package org.apache.lucene.search;
import java.io.IOException;
import java.util.Objects;
import java.util.Set;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermStates;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.similarities.Similarity;
public class TermQuery extends Query {
private final Term term;
private final TermStates perReaderTermState;
final class TermWeight extends Weight {
private final Similarity similarity;
private final Similarity.SimScorer simScorer;
private final TermStates termStates;
private final ScoreMode scoreMode;
public TermWeight(IndexSearcher searcher, ScoreMode scoreMode,
float boost, TermStates termStates) throws IOException {
super(TermQuery.this);
if (scoreMode.needsScores() && termStates == null) {
throw new IllegalStateException("termStates are required when scores are needed");
}
this.scoreMode = scoreMode;
this.termStates = termStates;
this.similarity = searcher.getSimilarity();
final CollectionStatistics collectionStats;
final TermStatistics termStats;
if (scoreMode.needsScores()) {
collectionStats = searcher.collectionStatistics(term.field());
termStats = termStates.docFreq() > 0 ? searcher.termStatistics(term, termStates.docFreq(), termStates.totalTermFreq()) : null;
} else {
collectionStats = new CollectionStatistics(term.field(), 1, 1, 1, 1);
termStats = new TermStatistics(term.bytes(), 1, 1);
}
if (termStats == null) {
this.simScorer = null;
} else {
this.simScorer = similarity.scorer(boost, collectionStats, termStats);
}
}
@Override
public void (Set<Term> terms) {
terms.add(getTerm());
}
@Override
public Matches matches(LeafReaderContext context, int doc) throws IOException {
TermsEnum te = getTermsEnum(context);
if (te == null) {
return null;
}
if (context.reader().terms(term.field()).hasPositions() == false) {
return super.matches(context, doc);
}
return MatchesUtils.forField(term.field(), () -> {
PostingsEnum pe = te.postings(null, PostingsEnum.OFFSETS);
if (pe.advance(doc) != doc) {
return null;
}
return new TermMatchesIterator(getQuery(), pe);
});
}
@Override
public String toString() {
return "weight(" + TermQuery.this + ")";
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
assert termStates == null || termStates.wasBuiltFor(ReaderUtil.getTopLevelContext(context)) : "The top-reader used to create Weight is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);;
final TermsEnum termsEnum = getTermsEnum(context);
if (termsEnum == null) {
return null;
}
LeafSimScorer scorer = new LeafSimScorer(simScorer, context.reader(), term.field(), scoreMode.needsScores());
if (scoreMode == ScoreMode.TOP_SCORES) {
return new TermScorer(this, termsEnum.impacts(PostingsEnum.FREQS), scorer);
} else {
return new TermScorer(this, termsEnum.postings(null, scoreMode.needsScores() ? PostingsEnum.FREQS : PostingsEnum.NONE), scorer);
}
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return true;
}
private TermsEnum getTermsEnum(LeafReaderContext context) throws IOException {
assert termStates != null;
assert termStates.wasBuiltFor(ReaderUtil.getTopLevelContext(context)) :
"The top-reader used to create Weight is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);
final TermState state = termStates.get(context);
if (state == null) {
assert termNotInReader(context.reader(), term) : "no termstate found but term exists in reader term=" + term;
return null;
}
final TermsEnum termsEnum = context.reader().terms(term.field()).iterator();
termsEnum.seekExact(term.bytes(), state);
return termsEnum;
}
private boolean termNotInReader(LeafReader reader, Term term) throws IOException {
return reader.docFreq(term) == 0;
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
TermScorer scorer = (TermScorer) scorer(context);
if (scorer != null) {
int newDoc = scorer.iterator().advance(doc);
if (newDoc == doc) {
float freq = scorer.freq();
LeafSimScorer docScorer = new LeafSimScorer(simScorer, context.reader(), term.field(), true);
Explanation freqExplanation = Explanation.match(freq, "freq, occurrences of term within document");
Explanation scoreExplanation = docScorer.explain(doc, freqExplanation);
return Explanation.match(
scoreExplanation.getValue(),
"weight(" + getQuery() + " in " + doc + ") ["
+ similarity.getClass().getSimpleName() + "], result of:",
scoreExplanation);
}
}
return Explanation.noMatch("no matching term");
}
}
public TermQuery(Term t) {
term = Objects.requireNonNull(t);
perReaderTermState = null;
}
public TermQuery(Term t, TermStates states) {
assert states != null;
term = Objects.requireNonNull(t);
perReaderTermState = Objects.requireNonNull(states);
}
public Term getTerm() {
return term;
}
@Override
public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
final IndexReaderContext context = searcher.getTopReaderContext();
final TermStates termState;
if (perReaderTermState == null
|| perReaderTermState.wasBuiltFor(context) == false) {
termState = TermStates.build(context, term, scoreMode.needsScores());
} else {
termState = this.perReaderTermState;
}
return new TermWeight(searcher, scoreMode, boost, termState);
}
@Override
public void visit(QueryVisitor visitor) {
if (visitor.acceptField(term.field())) {
visitor.consumeTerms(this, term);
}
}
@Override
public String toString(String field) {
StringBuilder buffer = new StringBuilder();
if (!term.field().equals(field)) {
buffer.append(term.field());
buffer.append(":");
}
buffer.append(term.text());
return buffer.toString();
}
public TermStates getTermStates() {
return perReaderTermState;
}
@Override
public boolean equals(Object other) {
return sameClassAs(other) &&
term.equals(((TermQuery) other).term);
}
@Override
public int hashCode() {
return classHash() ^ term.hashCode();
}
}