package org.apache.lucene.codecs.idversion;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.TreeMap;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.fst.PairOutputs.Pair;
public final class VersionBlockTreeTermsReader extends FieldsProducer {
final IndexInput in;
final PostingsReaderBase postingsReader;
private final TreeMap<String,VersionFieldReader> fields = new TreeMap<>();
public VersionBlockTreeTermsReader(PostingsReaderBase postingsReader, SegmentReadState state) throws IOException {
this.postingsReader = postingsReader;
String termsFile = IndexFileNames.segmentFileName(state.segmentInfo.name,
state.segmentSuffix,
VersionBlockTreeTermsWriter.TERMS_EXTENSION);
in = state.directory.openInput(termsFile, state.context);
boolean success = false;
IndexInput indexIn = null;
try {
int termsVersion = CodecUtil.checkIndexHeader(in, VersionBlockTreeTermsWriter.TERMS_CODEC_NAME,
VersionBlockTreeTermsWriter.VERSION_START,
VersionBlockTreeTermsWriter.VERSION_CURRENT,
state.segmentInfo.getId(), state.segmentSuffix);
String indexFile = IndexFileNames.segmentFileName(state.segmentInfo.name,
state.segmentSuffix,
VersionBlockTreeTermsWriter.TERMS_INDEX_EXTENSION);
indexIn = state.directory.openInput(indexFile, state.context);
int indexVersion = CodecUtil.checkIndexHeader(indexIn, VersionBlockTreeTermsWriter.TERMS_INDEX_CODEC_NAME,
VersionBlockTreeTermsWriter.VERSION_START,
VersionBlockTreeTermsWriter.VERSION_CURRENT,
state.segmentInfo.getId(), state.segmentSuffix);
if (indexVersion != termsVersion) {
throw new CorruptIndexException("mixmatched version files: " + in + "=" + termsVersion + "," + indexIn + "=" + indexVersion, indexIn);
}
CodecUtil.checksumEntireFile(indexIn);
postingsReader.init(in, state);
CodecUtil.retrieveChecksum(in);
seekDir(in);
seekDir(indexIn);
final int numFields = in.readVInt();
if (numFields < 0) {
throw new CorruptIndexException("invalid numFields: " + numFields, in);
}
for(int i=0;i<numFields;i++) {
final int field = in.readVInt();
final long numTerms = in.readVLong();
assert numTerms >= 0;
final int numBytes = in.readVInt();
final BytesRef code = new BytesRef(new byte[numBytes]);
in.readBytes(code.bytes, 0, numBytes);
code.length = numBytes;
final long version = in.readVLong();
final Pair<BytesRef,Long> rootCode = VersionBlockTreeTermsWriter.FST_OUTPUTS.newPair(code, version);
final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field);
assert fieldInfo != null: "field=" + field;
final long sumTotalTermFreq = numTerms;
final long sumDocFreq = numTerms;
assert numTerms <= Integer.MAX_VALUE;
final int docCount = (int) numTerms;
final int longsSize = in.readVInt();
BytesRef minTerm = readBytesRef(in);
BytesRef maxTerm = readBytesRef(in);
if (docCount < 0 || docCount > state.segmentInfo.maxDoc()) {
throw new CorruptIndexException("invalid docCount: " + docCount + " maxDoc: " + state.segmentInfo.maxDoc(), in);
}
if (sumDocFreq < docCount) {
throw new CorruptIndexException("invalid sumDocFreq: " + sumDocFreq + " docCount: " + docCount, in);
}
if (sumTotalTermFreq < sumDocFreq) {
throw new CorruptIndexException("invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq, in);
}
final long indexStartFP = indexIn.readVLong();
VersionFieldReader previous = fields.put(fieldInfo.name,
new VersionFieldReader(this, fieldInfo, numTerms, rootCode, sumTotalTermFreq, sumDocFreq, docCount,
indexStartFP, longsSize, indexIn, minTerm, maxTerm));
if (previous != null) {
throw new CorruptIndexException("duplicate field: " + fieldInfo.name, in);
}
}
indexIn.close();
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(indexIn, this);
}
}
}
private static BytesRef readBytesRef(IndexInput in) throws IOException {
BytesRef bytes = new BytesRef();
bytes.length = in.readVInt();
bytes.bytes = new byte[bytes.length];
in.readBytes(bytes.bytes, 0, bytes.length);
return bytes;
}
private void seekDir(IndexInput input) throws IOException {
input.seek(input.length() - CodecUtil.footerLength() - 8);
long dirOffset = input.readLong();
input.seek(dirOffset);
}
@Override
public void close() throws IOException {
try {
IOUtils.close(in, postingsReader);
} finally {
fields.clear();
}
}
@Override
public Iterator<String> iterator() {
return Collections.unmodifiableSet(fields.keySet()).iterator();
}
@Override
public Terms terms(String field) throws IOException {
assert field != null;
return fields.get(field);
}
@Override
public int size() {
return fields.size();
}
String brToString(BytesRef b) {
if (b == null) {
return "null";
} else {
try {
return b.utf8ToString() + " " + b;
} catch (Throwable t) {
return b.toString();
}
}
}
@Override
public long ramBytesUsed() {
long sizeInBytes = postingsReader.ramBytesUsed();
for(VersionFieldReader reader : fields.values()) {
sizeInBytes += reader.ramBytesUsed();
}
return sizeInBytes;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.addAll(Accountables.namedAccountables("field", fields));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableList(resources);
}
@Override
public void checkIntegrity() throws IOException {
CodecUtil.checksumEntireFile(in);
postingsReader.checkIntegrity();
}
@Override
public String toString() {
return getClass().getSimpleName() + "(fields=" + fields.size() + ",delegate=" + postingsReader.toString() + ")";
}
}