package org.apache.lucene.index;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.codecs.NormsProducer;
import org.apache.lucene.util.ByteBlockPool;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.IntBlockPool;
abstract class TermsHash {
final TermsHash nextTermsHash;
final IntBlockPool intPool;
final ByteBlockPool bytePool;
ByteBlockPool termBytePool;
final Counter bytesUsed;
final DocumentsWriterPerThread.DocState docState;
final boolean trackAllocations;
TermsHash(final DocumentsWriterPerThread docWriter, boolean trackAllocations, TermsHash nextTermsHash) {
this.docState = docWriter.docState;
this.trackAllocations = trackAllocations;
this.nextTermsHash = nextTermsHash;
this.bytesUsed = trackAllocations ? docWriter.bytesUsed : Counter.newCounter();
intPool = new IntBlockPool(docWriter.intBlockAllocator);
bytePool = new ByteBlockPool(docWriter.byteBlockAllocator);
if (nextTermsHash != null) {
termBytePool = bytePool;
nextTermsHash.termBytePool = bytePool;
}
}
public void abort() {
try {
reset();
} finally {
if (nextTermsHash != null) {
nextTermsHash.abort();
}
}
}
void reset() {
intPool.reset(false, false);
bytePool.reset(false, false);
}
void flush(Map<String,TermsHashPerField> fieldsToFlush, final SegmentWriteState state,
Sorter.DocMap sortMap, NormsProducer norms) throws IOException {
if (nextTermsHash != null) {
Map<String,TermsHashPerField> nextChildFields = new HashMap<>();
for (final Map.Entry<String,TermsHashPerField> entry : fieldsToFlush.entrySet()) {
nextChildFields.put(entry.getKey(), entry.getValue().nextPerField);
}
nextTermsHash.flush(nextChildFields, state, sortMap, norms);
}
}
abstract TermsHashPerField addField(FieldInvertState fieldInvertState, FieldInfo fieldInfo);
void finishDocument() throws IOException {
if (nextTermsHash != null) {
nextTermsHash.finishDocument();
}
}
void startDocument() throws IOException {
if (nextTermsHash != null) {
nextTermsHash.startDocument();
}
}
}