class org.apache.cassandra.streaming.compress.CompressedInputStream$Reader extends org.apache.cassandra.utils.WrappedRunnable
minor version: 0
major version: 59
flags: flags: (0x0020) ACC_SUPER
this_class: org.apache.cassandra.streaming.compress.CompressedInputStream$Reader
super_class: org.apache.cassandra.utils.WrappedRunnable
{
private final java.io.InputStream source;
descriptor: Ljava/io/InputStream;
flags: (0x0012) ACC_PRIVATE, ACC_FINAL
private final java.util.Iterator<org.apache.cassandra.io.compress.CompressionMetadata$Chunk> chunks;
descriptor: Ljava/util/Iterator;
flags: (0x0012) ACC_PRIVATE, ACC_FINAL
Signature: Ljava/util/Iterator<Lorg/apache/cassandra/io/compress/CompressionMetadata$Chunk;>;
private final java.util.concurrent.BlockingQueue<byte[]> dataBuffer;
descriptor: Ljava/util/concurrent/BlockingQueue;
flags: (0x0012) ACC_PRIVATE, ACC_FINAL
Signature: Ljava/util/concurrent/BlockingQueue<[B>;
final org.apache.cassandra.streaming.compress.CompressedInputStream this$0;
descriptor: Lorg/apache/cassandra/streaming/compress/CompressedInputStream;
flags: (0x1010) ACC_FINAL, ACC_SYNTHETIC
void <init>(java.io.InputStream, org.apache.cassandra.streaming.compress.CompressionInfo, java.util.concurrent.BlockingQueue<byte[]>);
descriptor: (Lorg/apache/cassandra/streaming/compress/CompressedInputStream;Ljava/io/InputStream;Lorg/apache/cassandra/streaming/compress/CompressionInfo;Ljava/util/concurrent/BlockingQueue;)V
flags: (0x0000)
Code:
stack=2, locals=5, args_size=5
start local 0 start local 2 start local 3 start local 4 0: aload 0
aload 1
putfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.this$0:Lorg/apache/cassandra/streaming/compress/CompressedInputStream;
1: aload 0
invokespecial org.apache.cassandra.utils.WrappedRunnable.<init>:()V
2: aload 0
aload 2
putfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.source:Ljava/io/InputStream;
3: aload 0
aload 3
getfield org.apache.cassandra.streaming.compress.CompressionInfo.chunks:[Lorg/apache/cassandra/io/compress/CompressionMetadata$Chunk;
invokestatic com.google.common.collect.Iterators.forArray:([Ljava/lang/Object;)Lcom/google/common/collect/UnmodifiableIterator;
putfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.chunks:Ljava/util/Iterator;
4: aload 0
aload 4
putfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.dataBuffer:Ljava/util/concurrent/BlockingQueue;
5: return
end local 4 end local 3 end local 2 end local 0 LocalVariableTable:
Start End Slot Name Signature
0 6 0 this Lorg/apache/cassandra/streaming/compress/CompressedInputStream$Reader;
0 6 2 source Ljava/io/InputStream;
0 6 3 info Lorg/apache/cassandra/streaming/compress/CompressionInfo;
0 6 4 dataBuffer Ljava/util/concurrent/BlockingQueue<[B>;
Signature: (Ljava/io/InputStream;Lorg/apache/cassandra/streaming/compress/CompressionInfo;Ljava/util/concurrent/BlockingQueue<[B>;)V
MethodParameters:
Name Flags
this$0 final
source
info
dataBuffer
protected void runMayThrow();
descriptor: ()V
flags: (0x0004) ACC_PROTECTED
Code:
stack=5, locals=6, args_size=1
start local 0 0: goto 20
1: StackMap locals:
StackMap stack:
aload 0
getfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.chunks:Ljava/util/Iterator;
invokeinterface java.util.Iterator.next:()Ljava/lang/Object;
checkcast org.apache.cassandra.io.compress.CompressionMetadata$Chunk
astore 2
start local 2 2: aload 2
getfield org.apache.cassandra.io.compress.CompressionMetadata$Chunk.length:I
iconst_4
iadd
istore 3
start local 3 3: iload 3
newarray 8
astore 1
start local 1 4: iconst_0
istore 4
start local 4 5: goto 18
6: StackMap locals: org.apache.cassandra.streaming.compress.CompressedInputStream$Reader byte[] org.apache.cassandra.io.compress.CompressionMetadata$Chunk int int
StackMap stack:
aload 0
getfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.source:Ljava/io/InputStream;
aload 1
iload 4
iload 3
iload 4
isub
invokevirtual java.io.InputStream.read:([BII)I
istore 5
start local 5 7: iload 5
ifge 11
8: aload 0
getfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.this$0:Lorg/apache/cassandra/streaming/compress/CompressedInputStream;
new java.io.EOFException
dup
ldc "No chunk available"
invokespecial java.io.EOFException.<init>:(Ljava/lang/String;)V
putfield org.apache.cassandra.streaming.compress.CompressedInputStream.readException:Ljava/io/IOException;
9: aload 0
getfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.dataBuffer:Ljava/util/concurrent/BlockingQueue;
getstatic org.apache.cassandra.streaming.compress.CompressedInputStream.POISON_PILL:[B
invokeinterface java.util.concurrent.BlockingQueue.put:(Ljava/lang/Object;)V
10: return
11: StackMap locals: int
StackMap stack:
iload 4
iload 5
iadd
istore 4
end local 5 12: goto 18
13: StackMap locals: org.apache.cassandra.streaming.compress.CompressedInputStream$Reader byte[] org.apache.cassandra.io.compress.CompressionMetadata$Chunk int int
StackMap stack: java.io.IOException
astore 5
start local 5 14: getstatic org.apache.cassandra.streaming.compress.CompressedInputStream.logger:Lorg/slf4j/Logger;
ldc "Error while reading compressed input stream."
aload 5
invokeinterface org.slf4j.Logger.warn:(Ljava/lang/String;Ljava/lang/Throwable;)V
15: aload 0
getfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.this$0:Lorg/apache/cassandra/streaming/compress/CompressedInputStream;
aload 5
putfield org.apache.cassandra.streaming.compress.CompressedInputStream.readException:Ljava/io/IOException;
16: aload 0
getfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.dataBuffer:Ljava/util/concurrent/BlockingQueue;
getstatic org.apache.cassandra.streaming.compress.CompressedInputStream.POISON_PILL:[B
invokeinterface java.util.concurrent.BlockingQueue.put:(Ljava/lang/Object;)V
17: return
end local 5 18: StackMap locals:
StackMap stack:
iload 4
iload 3
if_icmplt 6
19: aload 0
getfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.dataBuffer:Ljava/util/concurrent/BlockingQueue;
aload 1
invokeinterface java.util.concurrent.BlockingQueue.put:(Ljava/lang/Object;)V
end local 4 end local 3 end local 2 end local 1 20: StackMap locals: org.apache.cassandra.streaming.compress.CompressedInputStream$Reader
StackMap stack:
aload 0
getfield org.apache.cassandra.streaming.compress.CompressedInputStream$Reader.chunks:Ljava/util/Iterator;
invokeinterface java.util.Iterator.hasNext:()Z
ifne 1
21: return
end local 0 LocalVariableTable:
Start End Slot Name Signature
0 22 0 this Lorg/apache/cassandra/streaming/compress/CompressedInputStream$Reader;
4 20 1 compressedWithCRC [B
2 20 2 chunk Lorg/apache/cassandra/io/compress/CompressionMetadata$Chunk;
3 20 3 readLength I
5 20 4 bufferRead I
7 12 5 r I
14 18 5 e Ljava/io/IOException;
Exception table:
from to target type
6 10 13 Class java.io.IOException
11 12 13 Class java.io.IOException
Exceptions:
throws java.lang.Exception
}
SourceFile: "CompressedInputStream.java"
NestHost: org.apache.cassandra.streaming.compress.CompressedInputStream
InnerClasses:
public Chunk = org.apache.cassandra.io.compress.CompressionMetadata$Chunk of org.apache.cassandra.io.compress.CompressionMetadata
Reader = org.apache.cassandra.streaming.compress.CompressedInputStream$Reader of org.apache.cassandra.streaming.compress.CompressedInputStream