package org.springframework.http.codec.multipart;
import java.io.IOException;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import org.synchronoss.cloud.nio.multipart.DefaultPartBodyStreamStorageFactory;
import org.synchronoss.cloud.nio.multipart.Multipart;
import org.synchronoss.cloud.nio.multipart.MultipartContext;
import org.synchronoss.cloud.nio.multipart.MultipartUtils;
import org.synchronoss.cloud.nio.multipart.NioMultipartParser;
import org.synchronoss.cloud.nio.multipart.NioMultipartParserListener;
import org.synchronoss.cloud.nio.multipart.PartBodyStreamStorageFactory;
import org.synchronoss.cloud.nio.stream.storage.StreamStorage;
import reactor.core.publisher.BaseSubscriber;
import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxSink;
import reactor.core.publisher.Mono;
import reactor.core.publisher.SignalType;
import org.springframework.core.ResolvableType;
import org.springframework.core.codec.DecodingException;
import org.springframework.core.codec.Hints;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferLimitException;
import org.springframework.core.io.buffer.DataBufferUtils;
import org.springframework.core.io.buffer.DefaultDataBufferFactory;
import org.springframework.core.log.LogFormatUtils;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ReactiveHttpInputMessage;
import org.springframework.http.codec.HttpMessageReader;
import org.springframework.http.codec.LoggingCodecSupport;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
public class SynchronossPartHttpMessageReader extends LoggingCodecSupport implements HttpMessageReader<Part> {
private int maxInMemorySize = 256 * 1024;
private long maxDiskUsagePerPart = -1;
private int maxParts = -1;
public void setMaxInMemorySize(int byteCount) {
this.maxInMemorySize = byteCount;
}
public int getMaxInMemorySize() {
return this.maxInMemorySize;
}
public void setMaxDiskUsagePerPart(long maxDiskUsagePerPart) {
this.maxDiskUsagePerPart = maxDiskUsagePerPart;
}
public long getMaxDiskUsagePerPart() {
return this.maxDiskUsagePerPart;
}
public void setMaxParts(int maxParts) {
this.maxParts = maxParts;
}
public int getMaxParts() {
return this.maxParts;
}
@Override
public List<MediaType> getReadableMediaTypes() {
return MultipartHttpMessageReader.MIME_TYPES;
}
@Override
public boolean canRead(ResolvableType elementType, @Nullable MediaType mediaType) {
if (Part.class.equals(elementType.toClass())) {
if (mediaType == null) {
return true;
}
for (MediaType supportedMediaType : getReadableMediaTypes()) {
if (supportedMediaType.isCompatibleWith(mediaType)) {
return true;
}
}
}
return false;
}
@Override
public Flux<Part> read(ResolvableType elementType, ReactiveHttpInputMessage message, Map<String, Object> hints) {
return Flux.create(new SynchronossPartGenerator(message))
.doOnNext(part -> {
if (!Hints.isLoggingSuppressed(hints)) {
LogFormatUtils.traceDebug(logger, traceOn -> Hints.getLogPrefix(hints) + "Parsed " +
(isEnableLoggingRequestDetails() ?
LogFormatUtils.formatValue(part, !traceOn) :
"parts '" + part.name() + "' (content masked)"));
}
});
}
@Override
public Mono<Part> readMono(ResolvableType elementType, ReactiveHttpInputMessage message, Map<String, Object> hints) {
return Mono.error(new UnsupportedOperationException("Cannot read multipart request body into single Part"));
}
private class SynchronossPartGenerator extends BaseSubscriber<DataBuffer> implements Consumer<FluxSink<Part>> {
private final ReactiveHttpInputMessage inputMessage;
private final LimitedPartBodyStreamStorageFactory storageFactory = new LimitedPartBodyStreamStorageFactory();
@Nullable
private NioMultipartParserListener listener;
@Nullable
private NioMultipartParser parser;
public SynchronossPartGenerator(ReactiveHttpInputMessage inputMessage) {
this.inputMessage = inputMessage;
}
@Override
public void accept(FluxSink<Part> sink) {
HttpHeaders headers = this.inputMessage.getHeaders();
MediaType mediaType = headers.getContentType();
Assert.state(mediaType != null, "No content type set");
int length = getContentLength(headers);
Charset charset = Optional.ofNullable(mediaType.getCharset()).orElse(StandardCharsets.UTF_8);
MultipartContext context = new MultipartContext(mediaType.toString(), length, charset.name());
this.listener = new FluxSinkAdapterListener(sink, context, this.storageFactory);
this.parser = Multipart
.multipart(context)
.usePartBodyStreamStorageFactory(this.storageFactory)
.forNIO(this.listener);
this.inputMessage.getBody().subscribe(this);
}
@Override
protected void hookOnNext(DataBuffer buffer) {
Assert.state(this.parser != null && this.listener != null, "Not initialized yet");
int size = buffer.readableByteCount();
this.storageFactory.increaseByteCount(size);
byte[] resultBytes = new byte[size];
buffer.read(resultBytes);
try {
this.parser.write(resultBytes);
}
catch (IOException ex) {
cancel();
int index = this.storageFactory.getCurrentPartIndex();
this.listener.onError("Parser error for part [" + index + "]", ex);
}
finally {
DataBufferUtils.release(buffer);
}
}
@Override
protected void hookOnError(Throwable ex) {
if (this.listener != null) {
int index = this.storageFactory.getCurrentPartIndex();
this.listener.onError("Failure while parsing part[" + index + "]", ex);
}
}
@Override
protected void hookOnComplete() {
if (this.listener != null) {
this.listener.onAllPartsFinished();
}
}
@Override
protected void hookFinally(SignalType type) {
try {
if (this.parser != null) {
this.parser.close();
}
}
catch (IOException ex) {
}
}
private int (HttpHeaders headers) {
long length = headers.getContentLength();
return (int) length == length ? (int) length : -1;
}
}
private class LimitedPartBodyStreamStorageFactory implements PartBodyStreamStorageFactory {
private final PartBodyStreamStorageFactory storageFactory = (maxInMemorySize > 0 ?
new DefaultPartBodyStreamStorageFactory(maxInMemorySize) :
new DefaultPartBodyStreamStorageFactory());
private int index = 1;
private boolean isFilePart;
private long partSize;
public int getCurrentPartIndex() {
return this.index;
}
@Override
public StreamStorage newStreamStorageForPartBody(Map<String, List<String>> headers, int index) {
this.index = index;
this.isFilePart = (MultipartUtils.getFileName(headers) != null);
this.partSize = 0;
if (maxParts > 0 && index > maxParts) {
throw new DecodingException("Too many parts (" + index + " allowed)");
}
return this.storageFactory.newStreamStorageForPartBody(headers, index);
}
public void increaseByteCount(long byteCount) {
this.partSize += byteCount;
if (maxInMemorySize > 0 && !this.isFilePart && this.partSize >= maxInMemorySize) {
throw new DataBufferLimitException("Part[" + this.index + "] " +
"exceeded the in-memory limit of " + maxInMemorySize + " bytes");
}
if (maxDiskUsagePerPart > 0 && this.isFilePart && this.partSize > maxDiskUsagePerPart) {
throw new DecodingException("Part[" + this.index + "] " +
"exceeded the disk usage limit of " + maxDiskUsagePerPart + " bytes");
}
}
public void partFinished() {
this.index++;
this.isFilePart = false;
this.partSize = 0;
}
}
private static class FluxSinkAdapterListener implements NioMultipartParserListener {
private final FluxSink<Part> sink;
private final MultipartContext context;
private final LimitedPartBodyStreamStorageFactory storageFactory;
private final AtomicInteger terminated = new AtomicInteger();
FluxSinkAdapterListener(
FluxSink<Part> sink, MultipartContext context, LimitedPartBodyStreamStorageFactory factory) {
this.sink = sink;
this.context = context;
this.storageFactory = factory;
}
@Override
public void onPartFinished(StreamStorage storage, Map<String, List<String>> headers) {
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.putAll(headers);
this.storageFactory.partFinished();
this.sink.next(createPart(storage, httpHeaders));
}
private Part createPart(StreamStorage storage, HttpHeaders httpHeaders) {
String filename = MultipartUtils.getFileName(httpHeaders);
if (filename != null) {
return new SynchronossFilePart(httpHeaders, filename, storage);
}
else if (MultipartUtils.isFormField(httpHeaders, this.context)) {
String value = MultipartUtils.readFormParameterValue(storage, httpHeaders);
return new SynchronossFormFieldPart(httpHeaders, value);
}
else {
return new SynchronossPart(httpHeaders, storage);
}
}
@Override
public void onError(String message, Throwable cause) {
if (this.terminated.getAndIncrement() == 0) {
this.sink.error(new DecodingException(message, cause));
}
}
@Override
public void onAllPartsFinished() {
if (this.terminated.getAndIncrement() == 0) {
this.sink.complete();
}
}
@Override
public void onNestedPartStarted(Map<String, List<String>> headersFromParentPart) {
}
@Override
public void onNestedPartFinished() {
}
}
private abstract static class AbstractSynchronossPart implements Part {
private final String name;
private final HttpHeaders ;
(HttpHeaders headers) {
Assert.notNull(headers, "HttpHeaders is required");
this.name = MultipartUtils.getFieldName(headers);
this.headers = headers;
}
@Override
public String name() {
return this.name;
}
@Override
public HttpHeaders () {
return this.headers;
}
@Override
public String toString() {
return "Part '" + this.name + "', headers=" + this.headers;
}
}
private static class SynchronossPart extends AbstractSynchronossPart {
private final StreamStorage storage;
SynchronossPart(HttpHeaders headers, StreamStorage storage) {
super(headers);
Assert.notNull(storage, "StreamStorage is required");
this.storage = storage;
}
@Override
public Flux<DataBuffer> content() {
return DataBufferUtils.readInputStream(
getStorage()::getInputStream, DefaultDataBufferFactory.sharedInstance, 4096);
}
protected StreamStorage getStorage() {
return this.storage;
}
}
private static class SynchronossFilePart extends SynchronossPart implements FilePart {
private static final OpenOption[] FILE_CHANNEL_OPTIONS =
{StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE};
private final String filename;
SynchronossFilePart(HttpHeaders headers, String filename, StreamStorage storage) {
super(headers, storage);
this.filename = filename;
}
@Override
public String filename() {
return this.filename;
}
@Override
public Mono<Void> transferTo(Path dest) {
ReadableByteChannel input = null;
FileChannel output = null;
try {
input = Channels.newChannel(getStorage().getInputStream());
output = FileChannel.open(dest, FILE_CHANNEL_OPTIONS);
long size = (input instanceof FileChannel ? ((FileChannel) input).size() : Long.MAX_VALUE);
long totalWritten = 0;
while (totalWritten < size) {
long written = output.transferFrom(input, totalWritten, size - totalWritten);
if (written <= 0) {
break;
}
totalWritten += written;
}
}
catch (IOException ex) {
return Mono.error(ex);
}
finally {
if (input != null) {
try {
input.close();
}
catch (IOException ignored) {
}
}
if (output != null) {
try {
output.close();
}
catch (IOException ignored) {
}
}
}
return Mono.empty();
}
@Override
public String toString() {
return "Part '" + name() + "', filename='" + this.filename + "'";
}
}
private static class SynchronossFormFieldPart extends AbstractSynchronossPart implements FormFieldPart {
private final String content;
(HttpHeaders headers, String content) {
super(headers);
this.content = content;
}
@Override
public String value() {
return this.content;
}
@Override
public Flux<DataBuffer> content() {
byte[] bytes = this.content.getBytes(getCharset());
return Flux.just(DefaultDataBufferFactory.sharedInstance.wrap(bytes));
}
private Charset getCharset() {
String name = MultipartUtils.getCharEncoding(headers());
return (name != null ? Charset.forName(name) : StandardCharsets.UTF_8);
}
@Override
public String toString() {
return "Part '" + name() + "=" + this.content + "'";
}
}
}