All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.springframework.http.codec.multipart.SynchronossPartHttpMessageReader Maven / Gradle / Ivy

There is a newer version: 6.1.6
Show newest version
/*
 * Copyright 2002-2018 the original author or authors.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.springframework.http.codec.multipart;

import java.io.File;
import java.io.IOException;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.StandardOpenOption;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;

import org.synchronoss.cloud.nio.multipart.Multipart;
import org.synchronoss.cloud.nio.multipart.MultipartContext;
import org.synchronoss.cloud.nio.multipart.MultipartUtils;
import org.synchronoss.cloud.nio.multipart.NioMultipartParser;
import org.synchronoss.cloud.nio.multipart.NioMultipartParserListener;
import org.synchronoss.cloud.nio.stream.storage.StreamStorage;
import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxSink;
import reactor.core.publisher.Mono;

import org.springframework.core.ResolvableType;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferFactory;
import org.springframework.core.io.buffer.DataBufferUtils;
import org.springframework.core.io.buffer.DefaultDataBufferFactory;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ReactiveHttpInputMessage;
import org.springframework.http.codec.HttpMessageReader;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;

/**
 * {@code HttpMessageReader} for parsing {@code "multipart/form-data"} requests
 * to a stream of {@link Part}'s using the Synchronoss NIO Multipart library.
 *
 * 

This reader can be provided to {@link MultipartHttpMessageReader} in order * to aggregate all parts into a Map. * * @author Sebastien Deleuze * @author Rossen Stoyanchev * @author Arjen Poutsma * @since 5.0 * @see Synchronoss NIO Multipart * @see MultipartHttpMessageReader */ public class SynchronossPartHttpMessageReader implements HttpMessageReader { private final DataBufferFactory bufferFactory = new DefaultDataBufferFactory(); @Override public List getReadableMediaTypes() { return Collections.singletonList(MediaType.MULTIPART_FORM_DATA); } @Override public boolean canRead(ResolvableType elementType, @Nullable MediaType mediaType) { return Part.class.equals(elementType.resolve(Object.class)) && (mediaType == null || MediaType.MULTIPART_FORM_DATA.isCompatibleWith(mediaType)); } @Override public Flux read(ResolvableType elementType, ReactiveHttpInputMessage message, Map hints) { return Flux.create(new SynchronossPartGenerator(message, this.bufferFactory)); } @Override public Mono readMono(ResolvableType elementType, ReactiveHttpInputMessage message, Map hints) { return Mono.error(new UnsupportedOperationException( "This reader does not support reading a single element.")); } /** * Consume and feed input to the Synchronoss parser, then adapt parser * output events to {@code Flux>}. */ private static class SynchronossPartGenerator implements Consumer> { private final ReactiveHttpInputMessage inputMessage; private final DataBufferFactory bufferFactory; SynchronossPartGenerator(ReactiveHttpInputMessage inputMessage, DataBufferFactory factory) { this.inputMessage = inputMessage; this.bufferFactory = factory; } @Override public void accept(FluxSink emitter) { HttpHeaders headers = this.inputMessage.getHeaders(); MediaType mediaType = headers.getContentType(); Assert.state(mediaType != null, "No content type set"); int length = Math.toIntExact(headers.getContentLength()); Charset charset = Optional.ofNullable(mediaType.getCharset()).orElse(StandardCharsets.UTF_8); MultipartContext context = new MultipartContext(mediaType.toString(), length, charset.name()); NioMultipartParserListener listener = new FluxSinkAdapterListener(emitter, this.bufferFactory, context); NioMultipartParser parser = Multipart.multipart(context).forNIO(listener); this.inputMessage.getBody().subscribe(buffer -> { byte[] resultBytes = new byte[buffer.readableByteCount()]; buffer.read(resultBytes); try { parser.write(resultBytes); } catch (IOException ex) { listener.onError("Exception thrown providing input to the parser", ex); } finally { DataBufferUtils.release(buffer); } }, (ex) -> { try { listener.onError("Request body input error", ex); parser.close(); } catch (IOException ex2) { listener.onError("Exception thrown while closing the parser", ex2); } }, () -> { try { parser.close(); } catch (IOException ex) { listener.onError("Exception thrown while closing the parser", ex); } }); } } /** * Listen for parser output and adapt to {@code Flux>}. */ private static class FluxSinkAdapterListener implements NioMultipartParserListener { private final FluxSink sink; private final DataBufferFactory bufferFactory; private final MultipartContext context; private final AtomicInteger terminated = new AtomicInteger(0); FluxSinkAdapterListener(FluxSink sink, DataBufferFactory bufferFactory, MultipartContext context) { this.sink = sink; this.bufferFactory = bufferFactory; this.context = context; } @Override public void onPartFinished(StreamStorage storage, Map> headers) { HttpHeaders httpHeaders = new HttpHeaders(); httpHeaders.putAll(headers); this.sink.next(createPart(storage, httpHeaders)); } private Part createPart(StreamStorage storage, HttpHeaders httpHeaders) { String filename = MultipartUtils.getFileName(httpHeaders); if (filename != null) { return new SynchronossFilePart(httpHeaders, storage, this.bufferFactory, filename); } else if (MultipartUtils.isFormField(httpHeaders, this.context)) { String value = MultipartUtils.readFormParameterValue(storage, httpHeaders); return new SynchronossFormFieldPart(httpHeaders, this.bufferFactory, value); } else { return new DefaultSynchronossPart(httpHeaders, storage, this.bufferFactory); } } @Override public void onError(String message, Throwable cause) { if (this.terminated.getAndIncrement() == 0) { this.sink.error(new RuntimeException(message, cause)); } } @Override public void onAllPartsFinished() { if (this.terminated.getAndIncrement() == 0) { this.sink.complete(); } } @Override public void onNestedPartStarted(Map> headersFromParentPart) { } @Override public void onNestedPartFinished() { } } private static abstract class AbstractSynchronossPart implements Part { private final HttpHeaders headers; private final DataBufferFactory bufferFactory; AbstractSynchronossPart(HttpHeaders headers, DataBufferFactory bufferFactory) { Assert.notNull(headers, "HttpHeaders is required"); Assert.notNull(bufferFactory, "'bufferFactory' is required"); this.headers = headers; this.bufferFactory = bufferFactory; } @Override public String name() { return MultipartUtils.getFieldName(this.headers); } @Override public HttpHeaders headers() { return this.headers; } protected DataBufferFactory getBufferFactory() { return this.bufferFactory; } } private static class DefaultSynchronossPart extends AbstractSynchronossPart { private final StreamStorage storage; DefaultSynchronossPart(HttpHeaders headers, StreamStorage storage, DataBufferFactory factory) { super(headers, factory); Assert.notNull(storage, "'storage' is required"); this.storage = storage; } @Override public Flux content() { return DataBufferUtils.readInputStream(this.storage::getInputStream, getBufferFactory(), 4096); } protected StreamStorage getStorage() { return this.storage; } } private static class SynchronossFilePart extends DefaultSynchronossPart implements FilePart { private final String filename; public SynchronossFilePart( HttpHeaders headers, StreamStorage storage, DataBufferFactory factory, String filename) { super(headers, storage, factory); this.filename = filename; } @Override public String filename() { return this.filename; } @Override public Mono transferTo(File destination) { ReadableByteChannel input = null; FileChannel output = null; try { input = Channels.newChannel(getStorage().getInputStream()); output = FileChannel.open(destination.toPath(), StandardOpenOption.WRITE); long size = (input instanceof FileChannel ? ((FileChannel) input).size() : Long.MAX_VALUE); long totalWritten = 0; while (totalWritten < size) { long written = output.transferFrom(input, totalWritten, size - totalWritten); if (written <= 0) { break; } totalWritten += written; } } catch (IOException ex) { return Mono.error(ex); } finally { if (input != null) { try { input.close(); } catch (IOException ignored) { } } if (output != null) { try { output.close(); } catch (IOException ignored) { } } } return Mono.empty(); } } private static class SynchronossFormFieldPart extends AbstractSynchronossPart implements FormFieldPart { private final String content; SynchronossFormFieldPart(HttpHeaders headers, DataBufferFactory bufferFactory, String content) { super(headers, bufferFactory); this.content = content; } @Override public String value() { return this.content; } @Override public Flux content() { byte[] bytes = this.content.getBytes(getCharset()); DataBuffer buffer = getBufferFactory().allocateBuffer(bytes.length); buffer.write(bytes); return Flux.just(buffer); } private Charset getCharset() { return Optional.ofNullable(MultipartUtils.getCharEncoding(headers())) .map(Charset::forName).orElse(StandardCharsets.UTF_8); } } }





© 2015 - 2024 Weber Informatics LLC | Privacy Policy