Browse Source

Fix ProtobufDecoder handling of split message size

This commit introduces a new readMessageSize(DataBuffer input) private
method, inspired from CodedInputStream#readRawVarint32(int, InputStream)
and adapted for DataBuffer using MessageDecoderFunction fields in
order to support use cases where the message size is split between
distinct chunks.

It also fixes handling of end of streams by using
DataBuffer#readableByteCount instead of -1 which is only relevant with
InputStream.

Issue: SPR-17429
pull/2025/head
Sebastien Deleuze 6 years ago
parent
commit
91de8d265e
  1. 59
      spring-web/src/main/java/org/springframework/http/codec/protobuf/ProtobufDecoder.java
  2. 42
      spring-web/src/test/java/org/springframework/http/codec/protobuf/ProtobufDecoderTests.java

59
spring-web/src/main/java/org/springframework/http/codec/protobuf/ProtobufDecoder.java

@ -172,6 +172,8 @@ public class ProtobufDecoder extends ProtobufCodecSupport implements Decoder<Mes @@ -172,6 +172,8 @@ public class ProtobufDecoder extends ProtobufCodecSupport implements Decoder<Mes
private int messageBytesToRead;
private int offset;
public MessageDecoderFunction(ResolvableType elementType, int maxMessageSize) {
this.elementType = elementType;
@ -188,11 +190,9 @@ public class ProtobufDecoder extends ProtobufCodecSupport implements Decoder<Mes @@ -188,11 +190,9 @@ public class ProtobufDecoder extends ProtobufCodecSupport implements Decoder<Mes
do {
if (this.output == null) {
int firstByte = input.read();
if (firstByte == -1) {
throw new DecodingException("Cannot parse message size");
if (!readMessageSize(input)) {
return messages;
}
this.messageBytesToRead = CodedInputStream.readRawVarint32(firstByte, input.asInputStream());
if (this.messageBytesToRead > this.maxMessageSize) {
throw new DecodingException(
"The number of bytes to read from the incoming stream " +
@ -235,6 +235,57 @@ public class ProtobufDecoder extends ProtobufCodecSupport implements Decoder<Mes @@ -235,6 +235,57 @@ public class ProtobufDecoder extends ProtobufCodecSupport implements Decoder<Mes
DataBufferUtils.release(input);
}
}
/**
* Parse message size as a varint from the input stream, updating {@code messageBytesToRead} and
* {@code offset} fields if needed to allow processing of upcoming chunks.
* Inspired from {@link CodedInputStream#readRawVarint32(int, java.io.InputStream)}
*
* @return {code true} when the message size is parsed successfully, {code false} when the message size is
* truncated
* @see <a href ="https://developers.google.com/protocol-buffers/docs/encoding#varints">Base 128 Varints</a>
*/
private boolean readMessageSize(DataBuffer input) {
if (this.offset == 0) {
if (input.readableByteCount() == 0) {
return false;
}
int firstByte = input.read();
if ((firstByte & 0x80) == 0) {
this.messageBytesToRead = firstByte;
return true;
}
this.messageBytesToRead = firstByte & 0x7f;
this.offset = 7;
}
if (this.offset < 32) {
for (; this.offset < 32; this.offset += 7) {
if (input.readableByteCount() == 0) {
return false;
}
final int b = input.read();
this.messageBytesToRead |= (b & 0x7f) << offset;
if ((b & 0x80) == 0) {
this.offset = 0;
return true;
}
}
}
// Keep reading up to 64 bits.
for (; this.offset < 64; this.offset += 7) {
if (input.readableByteCount() == 0) {
return false;
}
final int b = input.read();
if ((b & 0x80) == 0) {
this.offset = 0;
return true;
}
}
this.offset = 0;
throw new DecodingException("Cannot parse message size: malformed varint");
}
}
}

42
spring-web/src/test/java/org/springframework/http/codec/protobuf/ProtobufDecoderTests.java

@ -128,7 +128,9 @@ public class ProtobufDecoderTests extends AbstractDecoderTestCase<ProtobufDecode @@ -128,7 +128,9 @@ public class ProtobufDecoderTests extends AbstractDecoderTestCase<ProtobufDecode
}
@Test
public void decodeSplitChunks() throws IOException {
public void decodeSplitChunks() {
Flux<DataBuffer> input = Flux.just(this.testMsg1, this.testMsg2)
.flatMap(msg -> Mono.defer(() -> {
DataBuffer buffer = this.bufferFactory.allocateBuffer();
@ -158,6 +160,44 @@ public class ProtobufDecoderTests extends AbstractDecoderTestCase<ProtobufDecode @@ -158,6 +160,44 @@ public class ProtobufDecoderTests extends AbstractDecoderTestCase<ProtobufDecode
.verifyComplete());
}
@Test // SPR-17429
public void decodeSplitMessageSize() {
this.decoder.setMaxMessageSize(100009);
StringBuilder builder = new StringBuilder();
for (int i = 0; i < 10000; i++) {
builder.append("azertyuiop");
}
Msg bigMessage = Msg.newBuilder().setFoo(builder.toString()).setBlah(secondMsg2).build();
Flux<DataBuffer> input = Flux.just(bigMessage, bigMessage)
.flatMap(msg -> Mono.defer(() -> {
DataBuffer buffer = this.bufferFactory.allocateBuffer();
try {
msg.writeDelimitedTo(buffer.asOutputStream());
return Mono.just(buffer);
}
catch (IOException e) {
release(buffer);
return Mono.error(e);
}
}))
.flatMap(buffer -> {
int len = 2;
Flux<DataBuffer> result = Flux.just(
DataBufferUtils.retain(buffer.slice(0, len)),
DataBufferUtils
.retain(buffer.slice(len, buffer.readableByteCount() - len))
);
release(buffer);
return result;
});
testDecode(input, Msg.class, step -> step
.expectNext(bigMessage)
.expectNext(bigMessage)
.verifyComplete());
}
@Test
public void decodeMergedChunks() throws IOException {
DataBuffer buffer = this.bufferFactory.allocateBuffer();

Loading…
Cancel
Save