Skip to content

Commit

Permalink
Decode percent encoding on-the-fly to avoid HttpData.usingBuffer (#22)
Browse files Browse the repository at this point in the history
Motivation:

HttpData.usingBuffer may require copying data when the data is only present on disk. This is slow and defeats the purpose of moving the data to disk in the first place. Likely most large files will use multipart where this is not an issue, but the possibility still makes alternative HttpData implementations awkward. Thus, usingBuffer should be avoided where possible.

Modification:

Alter HttpPostStandardRequestDecoder to decode data on-the-fly instead of all at once.

Result:

usingBuffer is not called anymore. usingBuffer is now unused outside the HttpData implementations themselves.
  • Loading branch information
yawkat authored Sep 18, 2023
1 parent c40b9cc commit 0d5b0e4
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -499,10 +499,15 @@ private void parseBodyAttributesStandard() {
firstpos = currentpos;
currentStatus = MultiPartStatus.EPILOGUE;
} else if (contRead && currentAttribute != null && currentStatus == MultiPartStatus.FIELD) {
// make sure we don't forward a partial percent escape
if (firstpos <= currentpos - 1 && undecodedChunk.getUnsignedByte(currentpos - 1) == '%') {
currentpos--;
} else if (firstpos <= currentpos - 2 && undecodedChunk.getUnsignedByte(currentpos - 2) == '%') {
currentpos -= 2;
}
// reset index except if to continue in case of FIELD getStatus
undecodedChunk.readerOffset(firstpos);
currentAttribute.addContent(undecodedChunk.readSplit(currentpos - firstpos),
false);
currentAttribute.addContent(decodeAttribute(undecodedChunk.readSplit(currentpos - firstpos), charset), false);
currentpos = 0;
firstpos = currentpos;
}
Expand Down Expand Up @@ -538,13 +543,7 @@ private void parseBodyAttributes() {
}

private void setFinalBuffer(Buffer buffer) throws IOException {
currentAttribute.addContent(buffer, true);
currentAttribute.usingBuffer(attrBuffer -> {
Buffer decodedBuf = decodeAttribute(attrBuffer, charset);
if (decodedBuf != null) { // override content only when ByteBuf needed decoding
currentAttribute.setContent(decodedBuf);
}
});
currentAttribute.addContent(decodeAttribute(buffer, charset), true);
addHttpData(currentAttribute);
currentAttribute = null;
}
Expand All @@ -566,7 +565,7 @@ private static Buffer decodeAttribute(Buffer b, Charset charset) {
ByteCursor cursor = b.openCursor();
int firstEscaped = cursor.process(new UrlEncodedDetector());
if (firstEscaped == -1) {
return null; // nothing to decode
return b; // nothing to decode
}

cursor = b.openCursor();
Expand All @@ -580,10 +579,13 @@ private static Buffer decodeAttribute(Buffer b, Charset charset) {
}
idx -= urlDecode.nextEscapedIdx - 1;
buf.close();
String s = b.toString(charset);
b.close();
throw new ErrorDataDecoderException(
String.format("Invalid hex byte at index '%d' in string: '%s'", idx, b.toString(charset)));
String.format("Invalid hex byte at index '%d' in string: '%s'", idx, s));
}

b.close();
return buf;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,18 @@
package io.netty.contrib.handler.codec.http.multipart;

import io.netty5.buffer.Buffer;
import io.netty5.buffer.BufferAllocator;
import io.netty5.buffer.DefaultBufferAllocators;
import io.netty5.handler.codec.http.DefaultHttpContent;
import io.netty5.handler.codec.http.DefaultHttpRequest;
import io.netty5.handler.codec.http.DefaultLastHttpContent;
import io.netty5.handler.codec.http.HttpContent;
import io.netty5.handler.codec.http.HttpMethod;
import io.netty5.handler.codec.http.HttpRequest;
import io.netty5.handler.codec.http.HttpVersion;
import java.nio.charset.StandardCharsets;

import io.netty5.handler.codec.http.LastHttpContent;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;

Expand Down Expand Up @@ -82,13 +87,35 @@ void testDecodeZeroAttributesWithAmpersandPrefix() {
decoder.destroy();
}

@Test
void testPercentDecode() {
String requestBody = "key1=va%20lue1";

HttpRequest request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/upload");

BufferAllocator alloc = DefaultBufferAllocators.preferredAllocator();
for (int splitIndex = 0; splitIndex < requestBody.length(); splitIndex++) {
Buffer full = alloc.copyOf(requestBody, StandardCharsets.UTF_8);
HttpPostStandardRequestDecoder decoder = new HttpPostStandardRequestDecoder(httpDiskDataFactory(), request);
try (HttpContent<?> left = new DefaultHttpContent(full.readSplit(splitIndex))) {
decoder.offer(left);
}
try (LastHttpContent<?> right = new DefaultLastHttpContent(full)) {
decoder.offer(right);
}
assertEquals(1, decoder.getBodyHttpDatas().size());
assertMemoryAttribute(decoder.getBodyHttpData("key1"), "va lue1");
decoder.destroy();
}
}

private static DefaultHttpDataFactory httpDiskDataFactory() {
return new DefaultHttpDataFactory(false);
}

private static void assertMemoryAttribute(InterfaceHttpData data, String expectedValue) {
assertEquals(InterfaceHttpData.HttpDataType.Attribute, data.getHttpDataType());
assertEquals(((MemoryAttribute) data).getValue(), expectedValue);
assertEquals(expectedValue, ((MemoryAttribute) data).getValue());
}

}

0 comments on commit 0d5b0e4

Please sign in to comment.