Skip to content

Commit

Permalink
Start adding max doc len validation (first to async)
Browse files Browse the repository at this point in the history
  • Loading branch information
cowtowncoder committed Sep 7, 2023
1 parent 414de9f commit ac4adfe
Show file tree
Hide file tree
Showing 5 changed files with 122 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -318,6 +318,19 @@ public long getMaxDocumentLength() {
return _maxDocLen;
}

/**
* Convenience method, basically same as:
*<pre>
* getMaxDocumentLength() > 0L
*</pre>
*
* @return {@code True} if this constraints instance has a limit for maximum
* document length to enforce; {@code false} otherwise.
*/
public boolean hasMaxDocumentLength() {
return _maxDocLen > 0L;
}

/**
* Accessor for maximum length of numbers to decode.
* see {@link Builder#maxNumberLength(int)} for details.
Expand Down Expand Up @@ -394,7 +407,7 @@ public void validateDocumentLength(long len) throws StreamConstraintsException
// Note: -1L used as marker for "unlimited"
&& (_maxDocLen > 0L)) {
throw _constructException(
"Document nesting depth (%d) exceeds the maximum allowed (%d, from %s)",
"Document length (%d) exceeds the maximum allowed (%d, from %s)",
len, _maxDocLen,
_constrainRef("getMaxDocumentLength"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
* @since 2.14
*/
public class NonBlockingByteBufferJsonParser
extends NonBlockingUtf8JsonParserBase
implements ByteBufferFeeder
extends NonBlockingUtf8JsonParserBase
implements ByteBufferFeeder
{
private ByteBuffer _inputBuffer = ByteBuffer.wrap(NO_BYTES);

Expand Down Expand Up @@ -56,6 +56,9 @@ public void feedInput(final ByteBuffer byteBuffer) throws IOException {
// Time to update pointers first
_currInputProcessed += _origBufferLen;

// 06-Sep-2023, tatu: [core#1046] Enforce max doc length limit
streamReadConstraints().validateDocumentLength(_currInputProcessed);

// Also need to adjust row start, to work as if it extended into the past wrt new buffer
_currInputRowStart = start - (_inputEnd - _currInputRowStart);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ public void feedInput(final byte[] buf, final int start, final int end) throws I
// Time to update pointers first
_currInputProcessed += _origBufferLen;

// 06-Sep-2023, tatu: [core#1046] Enforce max doc length limit
streamReadConstraints().validateDocumentLength(_currInputProcessed);

// Also need to adjust row start, to work as if it extended into the past wrt new buffer
_currInputRowStart = start - (_inputEnd - _currInputRowStart);

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
package com.fasterxml.jackson.core.constraints;

import java.io.IOException;

import com.fasterxml.jackson.core.*;
import com.fasterxml.jackson.core.async.AsyncTestBase;
import com.fasterxml.jackson.core.exc.StreamConstraintsException;
import com.fasterxml.jackson.core.testsupport.AsyncReaderWrapper;

// [core#1047]: Add max-name-length constraints
public class LargeDocReadTest extends AsyncTestBase
{
private final JsonFactory JSON_F_DEFAULT = newStreamFactory();

private final JsonFactory JSON_F_DOC_10K = JsonFactory.builder()
.streamReadConstraints(StreamReadConstraints.builder().maxDocumentLength(10_000L).build())
.build();

// Test name that is below default max name
public void testLargeNameBytes() throws Exception {
final String doc = generateJSON(StreamReadConstraints.defaults().getMaxNameLength() - 100);
try (JsonParser p = createParserUsingStream(JSON_F_DEFAULT, doc, "UTF-8")) {
consumeTokens(p);
}
}

public void testLargeNameChars() throws Exception {
final String doc = generateJSON(StreamReadConstraints.defaults().getMaxNameLength() - 100);
try (JsonParser p = createParserUsingReader(JSON_F_DEFAULT, doc)) {
consumeTokens(p);
}
}

public void testLargeNameWithSmallLimitBytes() throws Exception
{
final String doc = generateJSON(15_000);
try (JsonParser p = createParserUsingStream(JSON_F_DOC_10K, doc, "UTF-8")) {
consumeTokens(p);
fail("expected StreamConstraintsException");
} catch (StreamConstraintsException e) {
verifyMaxDocLen(JSON_F_DOC_10K, e);
}
}

public void testLargeNameWithSmallLimitChars() throws Exception
{
final String doc = generateJSON(15_000);
try (JsonParser p = createParserUsingReader(JSON_F_DOC_10K, doc)) {
consumeTokens(p);
fail("expected StreamConstraintsException");
} catch (StreamConstraintsException e) {
verifyMaxDocLen(JSON_F_DOC_10K, e);
}
}

public void testLargeNameWithSmallLimitAsync() throws Exception
{
final byte[] doc = utf8Bytes(generateJSON(25_000));

AsyncReaderWrapper p = asyncForBytes(JSON_F_DOC_10K, 1000, doc, 1);
try {
consumeAsync(p);
fail("expected StreamConstraintsException");
} catch (StreamConstraintsException e) {
verifyMaxDocLen(JSON_F_DOC_10K, e);
}
}

private void consumeTokens(JsonParser p) throws IOException {
while (p.nextToken() != null) {
;
}
}

private void consumeAsync(AsyncReaderWrapper w) throws IOException {
while (w.nextToken() != null) {
;
}
}

private String generateJSON(final int docLen) {
final StringBuilder sb = new StringBuilder();
sb.append("[");

int i = 0;
while (docLen > sb.length()) {
sb.append(++i).append(",\n");
}
sb.append("true ] ");
return sb.toString();
}

private void verifyMaxDocLen(JsonFactory f, StreamConstraintsException e) {
verifyException(e, "Document length");
verifyException(e, "exceeds the maximum allowed ("
+f.streamReadConstraints().getMaxDocumentLength()
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ public class AsyncReaderWrapperForByteArray extends AsyncReaderWrapper
private int _end;

public AsyncReaderWrapperForByteArray(JsonParser sr, int bytesPerCall,
byte[] doc, int padding)
byte[] doc, int padding)
{
super(sr);
_bytesPerFeed = bytesPerCall;
Expand Down

0 comments on commit ac4adfe

Please sign in to comment.