Skip to content

Commit

Permalink
Merge branch 'jetty-12.1.x' into fix/jetty-12.1.x/serlvet6-demos
Browse files Browse the repository at this point in the history
# Conflicts:
#	jetty-core/jetty-http/src/main/java/org/eclipse/jetty/http/content/CachingHttpContentFactory.java
  • Loading branch information
gregw committed Jan 13, 2025
2 parents f6dc52e + ced7f2c commit 0d5038a
Show file tree
Hide file tree
Showing 19 changed files with 478 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ public class CachingHttpContentFactory implements HttpContent.Factory
private final AtomicLong _cachedSize = new AtomicLong();
private final AtomicBoolean _shrinking = new AtomicBoolean();
private final ByteBufferPool.Sized _bufferPool;
private final AtomicBoolean _shrinking = new AtomicBoolean();
private int _maxCachedFileSize = DEFAULT_MAX_CACHED_FILE_SIZE;
private int _maxCachedFiles = DEFAULT_MAX_CACHED_FILES;
private long _maxCacheSize = DEFAULT_MAX_CACHE_SIZE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,12 @@ private MultiBufferFileMappedHttpContent(HttpContent content, int maxBufferSize)

long contentLength = content.getContentLengthValue();
int bufferCount = Math.toIntExact(contentLength / maxBufferSize);
if (contentLength % maxBufferSize != 0)
{
if (bufferCount == Integer.MAX_VALUE)
throw new IOException("Cannot memory map Content as that would require over Integer.MAX_VALUE buffers: " + content);
bufferCount++;
}
_buffers = new ByteBuffer[bufferCount];
long currentPos = 0L;
long total = 0L;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@
import org.eclipse.jetty.util.resource.ResourceFactory;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;

import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
Expand All @@ -39,7 +41,7 @@ public class FileMappingHttpContentFactoryTest
public WorkDir workDir;

@Test
public void testMultiBufferFileMapped() throws Exception
public void testMultiBufferFileMappedOffsetAndLength() throws Exception
{
Path file = Files.writeString(workDir.getEmptyPathDir().resolve("file.txt"), "0123456789abcdefghijABCDEFGHIJ");
FileMappingHttpContentFactory fileMappingHttpContentFactory = new FileMappingHttpContentFactory(
Expand Down Expand Up @@ -76,6 +78,22 @@ public void testMultiBufferFileMapped() throws Exception
assertThat(writeToString(content, 25, -1), is("FGHIJ"));
}

@ParameterizedTest
@ValueSource(ints = {8, 10})
public void testMultiBufferFileMappedMaxBufferSizeRounding(int maxBufferSize) throws Exception
{
Path file = Files.writeString(workDir.getEmptyPathDir().resolve("file.txt"), "0123456789abcdefghijABCDEFGHIJ");
FileMappingHttpContentFactory fileMappingHttpContentFactory = new FileMappingHttpContentFactory(
new ResourceHttpContentFactory(ResourceFactory.root().newResource(file.getParent()), MimeTypes.DEFAULTS, ByteBufferPool.SIZED_NON_POOLING),
0, maxBufferSize);

HttpContent content = fileMappingHttpContentFactory.getContent("file.txt");

assertThat(content.getContentLength().getValue(), is("30"));
assertThat(content.getContentLengthValue(), is(30L));
assertThat(writeToString(content, 0, -1), is("0123456789abcdefghijABCDEFGHIJ"));
}

private static String writeToString(HttpContent content, long offset, long length) throws IOException
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -464,9 +464,14 @@ private void configure(Map<Integer, Integer> settings, boolean local)
if (LOG.isDebugEnabled())
LOG.debug("Updating {} max header list size to {} for {}", local ? "decoder" : "encoder", value, this);
if (local)
{
parser.getHpackDecoder().setMaxHeaderListSize(value);
}
else
generator.getHpackEncoder().setMaxHeaderListSize(value);
{
HpackEncoder hpackEncoder = generator.getHpackEncoder();
hpackEncoder.setMaxHeaderListSize(Math.min(value, hpackEncoder.getMaxHeaderListSize()));
}
}
case SettingsFrame.ENABLE_CONNECT_PROTOCOL ->
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ public int getMaxHeaderListSize()

public void setMaxHeaderListSize(int maxHeaderListSize)
{
_maxHeaderListSize = maxHeaderListSize;
_maxHeaderListSize = maxHeaderListSize > 0 ? maxHeaderListSize : HpackContext.DEFAULT_MAX_HEADER_LIST_SIZE;
}

public HpackContext getHpackContext()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1315,6 +1315,7 @@ public boolean handle(Request request, Response response, Callback callback)
}, httpConfig);
connector.getBean(AbstractHTTP2ServerConnectionFactory.class).setMaxFrameSize(17 * 1024);
http2Client.setMaxFrameSize(18 * 1024);
http2Client.setMaxRequestHeadersSize(2 * maxHeadersSize);

// Wait for the SETTINGS frame to be exchanged.
CountDownLatch settingsLatch = new CountDownLatch(1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -447,6 +447,42 @@ public void onGoAway(Session session, GoAwayFrame frame)
assertTrue(goAwayLatch.await(5, TimeUnit.SECONDS));
}

@Test
public void testMaxHeaderListSizeCappedByClient() throws Exception
{
int maxHeadersSize = 2 * 1024;
CountDownLatch goAwayLatch = new CountDownLatch(1);
start(new ServerSessionListener()
{
@Override
public Map<Integer, Integer> onPreface(Session session)
{
return Map.of(SettingsFrame.MAX_HEADER_LIST_SIZE, maxHeadersSize);
}

@Override
public void onGoAway(Session session, GoAwayFrame frame)
{
goAwayLatch.countDown();
}
});
http2Client.setMaxRequestHeadersSize(maxHeadersSize / 2);

Session clientSession = newClientSession(new Session.Listener() {});
HttpFields requestHeaders = HttpFields.build()
.put("X-Large", "x".repeat(maxHeadersSize - 256)); // 256 bytes to account for the other headers
MetaData.Request request = newRequest("GET", requestHeaders);
HeadersFrame frame = new HeadersFrame(request, null, true);

Throwable failure = assertThrows(ExecutionException.class,
() -> clientSession.newStream(frame, new Stream.Listener() {}).get(5, TimeUnit.SECONDS))
.getCause();
// The HPACK context is compromised trying to encode the large header.
assertThat(failure, Matchers.instanceOf(HpackException.SessionException.class));

assertTrue(goAwayLatch.await(5, TimeUnit.SECONDS));
}

@Test
public void testMaxHeaderListSizeExceededByServer() throws Exception
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
Expand All @@ -41,6 +42,7 @@
import org.eclipse.jetty.util.annotation.ManagedOperation;
import org.eclipse.jetty.util.component.Dumpable;
import org.eclipse.jetty.util.component.DumpableCollection;
import org.eclipse.jetty.util.component.DumpableMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -66,8 +68,11 @@ public class ArrayByteBufferPool implements ByteBufferPool, Dumpable
private final long _maxHeapMemory;
private final long _maxDirectMemory;
private final IntUnaryOperator _bucketIndexFor;
private final IntUnaryOperator _bucketCapacity;
private final AtomicBoolean _evictor = new AtomicBoolean(false);
private final AtomicLong _reserved = new AtomicLong();
private final ConcurrentMap<Integer, Long> _noBucketDirectAcquires = new ConcurrentHashMap<>();
private final ConcurrentMap<Integer, Long> _noBucketIndirectAcquires = new ConcurrentHashMap<>();
private boolean _statisticsEnabled;

/**
Expand Down Expand Up @@ -166,6 +171,7 @@ protected ArrayByteBufferPool(int minCapacity, int factor, int maxCapacity, int
_maxHeapMemory = maxMemory(maxHeapMemory);
_maxDirectMemory = maxMemory(maxDirectMemory);
_bucketIndexFor = bucketIndexFor;
_bucketCapacity = bucketCapacity;
}

private long maxMemory(long maxMemory)
Expand Down Expand Up @@ -213,7 +219,10 @@ public RetainableByteBuffer.Mutable acquire(int size, boolean direct)

// No bucket, return non-pooled.
if (bucket == null)
{
recordNoBucketAcquire(size, direct);
return RetainableByteBuffer.wrap(BufferUtil.allocate(size, direct));
}

bucket.recordAcquire();

Expand All @@ -232,6 +241,22 @@ public RetainableByteBuffer.Mutable acquire(int size, boolean direct)
return buffer;
}

private void recordNoBucketAcquire(int size, boolean direct)
{
if (isStatisticsEnabled())
{
ConcurrentMap<Integer, Long> map = direct ? _noBucketDirectAcquires : _noBucketIndirectAcquires;
int idx = _bucketIndexFor.applyAsInt(size);
int key = _bucketCapacity.applyAsInt(idx);
map.compute(key, (k, v) ->
{
if (v == null)
return 1L;
return v + 1L;
});
}
}

@Override
public boolean releaseAndRemove(RetainableByteBuffer buffer)
{
Expand Down Expand Up @@ -437,7 +462,9 @@ public long getAvailableHeapMemory()
public void clear()
{
clearBuckets(_direct);
_noBucketDirectAcquires.clear();
clearBuckets(_indirect);
_noBucketIndirectAcquires.clear();
}

private void clearBuckets(RetainedBucket[] buckets)
Expand All @@ -456,7 +483,10 @@ public void dump(Appendable out, String indent) throws IOException
indent,
this,
DumpableCollection.fromArray("direct", _direct),
DumpableCollection.fromArray("indirect", _indirect));
new DumpableMap("direct non-pooled acquisitions", _noBucketDirectAcquires),
DumpableCollection.fromArray("indirect", _indirect),
new DumpableMap("indirect non-pooled acquisitions", _noBucketIndirectAcquires)
);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.junit.jupiter.api.Test;

import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.lessThan;
Expand All @@ -38,6 +39,30 @@

public class ArrayByteBufferPoolTest
{
@Test
public void testDump()
{
ArrayByteBufferPool pool = new ArrayByteBufferPool(0, 10, 100, Integer.MAX_VALUE, 200, 200);
pool.setStatisticsEnabled(true);

List<RetainableByteBuffer> buffers = new ArrayList<>();

for (int i = 1; i < 151; i++)
buffers.add(pool.acquire(i, true));

buffers.forEach(RetainableByteBuffer::release);

String dump = pool.dump();
assertThat(dump, containsString("direct non-pooled acquisitions size=5\n"));
assertThat(dump, containsString("110: 10\n"));
assertThat(dump, containsString("120: 10\n"));
assertThat(dump, containsString("130: 10\n"));
assertThat(dump, containsString("140: 10\n"));
assertThat(dump, containsString("150: 10\n"));
pool.clear();
assertThat(pool.dump(), containsString("direct non-pooled acquisitions size=0\n"));
}

@Test
public void testMaxMemoryEviction()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ public class JettyBootstrapActivator implements BundleActivator
public static final String DEFAULT_JETTYHOME = "/jettyhome";

private ServiceRegistration<?> _registeredServer;
/* private PackageAdminServiceTracker _packageAdminServiceTracker;*/

/**
* Setup a new jetty Server, register it as a service.
Expand All @@ -75,10 +74,6 @@ public class JettyBootstrapActivator implements BundleActivator
@Override
public void start(final BundleContext context) throws Exception
{
// track other bundles and fragments attached to this bundle that we
// should activate, as OSGi will not call activators for them.
/* _packageAdminServiceTracker = new PackageAdminServiceTracker(context);*/

ServiceReference[] references = context.getAllServiceReferences("org.eclipse.jetty.http.HttpFieldPreEncoder", null);

if (references == null || references.length == 0)
Expand All @@ -96,14 +91,6 @@ public void start(final BundleContext context) throws Exception
@Override
public void stop(BundleContext context) throws Exception
{

/* if (_packageAdminServiceTracker != null)
{
_packageAdminServiceTracker.stop();
context.removeServiceListener(_packageAdminServiceTracker);
_packageAdminServiceTracker = null;
}
*/
try
{
if (_registeredServer != null)
Expand Down Expand Up @@ -194,7 +181,7 @@ else if (jettyHomeBundleSysProp != null)
}
if (jettyHomeBundle == null)
{
LOG.warn("Unable to find the jetty.home.bundle named {}", jettyHomeSysProp);
LOG.warn("Unable to find the jetty.home.bundle named {}", jettyHomeBundleSysProp);
return;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,16 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.SeekableByteChannel;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Consumer;
import java.util.regex.Matcher;
Expand Down Expand Up @@ -760,6 +763,26 @@ public void testBigger() throws Exception
assertThat(response.getContent(), containsString(" 400\tThis is a big file\n"));
}

@Test
public void testOver2GBFile() throws Exception
{
long hugeLength = (long)Integer.MAX_VALUE + 10L;

generateFile(docRoot.resolve("huge.mkv"), hugeLength);

HttpTester.Response response = HttpTester.parseResponse(
_local.getResponse("""
GET /context/huge.mkv HTTP/1.1\r
Host: local\r
Connection: close\r
\r
"""));

assertThat(response.getStatus(), is(HttpStatus.OK_200));
long responseContentLength = response.getLongField(CONTENT_LENGTH);
assertThat(responseContentLength, is(hugeLength));
}

@Test
public void testBrotliInitialCompressed() throws Exception
{
Expand Down Expand Up @@ -3939,6 +3962,37 @@ private void setupBigFiles(Path base) throws Exception
}
}

private void generateFile(Path staticFile, long size) throws Exception
{
byte[] buf = new byte[(int)(1024 * 1024)]; // about 1 MB
Arrays.fill(buf, (byte)'x');
ByteBuffer src = ByteBuffer.wrap(buf);

if (Files.exists(staticFile) && Files.size(staticFile) == size)
{
// all done, nothing left to do.
return;
}

try (SeekableByteChannel channel = Files.newByteChannel(staticFile, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING))
{
long remaining = size;
while (remaining > 0)
{
ByteBuffer slice = src.slice();
int len = buf.length;
if (remaining < Integer.MAX_VALUE)
{
len = Math.min(buf.length, (int)remaining);
slice.limit(len);
}

channel.write(slice);
remaining -= len;
}
}
}

private void setupQuestionMarkDir(Path base) throws IOException
{
boolean filesystemSupportsQuestionMarkDir = false;
Expand Down
Loading

0 comments on commit 0d5038a

Please sign in to comment.