Netty instantiates a set of request handler classes whenever a new connection is opened. This seems fine for something like a websocket where the connection will stay open for t
TL;DR:
If you get to the volume needed to make GC a problem with the default HTTP handlers it is time for scaling with a proxy server anyway.
After Norman's answer I ended up attempting a very bare bones sharable HTTP codec/aggregator POC to see if this was something to pursue or not.
My sharable decoder was a long ways from RFC 7230 but it gave me enough of the request for my current project.
I then used httperf and visualvm to get a concept of the GC load difference. For my efforts I only had a 10% decrease in the GC rate. In other words, it really doesn't make much of a difference.
The only real appreciated effect was that I had 5% less errors when running 1000 req/sec compared to using the packaged un-shared HTTP codec + aggregator versus my sharable one. And this only occurred when I was doing 1000 req/sec sustained for longer than 10 seconds.
In the end I'm not going to pursue it. The amount of time needed to make this into a fully HTTP compliant decoder for the tiny benefit that can be solved by using a proxy server is not worth the time at all.
For reference purposes here is the combined sharable decoder/aggregator that I tried:
import java.util.concurrent.ConcurrentHashMap;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelId;
import io.netty.channel.ChannelInboundHandlerAdapter;
@Sharable
public class SharableHttpDecoder extends ChannelInboundHandlerAdapter {
private static final ConcurrentHashMap<ChannelId, SharableHttpRequest> MAP =
new ConcurrentHashMap<ChannelId, SharableHttpRequest>();
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg)
throws Exception
{
if (msg instanceof ByteBuf)
{
ByteBuf buf = (ByteBuf) msg;
ChannelId channelId = ctx.channel().id();
SharableHttpRequest request = MAP.get(channelId);
if (request == null)
{
request = new SharableHttpRequest(buf);
buf.release();
if (request.isComplete())
{
ctx.fireChannelRead(request);
}
else
{
MAP.put(channelId, request);
}
}
else
{
request.append(buf);
buf.release();
if (request.isComplete())
{
ctx.fireChannelRead(request);
}
}
}
else
{
// TODO send 501
System.out.println("WTF is this? " + msg.getClass().getName());
ctx.fireChannelRead(msg);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
throws Exception
{
System.out.println("Unable to handle request on channel: " +
ctx.channel().id().asLongText());
cause.printStackTrace(System.err);
// TODO send 500
ctx.fireExceptionCaught(cause);
ctx.close();
}
}
The resultant object created by the decoder for handling on the pipeline:
import java.util.Arrays;
import java.util.HashMap;
import io.netty.buffer.ByteBuf;
public class SharableHttpRequest
{
private static final byte SPACE = 32;
private static final byte COLON = 58;
private static final byte CARRAIGE_RETURN = 13;
private HashMap<Header,String> myHeaders;
private Method myMethod;
private String myPath;
private byte[] myBody;
private int myIndex = 0;
public SharableHttpRequest(ByteBuf buf)
{
try
{
myHeaders = new HashMap<Header,String>();
final StringBuilder builder = new StringBuilder(8);
parseRequestLine(buf, builder);
while (parseNextHeader(buf, builder));
parseBody(buf);
}
catch (Exception e)
{
e.printStackTrace(System.err);
}
}
public String getHeader(Header name)
{
return myHeaders.get(name);
}
public Method getMethod()
{
return myMethod;
}
public String getPath()
{
return myPath;
}
public byte[] getBody()
{
return myBody;
}
public boolean isComplete()
{
return myIndex >= myBody.length;
}
public void append(ByteBuf buf)
{
int length = buf.readableBytes();
buf.getBytes(buf.readerIndex(), myBody, myIndex, length);
myIndex += length;
}
private void parseRequestLine(ByteBuf buf, StringBuilder builder)
{
int idx = buf.readerIndex();
int end = buf.writerIndex();
for (; idx < end; ++idx)
{
byte next = buf.getByte(idx);
// break on CR
if (next == CARRAIGE_RETURN)
{
break;
}
// we need the method
else if (myMethod == null)
{
if (next == SPACE)
{
myMethod = Method.fromBuilder(builder);
builder.delete(0, builder.length());
builder.ensureCapacity(100);
}
else
{
builder.append((char) next);
}
}
// we need the path
else if (myPath == null)
{
if (next == SPACE)
{
myPath = builder.toString();
builder.delete(0, builder.length());
}
else
{
builder.append((char) next);
}
}
// don't need the version right now
}
idx += 2; // skip line endings
buf.readerIndex(idx);
}
private boolean parseNextHeader(ByteBuf buf, StringBuilder builder)
{
Header header = null;
int idx = buf.readerIndex();
int end = buf.writerIndex();
for (; idx < end; ++idx)
{
byte next = buf.getByte(idx);
// break on CR
if (next == CARRAIGE_RETURN)
{
if (header != Header.UNHANDLED)
{
myHeaders.put(header,builder.toString());
builder.delete(0, builder.length());
}
break;
}
else if (header == null)
{
// we have the full header name
if (next == COLON)
{
header = Header.fromBuilder(builder);
builder.delete(0, builder.length());
}
// get header name as lower case for mapping purposes
else
{
builder.append(next > 64 && next < 91 ?
(char) ( next | 32 ) : (char) next);
}
}
// we don't care about some headers
else if (header == Header.UNHANDLED)
{
continue;
}
// skip initial spaces
else if (builder.length() == 0 && next == SPACE)
{
continue;
}
// get the header value
else
{
builder.append((char) next);
}
}
idx += 2; // skip line endings
buf.readerIndex(idx);
if (buf.getByte(idx) == CARRAIGE_RETURN)
{
idx += 2; // skip line endings
buf.readerIndex(idx);
return false;
}
else
{
return true;
}
}
private void parseBody(ByteBuf buf)
{
int length = buf.readableBytes();
if (length == 0)
{
myBody = new byte[0];
myIndex = 1;
}
else
{
System.out.println("Content-Length: " + myHeaders.get(Header.CONTENT_LENGTH));
if (myHeaders.get(Header.CONTENT_LENGTH) != null)
{
int totalLength = Integer.valueOf(myHeaders.get(Header.CONTENT_LENGTH));
myBody = new byte[totalLength];
buf.getBytes(buf.readerIndex(), myBody, myIndex, length);
myIndex += length;
}
// TODO handle chunked
}
}
public enum Method
{
GET(new char[]{71, 69, 84}),
POST(new char[]{80, 79, 83, 84}),
UNHANDLED(new char[]{}); // could be expanded if needed
private char[] chars;
Method(char[] chars)
{
this.chars = chars;
}
public static Method fromBuilder(StringBuilder builder)
{
for (Method method : Method.values())
{
if (method.chars.length == builder.length())
{
boolean match = true;
for (int i = 0; i < builder.length(); i++)
{
if (method.chars[i] != builder.charAt(i))
{
match = false;
break;
}
}
if (match)
{
return method;
}
}
}
return null;
}
}
public enum Header
{
HOST(new char[]{104, 111, 115, 116}),
CONNECTION(new char[]{99, 111, 110, 110, 101, 99, 116, 105, 111, 110}),
IF_MODIFIED_SINCE(new char[]{
105, 102, 45, 109, 111, 100, 105, 102, 105, 101, 100, 45, 115,
105, 110, 99, 101}),
COOKIE(new char[]{99, 111, 111, 107, 105, 101}),
CONTENT_LENGTH(new char[]{
99, 111, 110, 116, 101, 110, 116, 45, 108, 101, 110, 103, 116, 104}),
UNHANDLED(new char[]{}); // could be expanded if needed
private char[] chars;
Header(char[] chars)
{
this.chars = chars;
}
public static Header fromBuilder(StringBuilder builder)
{
for (Header header : Header.values())
{
if (header.chars.length == builder.length())
{
boolean match = true;
for (int i = 0; i < builder.length(); i++)
{
if (header.chars[i] != builder.charAt(i))
{
match = false;
break;
}
}
if (match)
{
return header;
}
}
}
return UNHANDLED;
}
}
}
A simple handler for the testing:
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.util.CharsetUtil;
@Sharable
public class SharableHttpHandler extends SimpleChannelInboundHandler<SharableHttpRequest>
{
@Override
protected void channelRead0(ChannelHandlerContext ctx, SharableHttpRequest msg)
throws Exception
{
String message = "HTTP/1.1 200 OK\r\n" +
"Content-type: text/html\r\n" +
"Content-length: 42\r\n\r\n" +
"<html><body>Hello sharedworld</body><html>";
ByteBuf buffer = ctx.alloc().buffer(message.length());
buffer.writeCharSequence(message, CharsetUtil.UTF_8);
ChannelFuture flushPromise = ctx.channel().writeAndFlush(buffer);
flushPromise.addListener(ChannelFutureListener.CLOSE);
if (!flushPromise.isSuccess())
{
flushPromise.cause().printStackTrace(System.err);
}
}
}
The full pipeline using these sharable handlers:
import tests.SharableHttpDecoder;
import tests.SharableHttpHandler;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.socket.SocketChannel;
public class ServerPipeline extends ChannelInitializer<SocketChannel>
{
private final SharableHttpDecoder decoder = new SharableHttpDecoder();
private final SharableHttpHandler handler = new SharableHttpHandler();
@Override
public void initChannel(SocketChannel channel)
{
ChannelPipeline pipeline = channel.pipeline();
pipeline.addLast(decoder);
pipeline.addLast(handler);
}
}
The above was tested against this (more usual) unshared pipeline:
import static io.netty.handler.codec.http.HttpResponseStatus.OK;
import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.socket.SocketChannel;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.HttpServerCodec;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.util.CharsetUtil;
public class ServerPipeline extends ChannelInitializer<SocketChannel>
{
@Override
public void initChannel(SocketChannel channel)
{
ChannelPipeline pipeline = channel.pipeline();
pipeline.addLast(new HttpServerCodec());
pipeline.addLast(new HttpObjectAggregator(65536));
pipeline.addLast(new UnsharedHttpHandler());
}
class UnsharedHttpHandler extends SimpleChannelInboundHandler<FullHttpRequest>
{
@Override
public void channelRead0(ChannelHandlerContext ctx, FullHttpRequest request)
throws Exception
{
String message = "<html><body>Hello sharedworld</body><html>";
ByteBuf buffer = ctx.alloc().buffer(message.length());
buffer.writeCharSequence(message.toString(), CharsetUtil.UTF_8);
FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, OK, buffer);
response.headers().set(HttpHeaderNames.CONTENT_TYPE, "text/html; charset=UTF-8");
HttpUtil.setContentLength(response, response.content().readableBytes());
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE);
ChannelFuture flushPromise = ctx.writeAndFlush(response);
flushPromise.addListener(ChannelFutureListener.CLOSE);
}
}
}
While we always aim to produce as minimal GC as possible in netty there are just situations where this is not really possible. For example the http codecs etc keep state that is per connection so these can't be shared (even if they would be thread-safe).
The only way around this would be to pool them but I think there are other objects which are much more likely to cause GC problems and for these we try to pool when easily possible.