Skip to content

Commit

Permalink
Add some size checks to make code more robust and more clear (netty#1…
Browse files Browse the repository at this point in the history
…1512)

Motivation:

While its technical impossible that a chunk is larger than 64kb it still makes things easier to read and more robust to add some size checks to LzfDecoder.

Modifications:

Check the maximum length

Result:

More robust and easier to reason about code
  • Loading branch information
normanmaurer authored Jul 26, 2021
1 parent 8af59e4 commit 165a035
Showing 1 changed file with 19 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import com.ning.compress.BufferRecycler;
import com.ning.compress.lzf.ChunkDecoder;
import com.ning.compress.lzf.LZFChunk;
import com.ning.compress.lzf.util.ChunkDecoderFactory;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
Expand Down Expand Up @@ -137,6 +138,15 @@ protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) t
}
chunkLength = in.readUnsignedShort();

// chunkLength can never exceed MAX_CHUNK_LEN as MAX_CHUNK_LEN is 64kb and readUnsignedShort can
// never return anything bigger as well. Let's add some check any way to make things easier in terms
// of debugging if we ever hit this because of an bug.
if (chunkLength > LZFChunk.MAX_CHUNK_LEN) {
throw new DecompressionException(String.format(
"chunk length exceeds maximum: %d (expected: =< %d)",
chunkLength, LZFChunk.MAX_CHUNK_LEN));
}

if (type != BLOCK_TYPE_COMPRESSED) {
break;
}
Expand All @@ -147,6 +157,15 @@ protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) t
}
originalLength = in.readUnsignedShort();

// originalLength can never exceed MAX_CHUNK_LEN as MAX_CHUNK_LEN is 64kb and readUnsignedShort can
// never return anything bigger as well. Let's add some check any way to make things easier in terms
// of debugging if we ever hit this because of an bug.
if (originalLength > LZFChunk.MAX_CHUNK_LEN) {
throw new DecompressionException(String.format(
"original length exceeds maximum: %d (expected: =< %d)",
chunkLength, LZFChunk.MAX_CHUNK_LEN));
}

currentState = State.DECOMPRESS_DATA;
// fall through
case DECOMPRESS_DATA:
Expand Down

0 comments on commit 165a035

Please sign in to comment.