Merge pull request from GHSA-qcwq-55hx-v3vh
* asserted chunksize should be in the bounds of 0-java.outofmmeoryexception * asserted chunksize should be in the bounds of 0-java.outofmmeoryexception * https://github.com/xerial/snappy-java-ghsa-qcwq-55hx-v3vh/pull/2 * advisory-fix-3 * added and changed method name for happy and sad cases in SnappyTest.java * removed expected error for happy case in unit testing * added another unit test case in SnappyTest.java and fixed comments in SnappyInputStream.java * switched SnappyError to INVALID_CHUNK_SIZE * Updated unit tests * Resolved conflicts with another PR merge
This commit is contained in:
parent
820e2e074c
commit
3bf67857fc
|
@ -417,10 +417,21 @@ public class SnappyInputStream
|
|||
}
|
||||
}
|
||||
|
||||
// chunkSize is negative
|
||||
if (chunkSize < 0) {
|
||||
throw new SnappyError(SnappyErrorCode.INVALID_CHUNK_SIZE, "chunkSize is too big or negative : " + chunkSize);
|
||||
}
|
||||
|
||||
// extend the compressed data buffer size
|
||||
if (compressed == null || chunkSize > compressed.length) {
|
||||
// chunkSize exceeds limit
|
||||
try {
|
||||
compressed = new byte[chunkSize];
|
||||
}
|
||||
catch (java.lang.OutOfMemoryError e) {
|
||||
throw new SnappyError(SnappyErrorCode.INVALID_CHUNK_SIZE, e.getMessage());
|
||||
}
|
||||
}
|
||||
readBytes = 0;
|
||||
while (readBytes < chunkSize) {
|
||||
int ret = in.read(compressed, readBytes, chunkSize - readBytes);
|
||||
|
|
|
@ -26,6 +26,7 @@ package org.xerial.snappy;
|
|||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
|
@ -330,6 +331,53 @@ public class SnappyTest
|
|||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
Tests happy cases for SnappyInputStream.read method
|
||||
- {0}
|
||||
*/
|
||||
@Test
|
||||
public void isValidChunkLengthForSnappyInputStreamIn()
|
||||
throws Exception {
|
||||
byte[] data = {0};
|
||||
SnappyInputStream in = new SnappyInputStream(new ByteArrayInputStream(data));
|
||||
byte[] out = new byte[50];
|
||||
in.read(out);
|
||||
}
|
||||
|
||||
/*
|
||||
Tests sad cases for SnappyInputStream.read method
|
||||
- Expects a java.lang.NegativeArraySizeException catched into a SnappyError
|
||||
- {-126, 'S', 'N', 'A', 'P', 'P', 'Y', 0, 0, 0, 0, 0, 0, 0, 0, 0,(byte) 0x7f, (byte) 0xff, (byte) 0xff, (byte) 0xff}
|
||||
*/
|
||||
@Test(expected = SnappyError.class)
|
||||
public void isInvalidChunkLengthForSnappyInputStreamInNegative()
|
||||
throws Exception {
|
||||
byte[] data = {-126, 'S', 'N', 'A', 'P', 'P', 'Y', 0, 0, 0, 0, 0, 0, 0, 0, 0,(byte) 0x7f, (byte) 0xff, (byte) 0xff, (byte) 0xff};
|
||||
SnappyInputStream in = new SnappyInputStream(new ByteArrayInputStream(data));
|
||||
byte[] out = new byte[50];
|
||||
in.read(out);
|
||||
}
|
||||
|
||||
/*
|
||||
Tests sad cases for SnappyInputStream.read method
|
||||
- Expects a java.lang.OutOfMemoryError
|
||||
- {-126, 'S', 'N', 'A', 'P', 'P', 'Y', 0, 0, 0, 0, 0, 0, 0, 0, 0,(byte) 0x7f, (byte) 0xff, (byte) 0xff, (byte) 0xff}
|
||||
*/
|
||||
@Test(expected = SnappyError.class)
|
||||
public void isInvalidChunkLengthForSnappyInputStreamOutOfMemory()
|
||||
throws Exception {
|
||||
byte[] data = {-126, 'S', 'N', 'A', 'P', 'P', 'Y', 0, 0, 0, 0, 0, 0, 0, 0, 0, (byte) 0x7f, (byte) 0xff, (byte) 0xff, (byte) 0xff};
|
||||
SnappyInputStream in = new SnappyInputStream(new ByteArrayInputStream(data));
|
||||
byte[] out = new byte[50];
|
||||
try {
|
||||
in.read(out);
|
||||
} catch (Exception ignored) {
|
||||
// Exception here will be catched
|
||||
// But OutOfMemoryError will not be caught, and will still be thrown
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Tests happy cases for BitShuffle.shuffle method
|
||||
- double: 0, 10
|
||||
|
@ -386,5 +434,6 @@ public class SnappyTest
|
|||
@Test(expected = SnappyError.class)
|
||||
public void isTooLargeShortArrayInputLengthForBitShuffleShuffle() throws Exception {
|
||||
BitShuffle.shuffle(new short[Integer.MAX_VALUE / 2 + 1]);
|
||||
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue