Merge pull request #120 from linux-on-ibm-z/develop-s390x

Fix SnappyOutputStreamTest for big-endian platforms
This commit is contained in:
Taro L. Saito 2015-06-30 10:41:59 +09:00
commit f38b91f18c
1 changed files with 5 additions and 1 deletions

View File

@ -31,6 +31,7 @@ import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.lang.ref.WeakReference; import java.lang.ref.WeakReference;
import java.nio.ByteOrder;
import org.junit.Test; import org.junit.Test;
import org.xerial.snappy.buffer.BufferAllocatorFactory; import org.xerial.snappy.buffer.BufferAllocatorFactory;
@ -164,6 +165,9 @@ public class SnappyOutputStreamTest
byte[] expectedCompressedData = compressAsChunks(orig, Integer.MAX_VALUE); byte[] expectedCompressedData = compressAsChunks(orig, Integer.MAX_VALUE);
// Hardcoding an expected compressed size here will catch regressions that lower the // Hardcoding an expected compressed size here will catch regressions that lower the
// compression quality: // compression quality:
if (ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN)
assertEquals(90943, expectedCompressedData.length);
else
assertEquals(91013, expectedCompressedData.length); assertEquals(91013, expectedCompressedData.length);
// The chunk size should not affect the size of the compressed output: // The chunk size should not affect the size of the compressed output:
int[] chunkSizes = new int[] {1, 100, 1023, 1024, 10000}; int[] chunkSizes = new int[] {1, 100, 1023, 1024, 10000};