1 package org.xerial.snappy; 2 3 import org.apache.commons.io.FileUtils; 4 import org.apache.commons.io.IOUtils; 5 import org.apache.commons.lang.SystemUtils; 6 import org.apache.hadoop.conf.Configuration; 7 import org.apache.hadoop.io.compress.SnappyCodec; 8 import org.junit.AfterClass; 9 import org.junit.Assert; 10 import org.junit.BeforeClass; 11 import org.junit.Test; 12 13 import java.io.*; 14 import java.lang.reflect.Field; 15 import java.util.LinkedHashMap; 16 import java.util.Map; 17 18 public class SnappyHadoopCompatibleOutputStreamTest 19 { 20 private static File tempNativeLibFolder; 21 22 @BeforeClass loadHadoopNativeLibrary()23 public static void loadHadoopNativeLibrary() throws Exception 24 { 25 final String libResourceFolder; 26 Map<String, String> libraryNames = new LinkedHashMap<>(); 27 if (SystemUtils.IS_OS_LINUX) { 28 libResourceFolder = "/lib/Linux"; 29 libraryNames.put("libhadoop.so", "libhadoop.so"); 30 // certain Linux systems need these shared library be copied before the JVM started, see build.sbt 31 libraryNames.put("libsnappy.so", "libsnappy.so"); 32 libraryNames.put("libsnappy.so.1", "libsnappy.so"); 33 } else if (SystemUtils.IS_OS_MAC_OSX) { 34 libResourceFolder = "/lib/MacOSX"; 35 libraryNames.put("libhadoop.dylib", "libhadoop.dylib"); 36 libraryNames.put("libsnappy.dylib", "libsnappy.dylib"); 37 libraryNames.put("libsnappy.1.dylib", "libsnappy.dylib"); 38 } else { 39 return; // not support 40 } 41 42 String testLibDir = System.getenv("XERIAL_SNAPPY_LIB"); 43 44 tempNativeLibFolder = new File(testLibDir); 45 tempNativeLibFolder.mkdirs(); 46 47 for (Map.Entry<String, String> entry : libraryNames.entrySet()) { 48 copyNativeLibraryToFS(libResourceFolder, entry.getValue(), entry.getKey()); 49 } 50 51 System.setProperty("java.library.path", tempNativeLibFolder.getAbsolutePath()); 52 53 // credit: https://stackoverflow.com/questions/15409223/adding-new-paths-for-native-libraries-at-runtime-in-java 54 //set sys_paths to null so that java.library.path will be reevalueted next time it is needed 55 final Field sysPathsField = ClassLoader.class.getDeclaredField("sys_paths"); 56 sysPathsField.setAccessible(true); 57 sysPathsField.set(null, null); 58 } 59 copyNativeLibraryToFS(String libResourceFolder, String libraryName, String toLibraryName)60 private static void copyNativeLibraryToFS(String libResourceFolder, String libraryName, String toLibraryName) { 61 final String libraryResourceName = libResourceFolder + "/" + libraryName; 62 final File libraryPath = new File(tempNativeLibFolder, toLibraryName); 63 try (InputStream inputStream = SnappyHadoopCompatibleOutputStream.class.getResourceAsStream(libraryResourceName); 64 FileOutputStream outputStream = new FileOutputStream(libraryPath)) { 65 IOUtils.copy(inputStream, outputStream); 66 FileDescriptor fd = outputStream.getFD(); 67 fd.sync(); 68 } catch (IOException ex) { 69 throw new RuntimeException(ex); 70 } 71 } 72 73 @AfterClass cleanUpLibraryFolder()74 public static void cleanUpLibraryFolder() 75 { 76 FileUtils.deleteQuietly(tempNativeLibFolder); 77 } 78 79 @Test testXerialCompressionHadoopDecompressionCodec()80 public void testXerialCompressionHadoopDecompressionCodec() throws Exception 81 { 82 String os = OSInfo.getOSName(); 83 String arch = OSInfo.getArchName(); 84 if(!((os.equals("Linux") || os.equals("Mac")) && arch.equals("x86_64"))) { 85 // This test works only in Linux/Mac x86_64 86 System.err.println("SnappyHadoopCompatibleOutputStreamTest works only in 64-bit Linux/Mac"); 87 return; 88 } 89 90 File inputFile = File.createTempFile("TEST_hadoop_compatibility", ".txt"); 91 File snappyFile = File.createTempFile("TEST_hadoop_compatibility", ".snappy"); 92 InputStream snappyInput = null; 93 FileOutputStream outputStream = new FileOutputStream(inputFile); 94 try { 95 String text = ""; 96 for (int i = 0; i < 1024; i++) { 97 text += "Some long long strings to be compressed. Some long long strings to be compressed."; 98 } 99 text += "odd bytes"; 100 final byte[] bytes = text.getBytes("UTF-8"); 101 outputStream.write(bytes); 102 outputStream.flush(); 103 outputStream.close(); 104 105 compress(inputFile, snappyFile); 106 107 // Test using Hadoop's Snappy Codec 108 if (tempNativeLibFolder != null) { 109 SnappyCodec hadoopCodec = new SnappyCodec(); 110 hadoopCodec.setConf(new Configuration()); 111 snappyInput = hadoopCodec.createInputStream(new FileInputStream(snappyFile)); 112 byte[] buf = new byte[bytes.length]; 113 int byteRead = IOUtils.read(snappyInput, buf); 114 String decompressed = new String(buf, 0, byteRead, "UTF-8"); 115 Assert.assertEquals(decompressed, text); 116 } else { 117 System.err.println("WARNING: no hadoop library for this platform. skip hadoop decompression test"); 118 } 119 } finally { 120 if (snappyInput != null) { 121 snappyInput.close(); 122 } 123 inputFile.delete(); 124 snappyFile.delete(); 125 outputStream.close(); 126 } 127 } 128 compress(File inputPath, File outputPath)129 private void compress(File inputPath, File outputPath) throws Exception 130 { 131 FileInputStream fileInputStream = new FileInputStream(inputPath); 132 FileOutputStream fileOutputStream = new FileOutputStream(outputPath); 133 try { 134 InputStream inputStream = new BufferedInputStream(fileInputStream); 135 OutputStream outputStream = new SnappyHadoopCompatibleOutputStream(fileOutputStream); 136 int readCount; 137 byte[] buffer = new byte[64 * 1024]; 138 while ((readCount = inputStream.read(buffer)) > 0) { 139 outputStream.write(buffer, 0, readCount); 140 } 141 inputStream.close(); 142 outputStream.close(); 143 } finally { 144 fileInputStream.close(); 145 fileOutputStream.close(); 146 } 147 } 148 } 149