List of usage examples for org.apache.commons.io FileUtils contentEquals
public static boolean contentEquals(File file1, File file2) throws IOException
From source file:com.linkedin.pinot.segments.v1.creator.BitmapInvertedIndexCreatorTest.java
@Test public void testMultiValue() throws IOException { boolean singleValue = false; String colName = "multi_value_col"; FieldSpec spec = new DimensionFieldSpec(colName, DataType.INT, singleValue); int numDocs = 20; int[][] data = new int[numDocs][]; int maxLength = 10; int cardinality = 10; File indexDirHeap = new File("/tmp/indexDirHeap"); FileUtils.forceMkdir(indexDirHeap);//from w w w. j a v a2s. c o m indexDirHeap.mkdirs(); File indexDirOffHeap = new File("/tmp/indexDirOffHeap"); FileUtils.forceMkdir(indexDirOffHeap); indexDirOffHeap.mkdirs(); File bitmapIndexFileOffHeap = new File(indexDirOffHeap, colName + V1Constants.Indexes.BITMAP_INVERTED_INDEX_FILE_EXTENSION); File bitmapIndexFileHeap = new File(indexDirHeap, colName + V1Constants.Indexes.BITMAP_INVERTED_INDEX_FILE_EXTENSION); // GENERATE RANDOM MULTI VALUE DATA SET Random r = new Random(); Map<Integer, Set<Integer>> postingListMap = new HashMap<>(); for (int i = 0; i < cardinality; i++) { postingListMap.put(i, new LinkedHashSet<Integer>()); } int totalNumberOfEntries = 0; for (int docId = 0; docId < numDocs; docId++) { int length = r.nextInt(maxLength); data[docId] = new int[length]; totalNumberOfEntries += length; for (int j = 0; j < length; j++) { data[docId][j] = r.nextInt(cardinality); postingListMap.get(data[docId][j]).add(docId); } LOGGER.debug("docId:" + docId + " dictId:" + data[docId]); } for (int i = 0; i < cardinality; i++) { LOGGER.debug("Posting list for " + i + " : " + postingListMap.get(i)); } // GENERATE BITMAP USING OffHeapCreator and validate OffHeapBitmapInvertedIndexCreator offHeapCreator = new OffHeapBitmapInvertedIndexCreator(indexDirOffHeap, cardinality, numDocs, totalNumberOfEntries, spec); for (int i = 0; i < numDocs; i++) { offHeapCreator.add(i, data[i]); } offHeapCreator.seal(); validate(colName, bitmapIndexFileOffHeap, cardinality, postingListMap); // GENERATE BITMAP USING HeapCreator and validate HeapBitmapInvertedIndexCreator heapCreator = new HeapBitmapInvertedIndexCreator(indexDirHeap, cardinality, numDocs, totalNumberOfEntries, spec); for (int i = 0; i < numDocs; i++) { heapCreator.add(i, data[i]); } heapCreator.seal(); validate(colName, bitmapIndexFileHeap, cardinality, postingListMap); // assert that the file sizes and contents are the same Assert.assertEquals(bitmapIndexFileHeap.length(), bitmapIndexFileHeap.length()); Assert.assertTrue(FileUtils.contentEquals(bitmapIndexFileHeap, bitmapIndexFileHeap)); FileUtils.deleteQuietly(indexDirHeap); FileUtils.deleteQuietly(indexDirOffHeap); }
From source file:de.tudarmstadt.ukp.dkpro.core.io.text.TokenizedTextWriterTest.java
@Test public void testNoSentences() throws IOException, UIMAException { File targetFile = new File(context.getTestOutputFolder(), "TokenizedTextWriterNoSentences.out"); File tokenized = new File("src/test/resources/tokenizedTexts/textNoSentences.txt"); String text = "This is the 1st sentence . Here is another sentence ."; AnalysisEngineDescription writer = createEngineDescription(TokenizedTextWriter.class, TokenizedTextWriter.PARAM_TARGET_LOCATION, targetFile, TokenizedTextWriter.PARAM_SINGULAR_TARGET, true, TokenizedTextWriter.PARAM_OVERWRITE, true, TokenizedTextWriter.PARAM_COVERING_TYPE, null); TestRunner.runTest("id", writer, "en", text); assertTrue(FileUtils.contentEquals(tokenized, targetFile)); }
From source file:dynamicrefactoring.domain.xml.writer.RefactoringWriterTest.java
/** * Crea una refactorizacion dinamica con la informacion que le pasamos y * cierta informacion que asume por defecto para las * MinimumInformationDefinition y escribe la refactorizacion a un fichero * comprobando que lo escrito es lo esperado. * /*from w w w .ja va 2 s .c o m*/ * @param refactoringName * nombre de la refactorizacion * @param keywords * palabras clave de la refactorizacion * @param categories * categorias de la refactorizacion * @throws XMLRefactoringWriterException * @throws IOException * @throws Exception * si se produce un error al escribir la definicin de la * refactorizacin. */ private void assertMinimumInformationDefinition(String refactoringName, Set<String> keywords, Set<Category> categories) throws XMLRefactoringWriterException, IOException { Preconditions.checkNotNull(keywords); Preconditions.checkNotNull(categories); Preconditions.checkNotNull(refactoringName); DynamicRefactoringDefinition.Builder rd = createRefactoringDefinition(refactoringName, DESCRIPCION, MOTIVACION); rd.inputs(addSimpleInputs()); rd.categories(categories); rd.keywords(keywords); addSimplePredicates(rd); writeRefactoring(rd.build()); assertTrue(FileUtils.contentEquals( new File(TestCaseRefactoringReader.TESTDATA_XML_READER_DIR + refactoringName + TestCaseRefactoringReader.XML_EXTENSION), //$NON-NLS-1$ new File(TESTDATA_XML_WRITER_DIR + refactoringName + TestCaseRefactoringReader.XML_EXTENSION))); //$NON-NLS-1$ }
From source file:net.pms.util.FileUtilTest.java
@Test public void testConvertFileFromUtf16ToUtf8_inputFileIsUTF16LE() throws Exception { File file_utf8le = FileUtils.toFile(CLASS.getResource("russian-utf16-le.srt")); File outputFile = new File(file_utf8le.getParentFile(), "output-utf8-from-utf16-le.srt"); outputFile.delete();//from w ww . j a va 2 s . com FileUtil.convertFileFromUtf16ToUtf8(file_utf8le, outputFile); File file_utf8 = FileUtils.toFile(CLASS.getResource("russian-utf8-without-bom.srt")); assertThat(FileUtils.contentEquals(outputFile, file_utf8)).isTrue(); outputFile.delete(); }
From source file:edu.unc.lib.dl.admin.collect.DepositBinCollector.java
/** * Moves a list of files to a destination location after verifying that they were safely copied * * @param targetFiles/*from ww w. j av a 2 s . co m*/ * @param destination * @throws IOException */ private void moveFiles(List<File> targetFiles, DepositInfo depositInfo, DepositBinConfiguration config) throws IOException { try { // Copy the specified files into the destination path for (File file : targetFiles) { FileUtils.copyFileToDirectory(file, depositInfo.dataDir, true); } // Verify that the original files all copied over to the destination for (File file : targetFiles) { File destinationFile = new File(depositInfo.dataDir, file.getName()); if (!FileUtils.contentEquals(file, destinationFile)) { throw new IOException("Copied file " + destinationFile.toString() + " did not match the original file " + file.getAbsolutePath()); } } } catch (Exception e) { FileUtils.deleteDirectory(depositInfo.depositDir); throw new IOException("Failed to copy bin files to deposit directory, aborting and cleaning up", e); } // Clean up the original copies of the files for (File file : targetFiles) { boolean success = file.delete(); if (!success) { log.warn("Failed to cleanup file {}", file.getAbsolutePath()); } } }
From source file:net.pms.util.FileUtilTest.java
@Test public void testConvertFileFromUtf16ToUtf8_inputFileIsUTF16BE() throws Exception { File file_utf8be = FileUtils.toFile(CLASS.getResource("russian-utf16-be.srt")); File outputFile = new File(file_utf8be.getParentFile(), "output-utf8-from-utf16-be.srt"); outputFile.delete();//from w w w .jav a 2 s. c o m FileUtil.convertFileFromUtf16ToUtf8(file_utf8be, outputFile); File file_utf8 = FileUtils.toFile(CLASS.getResource("russian-utf8-with-bom.srt")); assertThat(FileUtils.contentEquals(outputFile, file_utf8)).isTrue(); outputFile.delete(); }
From source file:edu.cornell.med.icb.goby.modes.TestReformatCompactReadsMode.java
/** * Validates that a subset of a compact reads file can be written. * * @throws IOException if there is a problem reading or writing to the files *///from w w w.j a va 2 s .c om @Test public void reformatSubsetOfCompactFile() throws IOException { final ReformatCompactReadsMode reformat = new ReformatCompactReadsMode(); final String inputFilename = "test-data/compact-reads/s_1_sequence_short_1_per_chunk.compact-reads"; reformat.setInputFilenames(inputFilename); reformat.setStartPosition(11); final String outputFilename = "test-results/reformat-test-subset.compact-reads"; reformat.setOutputFile(outputFilename); reformat.execute(); final File inputFile = new File(inputFilename); final File outputFile = new File(outputFilename); assertFalse("The reformatted file should not be the same as the original", FileUtils.contentEquals(inputFile, outputFile)); final ReadsReader reader = new ReadsReader(FileUtils.openInputStream(outputFile)); assertTrue("There should be reads in this file", reader.hasNext()); int numberOfEntries = 0; for (final Reads.ReadEntry entry : reader) { assertNotNull("Entry should not be null: " + numberOfEntries, entry); numberOfEntries++; } // we should have skipped the first entry assertEquals("There should be 72 entries in the test file", 72, numberOfEntries); }
From source file:jetbrains.buildServer.torrent.TorrentTransportTest.java
public void testDownloadAndSeed() throws IOException, NoSuchAlgorithmException, InterruptedException { setTorrentTransportEnabled();/* ww w . j a v a2 s . c o m*/ setDownloadHonestly(true); final File storageDir = new File(myTempDir, "storageDir"); storageDir.mkdir(); final File downloadDir = new File(myTempDir, "downloadDir"); downloadDir.mkdir(); final File torrentsDir = new File(myTempDir, "torrentsDir"); torrentsDir.mkdir(); final String fileName = "MyBuild.31.zip"; final File artifactFile = new File(storageDir, fileName); createTempFile(20250).renameTo(artifactFile); final File teamcityIvyFile = new File("agent/tests/resources/" + TorrentTransportFactory.TEAMCITY_IVY); myDownloadMap.put("/" + TorrentTransportFactory.TEAMCITY_IVY, teamcityIvyFile); final String ivyUrl = SERVER_PATH + TorrentTransportFactory.TEAMCITY_IVY; final File ivyFile = new File(myTempDir, TorrentTransportFactory.TEAMCITY_IVY); myTorrentTransport.downloadUrlTo(ivyUrl, ivyFile); Tracker tracker = new Tracker(6969); List<Client> clientList = new ArrayList<Client>(); for (int i = 0; i < TorrentTransportFactory.MIN_SEEDERS_COUNT_TO_TRY; i++) { clientList.add(new Client()); } try { tracker.start(true); myDirectorySeeder.start(new InetAddress[] { InetAddress.getLocalHost() }, tracker.getAnnounceURI(), 5); final Torrent torrent = Torrent.create(artifactFile, tracker.getAnnounceURI(), "testplugin"); final File torrentFile = new File(torrentsDir, fileName + ".torrent"); torrent.save(torrentFile); myDownloadMap.put("/.teamcity/torrents/" + fileName + ".torrent", torrentFile); for (Client client : clientList) { client.start(InetAddress.getLocalHost()); client.addTorrent(SharedTorrent.fromFile(torrentFile, storageDir, true)); } final File targetFile = new File(downloadDir, fileName); final String digest = myTorrentTransport.downloadUrlTo(SERVER_PATH + fileName, targetFile); assertEquals(torrent.getHexInfoHash(), digest); assertTrue(FileUtils.contentEquals(artifactFile, targetFile)); } finally { for (Client client : clientList) { client.stop(); } tracker.stop(); } }
From source file:edu.rit.flick.util.FlickTest.java
private final void testForLosslessness() throws IOException, InterruptedException { Flick.main(VERBOSE_FLAG, originalFile.getPath()); Unflick.main(VERBOSE_FLAG, flickedFile.getPath(), unflickedFile.getPath()); if (!originalFile.exists()) { /*/*w w w. ja va 2s .co m*/ * By falling in here, we assume the test failed because the file * given as input was not found. Equally so, the flicked file will * not be found by the unflicker since no flicking would have been * able to occur. */ final String expectedUsageStatement = String.format("%s%n%s%n", new NoSuchFileException(originalFile.getPath(), null, AbstractFlickFile.FILE_NOT_FOUND_EXCEPTION_MESSAGE).getMessage().trim(), new NoSuchFileException(flickedFile.getPath(), null, AbstractFlickFile.FILE_NOT_FOUND_EXCEPTION_MESSAGE).getMessage().trim()); final Object[] errorStack = errContent.toString().split("\n"); final int eSl = errorStack.length; final String actualUsageStatement = String.format("%s\n%s\n", Arrays.copyOfRange(errorStack, eSl - 2, eSl)); assertEquals(expectedUsageStatement, actualUsageStatement); return; } final FileInputStream origFIS = new FileInputStream(originalFile); ByteBufferInputStream orig = ByteBufferInputStream.map(origFIS.getChannel()); final FileInputStream comAndDecomFIS = new FileInputStream(unflickedFile); ByteBufferInputStream comAndDecom = ByteBufferInputStream.map(comAndDecomFIS.getChannel()); if (!FileUtils.contentEquals(originalFile, unflickedFile)) { long position = 0; while (orig.available() > 0) { position++; final int o = orig.read(); final int c = comAndDecom.read(); assertEquals(format(FILES_DO_NOT_MATCH_ERROR_FORMAT, originalFile, unflickedFile, position), (char) o + "", (char) c + ""); } assertEquals(orig.available(), comAndDecom.available()); origFIS.close(); orig.close(); comAndDecomFIS.close(); comAndDecom.close(); orig = null; comAndDecom = null; } }
From source file:edu.cornell.med.icb.goby.modes.TestReformatCompactReadsMode.java
/** * Validates that setting a maximum read length will propertly exclude reads from * being written to the output./*w w w. j a va2 s. c o m*/ * * @throws IOException if there is a problem reading or writing to the files */ @Test public void excludeReadLengthsOf23() throws IOException { final ReformatCompactReadsMode reformat = new ReformatCompactReadsMode(); final String inputFilename = "test-data/compact-reads/s_1_sequence_short_1_per_chunk.compact-reads"; reformat.setInputFilenames(inputFilename); // there are no reads in the input file longer than 23 reformat.setMaxReadLength(23); final String outputFilename = "test-results/reformat-test-exclude-read-lengths.compact-reads"; reformat.setOutputFile(outputFilename); reformat.execute(); final File inputFile = new File(inputFilename); final File outputFile = new File(outputFilename); assertFalse("The reformatted file should not be the same as the original", FileUtils.contentEquals(inputFile, outputFile)); final ReadsReader reader = new ReadsReader(FileUtils.openInputStream(outputFile)); assertFalse("There should be no reads in this file", reader.hasNext()); }