List of usage examples for java.nio.file Path getFileName
Path getFileName();
From source file:com.facebook.buck.jvm.java.JarDirectoryStepTest.java
@Test public void shouldNotThrowAnExceptionWhenAddingDuplicateEntries() throws IOException { Path zipup = folder.newFolder("zipup"); Path first = createZip(zipup.resolve("a.zip"), "example.txt"); Path second = createZip(zipup.resolve("b.zip"), "example.txt", "com/example/Main.class"); JarDirectoryStep step = new JarDirectoryStep(new ProjectFilesystem(zipup), Paths.get("output.jar"), ImmutableSortedSet.of(first.getFileName(), second.getFileName()), "com.example.Main", /* manifest file */ null); ExecutionContext context = TestExecutionContext.newInstance(); int returnCode = step.execute(context).getExitCode(); assertEquals(0, returnCode);/*from w w w . j a va2 s . c o m*/ Path zip = zipup.resolve("output.jar"); assertTrue(Files.exists(zip)); // "example.txt" "Main.class" and the MANIFEST.MF. assertZipFileCountIs(3, zip); assertZipContains(zip, "example.txt"); }
From source file:com.streamsets.pipeline.lib.io.TestSingleLineLiveFileReader.java
@Test(expected = IOException.class) public void testInvalidOffset() throws Exception { Path file = createFile(Arrays.asList("Hello")); LiveFile lf = new LiveFile(file); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.defaultCharset(), 10, 10); }
From source file:neembuu.uploader.zip.generator.NUZipFileGenerator.java
private void walkOverAllFiles() throws IOException { for (final Path uploadersDirectory : uploadersDirectories) { Files.walkFileTree(uploadersDirectory, new FileVisitor<Path>() { @Override/* ww w . j av a2 s. c om*/ public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { exc.printStackTrace(); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (file.getFileName().toString().endsWith(".class")) { visitClassFile(file, attrs, uploadersDirectory); } return FileVisitResult.CONTINUE; } }); } }
From source file:org.fao.geonet.api.mapservers.GeoServerRest.java
/** * Create a coverage from file//from w ww .j a v a2 s. c o m * * @param ws Name of the workspace to add the coverage in * @param cs Name of the coverage * @param f A zip or a geotiff {@link java.io.File} to updload. */ public boolean createCoverage(String ws, String cs, Path f, String metadataUuid, String metadataTitle, String metadataAbstract) throws IOException { String contentType = "image/tiff"; if (f.getFileName().toString().toLowerCase().endsWith(".zip")) { contentType = "application/zip"; } int status = sendREST(GeoServerRest.METHOD_PUT, "/workspaces/" + ws + "/coveragestores/" + cs + "/file.geotiff", null, f, contentType, false); createCoverageForStore(ws, cs, null, metadataUuid, metadataTitle, metadataAbstract); return status == 201; }
From source file:com.streamsets.pipeline.lib.io.TestSingleLineLiveFileReader.java
@Test(expected = IllegalArgumentException.class) public void testInvalidCharset1() throws Exception { Path file = createFile(Arrays.asList("Hello1\n", "Hello2\n")); LiveFile lf = new LiveFile(file); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.forName("UTF-16"), 0, 10); }
From source file:com.streamsets.pipeline.lib.io.TestSingleLineLiveFileReader.java
@Test(expected = IllegalArgumentException.class) public void testInvalidCharset2() throws Exception { Path file = createFile(Arrays.asList("Hello1\n", "Hello2\n")); LiveFile lf = new LiveFile(file); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.forName("UTF-32"), 0, 10); }
From source file:com.streamsets.pipeline.lib.io.TestSingleLineLiveFileReader.java
@Test(expected = IllegalArgumentException.class) public void testInvalidCharset3() throws Exception { Path file = createFile(Arrays.asList("Hello1\n", "Hello2\n")); LiveFile lf = new LiveFile(file); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.forName("IBM500"), 0, 10); }
From source file:com.scooter1556.sms.server.io.AdaptiveStreamingProcess.java
public void initialise() { // Stop transcode process if one is already running if (process != null) { process.destroy();//from w ww . j a v a2 s .co m } // Determine stream directory streamDirectory = new File(SettingsService.getInstance().getCacheDirectory().getPath() + "/streams/" + id); try { if (streamDirectory.exists()) { // Wait for process to finish if (process != null) { process.waitFor(); } FileUtils.cleanDirectory(streamDirectory); } else { boolean success = streamDirectory.mkdirs(); if (!success) { LogService.getInstance().addLogEntry(Level.ERROR, CLASS_NAME, "Unable to create directory " + streamDirectory.getPath(), null); return; } } // Reset flags ended = false; // Setup post-processing of audio segments if required if (postProcessEnabled && audioTranscodes != null && mediaElement != null && transcoder != null) { // Setup thread pool for post-processing segments postProcessExecutor = Executors.newCachedThreadPool(); // Setup directory watcher watcher = new DirectoryWatcher.Builder().addDirectories(streamDirectory.getPath()) .setPreExistingAsCreated(true).build(new DirectoryWatcher.Listener() { @Override public void onEvent(DirectoryWatcher.Event event, final Path path) { switch (event) { case ENTRY_CREATE: // Check if we are interested in this file if (!FilenameUtils.getExtension(path.toString()).isEmpty() || !path.getFileName().toString().contains("audio")) { break; } // Get the information we require String[] segmentData = FilenameUtils.getBaseName(path.getFileName().toString()) .split("-"); if (segmentData.length < 3) { break; } // Variables final int transcode = Integer.parseInt(segmentData[2]); // Retrive transcode format if (audioTranscodes.length < transcode || mediaElement == null) { break; } // Determine codec AudioTranscode aTranscode = audioTranscodes[transcode]; String codec = aTranscode.getCodec(); if (codec.equals("copy")) { codec = MediaUtils.getAudioStreamById(mediaElement.getAudioStreams(), aTranscode.getId()).getCodec(); } final String format = TranscodeUtils.getFormatForAudioCodec(codec); // Transcode postProcessExecutor.submit(new Runnable() { @Override public void run() { postProcess(path.toString(), format); } }); break; case ENTRY_MODIFY: break; case ENTRY_DELETE: break; } } }); // Start directory watcher watcher.start(); } // Start transcoding start(); } catch (Exception ex) { if (process != null) { process.destroy(); } if (watcher != null) { watcher.stop(); } if (postProcessExecutor != null && !postProcessExecutor.isTerminated()) { postProcessExecutor.shutdownNow(); } ended = true; LogService.getInstance().addLogEntry(Level.ERROR, CLASS_NAME, "Error starting adaptive streaming process.", ex); } }
From source file:com.streamsets.pipeline.lib.io.TestSingleLineLiveFileReader.java
@Test public void testValidCharsets() throws Exception { Path file = createFile(Arrays.asList("Hello1\n", "Hello2\n")); LiveFile lf = new LiveFile(file); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.forName("US-ASCII"), 0, 10); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, StandardCharsets.UTF_8, 0, 10); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.forName("GBK"), 0, 10); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.forName("ISO-8859-1"), 0, 10); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.forName("shift_jis"), 0, 10); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.forName("euc-jp"), 0, 10); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.forName("euc-kr"), 0, 10); new SingleLineLiveFileReader(LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.forName("koi8-r"), 0, 10); }
From source file:herddb.cli.HerdDBCLI.java
private static void describeRawFile(String tablespaceuuid, String tableName, String tablesmetadatafile, String rawfile, String mode) throws Exception { Path path = Paths.get(rawfile); switch (mode) { case "txlog": { try (FileCommitLog.CommitFileReader reader = CommitFileReader.openForDescribeRawfile(path);) { LogEntryWithSequenceNumber nextEntry = reader.nextEntry(); while (nextEntry != null) { println(nextEntry.logSequenceNumber.ledgerId + "," + nextEntry.logSequenceNumber.offset + "," + nextEntry.entry.toString()); nextEntry = reader.nextEntry(); }//from w w w . j av a 2 s . c o m } break; } case "datapage": { if (tablesmetadatafile.isEmpty()) { throw new IllegalArgumentException( "metadatafile option is required in order to analize a datapage"); } if (tableName.isEmpty()) { throw new IllegalArgumentException("table option is required in order to analize a database"); } if (tablespaceuuid.isEmpty()) { throw new IllegalArgumentException( "tablespaceuuid option is required in order to analize a database"); } Path pathtablesmetadata = Paths.get(tablesmetadatafile); List<Table> tables = FileDataStorageManager.readTablespaceStructure(pathtablesmetadata, tablespaceuuid, null); println("File " + pathtablesmetadata.getFileName() + " contains the following table schematas:"); for (Table t : tables) { println("Table: " + t.uuid + " - " + t.tablespace + "." + t.name); } Table table = tables.stream().filter(t -> t.name.equals(tableName)).findAny().orElse(null); if (table == null) { println("No such table " + tableName); return; } List<Record> records = FileDataStorageManager.rawReadDataPage(path); for (Record record : records) { StringBuilder line = new StringBuilder(); DataAccessor dataAccessor = record.getDataAccessor(table); for (int i = 0; i < table.columns.length; i++) { Object value = dataAccessor.get(i); if (i > 0) { line.append(','); } line.append(value); } println(line); } break; } case "tablecheckpoint": { TableStatus tableStatus = FileDataStorageManager.readTableStatusFromFile(path); println("TableName:" + tableStatus.tableName); println("Sequence Number:" + tableStatus.sequenceNumber.ledgerId + ", " + tableStatus.sequenceNumber.offset); println("Next Page Id:" + tableStatus.nextPageId); println("Next Primary key value:" + (tableStatus.nextPrimaryKeyValue != null ? Bytes.from_array(tableStatus.nextPrimaryKeyValue) : "null")); println("Active pages:" + tableStatus.activePages); break; } case "indexcheckpoint": { IndexStatus indexStatus = FileDataStorageManager.readIndexStatusFromFile(path); println("IndexName:" + indexStatus.indexName); println("Sequence Number:" + indexStatus.sequenceNumber.ledgerId + ", " + indexStatus.sequenceNumber.offset); println("Active pages:" + indexStatus.activePages); try { BLinkMetadata<Bytes> blinkMetadata = MetadataSerializer.INSTANCE.read(indexStatus.indexData); println("BLink Metadata: " + blinkMetadata); println("BLink Metadata nodex: " + blinkMetadata.nodesToStrings()); } catch (IOException err) { } break; } case "tablespacemetadata": { TableSpace tableSpace = FileMetadataStorageManager.readTableSpaceMetadataFile(path); println("Name:" + tableSpace.name); println("UUID:" + tableSpace.uuid); println("Leader:" + tableSpace.leaderId); break; } case "tablesmetadata": { if (tablespaceuuid.isEmpty()) { throw new IllegalArgumentException( "tablespaceuuid option is required in order to analize a database"); } List<Table> tables = FileDataStorageManager.readTablespaceStructure(path, tablespaceuuid, null); for (Table table : tables) { println("Table"); println("Name: " + table.name); println("Tablespace: " + table.tablespace); println("Table UUID: " + table.uuid); for (Column c : table.columns) { println("Column : " + c.name + ", serialPosition: " + c.serialPosition + ", type " + ColumnTypes.typeToString(c.type) + " (" + c.type + ")"); } } break; } default: System.out.println("Unknown file type " + mode + " valid options are txlog, datapage, tablecheckpoint, indexcheckpoint, tablesmetadata"); } }