List of usage examples for java.nio.file Path toString
String toString();
From source file:edu.umd.umiacs.clip.tools.io.AllFiles.java
public static Stream<CSVRecord> records(CSVFormat format, Path path) { try {/* w ww.j a va2s . c o m*/ String p = path.toString(); if (!p.contains("*")) { return p.endsWith(".gz") ? GZIPFiles.records(format, path) : p.endsWith(".bz2") ? BZIP2Files.records(format, path) : overridenRecords(format, path); } else { File file = path.toFile(); return Stream .of(file.getParentFile().listFiles( (dir, name) -> name.matches(file.getName().replace(".", "\\.").replace("*", ".+")))) .sorted().flatMap(f -> records(format, f)); } } catch (IOException e) { throw new UncheckedIOException(e); } }
From source file:edu.umd.umiacs.clip.tools.io.AllFiles.java
public static Stream<String> lines(Path path) { try {// ww w .j av a 2 s. com String p = path.toString(); if (!p.contains("*")) { return p.endsWith(".gz") ? GZIPFiles.lines(path) : p.endsWith(".bz2") ? BZIP2Files.lines(path) : overridenLines(path); } else { File file = path.toFile(); return Stream .of(file.getParentFile().listFiles( (dir, name) -> name.matches(file.getName().replace(".", "\\.").replace("*", ".+")))) .sorted().flatMap(AllFiles::lines); } } catch (IOException e) { throw new UncheckedIOException(e); } }
From source file:ch.bender.evacuate.Helper.java
/** * If the given target is already present, the method retains this older version in a kind of * FIFO buffer (but persistent on disk). The given MaxBackups number indicates how many such * backups are kept.// ww w.j a va 2s . c o m * <p> * This routine is valid for files and directories. With files, the numbering suffix is done * before the last dot in the file name, with directories the number suffix is appended at the * end. * <p> * Example: target is "Target.txt" and there are already present: * <pre> * Target.txt * Target_01.txt * Target_02.txt * <pre> * Target_02.txt is renamed to Target_03.txt, Target_01.txt to Target_02.txt and Target.txt Target_01.txt. * <p> * If MaxBackup would be 3, then Target_02.txt would have been deleted instead renamed. * <p> * * @param aTarget * @param aMaxBackups * @param aFailedPreparations * @throws IOException */ public static void prepareTrashChain(Path aTarget, int aMaxBackups, Map<Path, Throwable> aFailedPreparations) { myLog.debug("preparing trash chain for " + aTarget.toString()); try { int i = aMaxBackups - 1; while (i > 0) { Path targetUpper = appendNumberSuffix(aTarget, i); Path targetLower = (i > 1) ? appendNumberSuffix(aTarget, i - 1) : aTarget; i--; if (Files.notExists(targetUpper) && Files.notExists(targetLower)) { continue; } if (Files.exists(targetUpper)) { myLog.info("There are already " + (i + 2) + " trashed versions of " + aTarget.toString() + ". Deleting the oldest one"); if (Files.exists(targetUpper)) { if (Files.isDirectory(targetUpper)) { Helper.deleteDirRecursive(targetUpper); } else { Files.delete(targetUpper); } } } if (Files.notExists(targetLower)) { continue; } myLog.debug("Renaming " + targetLower.toString() + " to " + targetUpper.toString()); Files.move(targetLower, targetUpper, StandardCopyOption.ATOMIC_MOVE); } } catch (Throwable e) { aFailedPreparations.put(aTarget, e); } }
From source file:com.facebook.buck.artifact_cache.ArtifactUploader.java
/** Archive and compress 'pathsToIncludeInArchive' into 'out', using tar+zstandard. */ @VisibleForTesting/*from ww w . j a va2 s . c o m*/ static void compress(ProjectFilesystem projectFilesystem, Collection<Path> pathsToIncludeInArchive, Path out) throws IOException { try (OutputStream o = new BufferedOutputStream(Files.newOutputStream(out)); OutputStream z = new ZstdCompressorOutputStream(o); TarArchiveOutputStream archive = new TarArchiveOutputStream(z)) { archive.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX); for (Path path : pathsToIncludeInArchive) { boolean isRegularFile = !projectFilesystem.isDirectory(path); // Add a file entry. TarArchiveEntry e = new TarArchiveEntry(path.toString() + (isRegularFile ? "" : "/")); e.setMode((int) projectFilesystem.getPosixFileMode(path)); e.setModTime(ZipConstants.getFakeTime()); if (isRegularFile) { e.setSize(projectFilesystem.getFileSize(path)); archive.putArchiveEntry(e); try (InputStream input = projectFilesystem.newFileInputStream(path)) { ByteStreams.copy(input, archive); } } else { archive.putArchiveEntry(e); } archive.closeArchiveEntry(); } archive.finish(); } }
From source file:general.Main.java
/** * Loads all pre-build query types./* w w w.ja va 2s .c o m*/ */ private static void loadPreBuildQueryTypes() { try (DirectoryStream<Path> directoryStream = Files .newDirectoryStream(Paths.get("preBuildQueryTypeFiles"))) { for (Path filePath : directoryStream) { if (Files.isRegularFile(filePath)) { if (filePath.toString().endsWith(".preBuildQueryType")) { String queryString = new String(readAllBytes(filePath)); OpenRDFQueryHandler queryHandler = new OpenRDFQueryHandler(); //queryHandler.setValidityStatus(1); queryHandler.setQueryString(queryString); if (queryHandler.getValidityStatus() != 1) { logger.info("The Pre-build query " + filePath + " is no valid SPARQL"); continue; } ParsedQuery normalizedPreBuildQuery = queryHandler.getNormalizedQuery(); String queryTypeName = filePath.toString().substring( filePath.toString().lastIndexOf("/") + 1, filePath.toString().lastIndexOf(".")); if (normalizedPreBuildQuery != null) { queryTypes.put(normalizedPreBuildQuery, queryTypeName); } else { logger.info("Pre-build query " + queryTypeName + " could not be parsed."); } } if (filePath.toString().endsWith(".tsv")) { TsvParserSettings parserSettings = new TsvParserSettings(); parserSettings.setLineSeparatorDetectionEnabled(true); parserSettings.setHeaderExtractionEnabled(true); parserSettings.setSkipEmptyLines(true); parserSettings.setReadInputOnSeparateThread(true); ObjectRowProcessor rowProcessor = new ObjectRowProcessor() { @Override public void rowProcessed(Object[] row, ParsingContext parsingContext) { if (row.length <= 1) { logger.warn("Ignoring line without tab while parsing."); return; } if (row.length == 5) { queryTypeToToolMapping.put(new Tuple2<>(row[0].toString(), row[1].toString()), new Tuple2<>(row[2].toString(), row[3].toString())); return; } logger.warn("Line with row length " + row.length + " found. Is the formatting of toolMapping.tsv correct?"); return; } }; parserSettings.setProcessor(rowProcessor); TsvParser parser = new TsvParser(parserSettings); parser.parse(filePath.toFile()); } } } } catch (IOException e) { logger.error("Could not read from directory inputData/queryType/premadeQueryTypeFiles", e); } }
From source file:demo.utils.MiniClusterUtils.java
public static void startMiniCluster() { if (clusterLauncher != null) { throw new IllegalStateException("MiniClustrer is currently running"); }// www .ja v a 2 s .c om File file = new File(System.getProperty("user.dir")); Path path = Paths.get(file.getAbsolutePath()); Path parentPath = path.getParent(); String[] resources = file.list(); for (String resource : resources) { if (resource.equals("yaya-demo")) { parentPath = path; break; } } File miniClusterExe = new File( parentPath.toString() + "/yarn-test-cluster/build/install/yarn-test-cluster/bin/yarn-test-cluster"); System.out.println(miniClusterExe.getAbsolutePath()); if (!miniClusterExe.exists()) { logger.info("BUILDING MINI_CLUSTER"); CommandProcessLauncher buildLauncher = new CommandProcessLauncher( path.toString() + "/build-mini-cluster"); buildLauncher.launch(); } Assert.isTrue(miniClusterExe.exists(), "Failed to find mini-cluster executable"); clusterLauncher = new CommandProcessLauncher(miniClusterExe.getAbsolutePath()); executor = Executors.newSingleThreadExecutor(); executor.execute(new Runnable() { @Override public void run() { logger.info("STARTING MINI_CLUSTER"); clusterLauncher.launch(); } }); try { Thread.sleep(2000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } }
From source file:gndata.lib.config.AbstractConfig.java
/** * Loads a configuration of a certain type from a json file. * Note: this method should be used by subclasses in order to implement * a more specific load method.// w w w . j a v a2 s . co m * * @param filePath Path to the json configuration file to read from. * @param cls The class of the configuration type. * * @throws IOException */ protected static <T extends AbstractConfig> T load(String filePath, Class<T> cls) throws IOException { Path tmpPath = Paths.get(filePath); ObjectMapper mapper = new ObjectMapper().enable(ACCEPT_EMPTY_STRING_AS_NULL_OBJECT) .enable(ACCEPT_SINGLE_VALUE_AS_ARRAY).disable(FAIL_ON_UNKNOWN_PROPERTIES); try { T config = mapper.readValue(tmpPath.toFile(), cls); config.setFilePath(tmpPath.toString()); return config; } catch (IOException e) { throw new IOException("Unable to read configuration file: " + filePath, e); } }
From source file:oz.hadoop.yarn.api.utils.MiniClusterUtils.java
public static void startMiniCluster() { try {//from ww w. ja v a2 s. c o m semaphore.acquire(); } catch (InterruptedException e) { throw new IllegalStateException("Acquisition of semaphore is interrupted. Exiting"); } if (clusterLauncher != null) { throw new IllegalStateException("MiniClustrer is currently running"); } File file = new File(""); Path path = Paths.get(file.getAbsolutePath()); Path parentPath = path.getParent(); File clusterConfiguration = new File(parentPath + "/yarn-test-cluster/src/main/resources"); Assert.isTrue(clusterConfiguration.exists()); ConfigUtils.addToClasspath(clusterConfiguration); File miniClusterExe = new File( parentPath.toString() + "/yarn-test-cluster/build/install/yarn-test-cluster/bin/yarn-test-cluster"); System.out.println(miniClusterExe.getAbsolutePath()); if (!miniClusterExe.exists()) { logger.info("BUILDING MINI_CLUSTER"); CommandProcessLauncher buildLauncher = new CommandProcessLauncher( path.toString() + "/build-mini-cluster"); buildLauncher.launch(); } Assert.isTrue(miniClusterExe.exists(), "Failed to find mini-cluster executable"); clusterLauncher = new CommandProcessLauncher(miniClusterExe.getAbsolutePath()); executor = Executors.newSingleThreadExecutor(); executor.execute(new Runnable() { @Override public void run() { logger.info("STARTING MINI_CLUSTER"); clusterLauncher.launch(); System.out.println("EXITING>>>>>>>>>"); } }); try { Thread.sleep(2000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } }
From source file:io.github.retz.executor.FileManager.java
static void fetchPersistentFiles(List<String> files, String destination, boolean trustPVFiles) throws IOException { for (String file : files) { java.nio.file.Path path = Paths.get(file).getFileName(); if (path == null) { throw new FileSystemException(destination); }//from w ww . j a v a 2 s . c o m File f = new File(FilenameUtils.concat(destination, path.toString())); LOG.info("Downloading: {} as {}", file, f); if (f.exists()) { LOG.debug("File already exists: {}", f); if (!trustPVFiles) { try { boolean needsDecompression = needsDecompression(f, destination); if (needsDecompression) { decompress(f, destination); } else { LOG.info("File {} was correctly decompressed before. Skipping decompression.", file); } } catch (ArchiveException e) { LOG.error("ArchiveException on {}: {}", f, e.getMessage()); e.printStackTrace(); } } } else if (file.startsWith("http")) { fetchHTTPFile(file, destination); decompress(f, destination); } else if (file.startsWith("hdfs://")) { fetchHDFSFile(file, destination); decompress(f, destination); } else if (file.startsWith("maprfs://")) { fetchHDFSFile(file, destination); decompress(f, destination); } else { LOG.error("Invalid URL scheme: {}", file); } } }
From source file:com.basistech.rosette.api.RosetteAPITest.java
@Parameterized.Parameters public static Collection<Object[]> data() throws URISyntaxException, IOException { File dir = new File("src/test/mock-data/response"); Collection<Object[]> params = new ArrayList<>(); try (DirectoryStream<Path> paths = Files.newDirectoryStream(dir.toPath())) { for (Path file : paths) { if (file.toString().endsWith(".json")) { params.add(new Object[] { file.getFileName().toString() }); }//w ww . j a v a2 s .c om } } return params; }