Example usage for java.nio.file FileSystems getDefault

List of usage examples for java.nio.file FileSystems getDefault

Introduction

In this page you can find the example usage for java.nio.file FileSystems getDefault.

Prototype

public static FileSystem getDefault() 

Source Link

Document

Returns the default FileSystem .

Usage

From source file:com.linkedin.pinot.controller.helix.core.realtime.PinotLLCRealtimeSegmentManager.java

/**
 * Extract the segment metadata file from the tar-zipped segment file that is expected to be in the
 * directory for the table./*from www .  j  a  v a2  s .c  om*/
 * Segment tar-zipped file path: DATADIR/rawTableName/segmentName
 * We extract the metadata into a file into a file in the same level,as in: DATADIR/rawTableName/segmentName.metadata
 * @param rawTableName Name of the table (not including the REALTIME extension)
 * @param segmentNameStr Name of the segment
 * @return SegmentMetadataImpl if it is able to extract the metadata file from the tar-zipped segment file.
 */
protected SegmentMetadataImpl extractSegmentMetadata(final String rawTableName, final String segmentNameStr) {
    final String baseDir = StringUtil.join("/", _controllerConf.getDataDir(), rawTableName);
    final String segFileName = StringUtil.join("/", baseDir, segmentNameStr);
    final File segFile = new File(segFileName);
    SegmentMetadataImpl segmentMetadata;
    Path metadataPath = null;
    try {
        InputStream is = TarGzCompressionUtils.unTarOneFile(new FileInputStream(segFile),
                V1Constants.MetadataKeys.METADATA_FILE_NAME);
        metadataPath = FileSystems.getDefault().getPath(baseDir, segmentNameStr + ".metadata");
        Files.copy(is, metadataPath);
        segmentMetadata = new SegmentMetadataImpl(new File(metadataPath.toString()));
    } catch (Exception e) {
        throw new RuntimeException("Exception extacting and reading segment metadata for " + segmentNameStr, e);
    } finally {
        if (metadataPath != null) {
            FileUtils.deleteQuietly(new File(metadataPath.toString()));
        }
    }
    return segmentMetadata;
}

From source file:it.units.malelab.ege.util.DUMapper.java

private static double[][][] getNeatData3(String baseDir, String fileNamePattern, int generations)
        throws IOException {
    List<List<Map<Integer, Multimap<Integer, Integer>>>> data = new ArrayList<>();
    Map<Integer, String> nodeTypesMap = new HashMap<>();
    for (int g = 0; g < generations; g++) {
        List<Map<Integer, Multimap<Integer, Integer>>> currentPopulation = new ArrayList<>();
        BufferedReader reader = Files.newBufferedReader(
                FileSystems.getDefault().getPath(baseDir, String.format(fileNamePattern, g + 1)));
        String line;/* ww  w. ja  v  a2s . co m*/
        boolean isInPopulation = false;
        Map<Integer, Multimap<Integer, Integer>> currentIndividual = null;
        while ((line = reader.readLine()) != null) {
            if (line.equals("[NEAT-POPULATION:SPECIES]")) {
                isInPopulation = true;
                continue;
            }
            if (!isInPopulation) {
                continue;
            }
            if (line.startsWith("\"g\"")) {
                if (currentIndividual != null) {
                    //save current individual
                    currentPopulation.add(currentIndividual);
                }
                currentIndividual = new HashMap<>();
            }
            if (line.startsWith("\"n\"")) {
                String[] pieces = line.split(",");
                nodeTypesMap.put(Integer.parseInt(pieces[4]), pieces[3].replaceAll("\"", ""));
                currentIndividual.put(Integer.parseInt(pieces[4]), (Multimap) HashMultimap.create());
            } else if (line.startsWith("\"l\"")) {
                String[] pieces = line.split(",");
                int from = Integer.parseInt(pieces[3]);
                int to = Integer.parseInt(pieces[4]);
                if (currentIndividual.get(from) == null) {
                    currentIndividual.put(from, (Multimap) HashMultimap.create());
                }
                if (currentIndividual.get(to) == null) {
                    currentIndividual.put(to, (Multimap) HashMultimap.create());
                }
                currentIndividual.get(from).put(1, to);
                currentIndividual.get(to).put(-1, from);
            }
        }
        reader.close();
        data.add(currentPopulation);
    }
    //build node innovation numbers
    String[] nodeTypes = new String[] { "i", "b", "h", "o" };
    List<Integer> nodeINs = new ArrayList<>();
    for (String nodeType : nodeTypes) {
        List<Integer> typeNodeINs = new ArrayList<>();
        for (Integer in : nodeTypesMap.keySet()) {
            if (nodeTypesMap.get(in).equals(nodeType)) {
                typeNodeINs.add(in);
            }
        }
        Collections.sort(typeNodeINs);
        nodeINs.addAll(typeNodeINs);
    }
    //populate arrays
    double[][] usages = new double[generations][];
    double[][] diversities = new double[generations][];
    for (int g = 0; g < generations; g++) {
        usages[g] = new double[nodeINs.size()];
        diversities[g] = new double[nodeINs.size()];
        List<Map<Integer, Multimap<Integer, Integer>>> currentPopulation = data.get(g);
        //populate usages, diversities
        int i = 0;
        for (int nodeIN : nodeINs) {
            double[] localUsages = new double[currentPopulation.size()];
            Multiset<Set<Integer>> froms = HashMultiset.create();
            Multiset<Set<Integer>> tos = HashMultiset.create();
            int c = 0;
            for (Map<Integer, Multimap<Integer, Integer>> currentIndividual : currentPopulation) {
                if (nodeTypesMap.get(nodeIN).equals("i") || nodeTypesMap.get(nodeIN).equals("b")) {
                    if (currentIndividual.containsKey(nodeIN)) {
                        localUsages[c] = currentIndividual.get(nodeIN).get(1).isEmpty() ? 0 : 1;
                        tos.add(new HashSet<>(currentIndividual.get(nodeIN).get(1)));
                    } else {
                        tos.add(Collections.EMPTY_SET);
                    }
                } else if (nodeTypesMap.get(nodeIN).equals("h")) {
                    if (currentIndividual.containsKey(nodeIN)) {
                        localUsages[c] = (currentIndividual.get(nodeIN).get(-1).isEmpty() ? 0 : 0.5)
                                + (currentIndividual.get(nodeIN).get(1).isEmpty() ? 0 : 0.5);
                        tos.add(new HashSet<>(currentIndividual.get(nodeIN).get(1)));
                        froms.add(new HashSet<>(currentIndividual.get(nodeIN).get(-1)));
                    } else {
                        tos.add(Collections.EMPTY_SET);
                        froms.add(Collections.EMPTY_SET);
                    }
                } else if (nodeTypesMap.get(nodeIN).equals("o")) {
                    if (currentIndividual.containsKey(nodeIN)) {
                        localUsages[c] = currentIndividual.get(nodeIN).get(-1).isEmpty() ? 0 : 1;
                        froms.add(new HashSet<>(currentIndividual.get(nodeIN).get(-1)));
                    } else {
                        froms.add(Collections.EMPTY_SET);
                    }
                }
                c = c + 1;
            }
            usages[g][i] = StatUtils.mean(localUsages);
            if (nodeTypesMap.get(nodeIN).equals("i") || nodeTypesMap.get(nodeIN).equals("b")) {
                diversities[g][i] = Utils.multisetDiversity(tos, tos.elementSet());
            } else if (nodeTypesMap.get(nodeIN).equals("h")) {
                diversities[g][i] = Utils.multisetDiversity(tos, tos.elementSet()) / 2
                        + Utils.multisetDiversity(froms, tos.elementSet()) / 2;
            } else if (nodeTypesMap.get(nodeIN).equals("o")) {
                diversities[g][i] = Utils.multisetDiversity(froms, tos.elementSet());
            }
            i = i + 1;
        }
    }
    return new double[][][] { diversities, usages };
}

From source file:com.spectralogic.ds3client.integration.Smoke_Test.java

@Test
public void testRecoverReadJob()
        throws IOException, XmlProcessingException, JobRecoveryException, URISyntaxException {
    final String bucketName = "test_recover_read_job_bucket";
    final String book1 = "beowulf.txt";
    final String book2 = "ulysses.txt";
    final Path objPath1 = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book1);
    final Path objPath2 = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book2);
    final Ds3Object obj1 = new Ds3Object(book1, Files.size(objPath1));
    final Ds3Object obj2 = new Ds3Object(book2, Files.size(objPath2));

    final Path dirPath = FileSystems.getDefault().getPath("output");
    if (!Files.exists(dirPath)) {
        Files.createDirectory(dirPath);
    }//from   www  .  j a  v a  2s . c o m

    try {
        HELPERS.ensureBucketExists(bucketName, envDataPolicyId);

        final Ds3ClientHelpers.Job putJob = HELPERS.startWriteJob(bucketName, Lists.newArrayList(obj1, obj2));
        putJob.transfer(new ResourceObjectPutter(RESOURCE_BASE_NAME));

        final FileChannel channel1 = FileChannel.open(dirPath.resolve(book1), StandardOpenOption.WRITE,
                StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);

        final Ds3ClientHelpers.Job readJob = HELPERS.startReadJob(bucketName, Lists.newArrayList(obj1, obj2));
        final GetObjectResponse readResponse1 = client
                .getObject(new GetObjectRequest(bucketName, book1, channel1, readJob.getJobId().toString(), 0));

        assertThat(readResponse1, is(notNullValue()));
        assertThat(readResponse1.getStatusCode(), is(equalTo(200)));

        // Interruption...
        final Ds3ClientHelpers.Job recoverJob = HELPERS.recoverReadJob(readJob.getJobId());

        final FileChannel channel2 = FileChannel.open(dirPath.resolve(book2), StandardOpenOption.WRITE,
                StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
        final GetObjectResponse readResponse2 = client.getObject(
                new GetObjectRequest(bucketName, book2, channel2, recoverJob.getJobId().toString(), 0));
        assertThat(readResponse2, is(notNullValue()));
        assertThat(readResponse2.getStatusCode(), is(equalTo(200)));

    } finally {
        deleteAllContents(client, bucketName);
        for (final Path tempFile : Files.newDirectoryStream(dirPath)) {
            Files.delete(tempFile);
        }
        Files.delete(dirPath);
    }
}

From source file:de.cebitec.readXplorer.differentialExpression.plot.BaySeqGraphicsTopComponent.java

private void saveToSVG(String fileLocation) {
    svgExportProgressHandle = ProgressHandleFactory.createHandle("Save plot to svg file: " + fileLocation);
    Path to = FileSystems.getDefault().getPath(fileLocation, "");
    ChartExporter exporter = new ChartExporter(to, chartPanel.getChart());
    exporter.registerObserver(this);
    new Thread(exporter).start();

}

From source file:com.mycompany.trafficimportfileconverter2.Main2Controller.java

private void watchForConsumption() throws IOException {
    WatchService watcher = FileSystems.getDefault().newWatchService();

    try {/*from ww w .j av  a 2 s .co m*/
        Path dir = getOutputDir().toPath();
        WatchKey key = dir.register(watcher, ENTRY_DELETE);

        for (;;) {

            if (Thread.interrupted()) {
                key.cancel();
                return;
            }
            try {
                key = watcher.take();
            } catch (InterruptedException x) {
                return;
            }

            for (WatchEvent<?> event : key.pollEvents()) {
                WatchEvent.Kind<?> kind = event.kind();

                // This key is registered only
                // for ENTRY_CREATE events,
                // but an OVERFLOW event can
                // occur regardless if events
                // are lost or discarded.
                if (kind == OVERFLOW) {
                    continue;
                }

                //                        // The filename is the
                //                        // context of the event.
                WatchEvent<Path> ev = (WatchEvent<Path>) event;
                Path filepath = ev.context();
                String filename = filepath.toString();
                System.out.println("the filename was: " + filename);
                System.out.println(kind);
                Optional<String> res = findFile(filename);
                if (res.isPresent()) {
                    System.out.println("BEFORE REMOVAL: " + myfiles.toString());
                    System.out.println("removing: " + res.get());
                    removeFromFiles(res.get());
                    System.out.println("Removed. Now: " + myfiles.toString());
                    int dpi = findThisDP(res.get());
                    if (-1 != dpi) {
                        UI(() -> {
                            datePickers[dpi].setStyle("-fx-background-color: lightgreen");
                            dayLabels[dpi].setStyle("-fx-background-color: lightgreen");
                        });
                    }
                    log("Wide Orbit CONSUMED: " + filename);

                } else {
                    System.out.println("is present was false for: " + filename);
                    System.out.println(myfiles.toString());
                }
                // Reset the key -- this step is critical if you want to
                // receive further watch events.  If the key is no longer valid,
                // the directory is inaccessible so exit the loop.
                boolean valid = key.reset();
                if (!valid) {
                    return;
                }
                if (myfiles.isEmpty()) {
                    key.cancel();
                    log("ALL WRITTEN FILES CONSUMED.");
                    System.out.println("\n\n\n");

                    return;
                }
            } //end of events
        } //end of infinite loop

    } catch (IOException x) {
        System.err.println(x);
    } finally {
        Thread.currentThread().interrupt();
    }

}

From source file:it.units.malelab.ege.util.DUMapper.java

private static double[][][] getGsgpData(String baseDir, String fileName, int generations, int genotypeSize,
        int populationSize) throws IOException {
    double[][] usages = new double[generations][];
    double[][] diversities = new double[generations][];
    BufferedReader reader = Files.newBufferedReader(FileSystems.getDefault().getPath(baseDir, fileName));
    for (int g = 0; g < generations; g++) {
        usages[g] = new double[genotypeSize];
        diversities[g] = new double[genotypeSize];
        double[][] popGenes = new double[genotypeSize][];
        for (int i = 0; i < genotypeSize; i++) {
            popGenes[i] = new double[populationSize];
        }//from w  w w. java 2s .com
        for (int p = 0; p < populationSize; p++) {
            String line = reader.readLine();
            String[] pieces = line.split("\\s");
            double[] genes = new double[genotypeSize];
            double maxGene = 0d;
            for (int i = 0; i < genotypeSize; i++) {
                //int gene = Integer.parseInt(pieces[i]);
                double gene = Double.parseDouble(pieces[i]);
                genes[i] = gene;
                maxGene = Math.max(maxGene, gene);
                popGenes[i][p] = gene;
            }
            for (int i = 0; i < genotypeSize; i++) {
                usages[g][i] = usages[g][i] + genes[i] / maxGene;
            }
        }
        for (int i = 0; i < genotypeSize; i++) {
            usages[g][i] = usages[g][i] / populationSize;
            diversities[g][i] = normalizedVar(popGenes[i]);
        }
    }
    reader.close();
    return new double[][][] { diversities, usages };
}

From source file:com.netflix.metacat.usermetadata.mysql.MysqlUserMetadataService.java

private void initProperties() throws Exception {
    final String configLocation = System.getProperty(METACAT_USERMETADATA_CONFIG_LOCATION,
            "usermetadata.properties");
    final URL url = Thread.currentThread().getContextClassLoader().getResource(configLocation);
    final Path filePath;
    if (url != null) {
        filePath = Paths.get(url.toURI());
    } else {/*from  www  .  j av a  2s .co m*/
        filePath = FileSystems.getDefault().getPath(configLocation);
    }
    Preconditions.checkState(filePath != null, "Unable to read from user metadata config file '%s'",
            configLocation);

    connectionProperties = new Properties();
    try (Reader reader = Files.newBufferedReader(filePath, Charsets.UTF_8)) {
        connectionProperties.load(reader);
    }
}

From source file:org.tinymediamanager.core.Utils.java

/**
 * <b>PHYSICALLY</b> deletes a file by moving it to datasource backup folder<br>
 * DS\.backup\&lt;filename&gt;<br>
 * maintaining its originating directory
 * //from   ww w .  j  a va2 s. co m
 * @param file
 *          the file to be deleted
 * @param datasource
 *          the data source (for the location of the backup folder)
 * @return true/false if successful
 */
public static boolean deleteFileWithBackup(Path file, String datasource) {
    String fn = file.toAbsolutePath().toString();
    if (!fn.startsWith(datasource)) { // safety
        LOGGER.warn("could not delete file '" + fn + "': datasource '" + datasource + "' does not match");
        return false;
    }
    if (Files.isDirectory(file)) {
        LOGGER.warn("could not delete file '" + fn + "': file is a directory!");
        return false;
    }

    // inject backup path
    fn = fn.replace(datasource, datasource + FileSystems.getDefault().getSeparator() + Constants.BACKUP_FOLDER);

    // backup
    try {
        // create path
        Path backup = Paths.get(fn);
        if (!Files.exists(backup.getParent())) {
            Files.createDirectories(backup.getParent());
        }
        // overwrite backup file by deletion prior
        Files.deleteIfExists(backup);
        return moveFileSafe(file, backup);
    } catch (IOException e) {
        LOGGER.warn("Could not delete file: " + e.getMessage());
        return false;
    }
}

From source file:org.parosproxy.paros.Constant.java

/**
 * Returns the absolute path for the given {@code directory}.
 * <p>/*from www . j  a  va 2s.  com*/
 * The path is terminated with a separator.
 * 
 * @param directory the directory whose path will be made absolute
 * @return the absolute path for the given {@code directory}, terminated with a separator
 * @since 2.4.0
 */
private static String getAbsolutePath(String directory) {
    String realPath = Paths.get(directory).toAbsolutePath().toString();
    String separator = FileSystems.getDefault().getSeparator();
    if (!realPath.endsWith(separator)) {
        realPath += separator;
    }
    return realPath;
}

From source file:org.tinymediamanager.core.Utils.java

/**
 * <b>PHYSICALLY</b> deletes a complete directory by moving it to datasource backup folder<br>
 * DS\.backup\&lt;foldername&gt;<br>
 * maintaining its originating directory
 * //from w w  w .  ja v a2  s .  c om
 * @param folder
 *          the folder to be deleted
 * @param datasource
 *          the datasource of this folder
 * @return true/false if successful
 */
public static boolean deleteDirectorySafely(Path folder, String datasource) {
    folder = folder.toAbsolutePath();
    String fn = folder.toAbsolutePath().toString();
    if (!Files.isDirectory(folder)) {
        LOGGER.warn("Will not delete folder '" + folder + "': folder is a file, NOT a directory!");
        return false;
    }
    if (!folder.toString().startsWith(datasource)) { // safety
        LOGGER.warn("Will not delete folder '" + folder + "': datasource '" + datasource + "' does not match");
        return false;
    }

    // inject backup path
    fn = fn.replace(datasource, datasource + FileSystems.getDefault().getSeparator() + Constants.BACKUP_FOLDER);

    // backup
    try {
        // create path
        Path backup = Paths.get(fn);
        if (!Files.exists(backup.getParent())) {
            Files.createDirectories(backup.getParent());
        }
        // overwrite backup file by deletion prior
        deleteDirectoryRecursive(backup);
        return moveDirectorySafe(folder, backup);
    } catch (IOException e) {
        LOGGER.warn("could not delete directory: " + e.getMessage());
        return false;
    }
}