Example usage for java.nio.file Files readAllLines

List of usage examples for java.nio.file Files readAllLines

Introduction

In this page you can find the example usage for java.nio.file Files readAllLines.

Prototype

public static List<String> readAllLines(Path path, Charset cs) throws IOException 

Source Link

Document

Read all lines from a file.

Usage

From source file:nl.vumc.biomedbridges.galaxy.metadata.GalaxyWorkflowStepTest.java

/**
 * Common initialization for all unit tests.
 *
 * @throws ParseException when parsing fails.
 * @throws IOException when reading from file fails.
 *//*from  w  w w. j a va2  s . com*/
@Before
public void setUp() throws ParseException, IOException {
    final String filePath = GALAXY_DIRECTORY + "TestWorkflow.ga";
    final String jsonContent = Joiner.on("\n").join(Files.readAllLines(Paths.get(filePath), Charsets.UTF_8));
    final JSONObject workflowJson = (JSONObject) new JSONParser().parse(jsonContent);
    final JSONObject stepsMapJson = (JSONObject) workflowJson.get("steps");
    workflowStep = new GalaxyWorkflowStep((JSONObject) stepsMapJson.get("2"));
}

From source file:com.nsdr.json.JsonPathTest.java

@Test
public void hello() throws URISyntaxException, IOException {
    Path path = Paths.get(getClass().getResource("/europeana-oai.json").toURI());
    List<String> lines = Files.readAllLines(path, Charset.defaultCharset());

    List<JsonBranch> paths = new ArrayList<>();
    paths.add(new JsonBranch("@about", "$.['edm:ProvidedCHO'][0]['@about']", JsonBranch.Category.MANDATORY));
    paths.add(new JsonBranch("Proxy/dc:title",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:title']",
            JsonBranch.Category.MANDATORY, JsonBranch.Category.DESCRIPTIVENESS,
            JsonBranch.Category.SEARCHABILITY, JsonBranch.Category.IDENTIFICATION,
            JsonBranch.Category.MULTILINGUALITY));
    paths.add(new JsonBranch("Proxy/dcterms:alternative",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dcterms:alternative']",
            JsonBranch.Category.DESCRIPTIVENESS, JsonBranch.Category.SEARCHABILITY,
            JsonBranch.Category.IDENTIFICATION, JsonBranch.Category.MULTILINGUALITY));
    paths.add(new JsonBranch("Proxy/dc:description",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:description']",
            JsonBranch.Category.MANDATORY, JsonBranch.Category.DESCRIPTIVENESS,
            JsonBranch.Category.SEARCHABILITY, JsonBranch.Category.CONTEXTUALIZATION,
            JsonBranch.Category.IDENTIFICATION, JsonBranch.Category.MULTILINGUALITY));
    paths.add(new JsonBranch("Proxy/dc:creator",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:creator']",
            JsonBranch.Category.DESCRIPTIVENESS, JsonBranch.Category.SEARCHABILITY,
            JsonBranch.Category.CONTEXTUALIZATION, JsonBranch.Category.BROWSING));
    paths.add(new JsonBranch("Proxy/dc:publisher",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:publisher']",
            JsonBranch.Category.SEARCHABILITY, JsonBranch.Category.REUSABILITY));
    paths.add(new JsonBranch("Proxy/dc:contributor",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:contributor']",
            JsonBranch.Category.SEARCHABILITY));
    paths.add(new JsonBranch("Proxy/dc:type",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:type']",
            JsonBranch.Category.MANDATORY, JsonBranch.Category.SEARCHABILITY,
            JsonBranch.Category.CONTEXTUALIZATION, JsonBranch.Category.IDENTIFICATION,
            JsonBranch.Category.BROWSING));
    paths.add(new JsonBranch("Proxy/dc:identifier",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:identifier']",
            JsonBranch.Category.IDENTIFICATION));
    paths.add(new JsonBranch("Proxy/dc:language",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:language']",
            JsonBranch.Category.DESCRIPTIVENESS, JsonBranch.Category.MULTILINGUALITY));
    paths.add(new JsonBranch("Proxy/dc:coverage",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:coverage']",
            JsonBranch.Category.MANDATORY, JsonBranch.Category.SEARCHABILITY,
            JsonBranch.Category.CONTEXTUALIZATION, JsonBranch.Category.BROWSING));
    paths.add(new JsonBranch("Proxy/dcterms:temporal",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dcterms:temporal']",
            JsonBranch.Category.SEARCHABILITY, JsonBranch.Category.CONTEXTUALIZATION,
            JsonBranch.Category.BROWSING));
    paths.add(new JsonBranch("Proxy/dcterms:spatial",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dcterms:spatial']",
            JsonBranch.Category.MANDATORY, JsonBranch.Category.SEARCHABILITY,
            JsonBranch.Category.CONTEXTUALIZATION, JsonBranch.Category.BROWSING));
    paths.add(new JsonBranch("Proxy/dc:subject",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:subject']",
            JsonBranch.Category.MANDATORY, JsonBranch.Category.DESCRIPTIVENESS,
            JsonBranch.Category.SEARCHABILITY, JsonBranch.Category.CONTEXTUALIZATION,
            JsonBranch.Category.MULTILINGUALITY));
    paths.add(new JsonBranch("Proxy/dc:date",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:date']",
            JsonBranch.Category.IDENTIFICATION, JsonBranch.Category.BROWSING, JsonBranch.Category.REUSABILITY));
    paths.add(new JsonBranch("Proxy/dcterms:created",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dcterms:created']",
            JsonBranch.Category.IDENTIFICATION, JsonBranch.Category.REUSABILITY));
    paths.add(new JsonBranch("Proxy/dcterms:issued",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dcterms:issued']",
            JsonBranch.Category.IDENTIFICATION, JsonBranch.Category.REUSABILITY));
    paths.add(new JsonBranch("Proxy/dcterms:extent",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dcterms:extent']",
            JsonBranch.Category.DESCRIPTIVENESS, JsonBranch.Category.REUSABILITY));
    paths.add(new JsonBranch("Proxy/dcterms:medium",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dcterms:medium']",
            JsonBranch.Category.DESCRIPTIVENESS, JsonBranch.Category.REUSABILITY));
    paths.add(new JsonBranch("Proxy/dcterms:provenance",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dcterms:provenance']",
            JsonBranch.Category.DESCRIPTIVENESS));
    paths.add(new JsonBranch("Proxy/dcterms:hasPart",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dcterms:hasPart']",
            JsonBranch.Category.SEARCHABILITY, JsonBranch.Category.CONTEXTUALIZATION,
            JsonBranch.Category.BROWSING));
    paths.add(new JsonBranch("Proxy/dcterms:isPartOf",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dcterms:isPartOf']",
            JsonBranch.Category.SEARCHABILITY, JsonBranch.Category.CONTEXTUALIZATION,
            JsonBranch.Category.BROWSING));
    paths.add(new JsonBranch("Proxy/dc:format",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:format']",
            JsonBranch.Category.DESCRIPTIVENESS, JsonBranch.Category.REUSABILITY));
    paths.add(new JsonBranch("Proxy/dc:source",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:source']",
            JsonBranch.Category.DESCRIPTIVENESS));
    paths.add(new JsonBranch("Proxy/dc:rights",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:rights']",
            JsonBranch.Category.REUSABILITY));
    paths.add(new JsonBranch("Proxy/dc:relation",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['dc:relation']",
            JsonBranch.Category.SEARCHABILITY, JsonBranch.Category.CONTEXTUALIZATION,
            JsonBranch.Category.BROWSING));
    paths.add(new JsonBranch("Proxy/edm:isNextInSequence",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['edm:isNextInSequence']",
            JsonBranch.Category.SEARCHABILITY, JsonBranch.Category.CONTEXTUALIZATION,
            JsonBranch.Category.BROWSING));
    paths.add(new JsonBranch("Proxy/edm:type",
            "$.['ore:Proxy'][?(@['edm:europeanaProxy'][0] == 'false')]['edm:type']",
            JsonBranch.Category.SEARCHABILITY, JsonBranch.Category.BROWSING));

    for (int i = 0; i < lines.size(); i++) {
        Object document = Configuration.defaultConfiguration().jsonProvider().parse(lines.get(i));
        double j = 0.0;
        Map<String, double[]> stat = new HashMap<>();
        stat.put("total", new double[] { 0.0, 0.0 });
        for (JsonBranch.Category category : JsonBranch.Category.values()) {
            stat.put(category.name(), new double[] { 0.0, 0.0 });
        }/*from   w ww  . j  ava  2s . c  o  m*/
        for (JsonBranch jp : paths) {
            Object value = JsonPath.read(document, jp.getJsonPath());
            increaseTotal(stat, jp.getCategories());
            if (value.getClass() == JSONArray.class) {
                if (!((JSONArray) value).isEmpty()) {
                    increaseInstance(stat, jp.getCategories());
                }
            } else if (value.getClass() == String.class) {
                if (StringUtils.isNotBlank((String) value)) {
                    increaseInstance(stat, jp.getCategories());
                }
            } else {
                System.err.println(jp.getLabel() + " value.getClass(): " + value.getClass());
                System.err.println(jp.getLabel() + ": " + value);
            }
        }
        for (String key : stat.keySet()) {
            System.err.println(
                    key + ": " + (stat.get(key)[1] == 0.0 ? 0.0 : (stat.get(key)[1] / stat.get(key)[0])));
        }
    }

}

From source file:com.bombardier.plugin.utils.FilePathUtils.java

/**
 * Used to read test cases from a list - line by line.
 * /*from   www . j a  v  a  2 s .  c o m*/
 * @param file
 *            the {@link Path} to the file
 * @return An {@link List} containing all test cases.
 * @throws IOException
 * @since 1.0
 */
public static List<String> readTextFileByLines(FilePath file) throws Exception {
    List<String> list = Files.readAllLines(Paths.get(file.absolutize().toURI()), StandardCharsets.UTF_8);
    list.removeIf(new Predicate<String>() {
        @Override
        public boolean test(String arg0) {
            return !StringUtils.isNotBlank(arg0);
        }
    });
    return list;
}

From source file:eu.smartfp7.foursquare.utils.Settings.java

protected Settings() {
    // We load the settings file and parse its JSON only once.
    try {/* w ww.  j  a  va2  s  .  co  m*/
        JsonParser parser = new JsonParser();
        this.settings_json = parser
                .parse(StringUtils
                        .join(Files.readAllLines(Paths.get("etc/settings.json"), StandardCharsets.UTF_8), " "))
                .getAsJsonObject();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.github.wellcomer.query3.core.Autocomplete.java

/**
     ?? ? ?./*w  ww  .  ja  va 2  s.c o m*/
     ?  ,   ? ? ? regex.
        
    @param fileName ? .
    @param regex ? .
*/
public List<String> get(String fileName, String regex) throws IOException {

    List<String> out = new ArrayList<>();

    if (regex == null || regex.equals(""))
        return out;

    List<String> lines = Files.readAllLines(Paths.get(filePath, fileName), charset);

    regex = regex.toLowerCase(); // ?? ?

    for (String line : lines) {
        if (line.toLowerCase().matches(".*" + regex + ".*")) {
            out.add(line.trim());
            if (maxItems == 0) // ?? 
                continue;
            if (out.size() >= maxItems)
                break;
        }
    }
    return out;
}

From source file:io.github.swagger2markup.MarkdownConverterTest.java

/**
 * Given a markdown document to search, this checks to see if the specified tables
 * have all of the expected fields listed.
 * Match is a "search", and not an "equals" match.
 *
 * @param doc           markdown document file to inspect
 * @param fieldsByTable map of table name (header) to field names expected
 *                      to be found in that table.
 * @throws IOException if the markdown document could not be read
 *///from ww w .j  a v  a  2 s .c  om
private static void verifyMarkdownContainsFieldsInTables(File doc, Map<String, Set<String>> fieldsByTable)
        throws IOException {
    //TODO: This method is too complex, split it up in smaller methods to increase readability
    final List<String> lines = Files.readAllLines(doc.toPath(), Charset.defaultCharset());
    final Map<String, Set<String>> fieldsLeftByTable = Maps.newHashMap();
    for (Map.Entry<String, Set<String>> entry : fieldsByTable.entrySet()) {
        fieldsLeftByTable.put(entry.getKey(), Sets.newHashSet(entry.getValue()));
    }
    String inTable = null;
    for (String line : lines) {
        // If we've found every field we care about, quit early
        if (fieldsLeftByTable.isEmpty()) {
            return;
        }

        // Transition to a new table if we encounter a header
        final String currentHeader = getTableHeader(line);
        if (inTable == null || currentHeader != null) {
            inTable = currentHeader;
        }

        // If we're in a table that we care about, inspect this potential table row
        if (inTable != null && fieldsLeftByTable.containsKey(inTable)) {
            // If we're still in a table, read the row and check for the field name
            //  NOTE: If there was at least one pipe, then there's at least 2 fields
            String[] parts = line.split("\\|");
            if (parts.length > 1) {
                final String fieldName = parts[1];
                final Set<String> fieldsLeft = fieldsLeftByTable.get(inTable);
                // Mark the field as found and if this table has no more fields to find,
                //  remove it from the "fieldsLeftByTable" map to mark the table as done
                Iterator<String> fieldIt = fieldsLeft.iterator();
                while (fieldIt.hasNext()) {
                    String fieldLeft = fieldIt.next();
                    if (fieldName.contains(fieldLeft))
                        fieldIt.remove();
                }
                if (fieldsLeft.isEmpty()) {
                    fieldsLeftByTable.remove(inTable);
                }
            }
        }
    }

    // After reading the file, if there were still types, fail
    if (!fieldsLeftByTable.isEmpty()) {
        fail(String.format("Markdown file '%s' did not contain expected fields (by table): %s", doc,
                fieldsLeftByTable));
    }
}

From source file:glass.Plugins.ServerLogPlugin.java

/**
 * @param args the command line arguments
 *///from   w ww  . ja v  a  2 s . c om
@Override
public String processFile(String path) {

    //code to create/cleanup work directory 
    String output_folder = "work/ServerLogPlugin/";
    cleanWorkDir(output_folder);

    try {
        FileUtils.copyFile(new File(GUI.toolPath + "configFiles\\ServerLogConfigFiles\\temperature_config.txt"),
                new File(output_folder + "temperature_config.txt"));//copy config files to dest
        FileUtils.copyFile(new File(GUI.toolPath + "configFiles\\ServerLogConfigFiles\\cpu_config.txt"),
                new File(output_folder + "cpu_config.txt"));//copy config files to dest
        FileUtils.copyFile(new File(GUI.toolPath + "configFiles\\ServerLogConfigFiles\\power_config.txt"),
                new File(output_folder + "power_config.txt"));//copy config files to dest
        FileUtils.copyFile(new File(GUI.toolPath + "configFiles\\ServerLogConfigFiles\\dma_config.txt"),
                new File(output_folder + "dma_config.txt"));//copy config files to dest

        //FileUtils.
        List<String> lines = Files.readAllLines(Paths.get(path), Charset.defaultCharset());
        double xValues[] = new double[lines.size()];

        double yValues1[] = new double[lines.size()];
        double yValues2[] = new double[lines.size()];
        double yValues3[] = new double[lines.size()];
        double yValues4[] = new double[lines.size()];

        ArrayList<String> lines_out1 = new ArrayList<>();
        ArrayList<String> lines_out2 = new ArrayList<>();
        ArrayList<String> lines_out3 = new ArrayList<>();
        ArrayList<String> lines_out4 = new ArrayList<>();

        for (int i = 1; i < lines.size(); i++) {
            String[] line_cloumn = lines.get(i).split(",");
            xValues[i] = i;

            yValues1[i] = Double.parseDouble(line_cloumn[49]);
            yValues2[i] = Double.parseDouble(line_cloumn[1]);
            yValues3[i] = Double.parseDouble(line_cloumn[48]);
            yValues4[i] = Double.parseDouble(line_cloumn[11]);

            lines_out1.add("" + xValues[i] + "," + yValues1[i]);
            lines_out2.add("" + xValues[i] + "," + yValues2[i]);
            lines_out3.add("" + xValues[i] + "," + yValues3[i]);
            lines_out4.add("" + xValues[i] + "," + yValues4[i]);
        }

        Files.write(Paths.get(output_folder + "temperature" + ".txt"), lines_out1, Charset.defaultCharset());
        Files.write(Paths.get(output_folder + "cpu" + ".txt"), lines_out2, Charset.defaultCharset());
        Files.write(Paths.get(output_folder + "power" + ".txt"), lines_out3, Charset.defaultCharset());
        Files.write(Paths.get(output_folder + "dma" + ".txt"), lines_out4, Charset.defaultCharset());

        return output_folder;
    } catch (Exception ex) {
        Logger.getLogger(ServerLogPlugin.class.getName()).log(Level.SEVERE, null, ex);
        ex.printStackTrace();
        return "";
    }
}

From source file:com.kakao.hbase.manager.command.ExportKeysTest.java

@Test
public void testRun() throws Exception {
    String outputFile = "exportkeys_test.keys";
    try {//from  w w  w  .  j a v a  2 s. co  m
        byte[] splitPoint = "splitpoint".getBytes();

        splitTable(splitPoint);

        String[] argsParam = { "zookeeper", tableName, outputFile };
        Args args = new ManagerArgs(argsParam);
        assertEquals("zookeeper", args.getZookeeperQuorum());
        ExportKeys command = new ExportKeys(admin, args);
        waitForSplitting(2);
        command.run();

        int i = 0;
        List<Triple<String, String, String>> results = new ArrayList<>();
        for (String keys : Files.readAllLines(Paths.get(outputFile), Constant.CHARSET)) {
            i++;

            String[] split = keys.split(ExportKeys.DELIMITER);
            results.add(new ImmutableTriple<>(split[0], split[1], split[2]));
        }
        assertEquals(2, i);

        assertEquals(tableName, results.get(0).getLeft().trim());
        assertArrayEquals("".getBytes(), Bytes.toBytesBinary(results.get(0).getMiddle().trim()));
        assertArrayEquals(splitPoint, Bytes.toBytesBinary(results.get(0).getRight().trim()));
        assertEquals(tableName, results.get(1).getLeft().trim());
        assertArrayEquals(splitPoint, Bytes.toBytesBinary(results.get(1).getMiddle().trim()));
        assertArrayEquals("".getBytes(), Bytes.toBytesBinary(results.get(1).getRight().trim()));

        // split once more
        byte[] splitPoint2 = Bytes.toBytes(100L);

        splitTable(splitPoint2);

        command.run();

        i = 0;
        results.clear();
        for (String keys : Files.readAllLines(Paths.get(outputFile), Constant.CHARSET)) {
            i++;

            String[] split = keys.split(ExportKeys.DELIMITER);
            results.add(new ImmutableTriple<>(split[0], split[1], split[2]));
        }
        assertEquals(3, i);

        assertEquals(tableName, results.get(0).getLeft().trim());
        assertArrayEquals("".getBytes(), Bytes.toBytesBinary(results.get(0).getMiddle().trim()));
        assertArrayEquals(splitPoint2, Bytes.toBytesBinary(results.get(0).getRight().trim()));
        assertEquals(tableName, results.get(1).getLeft().trim());
        assertArrayEquals(splitPoint2, Bytes.toBytesBinary(results.get(1).getMiddle().trim()));
        assertArrayEquals(splitPoint, Bytes.toBytesBinary(results.get(1).getRight().trim()));
        assertEquals(tableName, results.get(2).getLeft().trim());
        assertArrayEquals(splitPoint, Bytes.toBytesBinary(results.get(2).getMiddle().trim()));
        assertArrayEquals("".getBytes(), Bytes.toBytesBinary(results.get(2).getRight().trim()));
    } finally {
        Files.delete(Paths.get(outputFile));
    }
}

From source file:pl.nask.hsn2.unicorn.commands.framework.UploadWorkflowCommand.java

private String workflowContent() {
    try {//from w  w w. ja va 2 s.  c om
        List<String> lines = Files.readAllLines(Paths.get(workflowFilePath), StandardCharsets.UTF_8);
        StringBuilder builder = new StringBuilder();
        for (String line : lines) {
            builder.append(line);
            builder.append("\n");
        }
        return builder.toString();
    } catch (IOException e) {
        throw new IllegalStateException("Error reading workflow file: " + e.getMessage());
    }
}

From source file:org.graylog2.plugin.system.NodeId.java

private String read() throws IOException {
    final List<String> lines = Files.readAllLines(Paths.get(filename), StandardCharsets.UTF_8);

    return lines.size() > 0 ? lines.get(0) : "";
}