List of usage examples for java.nio.file Files newBufferedReader
public static BufferedReader newBufferedReader(Path path) throws IOException
From source file:javalibs.CSVExtractor.java
/** * Read a CSV file and return a list of records representing each row in the CSV * NOTE: This does not handle anything but plain CSV files with default formatting * @param csvPath The path to the CSV file * @return The list of CSVRecord objects *//*w w w . j a va 2 s . c o m*/ public static List<CSVRecord> getCSVRecords(String csvPath) { CSVParser parser = null; List<CSVRecord> records = null; try { parser = new CSVParser(Files.newBufferedReader(Paths.get(csvPath)), CSVFormat.DEFAULT.withHeader().withIgnoreHeaderCase().withTrim()); records = parser.getRecords(); } catch (IOException e) { TSL.get().exception(e); } return records; }
From source file:org.thingsboard.server.service.install.cql.CassandraDbHelper.java
public static void appendToEndOfLine(Path targetDumpFile, String toAppend) throws Exception { Path tmp = Files.createTempFile(null, null); try (CSVParser csvParser = new CSVParser(Files.newBufferedReader(targetDumpFile), CSV_DUMP_FORMAT)) { try (CSVPrinter csvPrinter = new CSVPrinter(Files.newBufferedWriter(tmp), CSV_DUMP_FORMAT)) { csvParser.forEach(record -> { List<String> newRecord = new ArrayList<>(); record.forEach(val -> newRecord.add(val)); newRecord.add(toAppend); try { csvPrinter.printRecord(newRecord); } catch (IOException e) { throw new RuntimeException("Error appending to EOL", e); }/*from w ww .j a v a 2s . c om*/ }); } } Files.move(tmp, targetDumpFile, StandardCopyOption.REPLACE_EXISTING); }
From source file:org.tallison.cc.WReGetter.java
private void execute(String[] args) throws IOException { if (args.length != 3) { usage();/*from ww w . j a va 2 s.c om*/ System.exit(1); } if (args[0].contains("-h")) { usage(); System.exit(0); } int numThreads = Integer.parseInt(args[0]); BufferedReader r = Files.newBufferedReader(Paths.get(args[1])); ArrayBlockingQueue<DigestURLPair> queue = new ArrayBlockingQueue<DigestURLPair>(1000); QueueFiller filler = new QueueFiller(r, queue, numThreads); new Thread(filler).start(); rootDir = Paths.get(args[2]); System.out.println("creating thread pool"); ExecutorService executorService = Executors.newFixedThreadPool(numThreads); ExecutorCompletionService<Integer> executorCompletionService = new ExecutorCompletionService<Integer>( executorService); System.out.println("about to start"); for (int i = 0; i < numThreads; i++) { System.out.println("submitted " + i); executorCompletionService.submit(new WGetter(queue)); } int completed = 0; while (completed < numThreads) { try { Future<Integer> future = executorCompletionService.poll(1, TimeUnit.SECONDS); if (future != null) { completed++; } } catch (InterruptedException e) { } } executorService.shutdown(); executorService.shutdownNow(); System.exit(0); }
From source file:de.sanandrew.mods.turretmod.registry.assembly.TurretAssemblyRecipes.java
private static boolean processJson(Path file, Ex2Function<JsonObject, Boolean, JsonParseException, IOException> callback) { if (!"json".equals(FilenameUtils.getExtension(file.toString())) || FilenameUtils.getName(file.toString()).startsWith("_")) { return true; }/* w ww. j a v a2 s .c om*/ try (BufferedReader reader = Files.newBufferedReader(file)) { JsonObject json = JsonUtils.fromJson(reader, JsonObject.class); if (json == null || json.isJsonNull()) { throw new JsonSyntaxException("Json cannot be null"); } callback.apply(json); return true; } catch (JsonParseException e) { TmrConstants.LOG.log(Level.ERROR, String.format("Parsing error loading assembly table recipe from %s", file), e); return false; } catch (IOException e) { TmrConstants.LOG.log(Level.ERROR, String.format("Couldn't read recipe from %s", file), e); return false; } }
From source file:jease.cmf.service.Backup.java
/** * Restore node-graph from file dump.//from www . j a v a2s .c om */ public Node restore(File dumpFile) { if (dumpFile == null) { return null; } try { Reader reader = Files.newBufferedReader(Zipfiles.unzip(dumpFile).toPath()); Node node = fromXML(reader); node.setId(Filenames.asId(dumpFile.getName()).replace(".xml.zip", "")); reader.close(); return node; } catch (IOException e) { throw new RuntimeException(e.getMessage(), e); } }
From source file:org.omegat.gui.scripting.ScriptingTest.java
public void testCompileScripts() throws Exception { File scriptDir = new File(StaticUtils.installDir(), ScriptingWindow.DEFAULT_SCRIPTS_DIR); assertTrue(scriptDir.isDirectory()); for (File f : scriptDir.listFiles()) { if (!f.isFile()) { continue; }//from www .j a v a 2s .co m String ext = FilenameUtils.getExtension(f.getName()); ScriptEngine engine = ScriptRunner.MANAGER.getEngineByExtension(ext); if (engine instanceof Compilable) { Compilable cEngine = (Compilable) engine; try (BufferedReader br = Files.newBufferedReader(f.toPath())) { assertNotNull(cEngine.compile(br)); } } } }
From source file:org.thingsboard.server.service.install.sql.SqlDbHelper.java
public static void loadTable(Connection conn, String tableName, String[] columns, Path sourceFile, boolean parseHeader) throws Exception { CSVFormat csvFormat = CSV_DUMP_FORMAT; if (parseHeader) { csvFormat = csvFormat.withFirstRecordAsHeader(); } else {/*from www. j av a 2 s .c o m*/ csvFormat = CSV_DUMP_FORMAT.withHeader(columns); } try (PreparedStatement prepared = conn.prepareStatement(createInsertStatement(tableName, columns))) { try (CSVParser csvParser = new CSVParser(Files.newBufferedReader(sourceFile), csvFormat)) { csvParser.forEach(record -> { try { for (int i = 0; i < columns.length; i++) { setColumnValue(i, columns[i], record, prepared); } prepared.execute(); } catch (SQLException e) { log.error("Unable to load table record!", e); } }); } } }
From source file:sadl.utils.IoUtils.java
public static Pair<TimedInput, TimedInput> readTrainTestFile(Path trainTestFile, Function<Reader, TimedInput> f) { try (BufferedReader br = Files.newBufferedReader(trainTestFile); PipedWriter trainWriter = new PipedWriter(); PipedReader trainReader = new PipedReader(trainWriter); PipedWriter testWriter = new PipedWriter(); PipedReader testReader = new PipedReader(testWriter)) { String line = ""; final ExecutorService ex = Executors.newFixedThreadPool(2); final Future<TimedInput> trainWorker = ex.submit(() -> f.apply(trainReader)); final Future<TimedInput> testWorker = ex.submit(() -> f.apply(testReader)); ex.shutdown();/*from w w w .j a va 2s. com*/ boolean writeTrain = true; while ((line = br.readLine()) != null) { if (line.startsWith(SmacDataGenerator.TRAIN_TEST_SEP)) { writeTrain = false; trainWriter.close(); continue; } if (writeTrain) { trainWriter.write(line); trainWriter.write('\n'); } else { testWriter.write(line); testWriter.write('\n'); } } testWriter.close(); ex.shutdown(); if (writeTrain) { trainWriter.close(); ex.shutdownNow(); throw new IOException("The provided file " + trainTestFile + " does not contain the separator " + SmacDataGenerator.TRAIN_TEST_SEP); } final Pair<TimedInput, TimedInput> result = Pair.of(trainWorker.get(), testWorker.get()); return result; } catch (final IOException | InterruptedException | ExecutionException e) { logger.error("Unexpected exception!", e); } return null; }
From source file:internal.product.ProductImportResource.java
@GET @Produces(MediaType.TEXT_PLAIN)//from w w w . j a v a 2 s .c om @Path("/file") public Response file(@HeaderParam("path") String path, @HeaderParam("skip") long skip, @HeaderParam("max") long max) { System.out.println("Product Load! path:" + path + " skip:" + skip + " max:" + max); Summary summary = new Summary(); if (max == 0) max = Long.MAX_VALUE; try (BufferedReader reader = Files.newBufferedReader(Paths.get(path))) { for (int i = 0; i < skip; i++) { reader.readLine(); } long bufferSize = 0; Transaction tx = db.beginTx(); try { for (int i = 0; i < max; i++) { if (bufferSize >= BATCH_SIZE) { tx.success(); tx.close(); tx = db.beginTx(); summary.batches++; bufferSize = 0; } String line = reader.readLine(); if (line == null) { summary.eof = true; break; } if (!processLine(line.toString(), summary, accountLRU, merchantLRU, brandLRU)) { StringBuilder extendedLine = new StringBuilder(line); boolean success = false; for (int retry = 0; retry < 20; retry++) { summary.failed++; if (i == max) { break; } line = reader.readLine(); i++; if (line == null) { summary.eof = true; break; } if (processLine(line, summary, accountLRU, merchantLRU, brandLRU)) { summary.processed++; bufferSize++; break; } extendedLine.append(line); if (processLine(extendedLine.toString(), summary, accountLRU, merchantLRU, brandLRU)) { summary.processed++; bufferSize++; summary.failed -= retry + 1; summary.multi_lines++; // System.out.println("Combined lines:"+(retry+1)); success = true; break; } } if (!success) { //System.out.println("Failed on:"+extendedLine.toString()); } } else { summary.processed++; bufferSize++; } if (i % 10000 == 0) { System.out.println(summary); } } } catch (Exception ex) { summary.batches++; tx.success(); tx.close(); ex.printStackTrace(); System.out.println("Failed:" + summary); return Response.status(Status.INTERNAL_SERVER_ERROR) .entity(UTF8.encode("{\"error\":\"" + ex.getMessage() + "\"}")).build(); } finally { tx.success(); summary.batches++; tx.close(); } } catch (IOException e) { e.printStackTrace(); System.out.println("Failed:" + summary); return Response.status(Status.INTERNAL_SERVER_ERROR) .entity(UTF8.encode("{\"error\":\"" + e.getMessage() + "\"}")).build(); } System.out.println("Done:" + summary); return Response .status(Status.OK).entity(UTF8.encode("{\"" + path + "\":\"OK\" , \"processed\":" + summary.processed + ", \"failed\":" + summary.failed + ", \"eof\":" + summary.eof + " }")) .build(); }
From source file:org.codice.ddf.security.sts.claims.property.UsersAttributesFileClaimsHandler.java
/** * @throws IllegalStateException when the users attributes file cannot be read or when the * contents do meet assumptions. See the documentation section "Updating System Users" for * details about the contents of the users attribute file. *//*from w ww .j ava 2 s . c om*/ public void init() { Path path = Paths.get(usersAttributesFileLocation); if (!path.isAbsolute()) { path = DDF_HOME_PATH.resolve(path); } final Map<String, Map<String, Object>> usersAttributesFileContents; final Type type = new TypeToken<Map<String, Map<String, Object>>>() { }.getType(); try (final Reader reader = Files.newBufferedReader(path)) { usersAttributesFileContents = new Gson().fromJson(reader, type); } catch (NoSuchFileException e) { final String errorMessage = createErrorMessage("Cannot find file"); LOGGER.error(errorMessage, e); throw new IllegalStateException(e); } catch (JsonIOException | IOException e) { final String errorMessage = createErrorMessage("Error reading file"); LOGGER.error(errorMessage, e); throw new IllegalStateException(e); } catch (JsonSyntaxException e) { final String errorMessage = createErrorMessage( "File does not contain expected the expected json format"); LOGGER.error(errorMessage, e); throw new IllegalStateException(e); } final Map<String, Map<String, Set<String>>> newJson = new HashMap<>(); for (Map.Entry<String, Map<String, Object>> userToAttributesMap : usersAttributesFileContents.entrySet()) { final Map<String, Set<String>> attributes = new HashMap<>(); for (Map.Entry<String, Object> attributesToValuesMap : userToAttributesMap.getValue().entrySet()) { attributes.put(attributesToValuesMap.getKey(), convertToSetOfStrings(attributesToValuesMap.getValue())); } newJson.put(userToAttributesMap.getKey(), attributes); } json = newJson; setSupportedClaimTypes(); setSystemHighUserAttributes(); }