List of usage examples for java.nio.file Files readAllLines
public static List<String> readAllLines(Path path) throws IOException
From source file:org.apdplat.superword.extract.ChineseSynonymAntonymExtractor.java
public static void main(String[] args) throws Exception { //parseSynonymAntonym(""); //parseSynonymAntonym("???"); //parseSynonymAntonym(Arrays.asList("", "???")); //System.out.println(getContent("http://www.iciba.com/%E7%83%AD%E7%88%B1")); parseSynonymAntonym(Files.readAllLines(Paths.get("src/main/resources/dic.txt")).stream() .sorted((a, b) -> new Integer(a.length()).compareTo(b.length())).collect(Collectors.toList())); }
From source file:org.loklak.susi.SusiLog.java
public static ArrayList<SusiInteraction> readLog(final File logdump, int count) throws IOException { List<String> lines = Files.readAllLines(logdump.toPath()); ArrayList<SusiInteraction> conversation = new ArrayList<>(); for (int i = lines.size() - 1; i >= 0; i--) { String line = lines.get(i); if (line.length() == 0) continue; SusiInteraction si = new SusiInteraction(new JSONObject(line)); conversation.add(si);//from w ww.ja v a 2 s. c o m if (conversation.size() >= count) break; } return conversation; }
From source file:com.wx3.galacdecks.Bootstrap.java
private void importPlayValidators(GameDatastore datastore, String path) throws IOException { Files.walk(Paths.get(path)).forEach(filePath -> { if (Files.isRegularFile(filePath)) { try { if (FilenameUtils.getExtension(filePath.getFileName().toString()).toLowerCase().equals("js")) { String id = FilenameUtils.removeExtension(filePath.getFileName().toString()); List<String> lines = Files.readAllLines(filePath); if (lines.size() < 3) { throw new RuntimeException( "Script file should have at least 2 lines: description and code."); }/*from w w w.j av a 2s . c o m*/ String description = lines.get(0).substring(2).trim(); String script = String.join("\n", lines); ValidatorScript validator = ValidatorScript.createValidator(id, script, description); //datastore.createValidator(validator); playValidatorCache.put(id, validator); logger.info("Imported play validator " + id); } } catch (Exception e) { throw new RuntimeException("Failed to parse " + filePath + ": " + e.getMessage()); } } }); }
From source file:sh.isaac.api.util.DownloadUnzipTask.java
/** * Call./*from w w w. j ava 2s . c o m*/ * * @return the file * @throws Exception the exception * @see javafx.concurrent.Task#call() */ @Override protected File call() throws Exception { final File dataFile = download(this.url); String calculatedSha1Value = null; String expectedSha1Value = null; ; try { LOG.debug("Attempting to get .sha1 file"); final File sha1File = download(new URL(this.url.toString() + ".sha1")); expectedSha1Value = Files.readAllLines(sha1File.toPath()).get(0); final Task<String> calculateTask = ChecksumGenerator.calculateChecksum("SHA1", dataFile); calculateTask.messageProperty().addListener( (ChangeListener<String>) (observable, oldValue, newValue) -> updateMessage(newValue)); calculateTask.progressProperty().addListener((ChangeListener<Number>) (observable, oldValue, newValue) -> updateProgress(calculateTask.getProgress(), calculateTask.getTotalWork())); WorkExecutors.get().getExecutor().execute(calculateTask); calculatedSha1Value = calculateTask.get(); sha1File.delete(); } catch (final Exception e1) { LOG.debug("Failed to get .sha1 file", e1); } if ((calculatedSha1Value != null) && !calculatedSha1Value.equals(expectedSha1Value)) { if (this.failOnBadCheksum) { throw new RuntimeException("Checksum of downloaded file '" + this.url.toString() + "' does not match the expected value!"); } else { LOG.warn("Checksum of downloaded file '" + this.url.toString() + "' does not match the expected value!"); } } if (this.cancel) { LOG.debug("Download cancelled"); throw new Exception("Cancelled!"); } if (this.unzip) { updateTitle("Unzipping"); try { final ZipFile zipFile = new ZipFile(dataFile); zipFile.setRunInThread(true); zipFile.extractAll(this.targetFolder.getAbsolutePath()); while (zipFile.getProgressMonitor().getState() == ProgressMonitor.STATE_BUSY) { if (this.cancel) { zipFile.getProgressMonitor().cancelAllTasks(); LOG.debug("Download cancelled"); throw new Exception("Cancelled!"); } updateProgress(zipFile.getProgressMonitor().getPercentDone(), 100); updateMessage("Unzipping " + dataFile.getName() + " at " + zipFile.getProgressMonitor().getPercentDone() + "%"); try { // TODO see if there is an API where I don't have to poll for completion Thread.sleep(25); } catch (final InterruptedException e) { // noop } } LOG.debug("Unzip complete"); } catch (final Exception e) { LOG.error("error unzipping", e); throw new Exception("The downloaded file doesn't appear to be a zip file"); } finally { dataFile.delete(); } return this.targetFolder; } else { return dataFile; } }
From source file:org.opencb.opencga.storage.app.cli.client.executors.VariantQueryCommandUtils.java
protected static Query parseGenericVariantQuery( StorageVariantCommandOptions.GenericVariantQueryOptions queryVariantsOptions, String studiesFilter, Collection<String> allStudyNames, boolean count, VariantWriterFactory.VariantOutputFormat of) throws Exception { Query query = new Query(); /*// w w w. ja v a 2 s . co m * Parse Variant parameters */ if (queryVariantsOptions.region != null && !queryVariantsOptions.region.isEmpty()) { query.put(REGION.key(), queryVariantsOptions.region); } else if (queryVariantsOptions.regionFile != null && !queryVariantsOptions.regionFile.isEmpty()) { Path gffPath = Paths.get(queryVariantsOptions.regionFile); FileUtils.checkFile(gffPath); String regionsFromFile = Files.readAllLines(gffPath).stream().map(line -> { String[] array = line.split("\t"); return new String(array[0].replace("chr", "") + ":" + array[3] + "-" + array[4]); }).collect(Collectors.joining(",")); query.put(REGION.key(), regionsFromFile); } addParam(query, ID, queryVariantsOptions.id); addParam(query, GENE, queryVariantsOptions.gene); addParam(query, TYPE, queryVariantsOptions.type); List<String> studies = new LinkedList<>(); if (StringUtils.isNotEmpty(studiesFilter)) { query.put(STUDIES.key(), studiesFilter); for (String study : studiesFilter.split(",|;")) { if (!study.startsWith("!")) { studies.add(study); } } } // If the studies to be returned is empty then we return the studies being queried if (queryVariantsOptions.returnStudy != null && !queryVariantsOptions.returnStudy.isEmpty()) { // query.put(RETURNED_STUDIES.key(), Arrays.asList(queryVariantsOptions.returnStudy.split(","))); List<String> list = new ArrayList<>(); Collections.addAll(list, queryVariantsOptions.returnStudy.split(",")); query.put(RETURNED_STUDIES.key(), list); } else { if (!studies.isEmpty()) { query.put(RETURNED_STUDIES.key(), studies); } } addParam(query, FILES, queryVariantsOptions.file); addParam(query, RETURNED_FILES, queryVariantsOptions.returnFile); addParam(query, FILTER, queryVariantsOptions.filter); addParam(query, GENOTYPE, queryVariantsOptions.sampleGenotype); addParam(query, SAMPLES, queryVariantsOptions.samples); addParam(query, RETURNED_SAMPLES, queryVariantsOptions.returnSample); addParam(query, UNKNOWN_GENOTYPE, queryVariantsOptions.unknownGenotype); /** * Annotation parameters */ addParam(query, ANNOT_CONSEQUENCE_TYPE, queryVariantsOptions.consequenceType); addParam(query, ANNOT_BIOTYPE, queryVariantsOptions.geneBiotype); addParam(query, ANNOT_POPULATION_ALTERNATE_FREQUENCY, queryVariantsOptions.populationFreqs); addParam(query, ANNOT_POPULATION_MINOR_ALLELE_FREQUENCY, queryVariantsOptions.populationMaf); addParam(query, ANNOT_CONSERVATION, queryVariantsOptions.conservation); if (queryVariantsOptions.proteinSubstitution != null && !queryVariantsOptions.proteinSubstitution.isEmpty()) { String[] fields = queryVariantsOptions.proteinSubstitution.split(","); for (String field : fields) { String[] arr = field.replaceAll("==", " ").replaceAll(">=", " ").replaceAll("<=", " ") .replaceAll("=", " ").replaceAll("<", " ").replaceAll(">", " ").split(" "); if (arr != null && arr.length > 1) { switch (arr[0]) { case "sift": query.put(ANNOT_SIFT.key(), field.replaceAll("sift", "")); break; case "polyphen": query.put(ANNOT_POLYPHEN.key(), field.replaceAll("polyphen", "")); break; default: query.put(ANNOT_PROTEIN_SUBSTITUTION.key(), field.replaceAll(arr[0], "")); break; } } } } /* * Stats parameters */ if (queryVariantsOptions.stats != null && !queryVariantsOptions.stats.isEmpty()) { Set<String> acceptedStatKeys = new HashSet<>(Arrays.asList(STATS_MAF.key(), STATS_MGF.key(), MISSING_ALLELES.key(), MISSING_GENOTYPES.key())); for (String stat : queryVariantsOptions.stats.split(",")) { int index = stat.indexOf("<"); index = index >= 0 ? index : stat.indexOf("!"); index = index >= 0 ? index : stat.indexOf("~"); index = index >= 0 ? index : stat.indexOf("<"); index = index >= 0 ? index : stat.indexOf(">"); index = index >= 0 ? index : stat.indexOf("="); if (index < 0) { throw new UnsupportedOperationException("Unknown stat filter operation: " + stat); } String name = stat.substring(0, index); String cond = stat.substring(index); if (acceptedStatKeys.contains(name)) { query.put(name, cond); } else { throw new UnsupportedOperationException("Unknown stat filter name: " + name); } logger.info("Parsed stat filter: {} {}", name, cond); } } addParam(query, STATS_MAF, queryVariantsOptions.maf); addParam(query, STATS_MGF, queryVariantsOptions.mgf); addParam(query, MISSING_ALLELES, queryVariantsOptions.missingAlleleCount); addParam(query, MISSING_GENOTYPES, queryVariantsOptions.missingGenotypeCount); boolean returnVariants = !count && StringUtils.isEmpty(queryVariantsOptions.groupBy) && StringUtils.isEmpty(queryVariantsOptions.rank); if (returnVariants && !of.isMultiStudyOutput()) { int returnedStudiesSize = query.getAsStringList(RETURNED_STUDIES.key()).size(); if (returnedStudiesSize == 0 && studies.size() == 1) { query.put(RETURNED_STUDIES.key(), studies.get(0)); } else if (returnedStudiesSize == 0 && allStudyNames.size() != 1 //If there are no returned studies, and there are more than one // study || returnedStudiesSize > 1) { // Or is required more than one returned study String availableStudies = allStudyNames == null || allStudyNames.isEmpty() ? "" : " Available studies: [ " + String.join(", ", allStudyNames) + " ]"; throw new Exception("Only one study is allowed when returning " + of + ", please use '--return-study' to select the returned " + "study." + availableStudies); } else { if (returnedStudiesSize == 0) { //If there were no returned studies, set the study existing one query.put(RETURNED_STUDIES.key(), allStudyNames.iterator().next()); } } } return query; }
From source file:org.jboss.jbossset.CommandLineParser.java
private List<String> getValidUsernames(CommandLine cmd) throws ParseException { boolean userFileSet = cmd.hasOption("userFile"); boolean usernamesSet = cmd.hasOption("usernames"); if (!userFileSet && !usernamesSet) throw new ParseException("You must specify either the usernames or userFile option"); if (userFileSet && usernamesSet) throw new ParseException("The usernames and userFile cannot be set at the same time"); if (usernamesSet) { String[] values = cmd.getOptionValues("usernames"); return Arrays.asList(values); }/*w w w . j a va2 s. co m*/ String url = cmd.getOptionValue("userFile"); if (!new File(url).exists()) throw new ParseException("The specified file <" + url + "> does not exist"); List<String> users; try { users = Files.readAllLines(Paths.get(url)); } catch (IOException e) { throw new ParseException("The specified file <" + url + "> cannot be opened"); } return users; }
From source file:com.spankingrpgs.util.LoadStateUtils.java
/** * Given a Loader, and a path to data files, loads the data from the files into the game using the loader. * * @param loader The loader to use to load data into the game * @param dataPath The path containing the files containing the data to load * @param fileGlob A glob that describes the kinds of files to load * @param newLineMarker The String to use to represent new lines *///w w w . j ava2s . co m public static void loadData(Loader loader, Path dataPath, String fileGlob, String newLineMarker) { LOG.info(String.format("Loading %s", dataPath)); try { PathMatcher jsonMatcher = FileSystems.getDefault().getPathMatcher(fileGlob); Collection<String> data = StreamSupport.stream(Files.newDirectoryStream(dataPath).spliterator(), false) .peek(path -> LOG.fine(String.format("Loading data from %s", path))) .filter(jsonMatcher::matches).map((Path path) -> { try { return String.join(newLineMarker, Files.readAllLines(path)); } catch (IOException e) { String msg = String.format("Problem reading file: %s", path); LOG.log(Level.SEVERE, String.format("%s with exception:%s", msg, e), e); throw new RuntimeException(msg); } }).collect(Collectors.toList()); loader.load(data, GameState.getCleanInstance()); } catch (IOException exception) { String msg = String.format("Problem reading files in: %s", dataPath); LOG.log(Level.SEVERE, String.format("%s with exception: %s", msg, exception), exception); throw new RuntimeException(msg); } }
From source file:de.tudarmstadt.ukp.dkpro.core.io.text.TokenizedTextWriterTest.java
@Test public void testLemmas() throws IOException, UIMAException { File targetFile = new File(context.getTestOutputFolder(), "lemmas.out"); targetFile.deleteOnExit();/*from ww w . j a v a 2 s .c om*/ String expected = "lemma1 lemma2"; int expectedLines = 1; String featurePath = Token.class.getName() + "/lemma/value"; JCas jCas = JCasFactory.createJCas(); jCas.setDocumentText("token1 token2"); DocumentMetaData metaData = DocumentMetaData.create(jCas); metaData.setDocumentId("lemmasTest"); metaData.addToIndexes(jCas); Token token1 = new Token(jCas, 0, 6); Token token2 = new Token(jCas, 7, 13); Lemma lemma1 = new Lemma(jCas, 0, 6); lemma1.setValue("lemma1"); Lemma lemma2 = new Lemma(jCas, 7, 13); lemma2.setValue("lemma2"); token1.setLemma(lemma1); token2.setLemma(lemma2); token1.addToIndexes(jCas); token2.addToIndexes(jCas); lemma1.addToIndexes(jCas); lemma2.addToIndexes(jCas); Sentence sentence = new Sentence(jCas, 0, 13); sentence.addToIndexes(jCas); AnalysisEngineDescription writer = createEngineDescription(TokenizedTextWriter.class, TokenizedTextWriter.PARAM_TARGET_LOCATION, targetFile, TokenizedTextWriter.PARAM_FEATURE_PATH, featurePath, TokenizedTextWriter.PARAM_SINGULAR_TARGET, true, TokenizedTextWriter.PARAM_OVERWRITE, true); SimplePipeline.runPipeline(jCas, writer); List<String> output = Files.readAllLines(targetFile.toPath()); assertEquals(expectedLines, output.size()); assertEquals(expected, output.get(0)); }
From source file:org.opennms.upgrade.implementations.MagicUsersMigratorOffline.java
@Override public void execute() throws OnmsUpgradeException { if (!canRun()) { log("Error: ignoring the execution of the task because the file magic-users.properties was not found. Maybe the task was already successfully executed before.\n"); return;//from w w w . j ava2s . c o m } // Parse read-only attributes final List<String> readOnlyUsers = new ArrayList<String>(); try { boolean readOnly = false; for (String line : Files.readAllLines(usersFile.toPath())) { if (line.contains("read-only")) { Matcher m = Pattern.compile("read-only=\"(.+)\"").matcher(line); if (m.find()) { readOnly = Boolean.parseBoolean(m.group(1)); } } if (line.contains("user-id")) { if (readOnly) { Matcher m = Pattern.compile("user-id[>](.+)[<][/]user-id").matcher(line); if (m.find()) { log("Warning: User %s has read-only flag\n", m.group(1)); readOnlyUsers.add(m.group(1)); } } readOnly = false; } } if (!readOnlyUsers.isEmpty()) { log("Removing the read-only flags from users.xml\n"); String content = new String(Files.readAllBytes(usersFile.toPath()), StandardCharsets.UTF_8); content = content.replaceAll(" read-only=\".+\"", ""); Files.write(usersFile.toPath(), content.getBytes(StandardCharsets.UTF_8)); } } catch (Exception e) { throw new OnmsUpgradeException("Can't fix configuration because " + e.getMessage(), e); } log("Moving security roles into users.xml...\n"); try { UserFactory.init(); UserManager userManager = UserFactory.getInstance(); // Retrieve all the currently configured users. final List<OnmsUser> users = new ArrayList<OnmsUser>(); for (final String userName : userManager.getUserNames()) { log("Loading configured user: %s...\n", userName); users.add(userManager.getOnmsUser(userName)); } // Parse magic-users.properties Properties properties = new Properties(); properties.load(new FileInputStream(magicUsersFile)); // Look up for custom users and their passwords String[] configuredUsers = BundleLists.parseBundleList(properties.getProperty("users")); for (String user : configuredUsers) { String username = properties.getProperty("user." + user + ".username"); String password = properties.getProperty("user." + user + ".password"); OnmsUser newUser = new OnmsUser(); newUser.setUsername(username); newUser.setFullName(user); newUser.setComments("This is a system user, do not delete"); newUser.setPassword(userManager.encryptedPassword(password, true)); newUser.setPasswordSalted(true); users.add(0, newUser); } // Configure security roles String[] configuredRoles = BundleLists.parseBundleList(properties.getProperty("roles")); for (final String role : configuredRoles) { String userList = properties.getProperty("role." + role + ".users"); if (userList == null) { log("Warning: Role configuration for '%s' does not have 'users' parameter. Expecting a 'role.%s.users' property. The role will not be usable.\n", role, role); continue; } String[] authUsers = BundleLists.parseBundleList(userList); boolean notInDefaultGroup = "true" .equals(properties.getProperty("role." + role + ".notInDefaultGroup")); String securityRole = "ROLE_" + role.toUpperCase(); List<String> customRoles = new ArrayList<String>(); for (final String username : authUsers) { OnmsUser onmsUser = getUser(users, username); if (onmsUser == null) { log("Warning: User %s doesn't exist on users.xml, Ignoring.\n", username); } else { addRole(onmsUser, securityRole); if (!notInDefaultGroup && !securityRole.equals(Authentication.ROLE_ADMIN)) { addRole(onmsUser, Authentication.ROLE_USER); } if (!Authentication.isValidRole(securityRole)) { log("Warning: %s is a custom role.\n", securityRole); customRoles.add(role); } } } if (!customRoles.isEmpty()) { String roleList = StringUtils.join(customRoles, ','); log("Creating %s with roles: %s\n", Authentication.ROLE_CONFIGURATION_FILE, roleList); Properties p = new Properties(); p.put("roles", roleList); File configFile = new File(ConfigFileConstants.getHome(), "etc" + File.separator + Authentication.ROLE_CONFIGURATION_FILE); p.store(new FileWriter(configFile), "Custom Roles"); } } // Update users.xml for (final OnmsUser user : users) { if (readOnlyUsers.contains(user.getUsername())) { addRole(user, Authentication.ROLE_READONLY); if (!user.getRoles().contains(Authentication.ROLE_USER)) { addRole(user, Authentication.ROLE_USER); } } userManager.save(user); } } catch (Throwable e) { throw new OnmsUpgradeException("Can't fix configuration because " + e.getMessage(), e); } }
From source file:org.apdplat.superword.tools.ProxyIp.java
private static void save() { try {/*from www .j a v a 2 s . c o m*/ //??IP?? Set<String> ips = new ConcurrentSkipListSet<>(); ips.addAll(Files.readAllLines(PROXY_IPS_FILE)); ips.addAll(IPS); //???IP ips.removeAll(NORMAL_IPS); Files.write(PROXY_IPS_FILE, toVerify(ips)); LOGGER.info("" + ips.size() + "??IP?"); Set<String> excellentIps = new HashSet<>(); excellentIps.addAll(Files.readAllLines(EXCELLENT_PROXY_IPS_FILE)); excellentIps.addAll(EXCELLENT_IPS); Files.write(EXCELLENT_PROXY_IPS_FILE, toVerify(excellentIps)); LOGGER.info("" + excellentIps.size() + "????IP?"); Set<String> excellentUsaIps = new HashSet<>(); excellentUsaIps.addAll(Files.readAllLines(EXCELLENT_USA_PROXY_IPS_FILE)); excellentUsaIps.addAll(EXCELLENT_USA_IPS); Files.write(EXCELLENT_USA_PROXY_IPS_FILE, toVerify(excellentUsaIps)); LOGGER.info("" + excellentUsaIps.size() + "????IP?"); Set<String> normalIps = new HashSet<>(); normalIps.addAll(Files.readAllLines(NORMAL_PROXY_IPS_FILE)); normalIps.addAll(NORMAL_IPS); Files.write(NORMAL_PROXY_IPS_FILE, toVerify(normalIps)); LOGGER.info("" + normalIps.size() + "?????IP?"); } catch (Exception e) { LOGGER.error("?", e); } }