List of usage examples for java.nio.file Files isReadable
public static boolean isReadable(Path path)
From source file:nl.salp.warcraft4j.casc.cdn.online.CachingOnlineDataReaderProvider.java
/** * Check if a file is cached.// w w w . j a v a 2s . c o m * * @param url The URL of the file. * * @return {@code true} if the file is cached. * * @throws CascParsingException When the provided URL is invalid or does not point to a valid file. */ private boolean isCached(String url) throws CascParsingException { Path file = toCacheFile(url); return Files.exists(file) && Files.isRegularFile(file) && Files.isReadable(file); }
From source file:org.opencb.cellbase.app.transform.VariationParser.java
@Override public void parse() throws IOException, InterruptedException, SQLException, ClassNotFoundException { if (!Files.exists(variationDirectoryPath) || !Files.isDirectory(variationDirectoryPath) || !Files.isReadable(variationDirectoryPath)) { throw new IOException( "Variation directory whether does not exist, is not a directory or cannot be read"); }/* ww w .java 2 s. com*/ if (!existsZippedOrUnzippedFile(VARIATION_FILENAME) || isEmpty(variationDirectoryPath.resolve(VARIATION_FILENAME).toString())) { throw new IOException("variation.txt.gz whether does not exist, is not a directory or cannot be read"); } Variation variation; // To speed up calculation a SQLite database is created with the IDs and file offsets, // file must be uncompressed for doing this. gunzipVariationInputFiles(); // add idVariation to transcript_variation file preprocessInputFiles(); // Open variation file, this file never gets uncompressed. It's read from gzip file BufferedReader bufferedReaderVariation = getBufferedReader(PREPROCESSED_VARIATION_FILENAME); // create buffered readers for all other input files createVariationFilesBufferedReaders(); Map<String, String> seqRegionMap = VariationUtils.parseSeqRegionToMap(variationDirectoryPath); Map<String, String> sourceMap = VariationUtils.parseSourceToMap(variationDirectoryPath); initializeVariationRelatedArrays(); Stopwatch globalStartwatch = Stopwatch.createStarted(); Stopwatch batchWatch = Stopwatch.createStarted(); logger.info("Parsing variation file " + variationDirectoryPath.resolve(PREPROCESSED_VARIATION_FILENAME) + " ..."); long countprocess = 0; String line; while ((line = bufferedReaderVariation.readLine()) != null) { String[] variationFields = line.split("\t"); int variationId = Integer.parseInt(variationFields[0]); List<String[]> resultVariationFeature = getVariationRelatedFields(VARIATION_FEATURE_FILE_ID, variationId); if (resultVariationFeature != null && resultVariationFeature.size() > 0) { String[] variationFeatureFields = resultVariationFeature.get(0); List<TranscriptVariation> transcriptVariation = getTranscriptVariations(variationId, variationFeatureFields[0]); List<Xref> xrefs = getXrefs(sourceMap, variationId); try { // Preparing the variation alleles String[] allelesArray = getAllelesArray(variationFeatureFields); // For code sanity save chromosome, start, end and id String chromosome = seqRegionMap.get(variationFeatureFields[1]); if (!chromosome.contains("PATCH") && !chromosome.contains("HSCHR") && !chromosome.contains("contig")) { int start = (variationFeatureFields != null) ? Integer.valueOf(variationFeatureFields[2]) : 0; int end = (variationFeatureFields != null) ? Integer.valueOf(variationFeatureFields[3]) : 0; String id = (variationFields[2] != null && !variationFields[2].equals("\\N")) ? variationFields[2] : ""; String reference = (allelesArray[0] != null && !allelesArray[0].equals("\\N")) ? allelesArray[0] : ""; String alternate = (allelesArray[1] != null && !allelesArray[1].equals("\\N")) ? allelesArray[1] : ""; // Preparing frequencies //List<PopulationFrequency> populationFrequencies = getPopulationFrequencies(variationId, allelesArray); List<PopulationFrequency> populationFrequencies = getPopulationFrequencies(chromosome, start, end, id, reference, alternate); // TODO: check that variationFeatureFields is always different to null and intergenic-variant is never used //List<String> consequenceTypes = (variationFeatureFields != null) ? Arrays.asList(variationFeatureFields[12].split(",")) : Arrays.asList("intergenic_variant"); List<String> consequenceTypes = Arrays.asList(variationFeatureFields[12].split(",")); String displayConsequenceType = getDisplayConsequenceType(consequenceTypes); // we have all the necessary to construct the 'variation' object variation = buildVariation(variationFields, variationFeatureFields, chromosome, start, end, id, reference, alternate, transcriptVariation, xrefs, populationFrequencies, allelesArray, consequenceTypes, displayConsequenceType); fileSerializer.serialize(variation, getOutputFileName(chromosome)); } if (++countprocess % 100000 == 0 && countprocess != 0) { logger.info("Processed variations: " + countprocess); logger.debug("Elapsed time processing batch: " + batchWatch); batchWatch.reset(); batchWatch.start(); } } catch (Exception e) { e.printStackTrace(); logger.error("Error parsing variation: " + e.getMessage()); logger.error("Last line processed: " + line); break; } } // TODO: just for testing, remove //if (countprocess % 100000 == 0) { // break; //} } logger.info("Variation parsing finished"); logger.info("Variants processed: " + countprocess); logger.debug("Elapsed time parsing: " + globalStartwatch); gzipVariationFiles(variationDirectoryPath); try { bufferedReaderVariation.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:com.ontotext.s4.service.S4ServiceClient.java
/** * Classifies the contents of a single file with the specified MIME type. Returns an object which allows * for convenient access to the classification information for the document. * * @param documentContent the file whose contents will be classified * @param documentEncoding the encoding of the document file * @param documentMimeType the MIME type of the document to classified content as well as the classifications produced * @throws IOException// w w w.j a va 2s .c om * @throws S4ServiceClientException */ public ClassifiedDocument classifyFileContents(File documentContent, Charset documentEncoding, SupportedMimeType documentMimeType) throws IOException, S4ServiceClientException { Path documentPath = documentContent.toPath(); if (!Files.isReadable(documentPath)) { throw new IOException("File " + documentPath.toString() + " is not readable."); } ByteBuffer buff; buff = ByteBuffer.wrap(Files.readAllBytes(documentPath)); String content = documentEncoding.decode(buff).toString(); return classifyDocument(content, documentMimeType); }
From source file:org.nuxeo.github.Analyzer.java
protected void load() { if (input == null) { input = Paths.get(System.getProperty("java.io.tmpdir"), "contributors.csv"); }/* w ww . ja v a2 s. co m*/ if (!Files.isReadable(input)) { return; } try (CSVReader reader = new CSVReader(Files.newBufferedReader(input, Charset.defaultCharset()), '\t')) { // Check header String[] header = reader.readNext(); if (!ArrayUtils.isEquals(CSV_HEADER, header)) { log.warn("Header mismatch " + Arrays.toString(header)); return; } String[] nextLine; while ((nextLine = reader.readNext()) != null) { Developer dev = parse(nextLine); if (dev.isAnonymous()) { developersByName.put(dev.getName(), dev); } else { developersByLogin.put(dev.getLogin(), dev); } } for (Developer dev : developersByLogin.values()) { for (String alias : dev.getAliases()) { if (developersByLogin.containsKey(alias)) { developersByLogin.get(alias).updateWith(dev); } } } for (Developer dev : developersByName.values()) { for (String alias : dev.getAliases()) { if (developersByLogin.containsKey(alias)) { developersByLogin.get(alias).updateWith(dev); } } } } catch (IOException e) { log.error(e.getMessage(), e); try { Files.copy(input, input.resolveSibling(input.getFileName() + ".bak"), StandardCopyOption.REPLACE_EXISTING); } catch (IOException e1) { log.error(e1.getMessage(), e1); } } }
From source file:io.anserini.index.IndexCollection.java
public IndexCollection(IndexCollection.Args args) throws Exception { this.args = args; LOG.info("Collection path: " + args.input); LOG.info("Index path: " + args.index); LOG.info("Threads: " + args.threads); LOG.info("Keep stopwords? " + args.keepStopwords); LOG.info("Store positions? " + args.storePositions); LOG.info("Store docvectors? " + args.storeDocvectors); LOG.info("Store transformed docs? " + args.storeTransformedDocs); LOG.info("Store raw docs? " + args.storeRawDocs); LOG.info("Optimize (merge segments)? " + args.optimize); this.indexPath = Paths.get(args.index); if (!Files.exists(this.indexPath)) { Files.createDirectories(this.indexPath); }// w w w. ja va 2s . c om collectionPath = Paths.get(args.input); if (!Files.exists(collectionPath) || !Files.isReadable(collectionPath) || !Files.isDirectory(collectionPath)) { throw new RuntimeException("Document directory " + collectionPath.toString() + " does not exist or is not readable, please check the path"); } this.transformerClass = Class.forName("io.anserini.index.generator." + args.generatorClass); this.collectionClass = Class.forName("io.anserini.collection." + args.collectionClass); collection = (Collection) this.collectionClass.newInstance(); collection.setCollectionPath(collectionPath); this.counters = new Counters(); }
From source file:nl.salp.warcraft4j.config.PropertyWarcraft4jConfigTest.java
@Test public void shouldNotCreateCacheDirectoryIfExisting() throws Exception { Files.createDirectories(cacheDir); assertTrue("Cache directory does not exist after creation.", Files.exists(cacheDir)); Path testFile = Files.createTempFile(cacheDir, "testFile", "tmp"); long creationTime = Files.getLastModifiedTime(cacheDir).toMillis(); PropertyWarcraft4jConfig config = new PropertyWarcraft4jConfig(configuration); assertTrue("Cache directory does not exist.", Files.exists(config.getCacheDirectory())); assertTrue("Cache directory is not a directory.", Files.isDirectory(config.getCacheDirectory())); assertTrue("Cache directory is not readable.", Files.isReadable(config.getCacheDirectory())); assertEquals("Cache directory modification time changed after initialisation", creationTime, Files.getLastModifiedTime(config.getCacheDirectory()).toMillis()); assertTrue("Test file in cache directory does not exist anymore.", Files.exists(testFile)); }
From source file:ddf.security.samlp.MetadataConfigurationParser.java
private void buildEntityDescriptor(String entityDescription) throws IOException { EntityDescriptor entityDescriptor = null; entityDescription = entityDescription.trim(); if (entityDescription.startsWith(HTTPS) || entityDescription.startsWith(HTTP)) { if (entityDescription.startsWith(HTTP)) { LOGGER.warn(/* w ww . j a va 2 s . c o m*/ "Retrieving metadata via HTTP instead of HTTPS. The metadata configuration is unsafe!!!"); } HttpTransport httpTransport = new ApacheHttpTransport(); HttpRequest httpRequest = httpTransport.createRequestFactory() .buildGetRequest(new GenericUrl(entityDescription)); httpRequest.setUnsuccessfulResponseHandler( new HttpBackOffUnsuccessfulResponseHandler(new ExponentialBackOff()) .setBackOffRequired(HttpBackOffUnsuccessfulResponseHandler.BackOffRequired.ALWAYS)); ListeningExecutorService service = MoreExecutors .listeningDecorator(Executors.newSingleThreadExecutor()); ListenableFuture<HttpResponse> httpResponseFuture = service.submit(httpRequest::execute); Futures.addCallback(httpResponseFuture, new FutureCallback<HttpResponse>() { @Override public void onSuccess(HttpResponse httpResponse) { if (httpResponse != null) { try { String parsedResponse = httpResponse.parseAsString(); buildEntityDescriptor(parsedResponse); } catch (IOException e) { LOGGER.error("Unable to parse metadata from: {}", httpResponse.getRequest().getUrl().toString(), e); } } } @Override public void onFailure(Throwable throwable) { LOGGER.error("Unable to retrieve metadata.", throwable); } }); service.shutdown(); } else if (entityDescription.startsWith(FILE + System.getProperty("ddf.home"))) { String pathStr = StringUtils.substringAfter(entityDescription, FILE); Path path = Paths.get(pathStr); if (Files.isReadable(path)) { try (InputStream fileInputStream = Files.newInputStream(path)) { entityDescriptor = readEntityDescriptor(new InputStreamReader(fileInputStream, "UTF-8")); } } } else if (entityDescription.startsWith("<") && entityDescription.endsWith(">")) { entityDescriptor = readEntityDescriptor(new StringReader(entityDescription)); } else { LOGGER.warn("Skipping unknown metadata configuration value: " + entityDescription); } if (entityDescriptor != null) { entityDescriptorMap.put(entityDescriptor.getEntityID(), entityDescriptor); if (updateCallback != null) { updateCallback.accept(entityDescriptor); } } }
From source file:com.fpuna.preproceso.PreprocesoTS.java
/** * Metodo estatico que lee el archivo y lo carga en una estructura de hash * * @param Archivo path del archivo/*from w w w . jav a 2 s . c o m*/ * @return Hash con las sessiones leida del archivo de TrainigSet */ public static HashMap<String, SessionTS> leerArchivos(String Archivo, String sensor) { HashMap<String, SessionTS> SessionsTotal = new HashMap<String, SessionTS>(); HashMap<String, String> actividades = new HashMap<String, String>(); String sDirectorio = path; File dirList = new File(sDirectorio); if (dirList.exists()) { // Directorio existe File[] ficheros = dirList.listFiles(); for (int x = 0; x < ficheros.length; x++) { Path file = Paths.get(path + ficheros[x].getName()); if (Files.exists(file) && Files.isReadable(file)) { try { BufferedReader reader = Files.newBufferedReader(file, Charset.defaultCharset()); String line; int cabecera = 0; while ((line = reader.readLine()) != null) { if (line.contentEquals("statusId | label")) { //Leo todos las actividades while ((line = reader.readLine()) != null && !line.contentEquals("statusId|sensorName|value|timestamp")) { String part[] = line.split("\\|"); actividades.put(part[0], part[1]); SessionTS s = new SessionTS(); s.setActividad(part[1]); SessionsTotal.put(part[0], s); } line = reader.readLine(); } String lecturas[] = line.split("\\|"); if (lecturas[1].contentEquals(sensor)) { Registro reg = new Registro(); reg.setSensor(lecturas[1]); String[] values = lecturas[2].split("\\,"); if (values.length == 3) { reg.setValor_x(Double.parseDouble(values[0].substring(1))); reg.setValor_y(Double.parseDouble(values[1])); reg.setValor_z( Double.parseDouble(values[2].substring(0, values[2].length() - 1))); } else if (values.length == 5) { reg.setValor_x(Double.parseDouble(values[0].substring(1))); reg.setValor_y(Double.parseDouble(values[1])); reg.setValor_z(Double.parseDouble(values[2])); reg.setM_1(Double.parseDouble(values[3])); reg.setM_2(Double.parseDouble(values[4].substring(0, values[4].length() - 1))); } reg.setTiempo(new Timestamp(Long.parseLong(lecturas[3]))); SessionTS s = SessionsTotal.get(lecturas[0]); s.addRegistro(reg); SessionsTotal.replace(lecturas[0], s); } } } catch (IOException ex) { System.err.println("Okapu"); Logger.getLogger(PreprocesoTS.class.getName()).log(Level.SEVERE, null, ex); } } } } else { //Directorio no existe } return SessionsTotal; }
From source file:org.savantbuild.io.tar.TarBuilderTest.java
@Test public void buildCompress() throws Exception { FileTools.prune(projectDir.resolve("build/test/tars")); Path file = projectDir.resolve("build/test/tars/test.tar.gz"); TarBuilder builder = new TarBuilder(file); builder.storeGroupName = true;/*from w w w. j a v a2 s.c o m*/ builder.storeUserName = true; builder.compress = true; int count = builder.fileSet(new FileSet(projectDir.resolve("src/main/java"))) .fileSet(new FileSet(projectDir.resolve("src/test/java"))) .optionalFileSet(new FileSet(projectDir.resolve("doesNotExist"))).build(); assertTrue(Files.isReadable(file)); assertTarFileEquals(file, "org/savantbuild/io/Copier.java", projectDir.resolve("src/main/java/org/savantbuild/io/Copier.java")); assertTarFileEquals(file, "org/savantbuild/io/FileSet.java", projectDir.resolve("src/main/java/org/savantbuild/io/FileSet.java")); assertEquals(count, 32); }