List of usage examples for java.nio.file Path getFileName
Path getFileName();
From source file:edu.cornell.mannlib.vitro.webapp.servlet.setup.FileGraphSetup.java
public boolean updateGraphInDB(RDFService rdfService, Model fileModel, String type, Path path) throws RDFServiceException { String graphURI = pathToURI(path, type); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); fileModel.write(buffer, "N-TRIPLE"); InputStream inStream = new ByteArrayInputStream(buffer.toByteArray()); if (rdfService.isEquivalentGraph(graphURI, inStream, ModelSerializationFormat.NTRIPLE)) { return false; }/*from w w w . j a v a 2 s . c om*/ Model dbModel = new RDFServiceDataset(rdfService).getNamedModel(graphURI); if (log.isDebugEnabled()) { log.debug(String.format("%s %s dbModel size is %d, fileModel size is %d", type, path.getFileName(), dbModel.size(), fileModel.size())); } log.info("Updating " + path + " because graphs are not isomorphic"); log.info("dbModel: " + dbModel.size() + " ; fileModel: " + fileModel.size()); dbModel.removeAll(); dbModel.add(fileModel); return true; }
From source file:org.elasticsearch.plugins.PluginManagerIT.java
private void writeMd5(Path file, boolean corrupt) throws IOException { String md5Hex = MessageDigests.toHexString(MessageDigests.md5().digest(Files.readAllBytes(file))); try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".md5"), StandardCharsets.UTF_8)) { out.write(md5Hex);/*from w ww . j ava2s . c o m*/ if (corrupt) { out.write("bad"); } } }
From source file:org.elasticsearch.plugins.PluginManagerIT.java
private void writeSha1(Path file, boolean corrupt) throws IOException { String sha1Hex = MessageDigests.toHexString(MessageDigests.sha1().digest(Files.readAllBytes(file))); try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".sha1"), StandardCharsets.UTF_8)) { out.write(sha1Hex);//from ww w . ja v a 2 s . co m if (corrupt) { out.write("bad"); } } }
From source file:com.google.cloud.runtimes.builder.buildsteps.docker.StageDockerArtifactBuildStep.java
@Override protected void doBuild(Path directory, Map<String, String> metadata) throws BuildStepException { try {//from ww w .j a v a 2 s . c o m // TODO wrap this in a try block and log a more friendly message if not found Path artifact = getArtifact(directory, metadata); logger.info("Found artifact {}", artifact); // make staging dir Path stagingDir = directory.resolve(DOCKER_STAGING_DIR); if (Files.exists(stagingDir)) { logger.info("Found a docker staging directory in provided sources. Cleaning {}", stagingDir.toString()); FileUtils.deleteDirectory(stagingDir.toFile()); } Files.createDirectory(stagingDir); metadata.put(BuildStepMetadataConstants.DOCKER_STAGING_PATH, stagingDir.toString()); logger.info("Preparing docker files in {}", stagingDir); // copy the artifact into the staging dir Files.copy(artifact, stagingDir.resolve(artifact.getFileName())); // copy the .dockerignore file into staging dir, if it exists Path dockerIgnoreFile = directory.resolve(DOCKER_IGNORE_FILE); if (Files.isRegularFile(dockerIgnoreFile)) { Files.copy(dockerIgnoreFile, stagingDir.resolve(DOCKER_IGNORE_FILE)); } // Generate dockerfile String dockerfile = dockerfileGenerator.generateDockerfile(artifact.getFileName()); Path dockerFileDest = stagingDir.resolve("Dockerfile"); try (BufferedWriter writer = Files.newBufferedWriter(dockerFileDest, StandardCharsets.US_ASCII)) { writer.write(dockerfile); } } catch (IOException | ArtifactNotFoundException | TooManyArtifactsException e) { throw new BuildStepException(e); } }
From source file:org.fao.geonet.api.records.formatters.FormatterApi.java
public synchronized Element getPluginLocResources(final ServiceContext context, Path formatDir) throws Exception { final String formatDirPath = formatDir.toString(); Element allLangResources = this.pluginLocs.get(formatDirPath); if (isDevMode(context) || allLangResources == null) { allLangResources = new Element("loc"); Path baseLoc = formatDir.resolve("loc"); if (Files.exists(baseLoc)) { final Element finalAllLangResources = allLangResources; Files.walkFileTree(baseLoc, new SimpleFileVisitor<Path>() { private void addTranslations(String locDirName, Element fileElements) { if (locDirName != null && !locDirName.isEmpty()) { Element resources = finalAllLangResources.getChild(locDirName); if (resources == null) { resources = new Element(locDirName); finalAllLangResources.addContent(resources); }/*from w ww . j av a 2s . c o m*/ resources.addContent(fileElements); } } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (file.getFileName().toString().toLowerCase().endsWith(".xml")) { try { final Element fileElements = Xml.loadFile(file); final String fileName = getNameWithoutExtension(file.getFileName().toString()); fileElements.setName(fileName); final String locDirName = getNameWithoutExtension( file.getParent().getFileName().toString()); addTranslations(locDirName, fileElements); } catch (JDOMException e) { throw new RuntimeException(e); } } else if (file.getFileName().toString().toLowerCase().endsWith(".json")) { try { final String fileName = getNameWithoutExtension(file.getFileName().toString()); final String[] nameParts = fileName.split("-", 2); IsoLanguagesMapper isoLanguagesMapper = context.getBean(IsoLanguagesMapper.class); String lang = isoLanguagesMapper.iso639_1_to_iso639_2(nameParts[0].toLowerCase(), nameParts[0]); final JSONObject json = new JSONObject( new String(Files.readAllBytes(file), Constants.CHARSET)); Element fileElements = new Element(nameParts[1]); final Iterator keys = json.keys(); while (keys.hasNext()) { String key = (String) keys.next(); fileElements.addContent(new Element(key).setText(json.getString(key))); } addTranslations(lang, fileElements); } catch (JSONException e) { throw new RuntimeException(e); } } return super.visitFile(file, attrs); } }); } this.pluginLocs.put(formatDirPath, allLangResources); } return allLangResources; }
From source file:com.streamsets.pipeline.lib.io.TestSingleLineLiveFileReader.java
@Test public void testCRLFLines() throws Exception { Path file = createFile(Arrays.asList("Hello1\r\n", "Hello\r\n")); LiveFile lf = new LiveFile(file); //multiple lines in one chunk LiveFileReader lfr = new SingleLineLiveFileReader( LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.defaultCharset(), 0, 20); Assert.assertTrue(lfr.hasNext());/*w w w .j av a 2s. c om*/ LiveFileChunk chunk = lfr.next(0); Assert.assertNotNull(chunk); Assert.assertFalse(chunk.isTruncated()); Assert.assertEquals(0, chunk.getOffset()); Assert.assertEquals(15, chunk.getLength()); Assert.assertEquals("Hello1\r\nHello\r\n", readChunk(chunk)); Assert.assertEquals(15, lfr.getOffset()); //1.5 lines in one chunk lfr = new SingleLineLiveFileReader( LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.defaultCharset(), 0, 10); Assert.assertTrue(lfr.hasNext()); chunk = lfr.next(0); Assert.assertNotNull(chunk); Assert.assertFalse(chunk.isTruncated()); Assert.assertEquals(0, chunk.getOffset()); Assert.assertEquals(8, chunk.getLength()); Assert.assertEquals("Hello1\r\n", readChunk(chunk)); Assert.assertEquals(8, lfr.getOffset()); chunk = lfr.next(0); Assert.assertNotNull(chunk); Assert.assertTrue(lfr.hasNext()); Assert.assertNotNull(chunk); Assert.assertFalse(chunk.isTruncated()); Assert.assertEquals(8, chunk.getOffset()); Assert.assertEquals(7, chunk.getLength()); Assert.assertEquals("Hello\r\n", readChunk(chunk)); Assert.assertEquals(15, lfr.getOffset()); //first line truncated after \r\n lfr = new SingleLineLiveFileReader( LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.defaultCharset(), 0, 8); Assert.assertTrue(lfr.hasNext()); chunk = lfr.next(0); Assert.assertNotNull(chunk); Assert.assertFalse(chunk.isTruncated()); Assert.assertEquals(0, chunk.getOffset()); Assert.assertEquals(8, chunk.getLength()); Assert.assertEquals("Hello1\r\n", readChunk(chunk)); Assert.assertEquals(8, lfr.getOffset()); chunk = lfr.next(0); Assert.assertNotNull(chunk); Assert.assertTrue(lfr.hasNext()); Assert.assertNotNull(chunk); Assert.assertFalse(chunk.isTruncated()); Assert.assertEquals(8, chunk.getOffset()); Assert.assertEquals(7, chunk.getLength()); Assert.assertEquals("Hello\r\n", readChunk(chunk)); Assert.assertEquals(15, lfr.getOffset()); //first line truncated after \r lfr = new SingleLineLiveFileReader( LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""), null, lf, Charset.defaultCharset(), 0, 7); Assert.assertTrue(lfr.hasNext()); chunk = lfr.next(0); Assert.assertNotNull(chunk); Assert.assertTrue(chunk.isTruncated()); Assert.assertEquals(0, chunk.getOffset()); Assert.assertEquals(7, chunk.getLength()); Assert.assertEquals("Hello1\r", readChunk(chunk)); Assert.assertEquals(-7, lfr.getOffset()); chunk = lfr.next(0); Assert.assertNotNull(chunk); Assert.assertTrue(lfr.hasNext()); Assert.assertNotNull(chunk); Assert.assertFalse(chunk.isTruncated()); Assert.assertEquals(8, chunk.getOffset()); Assert.assertEquals(7, chunk.getLength()); Assert.assertEquals("Hello\r\n", readChunk(chunk)); Assert.assertEquals(15, lfr.getOffset()); lfr.close(); }
From source file:org.fao.geonet.api.records.formatters.FormatterApi.java
public void copyNewerFilesToDataDir(final Path fromDir, final Path toDir) throws IOException { if (Files.exists(fromDir)) { Files.walkFileTree(fromDir, new SimpleFileVisitor<Path>() { @Override// ww w . ja va 2 s . c o m public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { final Path path = IO.relativeFile(fromDir, file, toDir); if (!file.getFileName().toString().toLowerCase().endsWith(".iml") && (!Files.exists(path) || Files.getLastModifiedTime(path).compareTo(Files.getLastModifiedTime(file)) < 0)) { Files.deleteIfExists(path); IO.copyDirectoryOrFile(file, path, false); } return super.visitFile(file, attrs); } }); } }
From source file:codes.thischwa.c5c.impl.LocalConnector.java
@Override public StreamContent download(String backendPath) throws C5CException { Path file = buildRealPath(backendPath); try {//from w ww . j a va 2 s.co m InputStream in = new BufferedInputStream(Files.newInputStream(file, StandardOpenOption.READ)); return buildStreamContent(in, Files.size(file)); } catch (FileNotFoundException e) { logger.error("Requested file not exits: {}", file.toAbsolutePath()); throw new FilemanagerException(FilemanagerAction.DOWNLOAD, FilemanagerException.Key.FileNotExists, backendPath); } catch (IOException | SecurityException e) { String msg = String.format("Error while downloading {}: {}", file.getFileName().toFile(), e.getMessage()); logger.error(msg, e); throw new C5CException(FilemanagerAction.DOWNLOAD, msg); } }
From source file:au.org.ands.vocabs.toolkit.provider.transform.PropertyRewriterTransformProvider.java
@Override public final boolean transform(final TaskInfo taskInfo, final JsonNode subtask, final HashMap<String, String> results) { // Prepare for rewriting. if (!loadRewriteMap()) { results.put(TaskStatus.ERROR, "PropertyRewriter unable to load rewrite map"); return false; }//from w ww.j a v a 2 s. c om Path originalHarvestdir = Paths.get(ToolkitFileUtils.getTaskHarvestOutputPath(taskInfo)); // Use this transform name and the task ID to construct // the path names. String transformName = "PropertyRewriter_" + taskInfo.getTask().getId(); String transformOutputDir = ToolkitFileUtils.getTaskTransformTemporaryOutputPath(taskInfo, transformName); Path transformOutputDirPath = Paths.get(transformOutputDir); try { ToolkitFileUtils.requireEmptyDirectory(transformOutputDir); } catch (IOException ex) { results.put(TaskStatus.EXCEPTION, "Exception in PropertyRewriter while cleaning old " + "transform output directory"); logger.error("Exception in PropertyRewriter while cleaning old " + "transform output directory: ", ex); return false; } // Open the harvest directory ... try (DirectoryStream<Path> stream = Files.newDirectoryStream(originalHarvestdir)) { // ... and iterate over every file in the harvest directory. for (Path entry : stream) { // First, parse the file into a model and do rewriting. Model model = new LinkedHashModel(); RDFFormat format = Rio.getParserFormatForFileName(entry.toString()); RDFParser rdfParser = Rio.createParser(format); ConceptHandler conceptHandler = new ConceptHandler(metadataRewriteConf, model); rdfParser.setRDFHandler(conceptHandler); FileInputStream is = new FileInputStream(entry.toString()); logger.debug("Reading RDF:" + entry.toString()); rdfParser.parse(is, entry.toString()); // And now serialize the result. String resultFileName = transformOutputDirPath.resolve(entry.getFileName()).toString(); FileOutputStream out = new FileOutputStream(resultFileName); // Write in the same format we read. Rio.write(model, out, format); out.close(); } } catch (DirectoryIteratorException | IOException | RDFParseException | RDFHandlerException | UnsupportedRDFormatException ex) { results.put(TaskStatus.EXCEPTION, "Exception in PropertyRewriter while Parsing RDF"); logger.error("Exception in PropertyRewriter while Parsing RDF:", ex); return false; } // Done rewriting, and was successful. Replace the old // harvest with the transformed files. if (!ToolkitFileUtils.renameTransformTemporaryOutputPath(taskInfo, transformName)) { results.put(TaskStatus.ERROR, "Error in PropertyRewriter when renaming output " + "directory"); logger.error("Error in PropertyRewriter when renaming output " + "directory"); return false; } return true; }