List of usage examples for java.nio.file Files delete
public static void delete(Path path) throws IOException
From source file:org.apache.openaz.xacml.rest.XACMLPdpLoader.java
public static synchronized void loadPolicy(Properties properties, StdPDPStatus status, String id, boolean isRoot) throws PAPException { PolicyDef policy = null;//from www.j a va 2s . c o m String location = null; URI locationURI = null; boolean isFile = false; try { location = properties.getProperty(id + ".file"); if (location == null) { location = properties.getProperty(id + ".url"); if (location != null) { // // Construct the URL // locationURI = URI.create(location); URL url = locationURI.toURL(); URLConnection urlConnection = url.openConnection(); urlConnection.setRequestProperty(XACMLRestProperties.PROP_PDP_HTTP_HEADER_ID, XACMLProperties.getProperty(XACMLRestProperties.PROP_PDP_ID)); // // Now construct the output file name // Path outFile = Paths.get(getPDPConfig().toAbsolutePath().toString(), id); // // Copy it to disk // try (FileOutputStream fos = new FileOutputStream(outFile.toFile())) { IOUtils.copy(urlConnection.getInputStream(), fos); } // // Now try to load // isFile = true; try (InputStream fis = Files.newInputStream(outFile)) { policy = DOMPolicyDef.load(fis); } // // Save it // properties.setProperty(id + ".file", outFile.toAbsolutePath().toString()); } } else { isFile = true; locationURI = Paths.get(location).toUri(); try (InputStream is = Files.newInputStream(Paths.get(location))) { policy = DOMPolicyDef.load(is); } } if (policy != null) { status.addLoadedPolicy(new StdPDPPolicy(id, isRoot, locationURI, properties)); logger.info("Loaded policy: " + policy.getIdentifier() + " version: " + policy.getVersion().stringValue()); } else { String error = "Failed to load policy " + location; logger.error(error); status.setStatus(PDPStatus.Status.LOAD_ERRORS); status.addLoadError(error); status.addFailedPolicy(new StdPDPPolicy(id, isRoot)); } } catch (Exception e) { logger.error("Failed to load policy '" + id + "' from location '" + location + "'", e); status.setStatus(PDPStatus.Status.LOAD_ERRORS); status.addFailedPolicy(new StdPDPPolicy(id, isRoot)); // // Is it a file? // if (isFile) { // // Let's remove it // try { logger.error("Corrupted policy file, deleting: " + location); Files.delete(Paths.get(location)); } catch (IOException e1) { logger.error(e1); } } throw new PAPException("Failed to load policy '" + id + "' from location '" + location + "'"); } }
From source file:company.gonapps.loghut.dao.PostDao.java
public void delete(PostDto post) throws IOException { Path postPath = Paths.get(getPostPathString(post)); rrwl.writeLock().lock();/*from w w w . j ava2s .com*/ try { Files.delete(postPath); FileUtils.rmdir(postPath.getParent(), new DirectoryStream.Filter<Path>() { @Override public boolean accept(Path path) throws IOException { return !postPathStringPattern.matcher(path.toString()).find(); } }); FileUtils.rmdir(postPath.getParent().getParent(), new DirectoryStream.Filter<Path>() { @Override public boolean accept(Path path) throws IOException { return (!postMonthPattern.matcher(path.toString()).find()) || (!Files.isDirectory(path)); } }); } finally { rrwl.writeLock().unlock(); } }
From source file:com.streamsets.datacollector.io.DataStore.java
private void verifyAndRecover() throws IOException { if (Files.exists(fileOld) || Files.exists(fileTmp) || Files.exists(fileNew)) { if (Files.exists(fileNew)) { LOG.debug("File '{}', write completed but not committed, committing", file); if (Files.exists(fileTmp)) { throw new IOException( Utils.format("File '{}' exists, '{}' should not exist", fileNew, fileTmp)); }/*from w w w .j a va 2 s . co m*/ if (Files.exists(file)) { throw new IOException(Utils.format("File '{}' exists, '{}' should not exist", fileNew, file)); } Files.move(fileNew, file); if (Files.exists(fileOld)) { Files.delete(fileOld); LOG.trace("File '{}', deleted during verification", fileOld); } LOG.trace("File '{}', committed during verification", file); } else if (Files.exists(fileTmp)) { LOG.debug("File '{}', write incomplete while writing, rolling back", file); if (!Files.exists(fileOld)) { throw new IOException(Utils.format("File '{}' exists, '{}' should exists", fileTmp, fileOld)); } if (Files.exists(file)) { throw new IOException(Utils.format("File '{}' exists, '{}' should not exist", fileTmp, file)); } Files.delete(fileTmp); Files.move(fileOld, file); LOG.trace("File '{}', rolled back during verification", file); } else if (Files.exists(fileOld)) { LOG.debug("File '{}', write incomplete while starting write, rolling back", file); if (Files.exists(file)) { throw new IOException(Utils.format("File '{}' exists, '{}' should not exist", fileOld, file)); } Files.move(fileOld, file); LOG.trace("File '{}', rolled back during verification", file); } } else { LOG.trace("File '{}' no recovery needed", file); } }
From source file:nl.knaw.huygens.alexandria.dropwizard.cli.CommandIntegrationTest.java
void deleteFile(String filename) throws IOException { Path file = workFilePath(filename); Files.delete(file); }
From source file:org.apache.solr.schema.SpatialRPTFieldTypeTest.java
private void setupRPTField(String distanceUnits, String geo, String format, FieldType fieldType) throws Exception { deleteCore();/*from w ww .ja va 2 s. c om*/ File managedSchemaFile = new File(tmpConfDir, "managed-schema"); Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); String fieldName = "new_text_field"; assertNull("Field '" + fieldName + "' is present in the schema", h.getCore().getLatestSchema().getFieldOrNull(fieldName)); IndexSchema oldSchema = h.getCore().getLatestSchema(); if (fieldType == null) { fieldType = new SpatialRecursivePrefixTreeFieldType(); } Map<String, String> rptMap = new HashMap<String, String>(); if (distanceUnits != null) rptMap.put("distanceUnits", distanceUnits); if (geo != null) rptMap.put("geo", geo); if (format != null) { rptMap.put("format", format); } fieldType.init(oldSchema, rptMap); fieldType.setTypeName("location_rpt"); SchemaField newField = new SchemaField("geo", fieldType, SchemaField.STORED | SchemaField.INDEXED, null); IndexSchema newSchema = oldSchema.addField(newField); h.getCore().setLatestSchema(newSchema); assertU(delQ("*:*")); }
From source file:org.apache.tika.eval.TikaEvalCLI.java
private void handleCompare(String[] subsetArgs) throws Exception { List<String> argList = new ArrayList(Arrays.asList(subsetArgs)); boolean containsBC = false; String inputDir = null;/* w ww .j a va 2 s .c o m*/ String extractsA = null; String alterExtract = null; //confirm there's a batch-config file for (int i = 0; i < argList.size(); i++) { String arg = argList.get(i); if (arg.equals("-bc")) { containsBC = true; } else if (arg.equals("-inputDir")) { if (i + 1 >= argList.size()) { System.err.println("Must specify directory after -inputDir"); ExtractComparer.USAGE(); return; } inputDir = argList.get(i + 1); i++; } else if (arg.equals("-extractsA")) { if (i + 1 >= argList.size()) { System.err.println("Must specify directory after -extractsA"); ExtractComparer.USAGE(); return; } extractsA = argList.get(i + 1); i++; } else if (arg.equals("-alterExtract")) { if (i + 1 >= argList.size()) { System.err.println("Must specify type 'as_is', 'first_only' or " + "'concatenate_content' after -alterExtract"); ExtractComparer.USAGE(); return; } alterExtract = argList.get(i + 1); i++; } } if (alterExtract != null && !alterExtract.equals("as_is") && !alterExtract.equals("concatenate_content") && !alterExtract.equals("first_only")) { System.out.println("Sorry, I don't understand:" + alterExtract + ". The values must be one of: as_is, first_only, concatenate_content"); ExtractComparer.USAGE(); return; } //need to specify each in the commandline that goes into tika-batch //if only extracts is passed to tika-batch, //the crawler will see no inputDir and start crawling "input". //if the user doesn't specify inputDir, crawl extractsA if (inputDir == null && extractsA != null) { argList.add("-inputDir"); argList.add(extractsA); } Path tmpBCConfig = null; try { tmpBCConfig = Files.createTempFile("tika-eval", ".xml"); if (!containsBC) { Files.copy(this.getClass().getResourceAsStream("/tika-eval-comparison-config.xml"), tmpBCConfig, StandardCopyOption.REPLACE_EXISTING); argList.add("-bc"); argList.add(tmpBCConfig.toAbsolutePath().toString()); } String[] updatedArgs = argList.toArray(new String[argList.size()]); DefaultParser defaultCLIParser = new DefaultParser(); try { CommandLine commandLine = defaultCLIParser.parse(ExtractComparer.OPTIONS, updatedArgs); if (commandLine.hasOption("db") && commandLine.hasOption("jdbc")) { System.out.println("Please specify either the default -db or the full -jdbc, not both"); ExtractComparer.USAGE(); return; } } catch (ParseException e) { System.out.println(e.getMessage() + "\n"); ExtractComparer.USAGE(); return; } FSBatchProcessCLI.main(updatedArgs); } finally { if (tmpBCConfig != null && Files.isRegularFile(tmpBCConfig)) { Files.delete(tmpBCConfig); } } }
From source file:org.nuxeo.github.Analyzer.java
/** * @return true if there are unsigned contributors *///from w w w .ja v a2 s .co m protected boolean saveAndPrint() { Set<Developer> allContributors = new TreeSet<>(); allContributors.addAll(developersByLogin.values()); allContributors.addAll(developersByName.values()); log.info(String.format("Found %s contributors", allContributors.size())); if (output == null) { output = Paths.get(System.getProperty("java.io.tmpdir"), "contributors.csv"); } boolean unsigned = false; Path tmpFile; try { tmpFile = Files.createTempFile("contributors", ".csv"); } catch (IOException e) { log.error(e.getMessage(), e); return false; } try (CSVWriter writer = new CSVWriter(Files.newBufferedWriter(tmpFile, Charset.defaultCharset()), '\t')) { writer.writeNext(CSV_HEADER); for (Developer dev : allContributors) { if (!unsigned && dev.getAliases().isEmpty() && !"Nuxeo".equalsIgnoreCase(dev.getCompany()) && !dev.isSigned()) { unsigned = true; } log.debug(dev); writer.writeNext(new String[] { dev.getLogin(), dev.getName(), Boolean.toString(dev.isSigned()), setToString(dev.getEmails()), dev.getCompany(), dev.getUrl(), setToString(dev.getAliases()), dev.isSigned() || "Nuxeo".equalsIgnoreCase(dev.getCompany()) || "ex-Nuxeo".equalsIgnoreCase(dev.getCompany()) ? "" : commitsToString(dev.getCommits()) }); } Files.copy(tmpFile, output, StandardCopyOption.REPLACE_EXISTING); Files.delete(tmpFile); log.info("Saved to file: " + output); } catch (IOException e) { log.error("See " + tmpFile + System.lineSeparator() + e.getMessage(), e); } return unsigned; }
From source file:org.syncany.plugins.local.LocalTransferManager.java
@Override public boolean testTargetCanWrite() { try {/*w w w . ja v a2s .co m*/ if (Files.isDirectory(repoPath)) { Path tempFile = Files.createTempFile(repoPath, "syncany-write-test", "tmp"); Files.delete(tempFile); logger.log(Level.INFO, "testTargetCanWrite: Can write, test file created/deleted successfully."); return true; } else { logger.log(Level.INFO, "testTargetCanWrite: Can NOT write, target does not exist."); return false; } } catch (Exception e) { logger.log(Level.INFO, "testTargetCanWrite: Can NOT write to target.", e); return false; } }
From source file:com.clust4j.algo.MeanShiftTests.java
@Test @Override/*from ww w . j a va 2 s . c o m*/ public void testSerialization() throws IOException, ClassNotFoundException { MeanShift ms = new MeanShift(data_, new MeanShiftParameters(0.5).setVerbose(true)).fit(); System.out.println(); final double n = ms.getNumberOfNoisePoints(); ms.saveObject(new FileOutputStream(TestSuite.tmpSerPath)); assertTrue(TestSuite.file.exists()); MeanShift ms2 = (MeanShift) MeanShift.loadObject(new FileInputStream(TestSuite.tmpSerPath)); assertTrue(ms2.getNumberOfNoisePoints() == n); assertTrue(ms.equals(ms2)); Files.delete(TestSuite.path); }
From source file:com.ejisto.util.IOUtils.java
public static void zipDirectory(File src, String outputFilePath) throws IOException { Path out = Paths.get(outputFilePath); if (Files.exists(out)) { Files.delete(out); }// www. j a v a2 s .c o m String filePath = out.toUri().getPath(); Map<String, String> env = new HashMap<>(); env.put("create", "true"); try (FileSystem targetFs = FileSystems.newFileSystem(URI.create("jar:file:" + filePath), env)) { Files.walkFileTree(src.toPath(), new CopyFileVisitor(src.toPath(), targetFs.getPath("/"))); } }