List of usage examples for java.nio.file StandardCopyOption REPLACE_EXISTING
StandardCopyOption REPLACE_EXISTING
To view the source code for java.nio.file StandardCopyOption REPLACE_EXISTING.
Click Source Link
From source file:edu.utah.bmi.ibiomes.lite.IBIOMESLiteManager.java
/** * Copy file that will displayed in Jmol * @param doc XML document/*from w ww . j a va 2 s . co m*/ * @param rootElt Root element * @param xreader XPath reader for the document * @param dataDirPath Path to directory that contains analysis data * @param dirPath Path to experiment directory * @return XML element for Jmol data * @throws IOException */ private Element pullJmolFile(Document doc, Node rootElt, XPathReader xreader, String dataDirPath, String dirPath) throws IOException { Element jmolElt = doc.createElement("jmol"); String mainStructureRelPath = (String) xreader .read("ibiomes/directory/AVUs/AVU[@id='MAIN_3D_STRUCTURE_FILE']", XPathConstants.STRING); if (mainStructureRelPath != null && mainStructureRelPath.length() > 0) { String dataFileNewName = mainStructureRelPath.replaceAll(PATH_FOLDER_SEPARATOR_REGEX, "_"); String dataFileDestPath = dataDirPath + PATH_FOLDER_SEPARATOR + dataFileNewName; Files.copy(Paths.get(dirPath + PATH_FOLDER_SEPARATOR + mainStructureRelPath), Paths.get(dataFileDestPath), StandardCopyOption.REPLACE_EXISTING); //set read permissions if (!Utils.isWindows()) { Set<PosixFilePermission> permissions = new HashSet<PosixFilePermission>(); permissions.add(PosixFilePermission.OWNER_READ); permissions.add(PosixFilePermission.OWNER_WRITE); permissions.add(PosixFilePermission.OWNER_EXECUTE); permissions.add(PosixFilePermission.GROUP_READ); permissions.add(PosixFilePermission.OTHERS_READ); Files.setPosixFilePermissions(Paths.get(dataFileDestPath), permissions); } jmolElt.setAttribute("path", dataFileNewName); jmolElt.setAttribute("name", mainStructureRelPath); NodeList avuNodes = (NodeList) xreader.read("//file[@absolutePath='" + dirPath + PATH_FOLDER_SEPARATOR + mainStructureRelPath + "']/AVUs/AVU", XPathConstants.NODESET); MetadataAVUList avuList = parseMetadata(avuNodes); String description = avuList.getValue(FileMetadata.FILE_DESCRIPTION); if (description != null && description.length() > 0) jmolElt.setAttribute("description", description); rootElt.appendChild(jmolElt); return jmolElt; } else return null; }
From source file:ddf.catalog.test.TestCatalog.java
@Test public void testContentDirectoryMonitor() throws Exception { startFeature(true, "content-core-directorymonitor"); final String TMP_PREFIX = "tcdm_"; Path tmpDir = Files.createTempDirectory(TMP_PREFIX); tmpDir.toFile().deleteOnExit();/* w w w .j av a 2s . com*/ Path tmpFile = Files.createTempFile(tmpDir, TMP_PREFIX, "_tmp.xml"); tmpFile.toFile().deleteOnExit(); Files.copy(this.getClass().getClassLoader().getResourceAsStream("metacard5.xml"), tmpFile, StandardCopyOption.REPLACE_EXISTING); Map<String, Object> cdmProperties = new HashMap<>(); cdmProperties.putAll(getMetatypeDefaults("content-core-directorymonitor", "ddf.content.core.directorymonitor.ContentDirectoryMonitor")); cdmProperties.put("monitoredDirectoryPath", tmpDir.toString() + "/"); // Must end with / cdmProperties.put("directive", "STORE_AND_PROCESS"); createManagedService("ddf.content.core.directorymonitor.ContentDirectoryMonitor", cdmProperties); long startTime = System.nanoTime(); ValidatableResponse response = null; do { response = executeOpenSearch("xml", "q=*SysAdmin*"); if (response.extract().xmlPath().getList("metacards.metacard").size() == 1) { break; } try { TimeUnit.MILLISECONDS.sleep(50); } catch (InterruptedException e) { } } while (TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime) < TimeUnit.MINUTES.toMillis(1)); response.body("metacards.metacard.size()", equalTo(1)); }
From source file:org.eclipse.che.plugin.docker.client.DockerConnector.java
/** * Puts files into specified container./*from w ww. ja va2 s . c o m*/ * * @param container * container id * @param targetPath * path to file or directory inside specified container * @param sourceStream * stream of files from source container * @param noOverwriteDirNonDir * If "false" then it will be an error if unpacking the given content would cause * an existing directory to be replaced with a non-directory or other resource and vice versa. * @throws IOException * when problems occurs with docker api calls, or during file system operations * @apiNote this method implements 1.20 docker API and requires docker not less than 1.8 version */ public void putResource(String container, String targetPath, InputStream sourceStream, boolean noOverwriteDirNonDir) throws IOException { File tarFile; long length; try (InputStream sourceData = sourceStream) { Path tarFilePath = Files.createTempFile("compressed-resources", ".tar"); tarFile = tarFilePath.toFile(); length = Files.copy(sourceData, tarFilePath, StandardCopyOption.REPLACE_EXISTING); } List<Pair<String, ?>> headers = Arrays.asList(Pair.of("Content-Type", ExtMediaType.APPLICATION_X_TAR), Pair.of("Content-Length", length)); try (InputStream tarStream = new BufferedInputStream(new FileInputStream(tarFile)); DockerConnection connection = connectionFactory.openConnection(dockerDaemonUri).method("PUT") .path("/containers/" + container + "/archive").query("path", targetPath) .query("noOverwriteDirNonDir", noOverwriteDirNonDir ? 0 : 1).headers(headers) .entity(tarStream)) { final DockerResponse response = connection.request(); final int status = response.getStatus(); if (status != OK.getStatusCode()) { throw getDockerException(response); } } finally { FileCleaner.addFile(tarFile); } }
From source file:datavis.Gui.java
/** * Returns a new PSAWWB data list containing data pulled from TCOON's website * @param station The station number, e.g., 33 for Port Lavaca * @param interval The number of seconds between data samples, e.g., 86400 * @param start Date range start date/*from ww w . java 2 s. c o m*/ * @param end Date range end date. Defaults to 1 year if null * @return A fresh DataList */ public static DataList_PWL_SURGE_ATP_WTP_WSD_BPR getDataFromServer(int station, int interval, Date start, Date end) throws UnsupportedEncodingException { String dateString = new String(), pattern = new String("%25Y%25j%2B%25H%25M"); DateFormat format = new SimpleDateFormat("MM.dd.yyyy"); File temp = null; URL url; if (interval % 360 != 0 || interval < 360) interval = 0; // default to monthly if (interval >= 86400) pattern = "%25m-%25d-%25Y"; if (start == null || end == null) dateString = "now,-1y"; else dateString = format.format(start) + "-" + format.format(end); try { temp = File.createTempFile("TCOON", null); temp.deleteOnExit(); url = new URL(String.format( "http://lighthouse.tamucc.edu/pd?stnlist=%03d&serlist=pwl,surge,atp,wtp,wsd,bpr&when=%s&whentz=UTC0&-action=csv&unit=metric&elev=stnd&interval=%s&datefmt=%s&rm=0&na=0", station, dateString, interval > 0 ? String.valueOf(interval) : "monthly", pattern)); Files.copy(url.openStream(), temp.toPath(), StandardCopyOption.REPLACE_EXISTING); } catch (IOException e) { } return new DataList_PWL_SURGE_ATP_WTP_WSD_BPR(temp.getAbsolutePath()); }
From source file:org.tinymediamanager.core.Utils.java
/** * copy a file, preserving the attributes * * @param srcFile/*from ww w .j a v a2s. co m*/ * the file to be copied * @param destFile * the target * @param overwrite * overwrite the target? * @return true/false * @throws NullPointerException * if source or destination is {@code null} * @throws FileExistsException * if the destination file exists * @throws IOException * if source or destination is invalid * @throws IOException * if an IO error occurs moving the file */ public static boolean copyFileSafe(final Path srcFile, final Path destFile, boolean overwrite) throws IOException { if (srcFile == null) { throw new NullPointerException("Source must not be null"); } if (destFile == null) { throw new NullPointerException("Destination must not be null"); } // if (!srcFile.equals(destFile)) { if (!srcFile.toAbsolutePath().toString().equals(destFile.toAbsolutePath().toString())) { LOGGER.debug("try to copy file " + srcFile + " to " + destFile); if (!Files.exists(srcFile)) { throw new FileNotFoundException("Source '" + srcFile + "' does not exist"); } if (Files.isDirectory(srcFile)) { throw new IOException("Source '" + srcFile + "' is a directory"); } if (!overwrite) { if (Files.exists(destFile) && !Files.isSameFile(destFile, srcFile)) { // extra check for windows, where the File.equals is case insensitive // so we know now, that the File is the same, but the absolute name does not match throw new FileExistsException("Destination '" + destFile + "' already exists"); } } if (Files.isDirectory(destFile)) { throw new IOException("Destination '" + destFile + "' is a directory"); } // rename folder; try 5 times and wait a sec boolean rename = false; for (int i = 0; i < 5; i++) { try { // replace existing for changing cASE Files.copy(srcFile, destFile, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.COPY_ATTRIBUTES); rename = true;// no exception } catch (IOException e) { } if (rename) { break; // ok it worked, step out } try { LOGGER.debug("rename did not work - sleep a while and try again..."); Thread.sleep(1000); } catch (InterruptedException e) { LOGGER.warn("I'm so excited - could not sleep"); } } if (!rename) { LOGGER.error("Failed to rename file '" + srcFile + " to " + destFile); MessageManager.instance .pushMessage(new Message(MessageLevel.ERROR, srcFile, "message.renamer.failedrename")); return false; } else { LOGGER.info("Successfully moved file from " + srcFile + " to " + destFile); return true; } } return true; // files are equal }
From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java
/** * This method restores ParagraphVectors model previously saved with writeParagraphVectors() * * @return//from ww w. j a v a 2s .c om */ public static ParagraphVectors readParagraphVectors(File file) throws IOException { File tmpFileL = File.createTempFile("paravec", "l"); tmpFileL.deleteOnExit(); Word2Vec w2v = readWord2Vec(file); // and "convert" it to ParaVec model + optionally trying to restore labels information ParagraphVectors vectors = new ParagraphVectors.Builder(w2v.getConfiguration()).vocabCache(w2v.getVocab()) .lookupTable(w2v.getLookupTable()).resetModel(false).build(); ZipFile zipFile = new ZipFile(file); // now we try to restore labels information ZipEntry labels = zipFile.getEntry("labels.txt"); if (labels != null) { InputStream stream = zipFile.getInputStream(labels); Files.copy(stream, Paths.get(tmpFileL.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); try (BufferedReader reader = new BufferedReader(new FileReader(tmpFileL))) { String line; while ((line = reader.readLine()) != null) { VocabWord word = vectors.getVocab().tokenFor(decodeB64(line.trim())); if (word != null) { word.markAsLabel(true); } } } } vectors.extractLabels(); return vectors; }
From source file:com.stimulus.archiva.store.MessageStore.java
public byte[] writeEmail(Email message, File file, boolean compress, boolean encrypt) throws MessageStoreException { logger.debug("writeEmail"); OutputStream fos = null;//w w w. j a v a2 s . c om try { MessageDigest sha = MessageDigest.getInstance("SHA-1"); fos = getRawMessageOutputStream(file, compress, encrypt); DigestOutputStream dos = new DigestOutputStream(fos, sha); message.writeTo(dos); byte[] digest = sha.digest(); if (digest == null) { throw new MessageStoreException("failed to generate email digest. digest is null.", logger, ChainedException.Level.DEBUG); } return digest; } catch (Exception e) { if (file.exists()) { boolean deleted = file.delete(); if (!deleted) { try { //Mod Start Seolhwa.kim 2017-04-13 //file.renameTo(File.createTempFile("ma", "tmp")); File tmpfile = File.createTempFile("ma", "tmp"); Files.move(Paths.get(file.getAbsolutePath()), Paths.get(tmpfile.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); //Mod End Seolhwa.kim 2017-04-13 Config.getFileSystem().getTempFiles().markForDeletion(file); } catch (Exception e3) { } } } throw new MessageStoreException("failed to write email {filename='" + file.getAbsolutePath() + "'", e, logger); } finally { try { if (fos != null) fos.close(); } catch (Exception e) { logger.error("failed to close email file:" + e.getMessage()); } } /* try { //System.out.println("WRITEMAIL:"+message.getContent()+"XXXXXXXXXXXXXXXXXXXXXX"); FileOutputStream fos2 = new FileOutputStream("c:\\test.eml"); message.writeTo(fos2); fos2.close(); } catch (Exception e) { e.printStackTrace(); logger.error(e); }*/ }
From source file:org.structr.web.maintenance.DeployCommand.java
private void exportFile(final Path target, final File file, final Map<String, Object> config) throws IOException { if (!DeployCommand.okToExport(file)) { return;//from w w w . j ava 2s .co m } final Map<String, Object> properties = new TreeMap<>(); final String name = file.getName(); final Path src = file.getFileOnDisk().toPath(); Path targetPath = target.resolve(name); boolean doExport = true; if (Files.exists(targetPath)) { // compare checksum final Long checksumOfExistingFile = FileHelper.getChecksum(targetPath.toFile()); final Long checksumOfExportFile = file.getChecksum(); doExport = !checksumOfExistingFile.equals(checksumOfExportFile); } if (doExport) { try { Files.copy(src, targetPath, StandardCopyOption.REPLACE_EXISTING); } catch (IOException ioex) { logger.warn("Unable to write file {}: {}", targetPath.toString(), ioex.getMessage()); } } exportFileConfiguration(file, properties); if (!properties.isEmpty()) { config.put(file.getPath(), properties); } }
From source file:org.owasp.benchmark.score.BenchmarkScore.java
private static TestResults readActualResults(File fileToParse) throws Exception { String filename = fileToParse.getName(); TestResults tr = null;//from w ww . java 2s . c om if (filename.endsWith(".ozasmt")) { tr = new AppScanSourceReader().parse(fileToParse); } else if (filename.endsWith(".json")) { String line1 = getLine(fileToParse, 0); String line2 = getLine(fileToParse, 1); if (line2.contains("Coverity") || line2.contains("formatVersion")) { tr = new CoverityReader().parse(fileToParse); } } else if (filename.endsWith(".txt")) { String line1 = getLine(fileToParse, 0); if (line1.startsWith("Possible ")) { tr = new SourceMeterReader().parse(fileToParse); } } else if (filename.endsWith(".xml")) { // Handle XML results file where the 2nd line indicates the tool type String line1 = getLine(fileToParse, 0); String line2 = getLine(fileToParse, 1); if (line2.startsWith("<pmd")) { tr = new PMDReader().parse(fileToParse); } else if (line2.startsWith("<FusionLiteInsight")) { tr = new FusionLiteInsightReader().parse(fileToParse); } else if (line2.startsWith("<XanitizerFindingsList")) { tr = new XanitizerReader().parse(fileToParse); } else if (line2.startsWith("<BugCollection")) { tr = new FindbugsReader().parse(fileToParse); // change the name of the tool if the filename contains findsecbugs if (fileToParse.getName().contains("findsecbugs")) { tr.setTool("FBwFindSecBugs"); } } else if (line2.startsWith("<ResultsSession")) { tr = new ParasoftReader().parse(fileToParse); } else if (line2.startsWith("<detailedreport")) { tr = new VeracodeReader().parse(fileToParse); } else if (line1.startsWith("<total")) { tr = new SonarQubeReader().parse(fileToParse); } else if (line1.contains("<OWASPZAPReport") || line2.contains("<OWASPZAPReport")) { tr = new ZapReader().parse(fileToParse); } else if (line2.startsWith("<CxXMLResults")) { tr = new CheckmarxReader().parse(fileToParse); } else if (line2.startsWith("<report")) { tr = new ArachniReader().parse(fileToParse); } else if (line2.startsWith("<analysisReportResult")) { tr = new JuliaReader().parse(fileToParse); } else { // Handle XML where we have to look for a specific node to identify the tool type Document doc = getXMLDocument(fileToParse); Node root = doc.getDocumentElement(); if (root.getNodeName().equals("issues")) { tr = new BurpReader().parse(root); } else if (root.getNodeName().equals("XmlReport")) { tr = new AppScanDynamicReader().parse(root); } else if (root.getNodeName().equals("noisycricket")) { tr = new NoisyCricketReader().parse(root); } else if (root.getNodeName().equals("Scan")) { tr = new WebInspectReader().parse(root); } else if (root.getNodeName().equals("ScanGroup")) { tr = new AcunetixReader().parse(root); } else if (root.getNodeName().equals("VulnSummary")) { tr = new Rapid7Reader().parse(root); } else if (root.getNodeName().equals("netsparker")) { tr = new NetsparkerReader().parse(root); } } // end else } // end if endsWith ".xml" else if (filename.endsWith(".fpr")) { // .fpr files are really .zip files. So we have to extract the .fvdl file out of it to process it Path path = Paths.get(fileToParse.getPath()); FileSystem fileSystem = FileSystems.newFileSystem(path, null); File outputFile = File.createTempFile(filename, ".fvdl"); Path source = fileSystem.getPath("audit.fvdl"); Files.copy(source, outputFile.toPath(), StandardCopyOption.REPLACE_EXISTING); tr = new FortifyReader().parse(outputFile); outputFile.delete(); // Check to see if the results are regular Fortify or Fortify OnDemand results // To check, you have to look at the filtertemplate.xml file inside the .fpr archive // and see if that file contains: "Fortify-FOD-Template" outputFile = File.createTempFile(filename + "-filtertemplate", ".xml"); source = fileSystem.getPath("filtertemplate.xml"); // In older versions of Fortify, like 4.1, the filtertemplate.xml file doesn't exist // So only check it if it exists try { Files.copy(source, outputFile.toPath(), StandardCopyOption.REPLACE_EXISTING); BufferedReader br = new BufferedReader(new FileReader(outputFile)); try { StringBuilder sb = new StringBuilder(); String line = br.readLine(); // Only read the first 3 lines and the answer is near the top of the file. int i = 1; while (line != null && i++ <= 3) { sb.append(line); line = br.readLine(); } if (sb.indexOf("Fortify-FOD-") > -1) { tr.setTool(tr.getTool() + "-OnDemand"); } } finally { br.close(); } } catch (NoSuchFileException e) { // Do nothing if the filtertemplate.xml file doesn't exist in the .fpr archive } finally { outputFile.delete(); } } else if (filename.endsWith(".log")) { tr = new ContrastReader().parse(fileToParse); } // If the version # of the tool is specified in the results file name, extract it, and set it. // For example: Benchmark-1.1-Coverity-results-v1.3.2661-6720.json (the version # is 1.3.2661 in this example). // This code should also handle: Benchmark-1.1-Coverity-results-v1.3.2661.xml (where the compute time '-6720' isn't specified) int indexOfVersionMarker = filename.lastIndexOf("-v"); if (indexOfVersionMarker != -1) { String restOfFileName = filename.substring(indexOfVersionMarker + 2); int endIndex = restOfFileName.lastIndexOf('-'); if (endIndex == -1) endIndex = restOfFileName.lastIndexOf('.'); String version = restOfFileName.substring(0, endIndex); tr.setToolVersion(version); } return tr; }
From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java
/** * This method restores Word2Vec model previously saved with writeWord2VecModel * * PLEASE NOTE: This method loads FULL model, so don't use it if you're only going to use weights. * * @param file//from w w w . jav a2 s .co m * @return * @throws IOException */ @Deprecated public static Word2Vec readWord2Vec(File file) throws IOException { File tmpFileSyn0 = File.createTempFile("word2vec", "0"); File tmpFileSyn1 = File.createTempFile("word2vec", "1"); File tmpFileC = File.createTempFile("word2vec", "c"); File tmpFileH = File.createTempFile("word2vec", "h"); File tmpFileF = File.createTempFile("word2vec", "f"); tmpFileSyn0.deleteOnExit(); tmpFileSyn1.deleteOnExit(); tmpFileH.deleteOnExit(); tmpFileC.deleteOnExit(); tmpFileF.deleteOnExit(); int originalFreq = Nd4j.getMemoryManager().getOccasionalGcFrequency(); boolean originalPeriodic = Nd4j.getMemoryManager().isPeriodicGcActive(); if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(false); Nd4j.getMemoryManager().setOccasionalGcFrequency(50000); try { ZipFile zipFile = new ZipFile(file); ZipEntry syn0 = zipFile.getEntry("syn0.txt"); InputStream stream = zipFile.getInputStream(syn0); Files.copy(stream, Paths.get(tmpFileSyn0.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); ZipEntry syn1 = zipFile.getEntry("syn1.txt"); stream = zipFile.getInputStream(syn1); Files.copy(stream, Paths.get(tmpFileSyn1.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); ZipEntry codes = zipFile.getEntry("codes.txt"); stream = zipFile.getInputStream(codes); Files.copy(stream, Paths.get(tmpFileC.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); ZipEntry huffman = zipFile.getEntry("huffman.txt"); stream = zipFile.getInputStream(huffman); Files.copy(stream, Paths.get(tmpFileH.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING); ZipEntry config = zipFile.getEntry("config.json"); stream = zipFile.getInputStream(config); StringBuilder builder = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) { String line; while ((line = reader.readLine()) != null) { builder.append(line); } } VectorsConfiguration configuration = VectorsConfiguration.fromJson(builder.toString().trim()); // we read first 4 files as w2v model Word2Vec w2v = readWord2VecFromText(tmpFileSyn0, tmpFileSyn1, tmpFileC, tmpFileH, configuration); // we read frequencies from frequencies.txt, however it's possible that we might not have this file ZipEntry frequencies = zipFile.getEntry("frequencies.txt"); if (frequencies != null) { stream = zipFile.getInputStream(frequencies); try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) { String line; while ((line = reader.readLine()) != null) { String[] split = line.split(" "); VocabWord word = w2v.getVocab().tokenFor(decodeB64(split[0])); word.setElementFrequency((long) Double.parseDouble(split[1])); word.setSequencesCount((long) Double.parseDouble(split[2])); } } } ZipEntry zsyn1Neg = zipFile.getEntry("syn1Neg.txt"); if (zsyn1Neg != null) { stream = zipFile.getInputStream(zsyn1Neg); try (InputStreamReader isr = new InputStreamReader(stream); BufferedReader reader = new BufferedReader(isr)) { String line = null; List<INDArray> rows = new ArrayList<>(); while ((line = reader.readLine()) != null) { String[] split = line.split(" "); double array[] = new double[split.length]; for (int i = 0; i < split.length; i++) { array[i] = Double.parseDouble(split[i]); } rows.add(Nd4j.create(array)); } // it's possible to have full model without syn1Neg if (rows.size() > 0) { INDArray syn1Neg = Nd4j.vstack(rows); ((InMemoryLookupTable) w2v.getLookupTable()).setSyn1Neg(syn1Neg); } } } return w2v; } finally { if (originalPeriodic) Nd4j.getMemoryManager().togglePeriodicGc(true); Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq); } }