Example usage for java.nio.file StandardCopyOption REPLACE_EXISTING

List of usage examples for java.nio.file StandardCopyOption REPLACE_EXISTING

Introduction

In this page you can find the example usage for java.nio.file StandardCopyOption REPLACE_EXISTING.

Prototype

StandardCopyOption REPLACE_EXISTING

To view the source code for java.nio.file StandardCopyOption REPLACE_EXISTING.

Click Source Link

Document

Replace an existing file if it exists.

Usage

From source file:com.heliosdecompiler.bootstrapper.Bootstrapper.java

private static void forceUpdate() throws IOException, VcdiffDecodeException {
    File backupFile = new File(DATA_DIR, "helios.jar.bak");
    try {//  www.  ja  va  2 s .c  o m
        Files.copy(IMPL_FILE.toPath(), backupFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
    } catch (IOException exception) {
        // We're going to wrap it so end users know what went wrong
        throw new IOException(String.format("Could not back up Helios implementation (%s %s, %s %s)",
                IMPL_FILE.canRead(), IMPL_FILE.canWrite(), backupFile.canRead(), backupFile.canWrite()),
                exception);
    }
    URL latestVersion = new URL("https://ci.samczsun.com/job/Helios/lastStableBuild/buildNumber");
    HttpURLConnection connection = (HttpURLConnection) latestVersion.openConnection();
    if (connection.getResponseCode() == 200) {
        boolean aborted = false;

        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        copy(connection.getInputStream(), outputStream);
        String version = new String(outputStream.toByteArray(), "UTF-8");
        System.out.println("Latest version: " + version);
        int intVersion = Integer.parseInt(version);

        loop: while (true) {
            int buildNumber = loadHelios().buildNumber;
            int oldBuildNumber = buildNumber;
            System.out.println("Current Helios version is " + buildNumber);

            if (buildNumber < intVersion) {
                while (buildNumber <= intVersion) {
                    buildNumber++;
                    URL status = new URL("https://ci.samczsun.com/job/Helios/" + buildNumber + "/api/json");
                    HttpURLConnection con = (HttpURLConnection) status.openConnection();
                    if (con.getResponseCode() == 200) {
                        JsonObject object = Json.parse(new InputStreamReader(con.getInputStream())).asObject();
                        if (object.get("result").asString().equals("SUCCESS")) {
                            JsonArray artifacts = object.get("artifacts").asArray();
                            for (JsonValue value : artifacts.values()) {
                                JsonObject artifact = value.asObject();
                                String name = artifact.get("fileName").asString();
                                if (name.contains("helios-") && !name.contains(IMPLEMENTATION_VERSION)) {
                                    JOptionPane.showMessageDialog(null,
                                            "Bootstrapper is out of date. Patching cannot continue");
                                    aborted = true;
                                    break loop;
                                }
                            }
                            URL url = new URL("https://ci.samczsun.com/job/Helios/" + buildNumber
                                    + "/artifact/target/delta.patch");
                            con = (HttpURLConnection) url.openConnection();
                            if (con.getResponseCode() == 200) {
                                File dest = new File(DATA_DIR, "delta.patch");
                                ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
                                copy(con.getInputStream(), byteArrayOutputStream);
                                FileOutputStream fileOutputStream = new FileOutputStream(dest);
                                fileOutputStream.write(byteArrayOutputStream.toByteArray());
                                fileOutputStream.close();
                                File cur = IMPL_FILE;
                                File old = new File(IMPL_FILE.getAbsolutePath() + "." + oldBuildNumber);
                                if (cur.renameTo(old)) {
                                    VcdiffDecoder.decode(old, dest, cur);
                                    old.delete();
                                    dest.delete();
                                    continue loop;
                                } else {
                                    throw new IllegalArgumentException("Could not rename");
                                }
                            }
                        }
                    } else {
                        JOptionPane.showMessageDialog(null,
                                "Server returned response code " + con.getResponseCode() + " "
                                        + con.getResponseMessage() + "\nAborting patch process",
                                null, JOptionPane.INFORMATION_MESSAGE);
                        aborted = true;
                        break loop;
                    }
                }
            } else {
                break;
            }
        }

        if (!aborted) {
            int buildNumber = loadHelios().buildNumber;
            System.out.println("Running Helios version " + buildNumber);
            JOptionPane.showMessageDialog(null, "Updated Helios to version " + buildNumber + "!");
            Runtime.getRuntime().exec(new String[] { "java", "-jar", BOOTSTRAPPER_FILE.getAbsolutePath() });
        } else {
            try {
                Files.copy(backupFile.toPath(), IMPL_FILE.toPath(), StandardCopyOption.REPLACE_EXISTING);
            } catch (IOException exception) {
                // We're going to wrap it so end users know what went wrong
                throw new IOException("Critical Error! Could not restore Helios implementation to original copy"
                        + "Try relaunching the Bootstrapper. If that doesn't work open a GitHub issue with details",
                        exception);
            }
        }
        System.exit(0);
    } else {
        throw new IOException(connection.getResponseCode() + ": " + connection.getResponseMessage());
    }
}

From source file:org.eclipse.winery.repository.importing.CSARImporter.java

/**
 * Reads the CSAR from the given inputstream
 * //from w  ww.  j  a v a  2  s.  co m
 * @param in the inputstream to read from
 * @param errorList the list of errors during the import. Has to be non-null
 * @param overwrite if true: contents of the repo are overwritten
 * 
 * @throws InvalidCSARException if the CSAR is invalid
 */
public void readCSAR(InputStream in, List<String> errors, boolean overwrite, final boolean asyncWPDParsing)
        throws IOException {
    // we have to extract the file to a temporary directory as
    // the .definitions file does not necessarily have to be the first entry in the archive
    Path csarDir = Files.createTempDirectory("winery");

    try (ZipInputStream zis = new ZipInputStream(in)) {
        ZipEntry entry;
        while ((entry = zis.getNextEntry()) != null) {
            if (!entry.isDirectory()) {
                Path targetPath = csarDir.resolve(entry.getName());
                Files.createDirectories(targetPath.getParent());
                Files.copy(zis, targetPath, StandardCopyOption.REPLACE_EXISTING);
            }
        }

        this.importFromDir(csarDir, errors, overwrite, asyncWPDParsing);
        this.importCustomFile(csarDir);
    } catch (Exception e) {
        CSARImporter.logger.debug("Could not import CSAR", e);
        throw e;
    } finally {
        // cleanup: delete all contents of the temporary directory
        FileUtils.forceDelete(csarDir);
    }
}

From source file:com.github.zhanhb.ckfinder.connector.utils.ImageUtils.java

/**
 * writes unchanged file to disk.//from w  ww  .j a v  a 2s  .c o  m
 *
 * @param sourceFile - file to read from
 * @param destFile - file to write to
 * @throws IOException when IO Exception occurs.
 */
private static void writeUntouchedImage(Path sourceFile, Path destFile) throws IOException {
    Files.copy(sourceFile, destFile, StandardCopyOption.REPLACE_EXISTING);
}

From source file:org.ballerinalang.containers.docker.impl.DefaultBallerinaDockerClient.java

private String createImageFromPackage(String packageName, String dockerEnv, List<Path> bPackagePaths,
        boolean isService, String imageName, String imageVersion)
        throws BallerinaDockerClientException, IOException, InterruptedException {

    if (bPackagePaths == null || bPackagePaths.size() == 0) {
        throw new BallerinaDockerClientException("Invalid Ballerina package(s)");
    }//  ww  w.j av a 2s.  co m

    for (Path bPackage : bPackagePaths) {
        if (!Files.exists(bPackage)) {
            throw new BallerinaDockerClientException(
                    "Cannot find Ballerina Package file: " + bPackage.toString());
        }

        if (isService && !FilenameUtils.getExtension(bPackage.toString()).equalsIgnoreCase("bsz")) {
            throw new BallerinaDockerClientException(
                    "Invalid Ballerina package archive. " + "Service packages should be of \"bsz\" type.");
        }

        if (!isService && !FilenameUtils.getExtension(bPackage.toString()).equalsIgnoreCase("bmz")) {
            throw new BallerinaDockerClientException(
                    "Invalid Ballerina package archive. " + "Main packages should be of \"bmz\" type.");
        }
    }

    imageName = getImageName(packageName, imageName, imageVersion);

    // 1. Create a tmp docker context
    Path tmpDir = prepTempDockerfileContext();

    // 2. Copy Ballerina packages
    for (Path bPackage : bPackagePaths) {
        Files.copy(bPackage, Paths.get(
                tmpDir.toString() + File.separator + PATH_FILES + File.separator + bPackage.toFile().getName()),
                StandardCopyOption.REPLACE_EXISTING);
    }

    // 3. Create a docker image from the temp context
    String timestamp = new SimpleDateFormat("yyyy-MM-dd'T'h:m:ssXX").format(new Date());
    String buildArgs = "{\"" + ENV_SVC_MODE + "\":\"" + String.valueOf(isService) + "\", " + "\"BUILD_DATE\":\""
            + timestamp + "\"}";
    buildImage(dockerEnv, imageName, tmpDir, buildArgs);

    // 4. Cleanup
    cleanupTempDockerfileContext(tmpDir);

    return getImage(imageName, dockerEnv);
}

From source file:ubic.basecode.ontology.OntologyLoader.java

/**
 * Load an ontology into memory. Use this type of model when fast access is critical and memory is available.
 * If load from URL fails, attempt to load from disk cache under @cacheName.
 * //w w w  .j  av  a  2s . c  om
 * @param  url
 * @param  spec      e.g. OWL_MEM_TRANS_INF
 * @param  cacheName unique name of this ontology, will be used to load from disk in case of failed url connection
 * @return
 */
public static OntModel loadMemoryModel(String url, OntModelSpec spec, String cacheName) {
    StopWatch timer = new StopWatch();
    timer.start();
    OntModel model = getMemoryModel(url, spec);

    URLConnection urlc = null;
    int tries = 0;
    while (tries < MAX_CONNECTION_TRIES) {
        try {
            urlc = new URL(url).openConnection();
            // help ensure mis-configured web servers aren't causing trouble.
            urlc.setRequestProperty("Accept", "application/rdf+xml");

            try {
                HttpURLConnection c = (HttpURLConnection) urlc;
                c.setInstanceFollowRedirects(true);
            } catch (ClassCastException e) {
                // not via http, using a FileURLConnection.
            }

            if (tries > 0) {
                log.info("Retrying connecting to " + url + " [" + tries + "/" + MAX_CONNECTION_TRIES
                        + " of max tries");
            } else {
                log.info("Connecting to " + url);
            }

            urlc.connect(); // Will error here on bad URL

            if (urlc instanceof HttpURLConnection) {
                String newUrl = urlc.getHeaderField("Location");

                if (StringUtils.isNotBlank(newUrl)) {
                    log.info("Redirect to " + newUrl);
                    urlc = new URL(newUrl).openConnection();
                    // help ensure mis-configured web servers aren't causing trouble.
                    urlc.setRequestProperty("Accept", "application/rdf+xml");
                    urlc.connect();
                }
            }

            break;
        } catch (IOException e) {
            // try to recover.
            log.error(e + " retrying?");
            tries++;
        }
    }

    if (urlc != null) {
        try (InputStream in = urlc.getInputStream();) {
            Reader reader;
            if (cacheName != null) {
                // write tmp to disk
                File tempFile = getTmpDiskCachePath(cacheName);
                if (tempFile == null) {
                    reader = new InputStreamReader(in);
                } else {
                    tempFile.getParentFile().mkdirs();
                    Files.copy(in, tempFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
                    reader = new FileReader(tempFile);
                }

            } else {
                // Skip the cache
                reader = new InputStreamReader(in);
            }

            assert reader != null;
            try (BufferedReader buf = new BufferedReader(reader);) {
                model.read(buf, url);
            }

            log.info("Load model: " + timer.getTime() + "ms");
        } catch (IOException e) {
            log.error(e.getMessage(), e);
        }
    }

    if (cacheName != null) {

        File f = getDiskCachePath(cacheName);
        File tempFile = getTmpDiskCachePath(cacheName);
        File oldFile = getOldDiskCachePath(cacheName);

        if (model.isEmpty()) {
            // Attempt to load from disk cache

            if (f == null) {
                throw new RuntimeException(
                        "Ontology cache directory required to load from disk: ontology.cache.dir");
            }

            if (f.exists() && !f.isDirectory()) {
                try (BufferedReader buf = new BufferedReader(new FileReader(f));) {
                    model.read(buf, url);
                    // We successfully loaded the cached ontology. Copy the loaded ontology to oldFile
                    // so that we don't recreate indices during initialization based on a false change in
                    // the ontology.
                    Files.copy(f.toPath(), oldFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
                    log.info("Load model from disk: " + timer.getTime() + "ms");
                } catch (IOException e) {
                    log.error(e.getMessage(), e);
                    throw new RuntimeException(
                            "Ontology failed load from URL (" + url + ") and disk cache: " + cacheName);
                }
            } else {
                throw new RuntimeException("Ontology failed load from URL (" + url
                        + ") and disk cache does not exist: " + cacheName);
            }

        } else {
            // Model was successfully loaded into memory from URL with given cacheName
            // Save cache to disk (rename temp file)
            log.info("Caching ontology to disk: " + cacheName);
            if (f != null) {
                try {
                    // Need to compare previous to current so instead of overwriting we'll move the old file
                    f.createNewFile();
                    Files.move(f.toPath(), oldFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
                    Files.move(tempFile.toPath(), f.toPath(), StandardCopyOption.REPLACE_EXISTING);
                } catch (IOException e) {
                    log.error(e.getMessage(), e);
                }
            } else {
                log.warn("Ontology cache directory required to save to disk: ontology.cache.dir");
            }
        }

    }

    assert !model.isEmpty();

    return model;
}

From source file:com.kegare.caveworld.util.CaveUtils.java

public static boolean archiveDirZip(final File dir, final File dest) {
    final Path dirPath = dir.toPath();
    final String parent = dir.getName();
    Map<String, String> env = Maps.newHashMap();
    env.put("create", "true");
    URI uri = dest.toURI();/*from  w ww.  j a v a2 s . c  o  m*/

    try {
        uri = new URI("jar:" + uri.getScheme(), uri.getPath(), null);
    } catch (Exception e) {
        return false;
    }

    try (FileSystem zipfs = FileSystems.newFileSystem(uri, env)) {
        Files.createDirectory(zipfs.getPath(parent));

        for (File file : dir.listFiles()) {
            if (file.isDirectory()) {
                Files.walkFileTree(file.toPath(), new SimpleFileVisitor<Path>() {
                    @Override
                    public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                        Files.copy(file, zipfs.getPath(parent, dirPath.relativize(file).toString()),
                                StandardCopyOption.REPLACE_EXISTING);

                        return FileVisitResult.CONTINUE;
                    }

                    @Override
                    public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
                            throws IOException {
                        Files.createDirectory(zipfs.getPath(parent, dirPath.relativize(dir).toString()));

                        return FileVisitResult.CONTINUE;
                    }
                });
            } else {
                Files.copy(file.toPath(), zipfs.getPath(parent, file.getName()),
                        StandardCopyOption.REPLACE_EXISTING);
            }
        }

        return true;
    } catch (Exception e) {
        e.printStackTrace();

        return false;
    }
}

From source file:com.htmlhifive.visualeditor.persister.LocalFileContentsPersister.java

@Override
public void move(UrlTreeMetaData<InputStream> metadata, String dstDir, UrlTreeContext ctx)
        throws BadContentException {

    String srcPathName = metadata.getAbsolutePath();
    Path srcPath = this.generateFileObj(srcPathName);
    Path dstPath = this.generateFileObj(dstDir);

    logger.debug("move: " + srcPath.toAbsolutePath() + " to " + dstPath.toAbsolutePath());

    try {/*from   w  w  w  .j  ava 2 s .  c  om*/
        Files.move(srcPath, dstPath.resolve(srcPath.getFileName()), StandardCopyOption.ATOMIC_MOVE,
                StandardCopyOption.REPLACE_EXISTING);
    } catch (IOException e) {
        throw new GenericResourceException("cannot copy file", e);
    }
}

From source file:codes.thischwa.c5c.DispatcherPUT.java

private void imageProcessingAndSizeCheck(Path tempPath, String sanitizedName, long fileSize,
        FilemanagerConfig conf) throws C5CException, IOException {
    Integer maxSize = (conf.getUpload().isFileSizeLimitAuto()) ? PropertiesLoader.getMaxUploadSize()
            : conf.getUpload().getFileSizeLimit();
    if (fileSize > maxSize.longValue() * 1024 * 1024)
        throw new FilemanagerException(FilemanagerAction.UPLOAD,
                FilemanagerException.Key.UploadFilesSmallerThan, String.valueOf(maxSize));
    String extension = FilenameUtils.getExtension(sanitizedName);

    // check image only
    boolean isImageExt = checkImageExtension(sanitizedName, conf.getUpload().isImagesOnly(),
            conf.getImages().getExtensions());
    if (!isImageExt)
        return;/*www. j ava2 s  .com*/

    // remove exif data
    Path woExifPath = UserObjectProxy.removeExif(tempPath);
    if (!tempPath.equals(woExifPath)) {
        Files.move(woExifPath, tempPath, StandardCopyOption.REPLACE_EXISTING);
    }

    // check if the file is really an image
    InputStream in = new BufferedInputStream(Files.newInputStream(tempPath, StandardOpenOption.READ));
    Dimension dim = getDimension(in);
    if (isImageExt && dim == null)
        throw new FilemanagerException(FilemanagerAction.UPLOAD, FilemanagerException.Key.UploadImagesOnly);
    IOUtils.closeQuietly(in);

    // check if resize is enabled and fix it, if necessary 
    Resize resize = conf.getImages().getResize();
    if (resize.isEnabled()
            && (dim.getHeight() > resize.getMaxHeight() || dim.getWidth() > resize.getMaxWidth())) {
        logger.debug("process resize");
        StreamContent sc = connector.resize(new BufferedInputStream(Files.newInputStream(tempPath)), extension,
                new Dimension(resize.getMaxWidth(), resize.getMaxHeight()));
        Files.copy(sc.getInputStream(), tempPath, StandardCopyOption.REPLACE_EXISTING);
        IOUtils.closeQuietly(sc.getInputStream());
    }
}

From source file:com.ibm.ecm.extension.aspera.AsperaPlugin.java

private Path copyResource(final String subFolder, final String resourceName, final String toSubFolder)
        throws AsperaPluginException {
    final Path path = toSubFolder.isEmpty() ? Paths.get(resourcesRoot, resourceName)
            : Paths.get(resourcesRoot, toSubFolder, resourceName);
    final InputStream resource = this.getClass()
            .getResourceAsStream("/aspera/" + subFolder + (subFolder.isEmpty() ? "" : "/") + resourceName);
    try {/*from  www.  ja v a 2 s  .c om*/
        Files.copy(resource, path, StandardCopyOption.REPLACE_EXISTING);
    } catch (final IOException e) {
        throw new AsperaPluginException("Failed to copy the plugin resource file: " + path, e);
    }

    return path;
}

From source file:org.apache.tika.eval.TikaEvalCLI.java

private void handleCompare(String[] subsetArgs) throws Exception {
    List<String> argList = new ArrayList(Arrays.asList(subsetArgs));

    boolean containsBC = false;
    String inputDir = null;/*from  w  w  w .j  a  v a 2 s .c  o  m*/
    String extractsA = null;
    String alterExtract = null;
    //confirm there's a batch-config file
    for (int i = 0; i < argList.size(); i++) {
        String arg = argList.get(i);
        if (arg.equals("-bc")) {
            containsBC = true;
        } else if (arg.equals("-inputDir")) {
            if (i + 1 >= argList.size()) {
                System.err.println("Must specify directory after -inputDir");
                ExtractComparer.USAGE();
                return;
            }
            inputDir = argList.get(i + 1);
            i++;
        } else if (arg.equals("-extractsA")) {
            if (i + 1 >= argList.size()) {
                System.err.println("Must specify directory after -extractsA");
                ExtractComparer.USAGE();
                return;
            }
            extractsA = argList.get(i + 1);
            i++;
        } else if (arg.equals("-alterExtract")) {
            if (i + 1 >= argList.size()) {
                System.err.println("Must specify type 'as_is', 'first_only' or "
                        + "'concatenate_content' after -alterExtract");
                ExtractComparer.USAGE();
                return;
            }
            alterExtract = argList.get(i + 1);
            i++;
        }
    }
    if (alterExtract != null && !alterExtract.equals("as_is") && !alterExtract.equals("concatenate_content")
            && !alterExtract.equals("first_only")) {
        System.out.println("Sorry, I don't understand:" + alterExtract
                + ". The values must be one of: as_is, first_only, concatenate_content");
        ExtractComparer.USAGE();
        return;
    }

    //need to specify each in the commandline that goes into tika-batch
    //if only extracts is passed to tika-batch,
    //the crawler will see no inputDir and start crawling "input".
    //if the user doesn't specify inputDir, crawl extractsA
    if (inputDir == null && extractsA != null) {
        argList.add("-inputDir");
        argList.add(extractsA);
    }

    Path tmpBCConfig = null;
    try {
        tmpBCConfig = Files.createTempFile("tika-eval", ".xml");
        if (!containsBC) {
            Files.copy(this.getClass().getResourceAsStream("/tika-eval-comparison-config.xml"), tmpBCConfig,
                    StandardCopyOption.REPLACE_EXISTING);
            argList.add("-bc");
            argList.add(tmpBCConfig.toAbsolutePath().toString());

        }
        String[] updatedArgs = argList.toArray(new String[argList.size()]);
        DefaultParser defaultCLIParser = new DefaultParser();
        try {
            CommandLine commandLine = defaultCLIParser.parse(ExtractComparer.OPTIONS, updatedArgs);
            if (commandLine.hasOption("db") && commandLine.hasOption("jdbc")) {
                System.out.println("Please specify either the default -db or the full -jdbc, not both");
                ExtractComparer.USAGE();
                return;
            }
        } catch (ParseException e) {
            System.out.println(e.getMessage() + "\n");
            ExtractComparer.USAGE();
            return;
        }

        FSBatchProcessCLI.main(updatedArgs);
    } finally {
        if (tmpBCConfig != null && Files.isRegularFile(tmpBCConfig)) {
            Files.delete(tmpBCConfig);
        }
    }
}