Example usage for java.nio.file Files deleteIfExists

List of usage examples for java.nio.file Files deleteIfExists

Introduction

In this page you can find the example usage for java.nio.file Files deleteIfExists.

Prototype

public static boolean deleteIfExists(Path path) throws IOException 

Source Link

Document

Deletes a file if it exists.

Usage

From source file:org.mitre.mpf.wfm.camel.JobCompleteProcessorImpl.java

private void destroy(long jobId) throws WfmProcessingException {
    TransientJob transientJob = redis.getJob(jobId);
    for (TransientMedia transientMedia : transientJob.getMedia()) {
        if (transientMedia.getUriScheme().isRemote() && transientMedia.getLocalPath() != null) {
            try {
                Files.deleteIfExists(Paths.get(transientMedia.getLocalPath()));
            } catch (Exception exception) {
                log.warn(//from  w  ww.  ja v  a2 s.c  o  m
                        "[{}|*|*] Failed to delete locally cached file '{}' due to an exception. This file must be manually deleted.",
                        transientJob.getId(), transientMedia.getLocalPath());
            }
        }
    }
    redis.clearJob(jobId);
}

From source file:com.webtide.jetty.load.generator.jenkins.LoadGeneratorBuilder.java

protected void parseTimeValues(FilePath workspace, Path responseTimeResultFilePath,
        List<Resource.NodeListener> nodeListeners) throws Exception {
    Path responseTimeResultFile = Files.createTempFile("loadgenerator_result_responsetime", ".csv");

    workspace.child(responseTimeResultFilePath.toString())
            .copyTo(Files.newOutputStream(responseTimeResultFile));

    CSVParser csvParser = new CSVParser(Files.newBufferedReader(responseTimeResultFile),
            CSVFormat.newFormat('|'));

    csvParser.forEach(strings -> {//from   www  . j  a va  2  s  .  c om
        Values values = new Values() //
                .eventTimestamp(Long.parseLong(strings.get(0))) //
                .method(strings.get(1)) //
                .path(strings.get(2)) //
                .status(Integer.parseInt(strings.get(3))) //
                .size(Long.parseLong(strings.get(4))) //
                .responseTime(Long.parseLong(strings.get(5))) //
                .latencyTime(Long.parseLong(strings.get(6)));

        for (Resource.NodeListener listener : nodeListeners) {
            listener.onResourceNode(values.getInfo());
        }
    });

    Files.deleteIfExists(responseTimeResultFile);
}

From source file:org.digidoc4j.main.DigiDoc4JTest.java

@Test
public void removeFileFromContainer() throws Exception {
    exit.expectSystemExitWithStatus(0);/*  w  w  w.j av  a  2 s.c o  m*/

    Container container = ContainerBuilder.aContainer(DDOC_CONTAINER_TYPE).build();
    container.addDataFile("testFiles/test.txt", "text/plain");
    Files.deleteIfExists(Paths.get("test1.ddoc"));
    container.saveAsFile("test1.ddoc");

    String[] params = new String[] { "-in", "test1.ddoc", "-remove", "test.txt" };
    DigiDoc4J.main(params);
}

From source file:amulet.appbuilder.AppBuilder.java

private void cleanUp(String filename) {
    try {//from   www  . j  ava2  s.  co m
        FileSystem fs = FileSystems.getDefault();
        Path interRep = fs.getPath(filename + ".ir");
        Path resRep = fs.getPath(filename + ".res");
        Path staticRep = fs.getPath(filename + ".stat");
        Path translateRep = fs.getPath(filename + ".trans");
        Files.deleteIfExists(interRep);
        Files.deleteIfExists(resRep);
        Files.deleteIfExists(staticRep);
        Files.deleteIfExists(translateRep);
    } catch (Exception exp) {
        System.err.println("Unexpected exception:" + exp.getMessage());
        exp.printStackTrace();
    }
}

From source file:org.silverpeas.components.blog.control.BlogSessionController.java

/**
 * Remove the actual wallpaper file./*  w  w w  . j av  a 2s . c  o  m*/
 */
public void removeWallPaperFile() {
    String path = FileRepositoryManager.getAbsolutePath(this.getComponentId());
    File banner = new File(path, BANNER_GIF);
    try {
        Files.deleteIfExists(banner.toPath());
    } catch (IOException e) {
        SilverLogger.getLogger(this).warn(THE_WALLPAPER_DELETION_FAILED, BANNER_GIF);
    }

    banner = new File(path, BANNER_JPG);
    try {
        Files.deleteIfExists(banner.toPath());
    } catch (IOException e) {
        SilverLogger.getLogger(this).warn(THE_WALLPAPER_DELETION_FAILED, BANNER_JPG);
    }

    banner = new File(path, BANNER_PNG);
    try {
        Files.deleteIfExists(banner.toPath());
    } catch (IOException e) {
        SilverLogger.getLogger(this).warn(THE_WALLPAPER_DELETION_FAILED, BANNER_PNG);
    }

    this.wallPaper = null;
}

From source file:com.vmware.photon.controller.deployer.dcp.task.CreateIsoTaskService.java

private void createIsoFile(final State currentState, String configDirectoryName) throws Throwable {

    final File isoFile = new File("/tmp", CONFIG_FILENAME + "-" + currentState.vmId + ".iso");
    DeployerContext deployerContext = HostUtils.getDeployerContext(this);
    final File userDataConfigFile = new File(deployerContext.getScriptDirectory(),
            CONFIG_FILENAME + "-user-data-" + currentState.vmId + ".yml");
    final File metaDataConfigFile = new File(deployerContext.getScriptDirectory(),
            CONFIG_FILENAME + "-meta-data-" + currentState.vmId + ".yml");
    File scriptLogFile = new File(deployerContext.getScriptDirectory(),
            SCRIPT_NAME + "-" + currentState.vmId + ".log");

    String userDataConfigFilename = createFile(currentState.userDataTemplate, userDataConfigFile);
    String metaDataConfigFilename = createFile(currentState.metaDataTemplate, metaDataConfigFile);

    List<String> command = new ArrayList<>();
    command.add("./" + SCRIPT_NAME);
    command.add(isoFile.getAbsolutePath());
    command.add(userDataConfigFilename);
    command.add(metaDataConfigFilename);
    if (configDirectoryName != null) {
        command.add(configDirectoryName);
    }//w ww .  j  a v a2s. com

    ScriptRunner scriptRunner = new ScriptRunner.Builder(command, deployerContext.getScriptTimeoutSec())
            .directory(deployerContext.getScriptDirectory())
            .redirectOutput(ProcessBuilder.Redirect.to(scriptLogFile)).build();

    ListenableFutureTask<Integer> futureTask = ListenableFutureTask.create(scriptRunner);
    HostUtils.getListeningExecutorService(this).submit(futureTask);

    FutureCallback<Integer> futureCallback = new FutureCallback<Integer>() {
        @Override
        public void onSuccess(@Nullable Integer result) {
            if (null == result) {
                failTask(new NullPointerException(SCRIPT_NAME + " returned null"));
            } else if (0 != result) {
                failTask(new Exception(SCRIPT_NAME + " returned " + result.toString()));
            } else {
                try {
                    Files.deleteIfExists(userDataConfigFile.toPath());
                    Files.deleteIfExists(metaDataConfigFile.toPath());
                    if (configDirectoryName != null) {
                        FileUtils.deleteDirectory(new File(configDirectoryName));
                    }
                    attachAndUploadIso(currentState, isoFile.getAbsolutePath());
                } catch (IOException e) {
                    failTask(e);
                }
            }
        }

        @Override
        public void onFailure(Throwable t) {
            failTask(t);
        }
    };

    Futures.addCallback(futureTask, futureCallback);
}

From source file:org.schedulesdirect.grabber.Grabber.java

private void updateZip(NetworkEpgClient clnt) throws IOException, JSONException, JsonParseException {
    Set<String> completedListings = new HashSet<String>();
    LOG.debug(String.format("Using %d worker threads", globalOpts.getMaxThreads()));
    pool = createThreadPoolExecutor();// w w w.j  a v  a 2s  . c o  m
    start = System.currentTimeMillis();
    File dest = grabOpts.getTarget();
    cachedSeriesIds = new HashSet<String>();
    boolean rmDest = false;
    if (dest.exists()) {
        ZipEpgClient zipClnt = null;
        try {
            zipClnt = new ZipEpgClient(dest);
            if (!zipClnt.getUserStatus().getLastServerRefresh()
                    .before(clnt.getUserStatus().getLastServerRefresh())) {
                LOG.info(
                        "Current cache file contains latest data from Schedules Direct server; use --force-download to force a new download from server.");
                boolean force = grabOpts.isForce();
                if (!force)
                    return;
                else
                    LOG.warn("Forcing an update of data with the server due to user request!");
            }
        } catch (Exception e) {
            if (grabOpts.isKeep()) {
                LOG.error("Existing cache is invalid, keeping by user request!", e);
                return;
            } else {
                LOG.warn("Existing cache is invalid, deleting it; use --keep-bad-cache to keep existing cache!",
                        e);
                rmDest = true;
            }
        } finally {
            if (zipClnt != null)
                try {
                    zipClnt.close();
                } catch (IOException e) {
                }
            if (rmDest && !dest.delete())
                throw new IOException("Unable to delete " + dest);
        }
    }

    freshZip = !dest.exists();
    try (FileSystem vfs = FileSystems.newFileSystem(new URI(String.format("jar:%s", dest.toURI())),
            Collections.singletonMap("create", "true"))) {
        if (freshZip) {
            Path target = vfs.getPath(ZipEpgClient.ZIP_VER_FILE);
            Files.write(target, Integer.toString(ZipEpgClient.ZIP_VER).getBytes(ZipEpgClient.ZIP_CHARSET));
        }
        ProgramCache progCache = ProgramCache.get(vfs);
        Path lineups = vfs.getPath("lineups.txt");
        Files.deleteIfExists(lineups);
        Path scheds = vfs.getPath("/schedules/");
        if (!Files.isDirectory(scheds))
            Files.createDirectory(scheds);
        Path maps = vfs.getPath("/maps/");
        PathUtils.removeDirectory(maps);
        Files.createDirectory(maps);
        Path progs = vfs.getPath("/programs/");
        if (!Files.isDirectory(progs))
            Files.createDirectory(progs);
        Path logos = vfs.getPath("/logos/");
        if (!Files.isDirectory(logos))
            Files.createDirectory(logos);
        Path md5s = vfs.getPath("/md5s/");
        if (!Files.isDirectory(md5s))
            Files.createDirectory(md5s);
        Path cache = vfs.getPath(LOGO_CACHE);
        if (Files.exists(cache)) {
            String cacheData = new String(Files.readAllBytes(cache), ZipEpgClient.ZIP_CHARSET);
            logoCache = Config.get().getObjectMapper().readValue(cacheData, JSONObject.class);
        } else
            logoCache = new JSONObject();
        Path seriesInfo = vfs.getPath("/seriesInfo/");
        if (!Files.isDirectory(seriesInfo))
            Files.createDirectories(seriesInfo);
        loadSeriesInfoIds(seriesInfo);
        missingSeriesIds = Collections.synchronizedSet(new HashSet<String>());
        loadRetryIds(vfs.getPath(SERIES_INFO_DATA));

        JSONObject resp = Config.get().getObjectMapper().readValue(
                factory.get(DefaultJsonRequest.Action.GET, RestNouns.LINEUPS, clnt.getHash(),
                        clnt.getUserAgent(), globalOpts.getUrl().toString()).submitForJson(null),
                JSONObject.class);
        if (!JsonResponseUtils.isErrorResponse(resp))
            Files.write(lineups, resp.toString(3).getBytes(ZipEpgClient.ZIP_CHARSET));
        else
            LOG.error("Received error response when requesting lineup data!");

        for (Lineup l : clnt.getLineups()) {
            buildStationList();
            JSONObject o = Config.get().getObjectMapper()
                    .readValue(
                            factory.get(DefaultJsonRequest.Action.GET, l.getUri(), clnt.getHash(),
                                    clnt.getUserAgent(), globalOpts.getUrl().toString()).submitForJson(null),
                            JSONObject.class);
            Files.write(vfs.getPath("/maps", ZipEpgClient.scrubFileName(String.format("%s.txt", l.getId()))),
                    o.toString(3).getBytes(ZipEpgClient.ZIP_CHARSET));
            JSONArray stations = o.getJSONArray("stations");
            JSONArray ids = new JSONArray();
            for (int i = 0; i < stations.length(); ++i) {
                JSONObject obj = stations.getJSONObject(i);
                String sid = obj.getString("stationID");
                if (stationList != null && !stationList.contains(sid))
                    LOG.debug(String.format("Skipped %s; not listed in station file", sid));
                else if (completedListings.add(sid)) {
                    ids.put(sid);
                    if (!grabOpts.isNoLogos()) {
                        if (logoCacheInvalid(obj))
                            pool.execute(new LogoTask(obj, vfs, logoCache));
                        else if (LOG.isDebugEnabled())
                            LOG.debug(String.format("Skipped logo for %s; already cached!",
                                    obj.optString("callsign", null)));
                    } else if (!logosWarned) {
                        logosWarned = true;
                        LOG.warn("Logo downloads disabled by user request!");
                    }
                } else
                    LOG.debug(String.format("Skipped %s; already downloaded.", sid));
                //pool.setMaximumPoolSize(5); // Processing these new schedules takes all kinds of memory!
                if (ids.length() == grabOpts.getMaxSchedChunk()) {
                    pool.execute(new ScheduleTask(ids, vfs, clnt, progCache, factory));
                    ids = new JSONArray();
                }
            }
            if (ids.length() > 0)
                pool.execute(new ScheduleTask(ids, vfs, clnt, progCache, factory));
        }
        pool.shutdown();
        try {
            LOG.debug("Waiting for SchedLogoExecutor to terminate...");
            if (pool.awaitTermination(15, TimeUnit.MINUTES))
                LOG.debug("SchedLogoExecutor: Terminated successfully.");
            else {
                failedTask = true;
                LOG.warn(
                        "SchedLogoExecutor: Termination timed out; some tasks probably didn't finish properly!");
            }
        } catch (InterruptedException e) {
            failedTask = true;
            LOG.warn(
                    "SchedLogoExecutor: Termination interrupted); some tasks probably didn't finish properly!");
        }
        Files.write(cache, logoCache.toString(3).getBytes(ZipEpgClient.ZIP_CHARSET),
                StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
        ScheduleTask.commit(vfs);

        pool = createThreadPoolExecutor();
        //pool.setMaximumPoolSize(5); // Again, we've got memory problems
        String[] dirtyPrograms = progCache.getDirtyIds();
        progCache.markAllClean();
        progCache = null;
        LOG.info(String.format("Identified %d program ids requiring an update!", dirtyPrograms.length));
        Collection<String> progIds = new ArrayList<String>();
        for (String progId : dirtyPrograms) {
            progIds.add(progId);
            if (progIds.size() == grabOpts.getMaxProgChunk()) {
                pool.execute(new ProgramTask(progIds, vfs, clnt, factory, missingSeriesIds, "programs", null,
                        false));
                progIds.clear();
            }
        }
        if (progIds.size() > 0)
            pool.execute(
                    new ProgramTask(progIds, vfs, clnt, factory, missingSeriesIds, "programs", null, false));
        pool.shutdown();
        try {
            LOG.debug("Waiting for ProgramExecutor to terminate...");
            if (pool.awaitTermination(15, TimeUnit.MINUTES)) {
                LOG.debug("ProgramExecutor: Terminated successfully.");
                Iterator<String> itr = missingSeriesIds.iterator();
                while (itr.hasNext()) {
                    String id = itr.next();
                    if (cachedSeriesIds.contains(id))
                        itr.remove();
                }
                if (missingSeriesIds.size() > 0) {
                    LOG.info(String.format("Grabbing %d series info programs!", missingSeriesIds.size()));
                    Set<String> retrySet = new HashSet<>();
                    try {
                        new ProgramTask(missingSeriesIds, vfs, clnt, factory, missingSeriesIds, "seriesInfo",
                                retrySet, true).run();
                    } catch (RuntimeException e) {
                        LOG.error("SeriesInfo task failed!", e);
                        Grabber.failedTask = true;
                    }
                    Path seriesInfoData = vfs.getPath(SERIES_INFO_DATA);
                    if (retrySet.size() > 0) {
                        StringBuilder sb = new StringBuilder();
                        for (String id : retrySet)
                            sb.append(id + "\n");
                        Files.write(seriesInfoData, sb.toString().getBytes(ZipEpgClient.ZIP_CHARSET),
                                StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING,
                                StandardOpenOption.CREATE);
                    } else if (Files.exists(seriesInfoData))
                        Files.delete(seriesInfoData);
                }
            } else {
                failedTask = true;
                LOG.warn("ProgramExecutor: Termination timed out; some tasks probably didn't finish properly!");
            }
        } catch (InterruptedException e) {
            failedTask = true;
            LOG.warn("ProgramExecutor: Termination interrupted); some tasks probably didn't finish properly!");
        }

        String userData = clnt.getUserStatus().toJson();
        if (failedTask) {
            LOG.error("One or more tasks failed!  Resetting last data refresh timestamp to zero.");
            SimpleDateFormat fmt = Config.get().getDateTimeFormat();
            String exp = fmt.format(new Date(0L));
            JSONObject o = Config.get().getObjectMapper().readValue(userData, JSONObject.class);
            o.put("lastDataUpdate", exp);
            userData = o.toString(2);
        }
        Path p = vfs.getPath(USER_DATA);
        Files.write(p, userData.getBytes(ZipEpgClient.ZIP_CHARSET), StandardOpenOption.WRITE,
                StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE);
        removeIgnoredStations(vfs);
    } catch (URISyntaxException e1) {
        throw new RuntimeException(e1);
    } finally {
        Runtime rt = Runtime.getRuntime();
        LOG.info(String.format("MemStats:%n\tFREE: %s%n\tUSED: %s%n\t MAX: %s",
                FileUtils.byteCountToDisplaySize(rt.freeMemory()),
                FileUtils.byteCountToDisplaySize(rt.totalMemory()),
                FileUtils.byteCountToDisplaySize(rt.maxMemory())));
    }
}

From source file:com.netflix.genie.agent.execution.statemachine.actions.SetUpJobAction.java

private void cleanupJobDirectory(final Path jobDirectoryPath, final CleanupStrategy cleanupStrategy)
        throws IOException {

    switch (cleanupStrategy) {
    case NO_CLEANUP:
        log.info("Skipping cleanup of job directory: {}", jobDirectoryPath);
        break;/*  w ww. j  a  v a 2s. c o m*/

    case FULL_CLEANUP:
        log.info("Wiping job directory: {}", jobDirectoryPath);
        FileSystemUtils.deleteRecursively(jobDirectoryPath);
        break;

    case DEPENDENCIES_CLEANUP:
        final RegexRuleSet cleanupWhitelist = RegexRuleSet.buildWhitelist((Pattern[]) Lists
                .newArrayList(PathUtils.jobClusterDirectoryPath(jobDirectoryPath.toFile(), ".*"),
                        PathUtils.jobCommandDirectoryPath(jobDirectoryPath.toFile(), ".*"),
                        PathUtils.jobApplicationDirectoryPath(jobDirectoryPath.toFile(), ".*"))
                .stream().map(PathUtils::jobEntityDependenciesPath).map(Path::toString)
                .map(pathString -> pathString + "/.*").map(Pattern::compile).toArray(Pattern[]::new));
        Files.walk(jobDirectoryPath).filter(path -> cleanupWhitelist.accept(path.toAbsolutePath().toString()))
                .forEach(path -> {
                    try {
                        log.debug("Deleting {}", path);
                        Files.deleteIfExists(path);
                    } catch (final IOException e) {
                        log.warn("Failed to delete: {}", path.toAbsolutePath().toString(), e);
                    }
                });
        break;

    default:
        throw new RuntimeException("Unknown cleanup strategy: " + cleanupStrategy.name());
    }
}

From source file:br.ufc.deti.ecgweb.domain.exam.EcgService.java

@Transactional(timeout = 0)
public void importEcg(Long patientId, File file, String ecgFileKey) throws IOException {

    System.out.println("Map Size=" + uploadFileMap.size());

    EcgFileType type = EcgFileType.HL7;//from  w  ww.j  a  v a 2 s.  co  m

    lock.lock();
    try {
        if (getUploadFile(patientId) != null) {
            //Has upload file occurring
            throw new ServiceUploadDuplicatedActionException();
        } else {
            addUploadFile(patientId, file);
        }
    } finally {
        lock.unlock();
    }

    System.out.println("Map Size=" + uploadFileMap.size());

    AbstractEcgFormat hl7 = new HL7FormatImpl();
    hl7.loadInformationFromFile(file);

    String newfileName = ecgFileKey;

    Patient patient = patientRepository.findOne(patientId);

    EcgFile ecgFile = new EcgFile();
    ecgFile.setDate(LocalDateTime.now());
    ecgFile.setFileName(newfileName);
    ecgFile.setType(type);
    ecgFileRepository.save(ecgFile);

    Ecg ecg = new Ecg();
    ecg.setBaseLine(hl7.getBaseLine());
    ecg.setGain(hl7.getGain());
    ecg.setSampleRate(hl7.getSampleRate());
    ecg.setFinished(Boolean.FALSE);
    ecg.setFile(ecgFile);
    ecgRepository.save(ecg);

    for (int i = 0; i < hl7.getNumberOfChannels(); i++) {

        EcgChannel channel = new EcgChannel();
        channel.setLeadType(hl7.getChannelType(i));

        long count = 0;
        List<EcgSignal> signals = hl7.getChannelSignals(i);
        for (EcgSignal signal : signals) {
            count++;
            /*if (count == 30000) {
            break;
            }*/

            EcgSignal signal_ = new EcgSignal();
            signal_.setSampleIdx(signal.getSampleIdx());
            signal_.setyIntensity(signal.getyIntensity());

            ecgSignalRepository.save(signal_);
            channel.addSignal(signal_);

            if (count % 10000 == 0) {
                System.out.println(patientId + " : signal:" + count);
            }
        }
        channel.setEcg(ecg);
        ecgChannelRepository.save(channel);

        ecg.addChannel(channel);
        ecgRepository.save(ecg);
    }

    patient.addEcgExam(ecg);
    patientRepository.save(patient);

    ecg.setFinished(Boolean.TRUE);
    ecgRepository.save(ecg);

    Path pathIn = FileSystems.getDefault().getPath(file.getAbsolutePath());
    Path pathOut = FileSystems.getDefault().getPath("/home/ecgs/" + newfileName);

    Files.copy(pathIn, pathOut);
    Files.deleteIfExists(pathIn);

    lock.lock();
    try {
        removeUploadFile(patientId);
    } finally {
        lock.unlock();
    }

    System.out.println("Map Size=" + uploadFileMap.size());
}

From source file:org.fao.geonet.kernel.harvest.harvester.geonet.Aligner.java

private void addMetadata(final RecordInfo ri, final boolean localRating, String uuid) throws Exception {
    final String id[] = { null };
    final Element md[] = { null };

    //--- import metadata from MEF file

    Path mefFile = retrieveMEF(ri.uuid);

    try {// w  w w .j  a v a 2 s  . c o  m
        String fileType = "mef";
        MEFLib.Version version = MEFLib.getMEFVersion(mefFile);
        if (version != null && version.equals(MEFLib.Version.V2)) {
            fileType = "mef2";
        }

        IVisitor visitor = fileType.equals("mef2") ? new MEF2Visitor() : new MEFVisitor();

        MEFLib.visit(mefFile, visitor, new IMEFVisitor() {
            public void handleMetadata(Element mdata, int index) throws Exception {
                md[index] = mdata;
            }

            //--------------------------------------------------------------------

            public void handleMetadataFiles(DirectoryStream<Path> files, Element info, int index)
                    throws Exception {
                // Import valid metadata
                Element metadataValidForImport = extractValidMetadataForImport(files, info);

                if (metadataValidForImport != null) {
                    handleMetadata(metadataValidForImport, index);
                }
            }

            //--------------------------------------------------------------------

            public void handleInfo(Element info, int index) throws Exception {

                final Element metadata = md[index];
                String schema = dataMan.autodetectSchema(metadata, null);
                if (info != null && info.getContentSize() != 0) {
                    Element general = info.getChild("general");
                    if (general != null && general.getContentSize() != 0) {
                        Element schemaInfo = general.getChild("schema");
                        if (schemaInfo != null) {
                            schemaInfo.setText(schema);
                        }
                    }
                }
                if (info != null) {
                    id[index] = addMetadata(ri, md[index], info, localRating, uuid);
                }
            }

            //--------------------------------------------------------------------

            public void handlePublicFile(String file, String changeDate, InputStream is, int index)
                    throws IOException {
                if (id[index] == null)
                    return;

                if (log.isDebugEnabled())
                    log.debug("    - Adding remote public file with name:" + file);
                Path pubDir = Lib.resource.getDir(context, "public", id[index]);

                Path outFile = pubDir.resolve(file);
                try (OutputStream os = Files.newOutputStream(outFile)) {
                    BinaryFile.copy(is, os);
                    IO.touch(outFile,
                            FileTime.from(new ISODate(changeDate).getTimeInSeconds(), TimeUnit.SECONDS));
                }
            }

            public void handleFeatureCat(Element md, int index) throws Exception {
                // Feature Catalog not managed for harvesting
            }

            public void handlePrivateFile(String file, String changeDate, InputStream is, int index)
                    throws IOException {
                if (params.mefFormatFull) {
                    if (log.isDebugEnabled())
                        log.debug("    - Adding remote private file with name:" + file
                                + " available for download for user used for harvester.");
                    Path dir = Lib.resource.getDir(context, "private", id[index]);
                    Path outFile = dir.resolve(file);
                    try (OutputStream os = Files.newOutputStream(outFile)) {
                        BinaryFile.copy(is, os);
                        IO.touch(outFile,
                                FileTime.from(new ISODate(changeDate).getTimeInSeconds(), TimeUnit.SECONDS));
                    }
                }
            }
        });
    } catch (Exception e) {
        //--- we ignore the exception here. Maybe the metadata has been removed just now
        if (log.isDebugEnabled())
            log.debug("  - Skipped unretrievable metadata (maybe has been removed) with uuid:" + ri.uuid);
        result.unretrievable++;
        log.error(e);
    } finally {
        try {
            Files.deleteIfExists(mefFile);
        } catch (IOException e) {
            log.warning("Unable to delete mefFile: " + mefFile);
        }
    }
}