Example usage for java.nio.file Files deleteIfExists

List of usage examples for java.nio.file Files deleteIfExists

Introduction

In this page you can find the example usage for java.nio.file Files deleteIfExists.

Prototype

public static boolean deleteIfExists(Path path) throws IOException 

Source Link

Document

Deletes a file if it exists.

Usage

From source file:com.sumzerotrading.reporting.csv.ReportGeneratorTest.java

@Test
public void testWriteRoundTrip() throws Exception {
    Path path = Files.createTempFile("ReportGeneratorUnitTest", ".txt");
    reportGenerator.outputFile = path.toString();
    String expected = "2016-03-19T07:01:10,Long,ABC,100,100.23,0,2016-03-20T06:01:10,101.23,0,Short,XYZ,50,250.34,0,251.34,0";

    Ticker longTicker = new StockTicker("ABC");
    Ticker shortTicker = new StockTicker("XYZ");
    int longSize = 100;
    int shortSize = 50;
    double longEntryFillPrice = 100.23;
    double longExitFillPrice = 101.23;
    double shortEntryFillPrice = 250.34;
    double shortExitFillPrice = 251.34;
    ZonedDateTime entryTime = ZonedDateTime.of(2016, 3, 19, 7, 1, 10, 0, ZoneId.systemDefault());
    ZonedDateTime exitTime = ZonedDateTime.of(2016, 3, 20, 6, 1, 10, 0, ZoneId.systemDefault());

    TradeOrder longEntry = new TradeOrder("123", longTicker, longSize, TradeDirection.BUY);
    longEntry.setFilledPrice(longEntryFillPrice);
    longEntry.setOrderFilledTime(entryTime);

    TradeOrder longExit = new TradeOrder("123", longTicker, longSize, TradeDirection.SELL);
    longExit.setFilledPrice(longExitFillPrice);
    longExit.setOrderFilledTime(exitTime);

    TradeOrder shortEntry = new TradeOrder("123", shortTicker, shortSize, TradeDirection.SELL);
    shortEntry.setFilledPrice(shortEntryFillPrice);
    shortEntry.setOrderFilledTime(entryTime);

    TradeOrder shortExit = new TradeOrder("123", shortTicker, shortSize, TradeDirection.BUY);
    shortExit.setFilledPrice(shortExitFillPrice);
    shortExit.setOrderFilledTime(exitTime);

    PairTradeRoundTrip roundTrip = new PairTradeRoundTrip();
    roundTrip.longEntry = longEntry;/*from  w  ww .j  a v a  2  s .c o  m*/
    roundTrip.longExit = longExit;
    roundTrip.shortEntry = shortEntry;
    roundTrip.shortExit = shortExit;

    System.out.println("Writing out to file: " + path);

    reportGenerator.writeRoundTripToFile(roundTrip);

    List<String> lines = Files.readAllLines(path);
    assertEquals(1, lines.size());
    assertEquals(expected, lines.get(0));

    Files.deleteIfExists(path);

}

From source file:org.n52.wps.server.database.PostgresDatabase.java

@Override
protected synchronized String insertResultEntity(InputStream stream, String id, String type, String mimeType) {
    Timestamp timestamp = new Timestamp(Calendar.getInstance().getTimeInMillis());
    FileInputStream fis = null;//from w  ww. ja  v  a  2  s  . c  o m
    Boolean storingOutput = null != id && id.toLowerCase().contains("output");
    Boolean saveResultsToDB = Boolean.parseBoolean(getDatabaseProperties("saveResultsToDB"));
    String filename = storingOutput ? id : UUID.randomUUID().toString();
    Path filePath = new File(BASE_DIRECTORY, filename).toPath();

    try {
        filePath = Files.createFile(filePath);
        Files.copy(stream, filePath, StandardCopyOption.REPLACE_EXISTING);
        fis = new FileInputStream(filePath.toFile());

        AbstractDatabase.insertSQL.setString(INSERT_COLUMN_REQUEST_ID, id);
        AbstractDatabase.insertSQL.setTimestamp(INSERT_COLUMN_REQUEST_DATE, timestamp);
        AbstractDatabase.insertSQL.setString(INSERT_COLUMN_RESPONSE_TYPE, type);
        AbstractDatabase.insertSQL.setString(INSERT_COLUMN_MIME_TYPE, mimeType);

        if (storingOutput) {
            if (!saveResultsToDB) {
                byte[] filePathByteArray = filePath.toUri().toString().getBytes();
                AbstractDatabase.insertSQL.setAsciiStream(INSERT_COLUMN_RESPONSE,
                        new ByteArrayInputStream(filePathByteArray), filePathByteArray.length);
            } else {
                AbstractDatabase.insertSQL.setAsciiStream(INSERT_COLUMN_RESPONSE, fis,
                        (int) filePath.toFile().length());
            }
        } else {
            AbstractDatabase.insertSQL.setAsciiStream(INSERT_COLUMN_RESPONSE, fis,
                    (int) filePath.toFile().length());
        }

        AbstractDatabase.insertSQL.executeUpdate();
        getConnection().commit();
    } catch (SQLException e) {
        LOGGER.error("Could not insert Response into database.", e);
    } catch (IOException e) {
        LOGGER.error("Could not insert Response into database.", e);
    } finally {
        if (fis != null) {
            try {
                fis.close();
            } catch (IOException e) {
                LOGGER.error("Could not close file input stream", e);
            }
        }

        // If we are storing output, we want to only delete the file if we're
        // storing the results to the database. Otherwise, don't delete the
        // file since that will be served on request
        if (filePath != null) {
            try {
                if (storingOutput) {
                    if (saveResultsToDB) {
                        Files.deleteIfExists(filePath);
                    }
                } else {
                    Files.deleteIfExists(filePath);
                }
            } catch (IOException e) {
                LOGGER.error("Could not delete file: " + filePath.toString(), e);
            }
        }
    }
    return generateRetrieveResultURL(id);
}

From source file:faescapeplan.FAEscapePlanUI.java

@SuppressWarnings("unchecked")
private void downloadJournals(ArrayList<String> journalList) {
    JSONArray jsonList = new JSONArray();
    String downloadLoc = this.saveLocText.getText();
    Path jsonPath = Paths.get(downloadLoc + "\\" + userData.getName() + "\\journals\\journals.json");

    try {/*from   www. j  a  v  a  2s . c  o m*/
        Files.deleteIfExists(jsonPath);
        Files.createFile(jsonPath);
    } catch (IOException ex) {
        Logger.getLogger(FAEscapePlanUI.class.getName()).log(Level.SEVERE, null, ex);
        JOptionPane.showMessageDialog(this, "A critical IO exception occurred in method: downloadJournals");
    }

    for (String item : journalList) {
        try {
            Map<String, String> jsonMap = new LinkedHashMap<>();
            Document doc = Jsoup.connect("http://www.furaffinity.net/journal/" + item + "/")
                    .cookies(userData.getCookies()).userAgent(USER_AGENT).get();
            String title = doc.title().split(" -- ")[0];
            String date = doc.getElementsByClass("popup_date").get(0).attr("title");
            String body = doc.getElementsByClass("journal-body").get(0).html();
            jsonMap.put("title", title);
            jsonMap.put("date", date);
            jsonMap.put("body", body);
            jsonList.add(jsonMap);
            Path journalPath = Paths.get(downloadLoc,
                    "\\" + userData.getName() + "\\journals\\" + item + "_" + title + ".txt");
            String bodyParsed = removeHtmlTags(body);

            try (FileWriter journalWriter = new FileWriter(new File(journalPath.toString()))) {
                journalWriter.append(title + System.getProperty("line.separator"));
                journalWriter.append(date + System.getProperty("line.separator"));
                journalWriter.append(bodyParsed + System.getProperty("line.separator"));
            }
        } catch (FileAlreadyExistsException ex) {
            Logger.getLogger(FAEscapePlanUI.class.getName()).log(Level.SEVERE, null, ex);
            updateTextLog("File already exists");
        } catch (IOException ex) {
            Logger.getLogger(FAEscapePlanUI.class.getName()).log(Level.SEVERE, null, ex);
            updateTextLog("An IO Exception occurred while downloading journal: " + item);
        }
    }

    String jsonString = JSONValue.toJSONString(jsonList);

    try {
        Files.write(jsonPath, Arrays.asList(jsonString), StandardOpenOption.WRITE);
    } catch (IOException ex) {
        Logger.getLogger(FAEscapePlanUI.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:de.digiway.rapidbreeze.server.infrastructure.objectstorage.ObjectStorage.java

/**
 * Removes all entities of the given class. If there is no entity of the
 * given class existing in the storage, the method returns without any
 * action.//w  w w .  jav a  2s . c  o m
 *
 * @param clazz
 */
public void removeAll(Class<?> clazz) {
    if (!ObjectStorageHelper.isEntity(clazz)) {
        throw new IllegalArgumentException("The given class: " + clazz + " is not a valid entity class.");
    }

    String tablename = ObjectStorageHelper.getEntityTable(clazz);

    if (classMap.containsKey(tablename)) {
        String classIndexFilename = classMap.get(tablename);
        PlainStorageMap classIndexMap = getStorageMap(classIndexFilename);
        Iterator<Map.Entry<String, String>> it = classIndexMap.entrySet().iterator();
        try {
            while (it.hasNext()) {
                String filename = it.next().getValue();
                it.remove();
                Path path = fileSystem.getPath(root.toString(), filename);
                Files.deleteIfExists(path);
            }
            classIndexMap.commit();
        } catch (IOException ex) {
            LOG.log(Level.WARNING, "Cannot remove object property file. Leaving it there.", ex);
        } catch (Exception ex) {
            classIndexMap.rollback();
            throw ex;
        }
    }
}

From source file:org.apache.zeppelin.interpreter.launcher.SparkInterpreterLauncherTest.java

@Test
public void testYarnClusterMode_2() throws IOException {
    ZeppelinConfiguration zConf = new ZeppelinConfiguration();
    SparkInterpreterLauncher launcher = new SparkInterpreterLauncher(zConf, null);
    Properties properties = new Properties();
    properties.setProperty("SPARK_HOME", "/user/spark");
    properties.setProperty("property_1", "value_1");
    properties.setProperty("master", "yarn");
    properties.setProperty("spark.submit.deployMode", "cluster");
    properties.setProperty("spark.files", "file_1");
    properties.setProperty("spark.jars", "jar_1");

    InterpreterOption option = new InterpreterOption();
    option.setUserImpersonate(true);//from  w  w w  . j  a  v  a  2  s  .  c o  m
    InterpreterLaunchContext context = new InterpreterLaunchContext(properties, option, null, "user1",
            "intpGroupId", "groupId", "spark", "spark", 0, "host");
    Path localRepoPath = Paths.get(zConf.getInterpreterLocalRepoPath(), context.getInterpreterSettingId());
    FileUtils.deleteDirectory(localRepoPath.toFile());
    Files.createDirectories(localRepoPath);
    Files.createFile(Paths.get(localRepoPath.toAbsolutePath().toString(), "test.jar"));

    InterpreterClient client = launcher.launch(context);
    assertTrue(client instanceof RemoteInterpreterManagedProcess);
    RemoteInterpreterManagedProcess interpreterProcess = (RemoteInterpreterManagedProcess) client;
    assertEquals("spark", interpreterProcess.getInterpreterSettingName());
    assertTrue(interpreterProcess.getInterpreterDir().endsWith("/interpreter/spark"));
    assertTrue(interpreterProcess.getLocalRepoDir().endsWith("/local-repo/groupId"));
    assertEquals(zConf.getInterpreterRemoteRunnerPath(), interpreterProcess.getInterpreterRunner());
    assertTrue(interpreterProcess.getEnv().size() >= 3);
    assertEquals("/user/spark", interpreterProcess.getEnv().get("SPARK_HOME"));
    assertEquals("true", interpreterProcess.getEnv().get("ZEPPELIN_SPARK_YARN_CLUSTER"));
    assertEquals(
            " --master yarn --conf spark.files='file_1',.//conf/log4j_yarn_cluster.properties --conf spark.jars='jar_1' --conf spark.submit.deployMode='cluster' --conf spark.yarn.isPython=true --conf spark.yarn.submit.waitAppCompletion=false --proxy-user user1 --jars "
                    + Paths.get(localRepoPath.toAbsolutePath().toString(), "test.jar").toString(),
            interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF"));
    Files.deleteIfExists(Paths.get(localRepoPath.toAbsolutePath().toString(), "test.jar"));
    FileUtils.deleteDirectory(localRepoPath.toFile());
}

From source file:org.digidoc4j.main.DigiDoc4JTest.java

@Test
public void createsContainerAndSignsIt() throws Exception {
    exit.expectSystemExitWithStatus(0);/*from  w  w  w.  j a v a2  s . c om*/
    Files.deleteIfExists(Paths.get("test1.ddoc"));
    String[] params = new String[] { "-in", "test1.ddoc", "-add", "testFiles/test.txt", "text/plain", "-pkcs12",
            "testFiles/signout.p12", "test" };
    DigiDoc4J.main(params);
}

From source file:org.linagora.linshare.webservice.userv2.impl.FlowDocumentUploaderRestServiceImpl.java

@Path("/")
@POST//  w ww  .j av a2  s .  c  om
@Consumes("multipart/form-data")
@Override
public FlowDto uploadChunk(@Multipart(CHUNK_NUMBER) long chunkNumber, @Multipart(TOTAL_CHUNKS) long totalChunks,
        @Multipart(CHUNK_SIZE) long chunkSize, @Multipart(CURRENT_CHUNK_SIZE) long currentChunkSize,
        @Multipart(TOTAL_SIZE) long totalSize, @Multipart(IDENTIFIER) String identifier,
        @Multipart(FILENAME) String filename, @Multipart(RELATIVE_PATH) String relativePath,
        @Multipart(FILE) InputStream file, MultipartBody body,
        @Multipart(value = WORK_GROUP_UUID, required = false) String workGroupUuid,
        @Multipart(value = WORK_GROUP_FOLDER_UUID, required = false) String workGroupFolderUuid,
        @Multipart(value = ASYNC_TASK, required = false) boolean async) throws BusinessException {
    logger.debug("upload chunk number : " + chunkNumber);
    identifier = cleanIdentifier(identifier);
    boolean isValid = FlowUploaderUtils.isValid(chunkNumber, chunkSize, totalSize, identifier, filename);
    Validate.isTrue(isValid);
    checkIfMaintenanceIsEnabled();
    FlowDto flow = new FlowDto(chunkNumber);
    try {
        logger.debug("writing chunk number : " + chunkNumber);
        java.nio.file.Path tempFile = FlowUploaderUtils.getTempFile(identifier, chunkedFiles);
        ChunkedFile currentChunkedFile = chunkedFiles.get(identifier);
        if (!currentChunkedFile.hasChunk(chunkNumber)) {
            FileChannel fc = FileChannel.open(tempFile, StandardOpenOption.CREATE, StandardOpenOption.APPEND);
            ByteArrayOutputStream output = new ByteArrayOutputStream();
            IOUtils.copy(file, output);
            fc.write(ByteBuffer.wrap(output.toByteArray()), (chunkNumber - 1) * chunkSize);
            fc.close();
            if (sizeValidation) {
                if (output.size() != currentChunkSize) {
                    String msg = String.format("File size does not match, found : %1$d, announced : %2$d",
                            output.size(), currentChunkSize);
                    logger.error(msg);
                    flow.setChunkUploadSuccess(false);
                    flow.setErrorMessage(msg);
                    return flow;
                }
            }
            currentChunkedFile.addChunk(chunkNumber);
        } else {
            logger.error("currentChunkedFile.hasChunk(chunkNumber) !!! " + currentChunkedFile);
            logger.error("chunkedNumber skipped : " + chunkNumber);
        }

        logger.debug("nb uploading files : " + chunkedFiles.size());
        logger.debug("current chuckedfile uuid : " + identifier);
        logger.debug("current chuckedfiles" + chunkedFiles.toString());
        if (FlowUploaderUtils.isUploadFinished(identifier, chunkSize, totalSize, chunkedFiles)) {
            flow.setLastChunk(true);
            logger.debug("upload finished : " + chunkNumber + " : " + identifier);
            InputStream inputStream = Files.newInputStream(tempFile, StandardOpenOption.READ);
            File tempFile2 = getTempFile(inputStream, "rest-flowuploader", filename);
            if (sizeValidation) {
                long currSize = tempFile2.length();
                if (currSize != totalSize) {
                    String msg = String.format("File size does not match, found : %1$d, announced : %2$d",
                            currSize, totalSize);
                    logger.error(msg);
                    flow.setChunkUploadSuccess(false);
                    flow.setErrorMessage(msg);
                    return flow;
                }
            }
            EntryDto uploadedDocument = new EntryDto();
            flow.setIsAsync(async);
            boolean isWorkGroup = !Strings.isNullOrEmpty(workGroupUuid);
            if (async) {
                logger.debug("Async mode is used");
                // Asynchronous mode
                AccountDto actorDto = documentFacade.getAuthenticatedAccountDto();
                AsyncTaskDto asyncTask = null;
                try {
                    if (isWorkGroup) {
                        ThreadEntryTaskContext threadEntryTaskContext = new ThreadEntryTaskContext(actorDto,
                                actorDto.getUuid(), workGroupUuid, tempFile2, filename, workGroupFolderUuid);
                        asyncTask = asyncTaskFacade.create(totalSize, getTransfertDuration(identifier),
                                filename, null, AsyncTaskType.THREAD_ENTRY_UPLOAD);
                        ThreadEntryUploadAsyncTask task = new ThreadEntryUploadAsyncTask(threadEntryAsyncFacade,
                                threadEntryTaskContext, asyncTask);
                        taskExecutor.execute(task);
                        flow.completeAsyncTransfert(asyncTask);
                    } else {
                        DocumentTaskContext documentTaskContext = new DocumentTaskContext(actorDto,
                                actorDto.getUuid(), tempFile2, filename, null, null);
                        asyncTask = asyncTaskFacade.create(totalSize, getTransfertDuration(identifier),
                                filename, null, AsyncTaskType.DOCUMENT_UPLOAD);
                        DocumentUploadAsyncTask task = new DocumentUploadAsyncTask(documentAsyncFacade,
                                documentTaskContext, asyncTask);
                        taskExecutor.execute(task);
                        flow.completeAsyncTransfert(asyncTask);
                    }
                } catch (Exception e) {
                    logAsyncFailure(asyncTask, e);
                    deleteTempFile(tempFile2);
                    ChunkedFile remove = chunkedFiles.remove(identifier);
                    Files.deleteIfExists(remove.getPath());
                    throw e;
                }
            } else {
                try {
                    if (isWorkGroup) {
                        uploadedDocument = threadEntryFacade.create(null, workGroupUuid, workGroupFolderUuid,
                                tempFile2, filename);
                    } else {
                        uploadedDocument = documentFacade.create(tempFile2, filename, "", null);
                    }
                    flow.completeTransfert(uploadedDocument);
                } finally {
                    deleteTempFile(tempFile2);
                    ChunkedFile remove = chunkedFiles.remove(identifier);
                    if (remove != null) {
                        Files.deleteIfExists(remove.getPath());
                    } else {
                        logger.error("Should not happen !!!");
                        logger.error("chunk number: " + chunkNumber);
                        logger.error("chunk identifier: " + identifier);
                        logger.error("chunk filename: " + filename);
                        logger.error("chunks : " + chunkedFiles.toString());
                    }
                }
            }
            return flow;
        } else {
            logger.debug("upload pending ");
            flow.setChunkUploadSuccess(true);
        }
    } catch (BusinessException e) {
        logger.error(e.getMessage());
        logger.debug("Exception : ", e);
        flow.setChunkUploadSuccess(false);
        flow.setErrorMessage(e.getMessage());
        flow.setErrCode(e.getErrorCode().getCode());
    } catch (Exception e) {
        logger.error(e.getMessage());
        logger.debug("Exception : ", e);
        flow.setChunkUploadSuccess(false);
        flow.setErrorMessage(e.getMessage());
    }
    return flow;
}

From source file:org.corehunter.services.simple.FileBasedDatasetServices.java

@Override
public void loadData(Dataset dataset, Path path, FileType fileType, CoreHunterDataType dataType,
        Object... options) throws IOException, DatasetException {

    if (dataset == null) {
        throw new DatasetException("Dataset not defined!");
    }//from  w w w  .ja va  2 s .c om

    if (fileType == null) {
        throw new DatasetException("File type not defined!");
    }

    if (dataType == null) {
        throw new DatasetException("Data type not defined!");
    }

    DatasetPojo internalDataset = datasetMap.get(dataset.getUniqueIdentifier());

    if (internalDataset == null) {
        throw new DatasetException("Unknown dataset with datasetId : " + dataset.getUniqueIdentifier());
    }

    if (!Files.exists(path)) {
        throw new DatasetException("Unknown path : " + path);
    }

    String datasetId = internalDataset.getUniqueIdentifier();

    String dataId = dataset.getUniqueIdentifier();
    String dataName = dataset.getName();

    Path copyPath;
    Path internalPath;

    Path originalFormatPath;
    Path dataPath;

    CoreHunterData coreHunterData = getCoreHunterData(internalDataset.getUniqueIdentifier());

    switch (dataType) {
    case GENOTYPIC:

        copyPath = Paths.get(getPath().toString(), GENOTYPIC_PATH, datasetId + getSuffix(fileType));

        internalPath = Paths.get(getPath().toString(), GENOTYPIC_PATH, datasetId + SUFFIX);

        originalFormatPath = Paths.get(getPath().toString(), GENOTYPIC_PATH,
                datasetId + ORIGINAL_FORMAT_SUFFIX);

        dataPath = Paths.get(getPath().toString(), GENOTYPIC_PATH, datasetId + DATA_SUFFIX);

        if (coreHunterData != null && (coreHunterData.getGenotypicData() != null || Files.exists(copyPath))) {
            throw new DatasetException(
                    "Genotypic Data is already associated for this dataset : " + dataset.getName());
        }

        try {
            copyOrMoveFile(path, copyPath);
        } catch (Exception e) {
            Files.deleteIfExists(copyPath);
            throw e;
        }

        GenotypeDataFormat genotypeDataFormat = getGenotypeDataFormat(options);
        FrequencyGenotypeData genotypeData;

        try {
            genotypeData = genotypeDataFormat.readData(copyPath, fileType);
        } catch (IOException e) {
            Files.deleteIfExists(copyPath);
            throw e;
        }

        try {
            genotypeData.writeData(internalPath, FileType.TXT);
        } catch (IOException e) {
            Files.deleteIfExists(copyPath);
            throw e;
        }

        try {
            writeToFile(originalFormatPath, genotypeDataFormat);
        } catch (IOException e) {
            Files.deleteIfExists(originalFormatPath);
            Files.deleteIfExists(copyPath);
            throw e;
        }

        if (coreHunterData != null) {
            coreHunterData = new CoreHunterData(genotypeData, coreHunterData.getPhenotypicData(),
                    coreHunterData.getDistancesData());
        } else {
            coreHunterData = new CoreHunterData(genotypeData, null, null);
        }

        dataCache.put(datasetId, coreHunterData);

        break;
    case PHENOTYPIC:

        copyPath = Paths.get(getPath().toString(), PHENOTYPIC_PATH, datasetId + getSuffix(fileType));

        internalPath = Paths.get(getPath().toString(), PHENOTYPIC_PATH, datasetId + SUFFIX);

        dataPath = Paths.get(getPath().toString(), PHENOTYPIC_PATH, datasetId + DATA_SUFFIX);

        if (coreHunterData != null && (coreHunterData.getPhenotypicData() != null || Files.exists(copyPath))) {
            throw new DatasetException(
                    "Phenotypic Data is already associated for this dataset : " + dataset.getName());
        }

        try {
            copyOrMoveFile(path, copyPath);
        } catch (Exception e) {
            Files.deleteIfExists(copyPath);
            throw e;
        }

        SimplePhenotypeData phenotypeData;

        try {
            phenotypeData = SimplePhenotypeData.readPhenotypeData(copyPath, fileType);
        } catch (IOException e) {
            Files.deleteIfExists(copyPath);
            throw e;
        }

        // TODO write data method should be on interface?
        try {
            phenotypeData.writeData(internalPath, FileType.TXT);
        } catch (IOException e) {
            Files.deleteIfExists(copyPath);
            throw e;
        }

        if (coreHunterData != null) {
            coreHunterData = new CoreHunterData(coreHunterData.getGenotypicData(), phenotypeData,
                    coreHunterData.getDistancesData());
        } else {
            coreHunterData = new CoreHunterData(null, phenotypeData, null);
        }

        dataCache.put(datasetId, coreHunterData);

        break;
    case DISTANCES:

        copyPath = Paths.get(getPath().toString(), DISTANCES_PATH, datasetId + getSuffix(fileType));

        internalPath = Paths.get(getPath().toString(), DISTANCES_PATH, datasetId + SUFFIX);

        dataPath = Paths.get(getPath().toString(), DISTANCES_PATH, datasetId + DATA_SUFFIX);

        if (coreHunterData != null && (coreHunterData.getDistancesData() != null || Files.exists(copyPath))) {
            throw new DatasetException(
                    "Distances Data is already associated for this dataset : " + dataset.getName());
        }

        try {
            copyOrMoveFile(path, copyPath);
        } catch (Exception e) {
            Files.deleteIfExists(copyPath);
            throw e;
        }

        SimpleDistanceMatrixData distanceData;

        try {
            distanceData = SimpleDistanceMatrixData.readData(copyPath, fileType);
        } catch (IOException e) {
            Files.deleteIfExists(copyPath);
            throw e;
        }

        try {
            distanceData.writeData(internalPath, FileType.TXT);
        } catch (IOException e) {
            Files.deleteIfExists(copyPath);
            throw e;
        }

        if (coreHunterData != null) {
            coreHunterData = new CoreHunterData(coreHunterData.getGenotypicData(),
                    coreHunterData.getPhenotypicData(), distanceData);
        } else {
            coreHunterData = new CoreHunterData(null, null, distanceData);
        }

        dataCache.put(datasetId, coreHunterData);
        break;
    default:
        throw new IllegalArgumentException("Unknown data type : " + dataType);
    }

    try {
        writeToFile(dataPath, new SimpleEntityPojo(dataId, dataName));
    } catch (IOException e) {
        throw new DatasetException(e);
    }

    internalDataset.setSize(coreHunterData.getSize());

    writeDatasets();
}

From source file:com.sumzerotrading.reporting.csv.ReportGeneratorTest.java

@Test
public void testReportGeneratorEndToEnd() throws Exception {
    StockTicker longTicker = new StockTicker("ABC");
    StockTicker shortTicker = new StockTicker("XYZ");

    ZonedDateTime entryOrderTime = ZonedDateTime.of(2016, 3, 25, 6, 18, 35, 0, ZoneId.systemDefault());
    ZonedDateTime exitOrderTime = ZonedDateTime.of(2016, 3, 25, 6, 19, 35, 0, ZoneId.systemDefault());

    String directory = System.getProperty("java.io.tmpdir");
    if (!directory.endsWith("/")) {
        directory += "/";
    }//from ww  w. ja  v a  2s.  c o  m
    Path reportPath = Paths.get(directory + "report.csv");
    Files.deleteIfExists(reportPath);
    System.out.println("Creating directory at: " + directory);
    ReportGenerator generator = new ReportGenerator("EOD-Pair-Strategy", directory, pairRoundtripBuilder);

    TradeOrder longEntryOrder = new TradeOrder("123", longTicker, 100, TradeDirection.BUY);
    longEntryOrder.setFilledPrice(100.00);
    longEntryOrder.setReference("EOD-Pair-Strategy:guid-123:Entry:Long*");
    longEntryOrder.setCurrentStatus(OrderStatus.Status.FILLED);
    longEntryOrder.setOrderFilledTime(entryOrderTime);

    TradeOrder shortEntryOrder = new TradeOrder("234", shortTicker, 50, TradeDirection.SELL);
    shortEntryOrder.setFilledPrice(50.00);
    shortEntryOrder.setReference("EOD-Pair-Strategy:guid-123:Entry:Short*");
    shortEntryOrder.setCurrentStatus(OrderStatus.Status.FILLED);
    shortEntryOrder.setOrderFilledTime(entryOrderTime);

    generator.orderEvent(new OrderEvent(longEntryOrder, null));
    assertFalse(Files.exists(reportPath));

    generator.orderEvent(new OrderEvent(shortEntryOrder, null));
    assertFalse(Files.exists(reportPath));

    TradeOrder longExitOrder = new TradeOrder("1234", longTicker, 100, TradeDirection.SELL);
    longExitOrder.setFilledPrice(105.00);
    longExitOrder.setReference("EOD-Pair-Strategy:guid-123:Exit:Long*");
    longExitOrder.setCurrentStatus(OrderStatus.Status.FILLED);
    longExitOrder.setOrderFilledTime(exitOrderTime);

    TradeOrder shortExitOrder = new TradeOrder("2345", shortTicker, 50, TradeDirection.BUY);
    shortExitOrder.setFilledPrice(40.00);
    shortExitOrder.setReference("EOD-Pair-Strategy:guid-123:Exit:Short*");
    shortExitOrder.setCurrentStatus(OrderStatus.Status.FILLED);
    shortExitOrder.setOrderFilledTime(exitOrderTime);

    generator.orderEvent(new OrderEvent(longExitOrder, null));
    assertFalse(Files.exists(reportPath));

    generator.orderEvent(new OrderEvent(shortExitOrder, null));
    assertTrue(Files.exists(reportPath));

    List<String> lines = Files.readAllLines(reportPath);
    assertEquals(1, lines.size());

    String line = lines.get(0);
    String expected = "2016-03-25T06:18:35,Long,ABC,100,100.0,0,2016-03-25T06:19:35,105.0,0,Short,XYZ,50,50.0,0,40.0,0";
    assertEquals(expected, line);

    generator.orderEvent(new OrderEvent(longEntryOrder, null));
    generator.orderEvent(new OrderEvent(longExitOrder, null));
    generator.orderEvent(new OrderEvent(shortEntryOrder, null));
    generator.orderEvent(new OrderEvent(shortExitOrder, null));

    lines = Files.readAllLines(reportPath);
    assertEquals(2, lines.size());
    assertEquals(expected, lines.get(0));
    assertEquals(expected, lines.get(1));

}

From source file:com.facebook.presto.hive.s3.TestPrestoS3FileSystem.java

@Test(expectedExceptions = IOException.class, expectedExceptionsMessageRegExp = "Configured staging path is not a directory: .*")
public void testCreateWithStagingDirectoryFile() throws Exception {
    java.nio.file.Path staging = createTempFile("staging", null);
    // staging = /tmp/stagingXXX.tmp

    try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) {
        MockAmazonS3 s3 = new MockAmazonS3();
        Configuration conf = new Configuration();
        conf.set(S3_STAGING_DIRECTORY, staging.toString());
        fs.initialize(new URI("s3n://test-bucket/"), conf);
        fs.setS3Client(s3);/* w  ww  . j a  v  a 2  s.c  o  m*/
        fs.create(new Path("s3n://test-bucket/test"));
    } finally {
        Files.deleteIfExists(staging);
    }
}