List of usage examples for java.nio.file Files delete
public static void delete(Path path) throws IOException
From source file:org.cryptomator.cryptofs.CryptoFileSystemImpl.java
void delete(CryptoPath cleartextPath) throws IOException { Path ciphertextFile = cryptoPathMapper.getCiphertextFilePath(cleartextPath, CiphertextFileType.FILE); // try to delete ciphertext file: if (!Files.deleteIfExists(ciphertextFile)) { // filePath doesn't exist, maybe it's an directory: Path ciphertextDir = cryptoPathMapper.getCiphertextDirPath(cleartextPath); Path ciphertextDirFile = cryptoPathMapper.getCiphertextFilePath(cleartextPath, CiphertextFileType.DIRECTORY); try {/*ww w. ja v a 2s .co m*/ Files.delete(ciphertextDir); if (!Files.deleteIfExists(ciphertextDirFile)) { // should not happen. Nevertheless this is a valid state, so who no big deal... LOG.warn("Successfully deleted dir {}, but didn't find corresponding dir file {}", ciphertextDir, ciphertextDirFile); } dirIdProvider.delete(ciphertextDirFile); } catch (NoSuchFileException e) { // translate ciphertext path to cleartext path throw new NoSuchFileException(cleartextPath.toString()); } catch (DirectoryNotEmptyException e) { // translate ciphertext path to cleartext path throw new DirectoryNotEmptyException(cleartextPath.toString()); } } }
From source file:org.tinymediamanager.core.tvshow.TvShowRenamer.java
private static void cleanEmptyDir(Path dir) { try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(dir)) { if (!directoryStream.iterator().hasNext()) { // no iterator = empty LOGGER.debug("Deleting empty Directory " + dir); Files.delete(dir); // do not use recursive her return; }/*from w ww . j av a2 s.com*/ } catch (IOException ex) { } // FIXME: recursive backward delete?! why?! // if (Files.isDirectory(dir)) { // cleanEmptyDir(dir.getParent()); // } }
From source file:org.codice.ddf.security.migratable.impl.SecurityMigratableTest.java
/** Verifies that when no PDP file exists, the export and import both succeed. */ @Test/*from ww w .j av a2s . c om*/ public void testDoExportAndDoImportWhenNoPdpFileExists() throws IOException { // Setup export Path exportDir = tempDir.getRoot().toPath().toRealPath(); // Remove PDP file Path xacmlPolicy = ddfHome.resolve(PDP_POLICIES_DIR).resolve(XACML_POLICY); Files.delete(xacmlPolicy); // Perform export MigrationReport exportReport = doExport(exportDir); // Verify export assertThat("The export report has errors.", exportReport.hasErrors(), is(false)); assertThat("The export report has warnings.", exportReport.hasWarnings(), is(false)); assertThat("Export was not successful.", exportReport.wasSuccessful(), is(true)); String exportedZipBaseName = String.format("%s-%s.dar", SUPPORTED_BRANDING, SUPPORTED_PRODUCT_VERSION); Path exportedZip = exportDir.resolve(exportedZipBaseName).toRealPath(); assertThat("Export zip does not exist.", exportedZip.toFile().exists(), is(true)); assertThat("Exported zip is empty.", exportedZip.toFile().length(), greaterThan(0L)); // Setup import setup(DDF_IMPORTED_HOME, DDF_IMPORTED_TAG, SUPPORTED_PRODUCT_VERSION); SecurityMigratable iSecurityMigratable = new SecurityMigratable(); List<Migratable> iMigratables = Arrays.asList(iSecurityMigratable); ConfigurationMigrationManager iConfigurationMigrationManager = new ConfigurationMigrationManager( iMigratables, systemService); MigrationReport importReport = iConfigurationMigrationManager.doImport(exportDir, this::print); // Verify import assertThat("The import report has errors.", importReport.hasErrors(), is(false)); assertThat("The import report has warnings.", importReport.hasWarnings(), is(false)); assertThat("Import was not successful.", importReport.wasSuccessful(), is(true)); verifyPolicyFilesImported(); verifyCrlImported(); }
From source file:org.apache.openaz.xacml.std.pap.StdPDPGroup.java
public PDPPolicy publishPolicy(String id, String name, boolean isRoot, InputStream policy) throws PAPException { ////from ww w. ja va2 s .c om // Does it exist already? // if (this.getPolicy(id) != null) { throw new PAPException("Policy with id " + id + " already exists - unpublish it first."); } Path tempFile = null; try { // // Copy the policy over // tempFile = Files.createFile(Paths.get(this.directory.toAbsolutePath().toString(), id)); long num; try (OutputStream os = Files.newOutputStream(tempFile)) { num = ByteStreams.copy(policy, os); } logger.info("Copied " + num + " bytes for policy " + name); StdPDPPolicy tempRootPolicy = new StdPDPPolicy(id, isRoot, name, tempFile.toUri()); if (!tempRootPolicy.isValid()) { try { Files.delete(tempFile); } catch (Exception ee) { logger.error("Policy was invalid, could NOT delete it.", ee); } throw new PAPException("Policy is invalid"); } // // Add it in // this.policies.add(tempRootPolicy); // // We are changed // this.firePDPGroupChanged(this); // // Return our new object. // return tempRootPolicy; } catch (IOException e) { logger.error("Failed to publishPolicy: ", e); } return null; }
From source file:cc.arduino.contributions.packages.ContributionInstaller.java
public synchronized void deleteUnknownFiles(List<String> downloadedPackageIndexFiles) throws IOException { File preferencesFolder = BaseNoGui.indexer.getIndexFile(".").getParentFile(); File[] additionalPackageIndexFiles = preferencesFolder .listFiles(new PackageIndexFilenameFilter(Constants.DEFAULT_INDEX_FILE_NAME)); if (additionalPackageIndexFiles == null) { return;//www.j a va 2s . co m } for (File additionalPackageIndexFile : additionalPackageIndexFiles) { if (!downloadedPackageIndexFiles.contains(additionalPackageIndexFile.getName())) { Files.delete(additionalPackageIndexFile.toPath()); } } }
From source file:fr.pilato.elasticsearch.crawler.fs.test.integration.FsCrawlerImplAllParametersIT.java
@Test public void test_remove_deleted_enabled() throws Exception { Fs fs = startCrawlerDefinition().setRemoveDeleted(true).build(); startCrawler(getCrawlerName(), fs, endCrawlerDefinition(getCrawlerName()), null); // We should have two docs first countTestHelper(getCrawlerName(), null, 2, currentTestResourceDir); // We remove a file logger.info(" ---> Removing file deleted_roottxtfile.txt"); Files.delete(currentTestResourceDir.resolve("deleted_roottxtfile.txt")); // We expect to have two files countTestHelper(getCrawlerName(), null, 1, currentTestResourceDir); }
From source file:dk.dma.ais.downloader.QueryService.java
/** * called every hour to clean up the repo *//* ww w. ja v a2 s. c o m*/ @Scheduled(cron = "12 27 */1 * * *") public void cleanUpRepoFolder() { long now = System.currentTimeMillis(); long expiredTime = now - FILE_EXPIRY_MS; try { Files.walkFileTree(getRepoRoot(), new SimpleFileVisitor<Path>() { @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { if (!dir.equals(getRepoRoot()) && isDirEmpty(dir)) { log.info("Deleting repo directory :" + dir); Files.delete(dir); } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (Files.getLastModifiedTime(file).toMillis() < expiredTime) { log.info("Deleting repo file :" + file); Files.delete(file); } return FileVisitResult.CONTINUE; } }); } catch (IOException e) { log.log(Level.SEVERE, "Failed cleaning up repo: " + e.getMessage()); } log.info(String.format("Cleaned up repo in %d ms", System.currentTimeMillis() - now)); }
From source file:org.apache.nifi.processors.standard.FetchFile.java
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get();//from w ww .j a v a2 s . co m if (flowFile == null) { return; } final StopWatch stopWatch = new StopWatch(true); final String filename = context.getProperty(FILENAME).evaluateAttributeExpressions(flowFile).getValue(); final LogLevel levelFileNotFound = LogLevel .valueOf(context.getProperty(FILE_NOT_FOUND_LOG_LEVEL).getValue()); final LogLevel levelPermDenied = LogLevel.valueOf(context.getProperty(PERM_DENIED_LOG_LEVEL).getValue()); final File file = new File(filename); // Verify that file system is reachable and file exists Path filePath = file.toPath(); if (!Files.exists(filePath) && !Files.notExists(filePath)) { // see https://docs.oracle.com/javase/tutorial/essential/io/check.html for more details getLogger().log(levelFileNotFound, "Could not fetch file {} from file system for {} because the existence of the file cannot be verified; routing to failure", new Object[] { file, flowFile }); session.transfer(session.penalize(flowFile), REL_FAILURE); return; } else if (!Files.exists(filePath)) { getLogger().log(levelFileNotFound, "Could not fetch file {} from file system for {} because the file does not exist; routing to not.found", new Object[] { file, flowFile }); session.getProvenanceReporter().route(flowFile, REL_NOT_FOUND); session.transfer(session.penalize(flowFile), REL_NOT_FOUND); return; } // Verify read permission on file final String user = System.getProperty("user.name"); if (!isReadable(file)) { getLogger().log(levelPermDenied, "Could not fetch file {} from file system for {} due to user {} not having sufficient permissions to read the file; routing to permission.denied", new Object[] { file, flowFile, user }); session.getProvenanceReporter().route(flowFile, REL_PERMISSION_DENIED); session.transfer(session.penalize(flowFile), REL_PERMISSION_DENIED); return; } // If configured to move the file and fail if unable to do so, check that the existing file does not exist and that we have write permissions // for the parent file. final String completionStrategy = context.getProperty(COMPLETION_STRATEGY).getValue(); final String targetDirectoryName = context.getProperty(MOVE_DESTINATION_DIR) .evaluateAttributeExpressions(flowFile).getValue(); if (targetDirectoryName != null) { final File targetDir = new File(targetDirectoryName); if (COMPLETION_MOVE.getValue().equalsIgnoreCase(completionStrategy)) { if (targetDir.exists() && (!isWritable(targetDir) || !isDirectory(targetDir))) { getLogger().error( "Could not fetch file {} from file system for {} because Completion Strategy is configured to move the original file to {}, " + "but that is not a directory or user {} does not have permissions to write to that directory", new Object[] { file, flowFile, targetDir, user }); session.transfer(flowFile, REL_FAILURE); return; } final String conflictStrategy = context.getProperty(CONFLICT_STRATEGY).getValue(); if (CONFLICT_FAIL.getValue().equalsIgnoreCase(conflictStrategy)) { final File targetFile = new File(targetDir, file.getName()); if (targetFile.exists()) { getLogger().error( "Could not fetch file {} from file system for {} because Completion Strategy is configured to move the original file to {}, " + "but a file with name {} already exists in that directory and the Move Conflict Strategy is configured for failure", new Object[] { file, flowFile, targetDir, file.getName() }); session.transfer(flowFile, REL_FAILURE); return; } } } } // import content from file system try (final FileInputStream fis = new FileInputStream(file)) { flowFile = session.importFrom(fis, flowFile); } catch (final IOException ioe) { getLogger().error("Could not fetch file {} from file system for {} due to {}; routing to failure", new Object[] { file, flowFile, ioe.toString() }, ioe); session.transfer(session.penalize(flowFile), REL_FAILURE); return; } session.getProvenanceReporter().modifyContent(flowFile, "Replaced content of FlowFile with contents of " + file.toURI(), stopWatch.getElapsed(TimeUnit.MILLISECONDS)); session.transfer(flowFile, REL_SUCCESS); // It is critical that we commit the session before we perform the Completion Strategy. Otherwise, we could have a case where we // ingest the file, delete/move the file, and then NiFi is restarted before the session is committed. That would result in data loss. // As long as we commit the session right here, before we perform the Completion Strategy, we are safe. session.commit(); // Attempt to perform the Completion Strategy action Exception completionFailureException = null; if (COMPLETION_DELETE.getValue().equalsIgnoreCase(completionStrategy)) { // convert to path and use Files.delete instead of file.delete so that if we fail, we know why try { delete(file); } catch (final IOException ioe) { completionFailureException = ioe; } } else if (COMPLETION_MOVE.getValue().equalsIgnoreCase(completionStrategy)) { final File targetDirectory = new File(targetDirectoryName); final File targetFile = new File(targetDirectory, file.getName()); try { if (targetFile.exists()) { final String conflictStrategy = context.getProperty(CONFLICT_STRATEGY).getValue(); if (CONFLICT_KEEP_INTACT.getValue().equalsIgnoreCase(conflictStrategy)) { // don't move, just delete the original Files.delete(file.toPath()); } else if (CONFLICT_RENAME.getValue().equalsIgnoreCase(conflictStrategy)) { // rename to add a random UUID but keep the file extension if it has one. final String simpleFilename = targetFile.getName(); final String newName; if (simpleFilename.contains(".")) { newName = StringUtils.substringBeforeLast(simpleFilename, ".") + "-" + UUID.randomUUID().toString() + "." + StringUtils.substringAfterLast(simpleFilename, "."); } else { newName = simpleFilename + "-" + UUID.randomUUID().toString(); } move(file, new File(targetDirectory, newName), false); } else if (CONFLICT_REPLACE.getValue().equalsIgnoreCase(conflictStrategy)) { move(file, targetFile, true); } } else { move(file, targetFile, false); } } catch (final IOException ioe) { completionFailureException = ioe; } } // Handle completion failures if (completionFailureException != null) { getLogger().warn( "Successfully fetched the content from {} for {} but failed to perform Completion Action due to {}; routing to success", new Object[] { file, flowFile, completionFailureException }, completionFailureException); } }
From source file:org.apache.solr.schema.TestManagedSchema.java
public void testAddDynamicField() throws Exception { deleteCore();// www. jav a2s . com File managedSchemaFile = new File(tmpConfDir, "managed-schema"); Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); assertNull("Field '*_s' is present in the schema", h.getCore().getLatestSchema().getFieldOrNull("*_s")); String errString = "Can't add dynamic field '*_s'."; ignoreException(Pattern.quote(errString)); try { Map<String, Object> options = new HashMap<>(); IndexSchema oldSchema = h.getCore().getLatestSchema(); String fieldName = "*_s"; String fieldType = "string"; SchemaField newField = oldSchema.newField(fieldName, fieldType, options); IndexSchema newSchema = oldSchema.addField(newField); h.getCore().setLatestSchema(newSchema); fail("Should fail when adding a dynamic field"); } catch (Exception e) { for (Throwable t = e; t != null; t = t.getCause()) { // short circuit out if we found what we expected if (t.getMessage() != null && -1 != t.getMessage().indexOf(errString)) return; } // otherwise, rethrow it, possibly completely unrelated throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unexpected error, expected error matching: " + errString, e); } finally { resetExceptionIgnores(); } }
From source file:de.topobyte.osm4j.extra.extracts.ExtractionFilesBuilder.java
public void execute() throws IOException, OsmInputException { System.out.println("Output directory: " + pathOutput); Files.createDirectories(pathOutput); if (!Files.isDirectory(pathOutput)) { System.out.println("Unable to create output directory"); System.exit(1);/*w w w.j a va 2s .c o m*/ } if (pathOutput.toFile().listFiles().length != 0) { System.out.println("Output directory is not empty"); System.exit(1); } String extension = OsmIoUtils.extension(outputFormat); pathNodes = pathOutput.resolve(fileNames.getSplitNodes()); pathWays = pathOutput.resolve(fileNames.getSplitWays()); pathRelations = pathOutput.resolve(fileNames.getSplitRelations()); pathTree = pathOutput.resolve(fileNames.getTree()); pathWaysByNodes = pathOutput.resolve(fileNames.getWaysByNodes()); pathSimpleRelations = pathOutput.resolve("relations.simple" + extension); pathComplexRelations = pathOutput.resolve("relations.complex" + extension); pathSimpleRelationsDir = pathOutput.resolve("relations.simple"); pathComplexRelationsDir = pathOutput.resolve("relations.complex"); pathSimpleRelationsNonTree = pathOutput.resolve("relations.simple.nontree" + extension); pathComplexRelationsNonTree = pathOutput.resolve("relations.complex.nontree" + extension); pathSimpleRelationsNonTreeBboxes = pathOutput.resolve("relations.simple.nontree.bboxlist"); pathComplexRelationsNonTreeBboxes = pathOutput.resolve("relations.complex.nontree.bboxlist"); pathSimpleRelationsEmpty = pathOutput.resolve(fileNames.getSimpleRelationsEmpty()); pathComplexRelationsEmpty = pathOutput.resolve(fileNames.getComplexRelationsEmpty()); pathSimpleRelationsSorted = pathOutput.resolve(fileNames.getSimpleRelations()); pathComplexRelationsSorted = pathOutput.resolve(fileNames.getComplexRelations()); pathSimpleRelationsSortedBboxes = pathOutput.resolve(fileNames.getSimpleRelationsBboxes()); pathComplexRelationsSortedBboxes = pathOutput.resolve(fileNames.getComplexRelationsBboxes()); pathTreeGeometry = pathOutput.resolve("tree.wkt"); pathSimpleRelationsSortedGeometry = pathOutput.resolve("simple.wkt"); pathComplexRelationsSortedGeometry = pathOutput.resolve("complex.wkt"); OsmFileInput fileInput = new OsmFileInput(pathInput, inputFormat); OsmFileInput fileInputNodes = new OsmFileInput(pathNodes, outputFormat); OsmFileInput fileInputWays = new OsmFileInput(pathWays, outputFormat); OsmFileInput fileInputRelations = new OsmFileInput(pathRelations, outputFormat); TreeFileNames treeNames = fileNames.getTreeNames(); String fileNamesFinalNodes = treeNames.getNodes(); String fileNamesFinalWays = treeNames.getWays(); String fileNamesFinalRelationsSimple = treeNames.getSimpleRelations(); String fileNamesFinalRelationsComplex = treeNames.getComplexRelations(); String fileNamesInitialNodes = "initial-nodes" + extension; String fileNamesInitialWays = "dist-ways" + extension; String fileNamesMissingWayNodeIds = "dist-ways-missing.ids"; String fileNamesMissingNodes = "missing-nodes" + extension; String fileNamesDistributedWays = "ways-unsorted" + extension; String fileNamesDistributedNodes = "nodes-unsorted" + extension; String fileNamesRelationsComplexUnsorted = "relations-complex-unsorted" + extension; BatchFileNames relationNames = fileNames.getRelationNames(); String fileNamesRelations = relationNames.getRelations(); OsmOutputConfig outputConfigSplit = new OsmOutputConfig(outputFormat, includeMetadata); OsmOutputConfig outputConfigTree = new OsmOutputConfig(outputFormat, includeMetadata); OsmOutputConfig outputConfigWays = new OsmOutputConfig(outputFormat, includeMetadata); OsmOutputConfig outputConfigRelations = new OsmOutputConfig(outputFormat, includeMetadata); outputConfigTree.getTboConfig().setLimitNodes(new ElementCountLimit(1024)); outputConfigTree.getTboConfig().setLimitWays(new WayNodeLimit(2048)); outputConfigTree.getTboConfig().setLimitRelations(new RelationMemberLimit(2048)); outputConfigRelations.getTboConfig().setLimitRelations(new RelationMemberLimit(1024)); OsmOutputConfig outputConfigTreeFinal = new OsmOutputConfig(outputFormat, includeMetadata); // Determine bounds BBox bbox = null; OsmIteratorInput inputBounds = fileInput.createIterator(false, false); if (!inputBounds.getIterator().hasBounds() && !computeBbox) { System.out.println( "Input does not provide bounds" + " and no flag has been set to compute the bounding box"); System.exit(1); } if (inputBounds.getIterator().hasBounds()) { OsmBounds bounds = inputBounds.getIterator().getBounds(); bbox = new BBox(bounds.getLeft(), bounds.getBottom(), bounds.getRight(), bounds.getTop()); System.out.println("bounds from file: " + BBoxString.create(bbox)); } inputBounds.close(); // Split entities t.start(KEY_TOTAL); t.start(KEY_SPLIT); OsmIteratorInput input = fileInput.createIterator(true, includeMetadata); ThreadedEntitySplitter splitter = new ThreadedEntitySplitter(input.getIterator(), pathNodes, pathWays, pathRelations, outputConfigSplit, 10000, 200); splitter.execute(); input.close(); t.stop(KEY_SPLIT); printInfo(); // Calculate bounding box t.start(KEY_COMPUTE_BBOX); if (computeBbox) { bbox = OsmUtils.computeBBox(fileInputNodes); System.out.println("computed bounds: " + BBoxString.create(bbox)); } t.stop(KEY_COMPUTE_BBOX); // Create node tree t.start(KEY_NODE_TREE); DataTree tree = DataTreeUtil.initNewTree(pathTree, bbox); DataTreeFiles treeFiles = new DataTreeFiles(pathTree, fileNamesInitialNodes); DataTreeOutputFactory dataTreeOutputFactory = new ClosingDataTreeOutputFactory(treeFiles, outputConfigTree); NodeTreeLeafCounterFactory counterFactory = new ThreadedNodeTreeLeafCounterFactory(); NodeTreeDistributorFactory distributorFactory = new ThreadedNodeTreeDistributorFactory(); NodeTreeCreatorMaxNodes creator = new NodeTreeCreatorMaxNodes(tree, fileInputNodes, dataTreeOutputFactory, maxNodes, SPLIT_INITIAL, SPLIT_ITERATION, pathTree, fileNamesInitialNodes, outputConfigTree, counterFactory, distributorFactory); creator.buildTree(); t.stop(KEY_NODE_TREE); printInfo(); // Sort ways by first node id t.start(KEY_SORT_WAYS); OsmIteratorInput inputWays = fileInputWays.createIterator(true, includeMetadata); WaysSorterByFirstNodeId waysSorter = new ThreadedWaysSorterByFirstNodeId(inputWays.getIterator(), pathWaysByNodes, outputConfigWays); waysSorter.execute(); inputWays.close(); t.stop(KEY_SORT_WAYS); // Map ways to tree t.start(KEY_MAP_WAYS); OsmIteratorInput inputNodes = fileInputNodes.createIterator(true, includeMetadata); WaysToTreeMapper waysMapper = new ThreadedWaysToTreeMapper(inputNodes.getIterator(), pathTree, pathWaysByNodes, outputFormat, fileNamesInitialWays, outputConfigTree); waysMapper.execute(); inputNodes.close(); if (!keepWaysByNodes) { FileUtils.deleteDirectory(pathWaysByNodes.toFile()); } t.stop(KEY_MAP_WAYS); printInfo(); // Find missing way nodes t.start(KEY_FIND_MISSING_WAY_NODES); MissingWayNodesFinder wayNodesFinder = new ThreadedMissingWayNodesFinder(pathTree, pathTree, pathTree, fileNamesInitialNodes, fileNamesInitialWays, fileNamesMissingWayNodeIds, outputFormat, outputFormat); wayNodesFinder.execute(); t.stop(KEY_FIND_MISSING_WAY_NODES); printInfo(); // Extract missing way nodes t.start(KEY_EXTRACT_MISSING_WAY_NODES); inputNodes = fileInputNodes.createIterator(true, includeMetadata); boolean threaded = true; MissingWayNodesExtractor wayNodesExtractor = new MissingWayNodesExtractor(inputNodes.getIterator(), pathTree, fileNamesMissingWayNodeIds, pathTree, fileNamesMissingNodes, outputConfigTree, threaded); wayNodesExtractor.execute(); inputNodes.close(); for (Path path : BatchFilesUtil.getPaths(pathTree, fileNamesMissingWayNodeIds)) { Files.delete(path); } t.stop(KEY_EXTRACT_MISSING_WAY_NODES); printInfo(); // Distribute ways t.start(KEY_DISTRIBUTE_WAYS); WaysDistributor waysDistributor = new ThreadedWaysDistributor(pathTree, fileNamesInitialNodes, fileNamesMissingNodes, fileNamesInitialWays, fileNamesDistributedWays, fileNamesDistributedNodes, outputFormat, outputFormat, outputConfigTree); waysDistributor.execute(); t.stop(KEY_DISTRIBUTE_WAYS); printInfo(); // Merge nodes t.start(KEY_MERGE_NODES); List<String> fileNamesSortedNodes = new ArrayList<>(); List<String> fileNamesUnsortedNodes = new ArrayList<>(); fileNamesSortedNodes.add(fileNamesInitialNodes); fileNamesSortedNodes.add(fileNamesMissingNodes); fileNamesUnsortedNodes.add(fileNamesDistributedNodes); TreeFilesMerger nodesMerger = new ThreadedTreeFilesMerger(pathTree, fileNamesSortedNodes, fileNamesUnsortedNodes, fileNamesFinalNodes, outputFormat, outputConfigTreeFinal, true); nodesMerger.execute(); t.stop(KEY_MERGE_NODES); printInfo(); // Merge ways t.start(KEY_MERGE_WAYS); List<String> fileNamesSortedWays = new ArrayList<>(); List<String> fileNamesUnsortedWays = new ArrayList<>(); fileNamesUnsortedWays.add(fileNamesInitialWays); fileNamesUnsortedWays.add(fileNamesDistributedWays); TreeFilesMerger waysMerger = new ThreadedTreeFilesMerger(pathTree, fileNamesSortedWays, fileNamesUnsortedWays, fileNamesFinalWays, outputFormat, outputConfigTreeFinal, true); waysMerger.execute(); t.stop(KEY_MERGE_WAYS); printInfo(); // Separate relations t.start(KEY_SEPARATE_RELATIONS); RelationsSeparator separator = new RelationsSeparator(fileInputRelations, pathSimpleRelations, pathComplexRelations, outputConfigRelations); separator.execute(); t.stop(KEY_SEPARATE_RELATIONS); printInfo(); // Split relations and collect members t.start(KEY_SPLIT_RELATIONS); OsmFileInput inputSimpleRelations = new OsmFileInput(pathSimpleRelations, outputFormat); OsmFileInput inputComplexRelations = new OsmFileInput(pathComplexRelations, outputFormat); RelationsSplitterAndMemberCollector relationSplitter = new RelationsSplitterAndMemberCollector( inputSimpleRelations, inputComplexRelations, pathSimpleRelationsDir, pathComplexRelationsDir, fileNamesRelations, fileInputWays, fileInputNodes, outputConfigRelations); relationSplitter.execute(); if (!keepRelations) { Files.delete(pathSimpleRelations); Files.delete(pathComplexRelations); } t.stop(KEY_SPLIT_RELATIONS); printInfo(); // Distribute relations t.start(KEY_DISTRIBUTE_RELATIONS); String fileNamesNodes = RelationsMemberCollector.FILE_NAMES_NODE_BASENAME + extension; String fileNamesWays = RelationsMemberCollector.FILE_NAMES_WAY_BASENAME + extension; SimpleRelationsDistributor simpleRelationsDistributor = new SimpleRelationsDistributor(pathTree, pathSimpleRelationsDir, pathSimpleRelationsEmpty, pathSimpleRelationsNonTree, pathSimpleRelationsNonTreeBboxes, fileNamesRelations, fileNamesWays, fileNamesNodes, fileNamesFinalRelationsSimple, outputFormat, outputConfigTree); simpleRelationsDistributor.execute(); ComplexRelationsDistributor complexRelationsDistributor = new ComplexRelationsDistributor(pathTree, pathComplexRelationsDir, pathComplexRelationsEmpty, pathComplexRelationsNonTree, pathComplexRelationsNonTreeBboxes, fileNamesRelations, fileNamesWays, fileNamesNodes, fileNamesRelationsComplexUnsorted, outputFormat, outputConfigTree); complexRelationsDistributor.execute(); t.stop(KEY_DISTRIBUTE_RELATIONS); printInfo(); // Sort complex tree relations t.start(KEY_SORT_COMPLEX_RELATIONS); TreeFileSorter sorter = new TreeFileSorter(pathTree, fileNamesRelationsComplexUnsorted, fileNamesFinalRelationsComplex, outputFormat, outputConfigRelations, keepUnsortedRelations); sorter.execute(); t.stop(KEY_SORT_COMPLEX_RELATIONS); // Sort non-tree relations t.start(KEY_SORT_RELATIONS); NonTreeRelationsSplitter nonTreeSplitter = new NonTreeRelationsSplitter(pathSimpleRelationsNonTree, pathComplexRelationsNonTree, pathSimpleRelationsNonTreeBboxes, pathComplexRelationsNonTreeBboxes, pathSimpleRelationsDir, pathComplexRelationsDir, pathSimpleRelationsSorted, pathComplexRelationsSorted, outputFormat, outputConfigRelations, pathSimpleRelationsSortedBboxes, pathComplexRelationsSortedBboxes, maxMembersSimple, maxMembersComplex, keepUnsortedRelations); nonTreeSplitter.execute(); if (!keepRelationBatches) { FileUtils.deleteDirectory(pathSimpleRelationsDir.toFile()); FileUtils.deleteDirectory(pathComplexRelationsDir.toFile()); } t.stop(KEY_SORT_RELATIONS); // Clean up t.start(KEY_CLEAN_UP); if (!keepNonTreeRelations) { Files.delete(pathSimpleRelationsNonTree); Files.delete(pathComplexRelationsNonTree); Files.delete(pathSimpleRelationsNonTreeBboxes); Files.delete(pathComplexRelationsNonTreeBboxes); } if (!keepSplittedNodes) { Files.delete(pathNodes); } if (!keepSplittedWays) { Files.delete(pathWays); } if (!keepSplittedRelations) { Files.delete(pathRelations); } t.stop(KEY_CLEAN_UP); t.start(KEY_CREATE_GEOMETRIES); DataTreeBoxGeometryCreator dataTreeBoxGeometryCreator = new DataTreeBoxGeometryCreator(pathTree.toFile(), pathTreeGeometry.toFile()); dataTreeBoxGeometryCreator.execute(); IdBboxListGeometryCreator idBboxListGeometryCreatorSimple = new IdBboxListGeometryCreator( pathSimpleRelationsSortedBboxes.toFile(), pathSimpleRelationsSortedGeometry.toFile()); idBboxListGeometryCreatorSimple.execute(); IdBboxListGeometryCreator idBboxListGeometryCreatorComplex = new IdBboxListGeometryCreator( pathComplexRelationsSortedBboxes.toFile(), pathComplexRelationsSortedGeometry.toFile()); idBboxListGeometryCreatorComplex.execute(); t.stop(KEY_CREATE_GEOMETRIES); t.stop(KEY_TOTAL); printInfo(); }