List of usage examples for java.nio.file StandardOpenOption CREATE_NEW
StandardOpenOption CREATE_NEW
To view the source code for java.nio.file StandardOpenOption CREATE_NEW.
Click Source Link
From source file:nl.jk_5.nailed.plugin.nmm.NmmMappack.java
@Override public void prepareWorld(File destination, SettableFuture<Void> future) { HttpClient httpClient = HttpClientBuilder.create().build(); try {/*from w w w. j av a 2 s .c o m*/ String mappack = this.path.split("/", 2)[1]; HttpGet request = new HttpGet("http://nmm.jk-5.nl/" + this.path + "/versions.json"); HttpResponse response = httpClient.execute(request); MappackInfo list = NmmPlugin.gson.fromJson(EntityUtils.toString(response.getEntity(), "UTF-8"), MappackInfo.class); HttpGet request2 = new HttpGet( "http://nmm.jk-5.nl/" + this.path + "/" + mappack + "-" + list.latest + ".zip"); HttpEntity response2 = httpClient.execute(request2).getEntity(); if (response2 != null) { File mappackZip = new File(destination, "mappack.zip"); try (ReadableByteChannel source = Channels.newChannel(response2.getContent()); FileChannel out = FileChannel.open(mappackZip.toPath(), StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) { out.transferFrom(source, 0, Long.MAX_VALUE); } ZipUtils.extract(mappackZip, destination); mappackZip.delete(); this.dir = destination; File dataDir = new File(destination, ".data"); dataDir.mkdir(); File metadataLocation = new File(dataDir, "game.xml"); new File(destination, "game.xml").renameTo(metadataLocation); new File(destination, "scripts").renameTo(new File(dataDir, "scripts")); File worldsDir = new File(destination, "worlds"); for (File f : worldsDir.listFiles()) { f.renameTo(new File(destination, f.getName())); } worldsDir.delete(); //metadata = XmlMappackMetadata.fromFile(metadataLocation); future.set(null); } else { future.setException(new RuntimeException( "Got an empty response while downloading mappack " + this.path + " from nmm.jk-5.nl")); } } catch (Exception e) { future.setException( new RuntimeException("Was not able to download mappack " + this.path + " from nmm.jk-5.nl", e)); } }
From source file:de.siegmar.securetransfer.component.Cryptor.java
private byte[] initSalt(final Path baseDir) { final Path saltFile = baseDir.resolve("salt"); try {// w w w . ja v a 2 s.c o m if (Files.exists(saltFile)) { return Files.readAllBytes(saltFile); } final byte[] newSalt = newRandom(SALT_SIZE); Files.write(saltFile, newSalt, StandardOpenOption.CREATE_NEW); LOG.info("Initialized instance salt at {}", saltFile); return newSalt; } catch (final IOException e) { throw new UncheckedIOException(e); } }
From source file:io.github.dsheirer.record.wave.WaveWriter.java
/** * Opens the file and writes a wave header. *///from w w w .j av a 2s .c o m private void open() throws IOException { int version = 2; while (Files.exists(mFile)) { mFile = Paths.get(mFile.toFile().getAbsolutePath().replace(".wav", "_" + version + ".wav")); version++; } mFileChannel = (FileChannel.open(mFile, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW)); ByteBuffer header = getWaveHeader(mAudioFormat); while (header.hasRemaining()) { mFileChannel.write(header); } }
From source file:com.netflix.genie.agent.cli.ResolveJobSpecCommand.java
@Override public ExitCode run() { log.info("Resolving job specification"); final ObjectMapper prettyJsonMapper = GenieObjectMapper.getMapper().copy() // Don't reconfigure the shared mapper .enable(SerializationFeature.INDENT_OUTPUT); final JobSpecification spec; final String jobId = resolveJobSpecCommandArguments.getSpecificationId(); if (!StringUtils.isBlank(jobId)) { // Do a specification lookup if an id is given log.info("Looking up specification of job {}", jobId); try {//from w w w.j a va2 s .com spec = agentJobService.getJobSpecification(jobId); } catch (final JobSpecificationResolutionException e) { throw new RuntimeException("Failed to get spec: " + jobId, e); } } else { // Compose a job request from argument final AgentJobRequest agentJobRequest; try { final ArgumentDelegates.JobRequestArguments jobArgs = resolveJobSpecCommandArguments .getJobRequestArguments(); agentJobRequest = jobRequestConverter.agentJobRequestArgsToDTO(jobArgs); } catch (final JobRequestConverter.ConversionException e) { throw new RuntimeException("Failed to construct job request from arguments", e); } // Print request if (!resolveJobSpecCommandArguments.isPrintRequestDisabled()) { try { System.out.println(prettyJsonMapper.writeValueAsString(agentJobRequest)); } catch (final JsonProcessingException e) { throw new RuntimeException("Failed to map request to JSON", e); } } // Resolve via service try { spec = agentJobService.resolveJobSpecificationDryRun(agentJobRequest); } catch (final JobSpecificationResolutionException e) { throw new RuntimeException("Failed to resolve job specification", e); } } // Translate response to JSON final String specJsonString; try { specJsonString = prettyJsonMapper.writeValueAsString(spec); } catch (final JsonProcessingException e) { throw new RuntimeException("Failed to map specification to JSON", e); } // Print specification System.out.println(specJsonString); // Write specification to file final File outputFile = resolveJobSpecCommandArguments.getOutputFile(); if (outputFile != null) { try { Files.write(outputFile.toPath(), specJsonString.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE_NEW); } catch (final IOException e) { throw new RuntimeException("Failed to write request to: " + outputFile.getAbsolutePath(), e); } } return ExitCode.SUCCESS; }
From source file:net.fatlenny.datacitation.service.GitCitationDBService.java
@Override public Status saveQuery(Query query) throws CitationDBException { checkoutBranch(REF_QUERIES);// w w w .j av a2s . co m String pidIdentifier = query.getPid().getIdentifier(); String fileName = DigestUtils.sha1Hex(pidIdentifier) + "." + QUERY_ENDING; try (Git git = new Git(repository)) { Path filePath = Paths.get(getWorkingTreeDir(), fileName); Properties properties = writeQueryToProperties(query); properties.store( Files.newBufferedWriter(filePath, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW), ""); git.add().addFilepattern(fileName).call(); PersonIdent personIdent = new PersonIdent("ReCitable", "recitable@fatlenny.net"); String message = String.format("Created query file for PID=%s", pidIdentifier); git.commit().setMessage(message).setAuthor(personIdent).setCommitter(personIdent).call(); } catch (IOException | GitAPIException e) { throw new CitationDBException(e); } return Status.SUCCESS; }
From source file:cloudeventbus.pki.CertificateUtils.java
public static void savePrivateKey(PrivateKey privateKey, String fileName) throws IOException { try (final OutputStream outputStream = Files.newOutputStream(Paths.get(fileName), StandardOpenOption.CREATE_NEW)) { outputStream.write(privateKey.getEncoded()); }// w ww. j av a 2s . c o m }
From source file:com.github.jinahya.simple.file.back.LocalFileBackTest.java
@Test(enabled = true, invocationCount = 1) public void write() throws IOException, FileBackException { fileContext.fileOperationSupplier(() -> FileOperation.WRITE); final ByteBuffer fileKey = randomFileKey(); fileContext.targetKeySupplier(() -> fileKey); final Path leafPath = LocalFileBack.leafPath(rootPath, fileKey, true); final byte[] fileBytes = randomFileBytes(); final boolean fileWritten = Files.isRegularFile(leafPath) || current().nextBoolean(); if (fileWritten) { Files.write(leafPath, fileBytes, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE); logger.trace("file written"); }/*from ww w.j a va2 s . c o m*/ fileContext.sourceChannelSupplier(() -> Channels.newChannel(new ByteArrayInputStream(fileBytes))); fileContext.targetChannelConsumer(v -> { try { final long copied = IOUtils.copyLarge(new ByteArrayInputStream(fileBytes), Channels.newOutputStream(v)); } catch (final IOException ioe) { logger.error("failed to copy", ioe); } }); fileBack.operate(fileContext); if (fileWritten) { final byte[] actual = Files.readAllBytes(leafPath); assertEquals(actual, fileBytes); } }
From source file:org.eclipse.jgit.lfs.server.fs.LfsServerTest.java
protected long getContent(String hexId, Path f) throws IOException { try (CloseableHttpClient client = HttpClientBuilder.create().build()) { HttpGet request = new HttpGet(server.getURI() + "/lfs/objects/" + hexId); HttpResponse response = client.execute(request); checkResponseStatus(response);//w ww. jav a 2s . c o m HttpEntity entity = response.getEntity(); long pos = 0; try (InputStream in = entity.getContent(); ReadableByteChannel inChannel = Channels.newChannel(in); FileChannel outChannel = FileChannel.open(f, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) { long transferred; do { transferred = outChannel.transferFrom(inChannel, pos, MiB); pos += transferred; } while (transferred > 0); } return pos; } }
From source file:de.fatalix.book.importer.BookMigrator.java
private static void exportBatchWise(SolrServer server, File exportFolder, int batchSize, int offset, Gson gson) throws SolrServerException, IOException { QueryResponse response = SolrHandler.searchSolrIndex(server, "*:*", batchSize, offset); List<BookEntry> bookEntries = response.getBeans(BookEntry.class); System.out.println(/*from w ww .j av a 2s . c o m*/ "Retrieved " + (bookEntries.size() + offset) + " of " + response.getResults().getNumFound()); for (BookEntry bookEntry : bookEntries) { String bookTitle = bookEntry.getTitle(); bookTitle = bookTitle.replaceAll(":", " "); File bookFolder = new File(exportFolder, bookEntry.getAuthor() + "-" + bookTitle); bookFolder.mkdirs(); if (bookEntry.getCover() != null) { if (bookEntry.getEpub() != null) { File bookData = new File(bookFolder, bookEntry.getAuthor() + "-" + bookTitle + ".epub"); Files.write(bookData.toPath(), bookEntry.getMobi(), StandardOpenOption.CREATE_NEW); } if (bookEntry.getMobi() != null) { File bookData = new File(bookFolder, bookEntry.getAuthor() + "-" + bookTitle + ".mobi"); Files.write(bookData.toPath(), bookEntry.getMobi(), StandardOpenOption.CREATE_NEW); } File coverData = new File(bookFolder, bookEntry.getAuthor() + "-" + bookTitle + ".jpg"); Files.write(coverData.toPath(), bookEntry.getCover(), StandardOpenOption.CREATE_NEW); File metaDataFile = new File(bookFolder, bookEntry.getAuthor() + "-" + bookTitle + ".json"); BookMetaData metaData = new BookMetaData(bookEntry.getAuthor(), bookEntry.getTitle(), bookEntry.getIsbn(), bookEntry.getPublisher(), bookEntry.getDescription(), bookEntry.getLanguage(), bookEntry.getReleaseDate(), bookEntry.getMimeType(), bookEntry.getUploadDate(), bookEntry.getViewed(), bookEntry.getShared()); gson.toJson(metaData); Files.write(metaDataFile.toPath(), gson.toJson(metaData).getBytes(), StandardOpenOption.CREATE_NEW); } } if (response.getResults().getNumFound() > offset) { exportBatchWise(server, exportFolder, batchSize, offset + batchSize, gson); } }
From source file:com.rvantwisk.cnctools.controllers.CNCToolsController.java
private void saveGCode(String gCode, final File filename) { if (gCode == null) { throw new IllegalArgumentException("gCode most not be null"); }//from ww w . j a v a 2 s. co m if (filename == null) { throw new IllegalArgumentException("filename most not be null"); } try (BufferedWriter br = Files.newBufferedWriter(filename.toPath(), Charset.forName("UTF-8"), new OpenOption[] { StandardOpenOption.CREATE_NEW })) { // Cleanup empty lines // We do this currently in memory because we don't expect large files anyways while (gCode.lastIndexOf(SEPERATOR + SEPERATOR) != -1) { gCode = gCode.replaceAll(SEPERATOR + SEPERATOR, SEPERATOR); } br.write(gCode.trim()); br.flush(); br.close(); } catch (IOException ex) { System.out.println(ex.getMessage()); } }