List of usage examples for java.nio.file StandardOpenOption CREATE
StandardOpenOption CREATE
To view the source code for java.nio.file StandardOpenOption CREATE.
Click Source Link
From source file:org.cryptomator.webdav.jackrabbit.resources.EncryptedDir.java
private void addMemberFile(DavResource resource, InputContext inputContext) throws DavException { final Path childPath = ResourcePathUtils.getPhysicalPath(resource); SeekableByteChannel channel = null; try {/* www . j a v a2 s. c o m*/ channel = Files.newByteChannel(childPath, StandardOpenOption.WRITE, StandardOpenOption.CREATE); cryptor.encryptFile(inputContext.getInputStream(), channel); } catch (SecurityException e) { throw new DavException(DavServletResponse.SC_FORBIDDEN, e); } catch (IOException e) { LOG.error("Failed to create file.", e); throw new IORuntimeException(e); } finally { IOUtils.closeQuietly(channel); IOUtils.closeQuietly(inputContext.getInputStream()); } }
From source file:divconq.util.IOUtil.java
public static boolean saveEntireFile2(Path dest, String content) { try {//w ww .ja v a2 s.c o m Files.createDirectories(dest.getParent()); Files.write(dest, Utf8Encoder.encode(content), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE, StandardOpenOption.SYNC); } catch (Exception x) { return false; } return true; }
From source file:eu.freme.bpt.service.FailurePolicyTest.java
private File createDir() throws IOException { File dir = Files.createTempDirectory("testDir").toFile(); try {/*from w w w.java 2 s . com*/ FileUtils.forceDeleteOnExit(dir); } catch (IOException e) { e.printStackTrace(); } // now create some files in it File file1 = new File(dir, "file1"); File file2 = new File(dir, "file2"); Files.write(file1.toPath(), "Hello".getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.WRITE); Files.write(file2.toPath(), "World".getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.WRITE); return dir; }
From source file:com.splicemachine.derby.stream.control.output.ControlExportDataSetWriter.java
@Override public DataSet<LocatedRow> write() throws StandardException { Integer count;//from ww w . ja v a 2s .c o m String extension = ".csv"; SpliceOperation operation = exportFunction.getOperation(); boolean isCompressed = path.endsWith(".gz"); if (!isCompressed && operation instanceof ExportOperation) { ExportOperation op = (ExportOperation) exportFunction.getOperation(); isCompressed = op.getExportParams().isCompression(); if (isCompressed) { extension += ".gz"; } } try { final DistributedFileSystem dfs = SIDriver.driver().fileSystem(); dfs.createDirectory(path, false); // The 'part-r-00000' naming convention is what spark uses so we are consistent on control side try (OutputStream fileOut = dfs.newOutputStream(path /*directory*/, "part-r-00000" + extension/*file*/, StandardOpenOption.CREATE)) { OutputStream toWrite = fileOut; if (isCompressed) { toWrite = new GZIPOutputStream(fileOut); } count = exportFunction.call(toWrite, dataSet.toLocalIterator()); } dfs.touchFile(path, ExportFile.SUCCESS_FILE); } catch (Exception e) { throw new RuntimeException(e); } ValueRow valueRow = new ValueRow(2); valueRow.setColumn(1, new SQLLongint(count)); valueRow.setColumn(2, new SQLInteger(0)); return new ControlDataSet<>(new SingletonIterator(new LocatedRow(valueRow))); }
From source file:com.vaushell.superpipes.transforms.done.T_Done.java
@Override public Message transform(final Message message) throws Exception { if (LOGGER.isTraceEnabled()) { LOGGER.trace("[" + getNode().getNodeID() + "/" + getClass().getSimpleName() + "] transform message : " + Message.formatSimple(message)); }/*from ww w. j av a 2 s . c o m*/ final String ID = buildID(message, fields); if (ids.contains(ID)) { return null; } // Save message ID. Won't be replay ids.add(ID); try (final BufferedWriter bfw = Files.newBufferedWriter(path, Charset.forName("utf-8"), StandardOpenOption.APPEND, StandardOpenOption.CREATE)) { bfw.write(ID); bfw.write(' '); bfw.write(Message.formatSimple(message)); bfw.newLine(); } return message; }
From source file:org.wso2.carbon.user.mgt.recorder.DefaultUserDeletionEventRecorder.java
private void writeToCustomFile(String path, String content) throws RecorderException { // Create the file if it does not exist. Open with write permission and append to the end. try (OutputStream outputStream = Files.newOutputStream(Paths.get(path), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND)) { BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(outputStream)); bufferedWriter.write(content);/* w w w. j ava 2 s. c om*/ bufferedWriter.newLine(); bufferedWriter.flush(); } catch (IOException e) { throw new RecorderException("Error while writing content to the file.", e); } }
From source file:ai.susi.mind.SusiIdentity.java
/** * Add a cognition to the identity. This will cause that we forget cognitions after * the awareness threshold has passed./*w w w .j a va 2 s . c o m*/ * @param cognition * @return self */ public SusiIdentity add(SusiCognition cognition) { this.short_term_memory.learn(cognition); List<SusiCognition> forgottenCognitions = this.short_term_memory.limitAwareness(this.attention); forgottenCognitions.forEach(c -> this.long_term_memory.learn(c)); // TODO add a rule to memorize only the most important ones try { Files.write(this.memorydump.toPath(), UTF8.getBytes(cognition.getJSON().toString(0) + "\n"), StandardOpenOption.APPEND, StandardOpenOption.CREATE); } catch (JSONException | IOException e) { e.printStackTrace(); } return this; }
From source file:org.cryptomator.webdav.jackrabbit.EncryptedDir.java
private void addMemberFile(DavResource resource, InputContext inputContext) throws DavException { final Path childPath = ResourcePathUtils.getPhysicalPath(resource); try (final SeekableByteChannel channel = Files.newByteChannel(childPath, StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)) { cryptor.encryptFile(inputContext.getInputStream(), channel); } catch (SecurityException e) { throw new DavException(DavServletResponse.SC_FORBIDDEN, e); } catch (IOException e) { LOG.error("Failed to create file.", e); throw new IORuntimeException(e); } catch (CounterOverflowException e) { // lets indicate this to the client as a "file too big" error throw new DavException(DavServletResponse.SC_INSUFFICIENT_SPACE_ON_RESOURCE, e); } catch (EncryptFailedException e) { LOG.error("Encryption failed for unknown reasons.", e); throw new IllegalStateException("Encryption failed for unknown reasons.", e); } finally {/*from w ww . ja v a 2 s.c o m*/ IOUtils.closeQuietly(inputContext.getInputStream()); } }
From source file:net.e2.bw.idreg.db2ldif.Db2Ldif.java
/** * Executes the DB-to-LDIF conversion/* w w w . ja va 2 s. co m*/ */ private void execute() { // You may just specify the DB name if (!dbDatabase.startsWith("jdbc:mysql")) { dbDatabase = "jdbc:mysql://localhost:3306/" + dbDatabase; } // Instantiate a DB TeamworkDB db = new TeamworkDB(dbDatabase, dbUser, dbPassword); StringBuilder ldif = new StringBuilder(); Map<String, String> userNames = new HashMap<>(); importUsers(db, ldif, userNames); importGroups(db, ldif, userNames); if (out == null) { System.out.println(ldif); } else { try { Files.write(Paths.get(out), ldif.toString().getBytes("UTF-8"), StandardOpenOption.CREATE); System.out.println("Wrote LDIF to " + out); } catch (IOException e) { e.printStackTrace(); } } }
From source file:io.redlink.solrlib.embedded.EmbeddedCoreContainer.java
@Override @SuppressWarnings({ "squid:S3725", "squid:S3776" }) protected synchronized void init(ExecutorService executorService) throws IOException { Preconditions.checkState(Objects.isNull(coreContainer), "Already initialized!"); if (solrHome == null) { solrHome = Files.createTempDirectory("solr-home"); log.debug("No solr-home set, using temp directory {}", solrHome); deleteOnShutdown = true;//from w w w .ja v a2 s .c o m } final Path absoluteSolrHome = this.solrHome.toAbsolutePath(); if (Files.isDirectory(absoluteSolrHome)) { log.trace("solr-home exists: {}", absoluteSolrHome); } else { Files.createDirectories(absoluteSolrHome); log.debug("Created solr-home: {}", absoluteSolrHome); } final Path lib = absoluteSolrHome.resolve("lib"); if (Files.isDirectory(lib)) { log.trace("lib-directory exists: {}", lib); } else { Files.createDirectories(lib); log.debug("Created solr-lib directory: {}", lib); } final Path solrXml = absoluteSolrHome.resolve("solr.xml"); if (!Files.exists(solrXml)) { log.info("no solr.xml found, creating new at {}", solrXml); try (PrintStream writer = new PrintStream(Files.newOutputStream(solrXml, StandardOpenOption.CREATE))) { writer.printf("<!-- Generated by %s on %tF %<tT -->%n", getClass().getSimpleName(), new Date()); writer.println("<solr>"); writer.printf(" <str name=\"%s\">%s</str>%n", "sharedLib", absoluteSolrHome.relativize(lib)); writer.println("</solr>"); } } else { log.trace("found solr.xml: {}", solrXml); } for (SolrCoreDescriptor coreDescriptor : coreDescriptors) { final String coreName = coreDescriptor.getCoreName(); if (availableCores.containsKey(coreName)) { log.warn("CoreName-Clash: {} already initialized. Skipping {}", coreName, coreDescriptor.getClass()); continue; } final Path coreDir = absoluteSolrHome.resolve(coreName); Files.createDirectories(coreDir); coreDescriptor.initCoreDirectory(coreDir, lib); final Properties coreProperties = new Properties(); final Path corePropertiesFile = coreDir.resolve("core.properties"); if (Files.exists(corePropertiesFile)) { try (InputStream inStream = Files.newInputStream(corePropertiesFile, StandardOpenOption.CREATE)) { coreProperties.load(inStream); } log.debug("core.properties for {} found, updating", coreName); } else { log.debug("Creating new core {} in {}", coreName, coreDir); } coreProperties.setProperty("name", coreName); try (OutputStream outputStream = Files.newOutputStream(corePropertiesFile)) { coreProperties.store(outputStream, null); } if (coreDescriptor.getNumShards() > 1 || coreDescriptor.getReplicationFactor() > 1) { log.warn("Deploying {} to EmbeddedCoreContainer, ignoring config of shards={},replication={}", coreName, coreDescriptor.getNumShards(), coreDescriptor.getReplicationFactor()); } availableCores.put(coreName, coreDescriptor); } log.info("Starting {} in solr-home '{}'", getClass().getSimpleName(), absoluteSolrHome); coreContainer = CoreContainer.createAndLoad(absoluteSolrHome, solrXml); availableCores.values().forEach(coreDescriptor -> { final String coreName = coreDescriptor.getCoreName(); try (SolrClient solrClient = createSolrClient(coreName)) { final NamedList<Object> coreStatus = CoreAdminRequest.getStatus(coreName, solrClient) .getCoreStatus(coreName); final NamedList<Object> indexStatus = coreStatus == null ? null : (NamedList<Object>) coreStatus.get("index"); final Object lastModified = indexStatus == null ? null : indexStatus.get("lastModified"); // lastModified is null if there was never a update scheduleCoreInit(executorService, coreDescriptor, lastModified == null); } catch (SolrServerException | IOException e) { if (log.isDebugEnabled()) { log.error("Error initializing core {}", coreName, e); } //noinspection ThrowableResultOfMethodCallIgnored coreInitExceptions.put(coreName, e); } }); }