List of usage examples for java.nio.file StandardOpenOption WRITE
StandardOpenOption WRITE
To view the source code for java.nio.file StandardOpenOption WRITE.
Click Source Link
From source file:org.jboss.pull.player.LabelProcessor.java
/** * Processes the pull requests {@link #add(org.jboss.dmr.ModelNode) added}. * <p/>/*from w w w.j av a 2 s.c o m*/ * This should normally only be invoked once as it makes API calls to GitHub. */ void process() { try { // Get all the open issues to lessen the hits to the API final ModelNode openIssues = getIssues(); // Process each issue in the model for (Property property : issuesModel.asPropertyList()) { final ModelNode value = property.getValue(); // Get the PR url final String prUrl = value.get("pull_request_url").asString(); if (openIssues.hasDefined(prUrl)) { final ModelNode openIssue = openIssues.get(prUrl); // Get the current labels final List<String> currentLabels = getLabels(openIssue); // If no labels are present, we can delete the issue if (currentLabels.isEmpty()) { issuesModel.remove(property.getName()); } else { boolean changeRequired = false; // Process the labels only requiring a change if the label was defined in the configuration final List<String> newLabels = new ArrayList<>(); for (String label : currentLabels) { if (labels.isRemovable(label)) { final String newLabel = labels.getReplacement(label); if (newLabel != null) { newLabels.add(newLabel); } changeRequired = true; } else { newLabels.add(label); } } // Check that the PR has been changed and a change is required if (changeRequired && value.hasDefined("new-sha")) { final String issueUrl = value.get("issue_url").asString(); // Set the new labels setLabels(issueUrl, newLabels); // Node needs to be removed issuesModel.remove(property.getName()); } else if (!changeRequired) { // No change in labels has been required, remove the issue issuesModel.remove(property.getName()); } } } else { // The issue/PR may be closed, we can just delete it issuesModel.remove(property.getName()); } } // Write the issues out to a file try (final PrintWriter writer = new PrintWriter(Files.newBufferedWriter(path, StandardCharsets.UTF_8, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.SYNC, StandardOpenOption.CREATE))) { issuesModel.writeJSONString(writer, false); } } catch (IOException e) { e.printStackTrace(err); } }
From source file:io.spotnext.maven.mojo.TransformTypesMojo.java
/** {@inheritDoc} */ @Override/* www. ja v a 2 s .c o m*/ public void execute() throws MojoExecutionException { if (skip) { getLog().info("Skipping type transformation!"); return; } trackExecution("start"); final ClassLoader classLoader = getClassloader(); final List<ClassFileTransformer> transformers = getClassFileTransformers(classLoader); List<File> classFiles = FileUtils.getFiles(project.getBuild().getOutputDirectory(), f -> f.getAbsolutePath().endsWith(".class")); getLog().debug("Found class files for processing: " + classFiles.stream().map(f -> f.getName()).collect(Collectors.joining(", "))); if (CollectionUtils.isNotEmpty(transformers)) { if (CollectionUtils.isNotEmpty(classFiles)) { getLog().info(String.format("Transforming %s classes", classFiles.size())); for (final File f : classFiles) { if (f.getName().endsWith(Constants.CLASS_EXTENSION)) { String relativeClassFilePath = StringUtils.remove(f.getPath(), project.getBuild().getOutputDirectory()); relativeClassFilePath = StringUtils.removeStart(relativeClassFilePath, "/"); final String className = relativeClassFilePath.substring(0, relativeClassFilePath.length() - Constants.CLASS_EXTENSION.length()); trackExecution("Loading class: " + f.getAbsolutePath()); byte[] byteCode; try { byteCode = Files.readAllBytes(f.toPath()); } catch (final IOException e) { String message = String.format("Can't read bytecode for class %s", className); buildContext.addMessage(f, 0, 0, message, BuildContext.SEVERITY_ERROR, e); throw new IllegalStateException(message, e); } trackExecution("Loaded class: " + f.getAbsolutePath()); for (final ClassFileTransformer t : transformers) { try { // log exceptions into separate folder, to be able to inspect them even if Eclipse swallows them ... if (t instanceof AbstractBaseClassTransformer) { ((AbstractBaseClassTransformer) t).setErrorLogger(this::logError); } // returns null if nothing has been transformed byteCode = t.transform(classLoader, className, null, null, byteCode); } catch (final Exception e) { String exception = "Exception during transformation of class: " + f.getAbsolutePath() + "\n" + e.getMessage(); trackExecution(exception); String message = String.format("Can't transform class %s, transformer %s: %s", className, t.getClass().getSimpleName(), ExceptionUtils.getStackTrace(e)); buildContext.addMessage(f, 0, 0, message, BuildContext.SEVERITY_ERROR, e); throw new MojoExecutionException(exception, e); } } if (byteCode != null && byteCode.length > 0) { try { Files.write(f.toPath(), byteCode, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); trackExecution("Saved transformed class: " + f.getAbsolutePath()); } catch (final IOException e) { String message = "Could not write modified class: " + relativeClassFilePath; buildContext.addMessage(f, 0, 0, message, BuildContext.SEVERITY_ERROR, e); throw new IllegalStateException(message); } finally { buildContext.refresh(f); getLog().info("Applied transformation to type: " + f.getAbsolutePath()); } } else { trackExecution("No changes made for class: " + f.getAbsolutePath()); getLog().debug("No transformation was applied to type: " + f.getAbsolutePath()); } } } } else { getLog().info("No class files found"); } trackExecution("All classes in build output folder transformed"); if (includeJars) { final String packaging = project.getPackaging(); final Artifact artifact = project.getArtifact(); if ("jar".equals(packaging) && artifact != null) { try { final File source = artifact.getFile(); if (source.isFile()) { final File destination = new File(source.getParent(), "instrument.jar"); final JarTransformer transformer = new JarTransformer(getLog(), classLoader, Arrays.asList(source), transformers); transformer.transform(destination); final File sourceRename = new File(source.getParent(), "notransform-" + source.getName()); if (source.renameTo(sourceRename)) { throw new MojoExecutionException(String.format("Could not move %s to %s", source.toString(), sourceRename.toString())); } if (destination.renameTo(sourceRename)) { throw new MojoExecutionException(String.format("Could not move %s to %s", destination.toString(), sourceRename.toString())); } buildContext.refresh(destination); } } catch (final Exception e) { buildContext.addMessage(artifact.getFile(), 0, 0, e.getMessage(), BuildContext.SEVERITY_ERROR, e); throw new MojoExecutionException(e.getMessage(), e); } } else { getLog().debug(String.format("Artifact %s not a jar file", artifact != null ? (artifact.getGroupId() + ":" + artifact.getArtifactId()) : "<null>")); } } } else { getLog().info("No class transformers configured"); } }
From source file:com.evolveum.midpoint.model.intest.manual.CsvBackingStore.java
protected void deleteInCsv(String username) throws IOException { List<String> lines = Files.readAllLines(Paths.get(CSV_TARGET_FILE.getPath())); Iterator<String> iterator = lines.iterator(); while (iterator.hasNext()) { String line = iterator.next(); String[] cols = line.split(","); if (cols[0].matches("\"" + username + "\"")) { iterator.remove();/*from w w w. j a va 2 s .co m*/ } } Files.write(Paths.get(CSV_TARGET_FILE.getPath()), lines, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); }
From source file:com.github.jinahya.simple.file.back.LocalFileBackTest.java
@Test(enabled = true, invocationCount = 1) public void read() throws IOException, FileBackException { fileContext.fileOperationSupplier(() -> FileOperation.READ); final ByteBuffer fileKey = randomFileKey(); fileContext.sourceKeySupplier(() -> fileKey); final Path leafPath = LocalFileBack.leafPath(rootPath, fileKey, true); final byte[] fileBytes = randomFileBytes(); final boolean fileWritten = Files.isRegularFile(leafPath) || current().nextBoolean(); if (fileWritten) { Files.write(leafPath, fileBytes, StandardOpenOption.CREATE, StandardOpenOption.WRITE); logger.trace("file written"); }//from w w w . j a v a2s . c o m fileContext.sourceChannelConsumer(v -> { final byte[] actual = new byte[fileBytes.length]; try { IOUtils.readFully(Channels.newInputStream(v), actual); if (fileWritten) { assertEquals(actual, fileBytes); } } catch (final IOException ioe) { fail("failed to read from source channel", ioe); } }); final ByteArrayOutputStream targetStream = new ByteArrayOutputStream(); fileContext.targetChannelSupplier(() -> { return Channels.newChannel(targetStream); }); fileBack.operate(fileContext); if (fileWritten) { assertEquals(targetStream.toByteArray(), fileBytes); } }
From source file:com.facebook.buck.cxx.CxxLibraryIntegrationTest.java
@Test public void thinArchivesDoNotContainAbsolutePaths() throws IOException { CxxPlatform cxxPlatform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); assumeTrue(cxxPlatform.getAr().supportsThinArchives()); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_library", tmp); workspace.setUp();/*w w w . j av a2 s . co m*/ Path archive = workspace.buildAndReturnOutput("-c", "cxx.archive_contents=thin", "//:foo#default,static"); // NOTE: Replace the thin header with a normal header just so the commons compress parser // can parse the archive contents. try (OutputStream outputStream = Files.newOutputStream(workspace.getPath(archive), StandardOpenOption.WRITE)) { outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER); } // Now iterate the archive and verify it contains no absolute paths. try (ArArchiveInputStream stream = new ArArchiveInputStream( new FileInputStream(workspace.getPath(archive).toFile()))) { ArArchiveEntry entry; while ((entry = stream.getNextArEntry()) != null) { if (!entry.getName().isEmpty()) { assertFalse("found absolute path: " + entry.getName(), workspace.getDestPath().getFileSystem().getPath(entry.getName()).isAbsolute()); } } } }
From source file:com.github.jinahya.verbose.codec.BinaryCodecTest.java
@Test(enabled = true, invocationCount = 128) public void encodeDecodeRandomFiles() throws IOException { final Path expectedPath = Files.createTempFile("test", null); getRuntime().addShutdownHook(new Thread(() -> { try {//www. j a v a 2 s . c om Files.delete(expectedPath); } catch (final IOException ioe) { ioe.printStackTrace(System.err); } })); final byte[] array = new byte[1024]; final ByteBuffer buffer = ByteBuffer.wrap(array); try (final FileChannel decodedChannel = FileChannel.open(expectedPath, StandardOpenOption.WRITE)) { final int count = current().nextInt(128); for (int i = 0; i < count; i++) { current().nextBytes(array); decodedChannel.write(buffer); } decodedChannel.force(false); } encodeDecode(expectedPath); }
From source file:de.digiway.rapidbreeze.server.model.download.Download.java
/** * Starts this {@linkplain Download}./*w w w . j a v a 2s .c om*/ * */ void start() { switch (statusHandler.getCurrentStatus()) { case RUNNING: return; case PAUSE: statusHandler.newStatus(DownloadStatus.RUNNING); return; } try { long startAt = 0; if (Files.exists(tempFile)) { try { startAt = Files.size(tempFile); } catch (IOException ex) { // File might be removed in the meantime startAt = 0; } } StorageProviderDownloadClient storageDownload = getDownloadClient(); throttledInputStream = new ThrottledInputStream(storageDownload.start(url, startAt)); throttledInputStream.setThrottle(throttleMaxBytesPerSecond); sourceChannel = Channels.newChannel(throttledInputStream); targetChannel = FileChannel.open(tempFile, StandardOpenOption.WRITE, StandardOpenOption.APPEND, StandardOpenOption.CREATE); targetChannel.position(startAt); } catch (IOException | RuntimeException ex) { LOG.log(Level.SEVERE, "An exception occured during data transfer setup for " + Download.class.getSimpleName() + ":" + this, ex); closeChannels(); cachedUrlStatus = null; statusHandler.newException(ex); return; } done = false; statusHandler.newStatus(DownloadStatus.RUNNING); }
From source file:at.tfr.securefs.xnio.MessageHandlerImpl.java
@Override public void handleMessage(String json, MessageSender messageSender) throws IOException { log.debug("handleMessage: " + json); final Message message = objectMapper.readValue(json, Message.class); Path path = configuration.getBasePath().resolve(message.getPath()); if (!path.relativize(configuration.getBasePath()).toString().equals("..")) { throw new SecurityException("invalid path spec: " + message.getPath()); }// w ww . j a va 2 s . c o m try { final String uniqueKey = message.getUniqueKey(); // find the Channel for this data stream: StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>> info = activeStreams.getStreams() .get(uniqueKey); if (message.getType() == MessageType.OPEN && info != null) { log.warn("illegal state on Open stream: " + message); IoUtils.safeClose(info.getStream().getRightSide()); messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); } switch (message.getType()) { case ERROR: log.info("error from Client: " + json); case CLOSE: { if (info != null) { IoUtils.safeClose(info.getStream().getRightSide()); } } break; case OPEN: { switch (message.getSubType()) { case READ: { final InputStream is = Files.newInputStream(path, StandardOpenOption.READ); final InputStream cis = new CipherInputStream(is, getCipher(message, Cipher.DECRYPT_MODE)); final ChannelPipe<StreamSourceChannel, StreamSinkChannel> pipe = xnioWorker .createHalfDuplexPipe(); pipe.getLeftSide().getReadSetter().set(new SecureChannelWriterBase(message) { @Override protected void write(Message message) { try { messageSender.send(message); } catch (Exception e) { log.warn("cannot write message=" + message + " : " + e, e); } } }); pipe.getLeftSide().getCloseSetter().set(new ChannelListener<StreamSourceChannel>() { @Override public void handleEvent(StreamSourceChannel channel) { activeStreams.getStreams().remove(uniqueKey); messageSender.send(new Message(MessageType.CLOSE, message.getPath()).key(uniqueKey)); } }); pipe.getRightSide().getWriteSetter().set(new ChannelListener<StreamSinkChannel>() { private byte[] bytes = new byte[Constants.BUFFER_SIZE]; @Override public void handleEvent(StreamSinkChannel channel) { try { int count = 0; while ((count = cis.read(bytes, 0, bytes.length)) > 0) { if (count > 0) { Channels.writeBlocking(pipe.getRightSide(), ByteBuffer.wrap(bytes, 0, count)); } if (count < 0) { pipe.getRightSide().close(); } else { channel.resumeWrites(); } } } catch (Exception e) { log.warn("cannot read from cypher: " + e, e); IoUtils.safeClose(channel); } } }); activeStreams.getStreams().put(uniqueKey, new StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>>(pipe, message.getPath())); // start sending data: pipe.getLeftSide().resumeReads(); pipe.getRightSide().resumeWrites(); } break; case WRITE: { Files.createDirectories(path.getParent()); OutputStream os = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE); OutputStream cos = new CipherOutputStream(os, getCipher(message, Cipher.ENCRYPT_MODE)); ChannelPipe<StreamSourceChannel, StreamSinkChannel> pipe = xnioWorker.createHalfDuplexPipe(); pipe.getLeftSide().getReadSetter().set(new SecureChannelReaderBase() { @Override public void handleEvent(StreamSourceChannel channel) { readChannel(message, cos, pipe, channel); } }); pipe.getLeftSide().getCloseSetter().set(new SecureChannelReaderBase() { @Override public void handleEvent(StreamSourceChannel channel) { try { cos.close(); activeStreams.getStreams().remove(pipe.toString()); messageSender .send(new Message(MessageType.CLOSE, message.getPath()).key(uniqueKey)); log.info("closed channel: " + pipe.toString()); } catch (IOException e) { log.warn("cannot close stream: message=" + message + " : " + e, e); } } }); activeStreams.getStreams().put(uniqueKey, new StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>>(pipe, message.getPath())); // start receiving data: pipe.getLeftSide().resumeReads(); } break; default: messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); break; } } break; case DATA: { if (info != null) { Channels.writeBlocking(info.getStream().getRightSide(), ByteBuffer.wrap(message.getBytes())); } else { messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); } } break; } } catch (IOException e) { log.warn("cannot handle message: " + message + " : " + e, e); throw e; } catch (Exception e) { log.warn("cannot handle message: " + message + " : " + e, e); throw new IOException("cannot handle message: " + message + " : " + e, e); } }
From source file:org.wrml.runtime.service.file.FileSystemService.java
public static void writeModelFile(final Model model, final Path modelFilePath, final URI fileFormatUri, final ModelWriteOptions writeOptions) throws IOException, ModelWriterException { final Context context = model.getContext(); OutputStream out = null;/* w ww . j a va 2 s. co m*/ try { Files.createDirectories(modelFilePath.getParent()); Files.deleteIfExists(modelFilePath); Files.createFile(modelFilePath); out = Files.newOutputStream(modelFilePath, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); } catch (final IOException e) { IOUtils.closeQuietly(out); throw e; } try { context.writeModel(out, model, writeOptions, fileFormatUri); } catch (final ModelWriterException e) { IOUtils.closeQuietly(out); throw e; } IOUtils.closeQuietly(out); }
From source file:com.github.jinahya.simple.file.back.LocalFileBackTest.java
@Test(enabled = true, invocationCount = 1) public void write() throws IOException, FileBackException { fileContext.fileOperationSupplier(() -> FileOperation.WRITE); final ByteBuffer fileKey = randomFileKey(); fileContext.targetKeySupplier(() -> fileKey); final Path leafPath = LocalFileBack.leafPath(rootPath, fileKey, true); final byte[] fileBytes = randomFileBytes(); final boolean fileWritten = Files.isRegularFile(leafPath) || current().nextBoolean(); if (fileWritten) { Files.write(leafPath, fileBytes, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE); logger.trace("file written"); }//from w w w . ja va 2 s . c o m fileContext.sourceChannelSupplier(() -> Channels.newChannel(new ByteArrayInputStream(fileBytes))); fileContext.targetChannelConsumer(v -> { try { final long copied = IOUtils.copyLarge(new ByteArrayInputStream(fileBytes), Channels.newOutputStream(v)); } catch (final IOException ioe) { logger.error("failed to copy", ioe); } }); fileBack.operate(fileContext); if (fileWritten) { final byte[] actual = Files.readAllBytes(leafPath); assertEquals(actual, fileBytes); } }