Example usage for java.nio.file StandardOpenOption READ

List of usage examples for java.nio.file StandardOpenOption READ

Introduction

In this page you can find the example usage for java.nio.file StandardOpenOption READ.

Prototype

StandardOpenOption READ

To view the source code for java.nio.file StandardOpenOption READ.

Click Source Link

Document

Open for read access.

Usage

From source file:org.cyclop.service.common.FileStorage.java

private FileChannel openForRead(Path histPath) throws IOException {
    File file = histPath.toFile();
    if (!file.exists() || !file.canRead()) {
        LOG.debug("History file not found: " + histPath);
        return null;
    }// w  w  w. j ava  2s. c o  m
    FileChannel byteChannel = FileChannel.open(histPath, StandardOpenOption.READ, StandardOpenOption.WRITE);
    FileChannel lockChannel = lock(histPath, byteChannel);
    return lockChannel;
}

From source file:org.linagora.linshare.webservice.uploadrequest.impl.FlowUploaderRestServiceImpl.java

@Path("/")
@POST//from  w w  w.j a  v a2s .  com
@Consumes("multipart/form-data")
@Override
public Response uploadChunk(@Multipart(CHUNK_NUMBER) long chunkNumber,
        @Multipart(TOTAL_CHUNKS) long totalChunks, @Multipart(CHUNK_SIZE) long chunkSize,
        @Multipart(TOTAL_SIZE) long totalSize, @Multipart(IDENTIFIER) String identifier,
        @Multipart(FILENAME) String filename, @Multipart(RELATIVE_PATH) String relativePath,
        @Multipart(FILE) InputStream file, MultipartBody body,
        @Multipart(REQUEST_URL_UUID) String uploadRequestUrlUuid, @Multipart(PASSWORD) String password)
        throws BusinessException {

    logger.debug("upload chunk number : " + chunkNumber);
    identifier = cleanIdentifier(identifier);
    Validate.isTrue(isValid(chunkNumber, chunkSize, totalSize, identifier, filename));
    try {
        logger.debug("writing chunk number : " + chunkNumber);
        java.nio.file.Path tempFile = getTempFile(identifier);
        FileChannel fc = FileChannel.open(tempFile, StandardOpenOption.CREATE, StandardOpenOption.APPEND);
        byte[] byteArray = IOUtils.toByteArray(file);
        fc.write(ByteBuffer.wrap(byteArray), (chunkNumber - 1) * chunkSize);
        fc.close();
        chunkedFiles.get(identifier).addChunk(chunkNumber);
        if (isUploadFinished(identifier, chunkSize, totalSize)) {
            logger.debug("upload finished ");
            InputStream inputStream = Files.newInputStream(tempFile, StandardOpenOption.READ);
            File tempFile2 = getTempFile(inputStream, "rest-flowuploader", filename);
            try {
                uploadRequestUrlFacade.addUploadRequestEntry(uploadRequestUrlUuid, password, tempFile2,
                        filename);
            } finally {
                deleteTempFile(tempFile2);
            }
            ChunkedFile remove = chunkedFiles.remove(identifier);
            Files.deleteIfExists(remove.getPath());
            return Response.ok("upload success").build();
        } else {
            logger.debug("upload pending ");
        }
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    return Response.ok("upload success").build();
}

From source file:org.apache.nifi.util.MockFlowFile.java

/**
 * Asserts that the content of this FlowFile is the same as the content of
 * the given path/*from   w  w  w  . j av  a2  s.c o m*/
 *
 * @param path where to find content to compare to
 * @throws IOException if io error occurs while comparing content
 */
public void assertContentEquals(final Path path) throws IOException {
    try (final InputStream in = Files.newInputStream(path, StandardOpenOption.READ)) {
        assertContentEquals(in);
    }
}

From source file:ca.polymtl.dorsal.libdelorean.statedump.Statedump.java

/**
 * Retrieve a previously-saved statedump.
 *
 * @param parentPath//from www. j  a v a 2 s  . com
 *            The expected location of the statedump file. Like the
 *            corresponding parameter in {@link #dumpState}, this is the
 *            parent path of the TC-specific subdirectory.
 * @param ssid
 *            The ID of the state system to retrieve
 * @return The corresponding de-serialized statedump. Returns null if there
 *         are no statedump for this state system ID (or no statedump
 *         directory at all).
 */
public static @Nullable Statedump loadState(Path parentPath, String ssid) {
    /* Find the state dump directory */
    Path sdPath = parentPath.resolve(STATEDUMP_DIRECTORY);
    if (!Files.isDirectory(sdPath)) {
        return null;
    }

    /* Find the state dump file */
    String fileName = ssid + FILE_SUFFIX;
    Path filePath = sdPath.resolve(fileName);
    if (!Files.exists(filePath)) {
        return null;
    }

    try (InputStreamReader in = new InputStreamReader(
            Files.newInputStream(filePath, StandardOpenOption.READ))) {
        BufferedReader bufReader = new BufferedReader(in);
        String json = bufReader.lines().collect(Collectors.joining("\n")); //$NON-NLS-1$
        JSONObject root = new JSONObject(json);

        return Serialization.stateDumpFromJsonObject(root, ssid);
    } catch (IOException | JSONException e) {
        return null;
    }
}

From source file:de.decoit.visa.rdf.RDFManager.java

/**
 * Construct a new RDFManager object. It will open and clear the TDB
 * database at the specified location if it exists. Otherwise a new database
 * will be created. The program must have read and write access to the
 * database location./*from w  w w  . j a  v  a2s. co  m*/
 *
 * @param pLocation The TDB database will be opened at this location
 * @throws IOException if the VSA template directory is not accessible
 * @throws ParserConfigurationException
 * @throws SAXException
 */
public RDFManager(String pLocation) throws IOException, ParserConfigurationException, SAXException {
    source = new ArrayList<>();
    vsaTemplates = new ArrayList<>();
    activeNamedModel = null;

    // Load a list of available VSA templates
    DirectoryStream<Path> dirStream = Files.newDirectoryStream(Paths.get("res/vsa"), "*.xml");
    DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
    for (Path p : dirStream) {
        DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
        Document doc = dBuilder.parse(Files.newInputStream(p, StandardOpenOption.READ));

        doc.getDocumentElement().normalize();

        vsaTemplates.add(doc);
    }

    // Create or load the TDB database at pLocation
    ds = TDBFactory.createDataset(pLocation);

    ds.begin(ReadWrite.WRITE);

    try {
        // Do some cleanup if last run didn't clear the database
        // Remove all named models from the database
        Iterator<String> itNames = ds.listNames();
        ArrayList<String> names = new ArrayList<>();

        while (itNames.hasNext()) {
            names.add(itNames.next());
        }

        for (String n : names) {
            ds.removeNamedModel(n);
        }

        // Clear the default model
        ds.getDefaultModel().removeAll();

        ds.commit();
    } catch (Throwable ex) {
        ds.abort();
    } finally {
        ds.end();

        // Sync changes to disk
        TDB.sync(ds);
    }
}

From source file:it.greenvulcano.configuration.BaseConfigurationManager.java

@Override
public void deploy(String name) throws XMLConfigException, FileNotFoundException {

    Path configurationArchivePath = getConfigurationPath(name);

    Path current = Paths.get(XMLConfig.getBaseConfigPath());
    Path staging = current.getParent().resolve("deploy");
    Path destination = current.getParent().resolve(name);

    if (LOCK.tryLock()) {

        if (Files.exists(configurationArchivePath) && !Files.isDirectory(configurationArchivePath)) {

            try {

                ZipInputStream configurationArchive = new ZipInputStream(
                        Files.newInputStream(configurationArchivePath, StandardOpenOption.READ));

                LOG.debug("Starting deploy of configuration " + name);
                ZipEntry zipEntry = null;

                for (Path cfgFile : Files.walk(current).collect(Collectors.toSet())) {

                    if (!Files.isDirectory(cfgFile)) {

                        Path target = staging.resolve(current.relativize(cfgFile));
                        Files.createDirectories(target);

                        Files.copy(cfgFile, target, StandardCopyOption.REPLACE_EXISTING);
                    }/*from  w ww. j  av  a2s .com*/

                }

                LOG.debug("Staging new config " + name);

                while ((zipEntry = configurationArchive.getNextEntry()) != null) {

                    Path entryPath = staging.resolve(zipEntry.getName());

                    LOG.debug("Adding resource: " + entryPath);
                    if (zipEntry.isDirectory()) {
                        entryPath.toFile().mkdirs();
                    } else {

                        Path parent = entryPath.getParent();
                        if (!Files.exists(parent)) {
                            Files.createDirectories(parent);
                        }

                        Files.copy(configurationArchive, entryPath, StandardCopyOption.REPLACE_EXISTING);
                    }

                }

                //**** Deleting old config dir
                LOG.debug("Removing old config: " + current);
                Files.walk(current, FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder())
                        .map(java.nio.file.Path::toFile).forEach(File::delete);

                LOG.debug("Deploy new config " + name + " in path " + destination);
                Files.move(staging, destination, StandardCopyOption.ATOMIC_MOVE);

                setXMLConfigBasePath(destination.toString());
                LOG.debug("Deploy complete");
                deployListeners.forEach(l -> l.onDeploy(destination));

            } catch (Exception e) {

                if (Objects.nonNull(staging) && Files.exists(staging)) {
                    LOG.error("Deploy failed, rollback to previous configuration", e);
                    try {
                        Files.walk(staging, FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder())
                                .map(java.nio.file.Path::toFile).forEach(File::delete);

                        setXMLConfigBasePath(current.toString());
                    } catch (IOException | InvalidSyntaxException rollbackException) {
                        LOG.error("Failed to delete old configuration", e);
                    }
                } else {
                    LOG.error("Deploy failed", e);
                }

                throw new XMLConfigException("Deploy failed", e);
            } finally {
                LOCK.unlock();
            }
        } else {
            throw new FileNotFoundException(configurationArchivePath.toString());
        }
    } else {
        throw new IllegalStateException("A deploy is already in progress");
    }

}

From source file:org.eclipse.packagedrone.utils.rpm.build.PayloadRecorder.java

@Override
public FileChannel openChannel() throws IOException {
    checkFinished(true);/*from  ww  w  .ja  va 2s .co  m*/

    return FileChannel.open(this.tempFile, StandardOpenOption.READ);
}

From source file:org.olat.repository.RepositoryEntryImportExport.java

/**
 * Read previousely exported Propertiesproperties
 *//* ww w  .  j a va2s.c  o m*/
private void loadConfiguration() {
    try {
        if (baseDirectory.exists()) {
            if (baseDirectory.getName().endsWith(".zip")) {
                Path fPath = FileSystems.newFileSystem(baseDirectory.toPath(), null).getPath("/");
                Path manifestPath = fPath.resolve("export").resolve(PROPERTIES_FILE);
                try (InputStream inputFile = Files.newInputStream(manifestPath, StandardOpenOption.READ)) {
                    XStream xstream = getXStream();
                    repositoryProperties = (RepositoryEntryImport) xstream.fromXML(inputFile);
                } catch (Exception e) {
                    log.error("Cannot read repo.xml im zip", e);
                }
            } else {
                File inputFile = new File(baseDirectory, PROPERTIES_FILE);
                if (inputFile.exists()) {
                    XStream xstream = getXStream();
                    repositoryProperties = (RepositoryEntryImport) xstream.fromXML(inputFile);
                } else {
                    repositoryProperties = new RepositoryEntryImport();
                }
            }
        } else {
            repositoryProperties = new RepositoryEntryImport();
        }
        propertiesLoaded = true;
    } catch (Exception ce) {
        throw new OLATRuntimeException("Error importing repository entry properties.", ce);
    }
}

From source file:codes.thischwa.c5c.DispatcherPUT.java

private void imageProcessingAndSizeCheck(Path tempPath, String sanitizedName, long fileSize,
        FilemanagerConfig conf) throws C5CException, IOException {
    Integer maxSize = (conf.getUpload().isFileSizeLimitAuto()) ? PropertiesLoader.getMaxUploadSize()
            : conf.getUpload().getFileSizeLimit();
    if (fileSize > maxSize.longValue() * 1024 * 1024)
        throw new FilemanagerException(FilemanagerAction.UPLOAD,
                FilemanagerException.Key.UploadFilesSmallerThan, String.valueOf(maxSize));
    String extension = FilenameUtils.getExtension(sanitizedName);

    // check image only
    boolean isImageExt = checkImageExtension(sanitizedName, conf.getUpload().isImagesOnly(),
            conf.getImages().getExtensions());
    if (!isImageExt)
        return;/* w w w.j  av  a 2s.  com*/

    // remove exif data
    Path woExifPath = UserObjectProxy.removeExif(tempPath);
    if (!tempPath.equals(woExifPath)) {
        Files.move(woExifPath, tempPath, StandardCopyOption.REPLACE_EXISTING);
    }

    // check if the file is really an image
    InputStream in = new BufferedInputStream(Files.newInputStream(tempPath, StandardOpenOption.READ));
    Dimension dim = getDimension(in);
    if (isImageExt && dim == null)
        throw new FilemanagerException(FilemanagerAction.UPLOAD, FilemanagerException.Key.UploadImagesOnly);
    IOUtils.closeQuietly(in);

    // check if resize is enabled and fix it, if necessary 
    Resize resize = conf.getImages().getResize();
    if (resize.isEnabled()
            && (dim.getHeight() > resize.getMaxHeight() || dim.getWidth() > resize.getMaxWidth())) {
        logger.debug("process resize");
        StreamContent sc = connector.resize(new BufferedInputStream(Files.newInputStream(tempPath)), extension,
                new Dimension(resize.getMaxWidth(), resize.getMaxHeight()));
        Files.copy(sc.getInputStream(), tempPath, StandardCopyOption.REPLACE_EXISTING);
        IOUtils.closeQuietly(sc.getInputStream());
    }
}

From source file:io.pravega.segmentstore.storage.impl.extendeds3.S3FileSystemImpl.java

@Synchronized
@Override//from w  w  w .  ja va 2  s  .  c o m
public CompleteMultipartUploadResult completeMultipartUpload(CompleteMultipartUploadRequest request) {
    Map<Integer, CopyPartRequest> partMap = multipartUploads.get(request.getKey());
    if (partMap == null) {
        throw new S3Exception("NoSuchKey", HttpStatus.SC_NOT_FOUND, "NoSuchKey", "");
    }
    try {
        partMap.forEach((index, copyPart) -> {
            if (copyPart.getKey() != copyPart.getSourceKey()) {
                Path sourcePath = Paths.get(this.baseDir, copyPart.getBucketName(), copyPart.getSourceKey());
                Path targetPath = Paths.get(this.baseDir, copyPart.getBucketName(), copyPart.getKey());
                try (FileChannel sourceChannel = FileChannel.open(sourcePath, StandardOpenOption.READ);
                        FileChannel targetChannel = FileChannel.open(targetPath, StandardOpenOption.WRITE)) {
                    targetChannel.transferFrom(sourceChannel, Files.size(targetPath),
                            copyPart.getSourceRange().getLast() + 1 - copyPart.getSourceRange().getFirst());
                    targetChannel.close();
                    AclSize aclMap = this.aclMap.get(copyPart.getKey());
                    this.aclMap.put(copyPart.getKey(), aclMap.withSize(Files.size(targetPath)));
                } catch (IOException e) {
                    throw new S3Exception("NoSuchKey", 404, "NoSuchKey", "");
                }
            }
        });
    } finally {
        multipartUploads.remove(request.getKey());
    }

    return new CompleteMultipartUploadResult();
}