Example usage for java.nio.file Files newInputStream

List of usage examples for java.nio.file Files newInputStream

Introduction

In this page you can find the example usage for java.nio.file Files newInputStream.

Prototype

public static InputStream newInputStream(Path path, OpenOption... options) throws IOException 

Source Link

Document

Opens a file, returning an input stream to read from the file.

Usage

From source file:it.greenvulcano.configuration.BaseConfigurationManager.java

@Override
public Properties getXMLConfigProperties() throws FileNotFoundException, IOException {

    Path xmlConfigPath = Paths.get(XMLConfig.getBaseConfigPath(), "XMLConfig.properties");

    if (Files.exists(xmlConfigPath)) {
        Properties properties = new Properties();
        properties.load(Files.newInputStream(xmlConfigPath, StandardOpenOption.READ));
        return properties;
    } else {//from w  w w  . j  a  v  a  2  s . c  o m
        throw new FileNotFoundException("XMLConfig.properties");
    }

}

From source file:de.elomagic.mag.AbstractTest.java

protected Future<byte[]> createFileExistsFuture(Path file) {

    ExecutorService executor = Executors.newFixedThreadPool(2);

    FutureTask<byte[]> futureTask = new FutureTask<>(() -> {
        byte[] result = null;
        do {//  w ww . j  a  v a 2  s.  c  o m
            if (Files.exists(file)) {
                InputStream in = Files.newInputStream(file, StandardOpenOption.READ);
                result = IOUtils.readFully(in, in.available());
            }

            Thread.sleep(100);
        } while (result == null);

        return result;
    });

    executor.execute(futureTask);

    return futureTask;
}

From source file:org.apache.nifi.processors.kite.TestInferAvroSchema.java

@Test
public void inferAvroSchemaFromCSVFile() throws Exception {

    runner.assertValid();//from ww w . j  a  v  a  2s.c  o  m

    // Read in the header
    StringWriter writer = new StringWriter();
    IOUtils.copy(
            (Files.newInputStream(Paths.get("src/test/resources/ShapesHeader.csv"), StandardOpenOption.READ)),
            writer, "UTF-8");
    runner.setProperty(InferAvroSchema.CSV_HEADER_DEFINITION, writer.toString());
    runner.setProperty(InferAvroSchema.GET_CSV_HEADER_DEFINITION_FROM_INPUT, "false");

    Map<String, String> attributes = new HashMap<>();
    attributes.put(CoreAttributes.MIME_TYPE.key(), "text/csv");
    runner.enqueue(new File("src/test/resources/Shapes_NoHeader.csv").toPath(), attributes);

    runner.run();
    runner.assertTransferCount(InferAvroSchema.REL_UNSUPPORTED_CONTENT, 0);
    runner.assertTransferCount(InferAvroSchema.REL_FAILURE, 0);
    runner.assertTransferCount(InferAvroSchema.REL_ORIGINAL, 1);
    runner.assertTransferCount(InferAvroSchema.REL_SUCCESS, 1);

    MockFlowFile data = runner.getFlowFilesForRelationship(InferAvroSchema.REL_SUCCESS).get(0);
    data.assertContentEquals(
            unix2PlatformSpecificLineEndings(new File("src/test/resources/Shapes_header.csv.avro")));
    data.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/avro-binary");
}

From source file:org.linagora.linshare.webservice.uploadrequest.impl.FlowUploaderRestServiceImpl.java

@Path("/")
@POST//w  w  w  . j a  va 2  s.  c o m
@Consumes("multipart/form-data")
@Override
public Response uploadChunk(@Multipart(CHUNK_NUMBER) long chunkNumber,
        @Multipart(TOTAL_CHUNKS) long totalChunks, @Multipart(CHUNK_SIZE) long chunkSize,
        @Multipart(TOTAL_SIZE) long totalSize, @Multipart(IDENTIFIER) String identifier,
        @Multipart(FILENAME) String filename, @Multipart(RELATIVE_PATH) String relativePath,
        @Multipart(FILE) InputStream file, MultipartBody body,
        @Multipart(REQUEST_URL_UUID) String uploadRequestUrlUuid, @Multipart(PASSWORD) String password)
        throws BusinessException {

    logger.debug("upload chunk number : " + chunkNumber);
    identifier = cleanIdentifier(identifier);
    Validate.isTrue(isValid(chunkNumber, chunkSize, totalSize, identifier, filename));
    try {
        logger.debug("writing chunk number : " + chunkNumber);
        java.nio.file.Path tempFile = getTempFile(identifier);
        FileChannel fc = FileChannel.open(tempFile, StandardOpenOption.CREATE, StandardOpenOption.APPEND);
        byte[] byteArray = IOUtils.toByteArray(file);
        fc.write(ByteBuffer.wrap(byteArray), (chunkNumber - 1) * chunkSize);
        fc.close();
        chunkedFiles.get(identifier).addChunk(chunkNumber);
        if (isUploadFinished(identifier, chunkSize, totalSize)) {
            logger.debug("upload finished ");
            InputStream inputStream = Files.newInputStream(tempFile, StandardOpenOption.READ);
            File tempFile2 = getTempFile(inputStream, "rest-flowuploader", filename);
            try {
                uploadRequestUrlFacade.addUploadRequestEntry(uploadRequestUrlUuid, password, tempFile2,
                        filename);
            } finally {
                deleteTempFile(tempFile2);
            }
            ChunkedFile remove = chunkedFiles.remove(identifier);
            Files.deleteIfExists(remove.getPath());
            return Response.ok("upload success").build();
        } else {
            logger.debug("upload pending ");
        }
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    return Response.ok("upload success").build();
}

From source file:org.apache.nifi.util.MockFlowFile.java

/**
 * Asserts that the content of this FlowFile is the same as the content of
 * the given path/* w  ww  .  ja  v a  2  s  . co  m*/
 *
 * @param path where to find content to compare to
 * @throws IOException if io error occurs while comparing content
 */
public void assertContentEquals(final Path path) throws IOException {
    try (final InputStream in = Files.newInputStream(path, StandardOpenOption.READ)) {
        assertContentEquals(in);
    }
}

From source file:ca.polymtl.dorsal.libdelorean.statedump.Statedump.java

/**
 * Retrieve a previously-saved statedump.
 *
 * @param parentPath//w w w.j a v  a  2 s .  co  m
 *            The expected location of the statedump file. Like the
 *            corresponding parameter in {@link #dumpState}, this is the
 *            parent path of the TC-specific subdirectory.
 * @param ssid
 *            The ID of the state system to retrieve
 * @return The corresponding de-serialized statedump. Returns null if there
 *         are no statedump for this state system ID (or no statedump
 *         directory at all).
 */
public static @Nullable Statedump loadState(Path parentPath, String ssid) {
    /* Find the state dump directory */
    Path sdPath = parentPath.resolve(STATEDUMP_DIRECTORY);
    if (!Files.isDirectory(sdPath)) {
        return null;
    }

    /* Find the state dump file */
    String fileName = ssid + FILE_SUFFIX;
    Path filePath = sdPath.resolve(fileName);
    if (!Files.exists(filePath)) {
        return null;
    }

    try (InputStreamReader in = new InputStreamReader(
            Files.newInputStream(filePath, StandardOpenOption.READ))) {
        BufferedReader bufReader = new BufferedReader(in);
        String json = bufReader.lines().collect(Collectors.joining("\n")); //$NON-NLS-1$
        JSONObject root = new JSONObject(json);

        return Serialization.stateDumpFromJsonObject(root, ssid);
    } catch (IOException | JSONException e) {
        return null;
    }
}

From source file:de.decoit.visa.rdf.RDFManager.java

/**
 * Construct a new RDFManager object. It will open and clear the TDB
 * database at the specified location if it exists. Otherwise a new database
 * will be created. The program must have read and write access to the
 * database location.//  w ww. j  ava2s.  co m
 *
 * @param pLocation The TDB database will be opened at this location
 * @throws IOException if the VSA template directory is not accessible
 * @throws ParserConfigurationException
 * @throws SAXException
 */
public RDFManager(String pLocation) throws IOException, ParserConfigurationException, SAXException {
    source = new ArrayList<>();
    vsaTemplates = new ArrayList<>();
    activeNamedModel = null;

    // Load a list of available VSA templates
    DirectoryStream<Path> dirStream = Files.newDirectoryStream(Paths.get("res/vsa"), "*.xml");
    DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
    for (Path p : dirStream) {
        DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
        Document doc = dBuilder.parse(Files.newInputStream(p, StandardOpenOption.READ));

        doc.getDocumentElement().normalize();

        vsaTemplates.add(doc);
    }

    // Create or load the TDB database at pLocation
    ds = TDBFactory.createDataset(pLocation);

    ds.begin(ReadWrite.WRITE);

    try {
        // Do some cleanup if last run didn't clear the database
        // Remove all named models from the database
        Iterator<String> itNames = ds.listNames();
        ArrayList<String> names = new ArrayList<>();

        while (itNames.hasNext()) {
            names.add(itNames.next());
        }

        for (String n : names) {
            ds.removeNamedModel(n);
        }

        // Clear the default model
        ds.getDefaultModel().removeAll();

        ds.commit();
    } catch (Throwable ex) {
        ds.abort();
    } finally {
        ds.end();

        // Sync changes to disk
        TDB.sync(ds);
    }
}

From source file:it.greenvulcano.configuration.BaseConfigurationManager.java

@Override
public void deploy(String name) throws XMLConfigException, FileNotFoundException {

    Path configurationArchivePath = getConfigurationPath(name);

    Path current = Paths.get(XMLConfig.getBaseConfigPath());
    Path staging = current.getParent().resolve("deploy");
    Path destination = current.getParent().resolve(name);

    if (LOCK.tryLock()) {

        if (Files.exists(configurationArchivePath) && !Files.isDirectory(configurationArchivePath)) {

            try {

                ZipInputStream configurationArchive = new ZipInputStream(
                        Files.newInputStream(configurationArchivePath, StandardOpenOption.READ));

                LOG.debug("Starting deploy of configuration " + name);
                ZipEntry zipEntry = null;

                for (Path cfgFile : Files.walk(current).collect(Collectors.toSet())) {

                    if (!Files.isDirectory(cfgFile)) {

                        Path target = staging.resolve(current.relativize(cfgFile));
                        Files.createDirectories(target);

                        Files.copy(cfgFile, target, StandardCopyOption.REPLACE_EXISTING);
                    }// w  w  w.  ja  v  a 2  s.c  om

                }

                LOG.debug("Staging new config " + name);

                while ((zipEntry = configurationArchive.getNextEntry()) != null) {

                    Path entryPath = staging.resolve(zipEntry.getName());

                    LOG.debug("Adding resource: " + entryPath);
                    if (zipEntry.isDirectory()) {
                        entryPath.toFile().mkdirs();
                    } else {

                        Path parent = entryPath.getParent();
                        if (!Files.exists(parent)) {
                            Files.createDirectories(parent);
                        }

                        Files.copy(configurationArchive, entryPath, StandardCopyOption.REPLACE_EXISTING);
                    }

                }

                //**** Deleting old config dir
                LOG.debug("Removing old config: " + current);
                Files.walk(current, FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder())
                        .map(java.nio.file.Path::toFile).forEach(File::delete);

                LOG.debug("Deploy new config " + name + " in path " + destination);
                Files.move(staging, destination, StandardCopyOption.ATOMIC_MOVE);

                setXMLConfigBasePath(destination.toString());
                LOG.debug("Deploy complete");
                deployListeners.forEach(l -> l.onDeploy(destination));

            } catch (Exception e) {

                if (Objects.nonNull(staging) && Files.exists(staging)) {
                    LOG.error("Deploy failed, rollback to previous configuration", e);
                    try {
                        Files.walk(staging, FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder())
                                .map(java.nio.file.Path::toFile).forEach(File::delete);

                        setXMLConfigBasePath(current.toString());
                    } catch (IOException | InvalidSyntaxException rollbackException) {
                        LOG.error("Failed to delete old configuration", e);
                    }
                } else {
                    LOG.error("Deploy failed", e);
                }

                throw new XMLConfigException("Deploy failed", e);
            } finally {
                LOCK.unlock();
            }
        } else {
            throw new FileNotFoundException(configurationArchivePath.toString());
        }
    } else {
        throw new IllegalStateException("A deploy is already in progress");
    }

}

From source file:org.olat.repository.RepositoryEntryImportExport.java

/**
 * Read previousely exported Propertiesproperties
 *//*  w w  w.  j ava 2 s.com*/
private void loadConfiguration() {
    try {
        if (baseDirectory.exists()) {
            if (baseDirectory.getName().endsWith(".zip")) {
                Path fPath = FileSystems.newFileSystem(baseDirectory.toPath(), null).getPath("/");
                Path manifestPath = fPath.resolve("export").resolve(PROPERTIES_FILE);
                try (InputStream inputFile = Files.newInputStream(manifestPath, StandardOpenOption.READ)) {
                    XStream xstream = getXStream();
                    repositoryProperties = (RepositoryEntryImport) xstream.fromXML(inputFile);
                } catch (Exception e) {
                    log.error("Cannot read repo.xml im zip", e);
                }
            } else {
                File inputFile = new File(baseDirectory, PROPERTIES_FILE);
                if (inputFile.exists()) {
                    XStream xstream = getXStream();
                    repositoryProperties = (RepositoryEntryImport) xstream.fromXML(inputFile);
                } else {
                    repositoryProperties = new RepositoryEntryImport();
                }
            }
        } else {
            repositoryProperties = new RepositoryEntryImport();
        }
        propertiesLoaded = true;
    } catch (Exception ce) {
        throw new OLATRuntimeException("Error importing repository entry properties.", ce);
    }
}

From source file:codes.thischwa.c5c.DispatcherPUT.java

private void imageProcessingAndSizeCheck(Path tempPath, String sanitizedName, long fileSize,
        FilemanagerConfig conf) throws C5CException, IOException {
    Integer maxSize = (conf.getUpload().isFileSizeLimitAuto()) ? PropertiesLoader.getMaxUploadSize()
            : conf.getUpload().getFileSizeLimit();
    if (fileSize > maxSize.longValue() * 1024 * 1024)
        throw new FilemanagerException(FilemanagerAction.UPLOAD,
                FilemanagerException.Key.UploadFilesSmallerThan, String.valueOf(maxSize));
    String extension = FilenameUtils.getExtension(sanitizedName);

    // check image only
    boolean isImageExt = checkImageExtension(sanitizedName, conf.getUpload().isImagesOnly(),
            conf.getImages().getExtensions());
    if (!isImageExt)
        return;/*www  .  ja  v  a2 s  .  co m*/

    // remove exif data
    Path woExifPath = UserObjectProxy.removeExif(tempPath);
    if (!tempPath.equals(woExifPath)) {
        Files.move(woExifPath, tempPath, StandardCopyOption.REPLACE_EXISTING);
    }

    // check if the file is really an image
    InputStream in = new BufferedInputStream(Files.newInputStream(tempPath, StandardOpenOption.READ));
    Dimension dim = getDimension(in);
    if (isImageExt && dim == null)
        throw new FilemanagerException(FilemanagerAction.UPLOAD, FilemanagerException.Key.UploadImagesOnly);
    IOUtils.closeQuietly(in);

    // check if resize is enabled and fix it, if necessary 
    Resize resize = conf.getImages().getResize();
    if (resize.isEnabled()
            && (dim.getHeight() > resize.getMaxHeight() || dim.getWidth() > resize.getMaxWidth())) {
        logger.debug("process resize");
        StreamContent sc = connector.resize(new BufferedInputStream(Files.newInputStream(tempPath)), extension,
                new Dimension(resize.getMaxWidth(), resize.getMaxHeight()));
        Files.copy(sc.getInputStream(), tempPath, StandardCopyOption.REPLACE_EXISTING);
        IOUtils.closeQuietly(sc.getInputStream());
    }
}