Example usage for org.apache.commons.io FileUtils openInputStream

List of usage examples for org.apache.commons.io FileUtils openInputStream

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils openInputStream.

Prototype

public static FileInputStream openInputStream(File file) throws IOException 

Source Link

Document

Opens a FileInputStream for the specified file, providing better error messages than simply calling new FileInputStream(file).

Usage

From source file:org.apache.jackrabbit.oak.plugins.tika.TextExtractorMain.java

private static Properties loadProperties(File s3Config) throws IOException {
    Properties props = new Properties();
    InputStream is = FileUtils.openInputStream(s3Config);
    try {//  w  ww.j a v a 2s  . c om
        props.load(is);
    } finally {
        IOUtils.closeQuietly(is);
    }
    return props;
}

From source file:org.apache.jackrabbit.performance.AbstractPerformanceTest.java

protected void testPerformance(String name) throws Exception {
    repoPattern = Pattern.compile(System.getProperty("repo", "\\d\\.\\d"));
    testPattern = Pattern.compile(System.getProperty("only", ".*"));

    // Create a repository using the Jackrabbit default configuration
    testPerformance(name, getDefaultConfig());

    // Create repositories for any special configurations included
    File directory = new File(new File("src", "test"), "resources");
    File[] files = directory.listFiles();
    if (files != null) {
        Arrays.sort(files);// www  .  ja  va2  s .c om
        for (File file : files) {
            String xml = file.getName();
            if (file.isFile() && xml.endsWith(".xml")) {
                String repositoryName = name + "-" + xml.substring(0, xml.length() - 4);
                testPerformance(repositoryName, FileUtils.openInputStream(file));
            }
        }
    }
}

From source file:org.apache.synapse.config.xml.MultiXMLConfigurationBuilder.java

private static SynapseConfiguration createConfigurationFromSynapseXML(String rootDirPath,
        Properties properties) {//from   w ww  .j  a  va  2s. co m

    File synapseXML = new File(rootDirPath, SynapseConstants.SYNAPSE_XML);
    if (!synapseXML.exists() || !synapseXML.isFile()) {
        return null;
    }

    FileInputStream is;
    SynapseConfiguration config = null;
    try {
        is = FileUtils.openInputStream(synapseXML);
    } catch (IOException e) {
        handleException("Error while opening the file: " + synapseXML.getName(), e);
        return null;
    }

    try {
        config = XMLConfigurationBuilder.getConfiguration(is, properties);
        is.close();
    } catch (XMLStreamException e) {
        handleException(
                "Error while loading the Synapse configuration from the " + synapseXML.getName() + " file", e);
    } catch (IOException e) {
        log.warn("Error while closing the input stream from file: " + synapseXML.getName(), e);
    }

    return config;
}

From source file:org.apache.synapse.config.xml.MultiXMLConfigurationBuilder.java

private static OMElement getOMElement(File file) {
    FileInputStream is;/*from   w  w w  . j av  a  2  s  . c o m*/
    OMElement document = null;

    try {
        is = FileUtils.openInputStream(file);
    } catch (IOException e) {
        handleException("Error while opening the file: " + file.getName() + " for reading", e);
        return null;
    }

    try {
        document = new StAXOMBuilder(is).getDocumentElement();
        document.build();
        is.close();
    } catch (XMLStreamException e) {
        handleException("Error while parsing the content of the file: " + file.getName(), e);
    } catch (IOException e) {
        log.warn("Error while closing the input stream from the file: " + file.getName(), e);
    }

    return document;
}

From source file:org.apache.synapse.deployers.AbstractSynapseArtifactDeployer.java

/**
 * This method is called by the axis2 deployment framework and it performs a synapse artifact
 * specific yet common across all the artifacts, set of tasks and delegate the actual deployment
 * to the respective artifact deployers.
 *
 * @param deploymentFileData file to be used for the deployment
 * @throws org.apache.axis2.deployment.DeploymentException in-case of an error in deploying the file
 * /*from  w w w . ja va  2  s.c o  m*/
 * @see org.apache.synapse.deployers.AbstractSynapseArtifactDeployer#deploySynapseArtifact(org.apache.axiom.om.OMElement,
 * String,java.util.Properties)
 */
public void deploy(DeploymentFileData deploymentFileData) throws DeploymentException {

    //        CustomLogSetter.getInstance().setLogAppender(customLogContent);
    if (!isHotDeploymentEnabled()) {
        if (log.isDebugEnabled()) {
            log.debug("Hot deployment has been suspended - Ignoring");
        }
        return;
    }

    String filename = SynapseArtifactDeploymentStore
            .getNormalizedAbsolutePath(deploymentFileData.getAbsolutePath());
    if (log.isDebugEnabled()) {
        log.debug("Deployment of the synapse artifact from file : " + filename + " : STARTED");
    }

    if (getServerContextInformation().getServerState() != ServerState.STARTED) {
        // synapse server has not yet being started
        if (log.isDebugEnabled()) {
            log.debug("Skipped the artifact deployment (since the Synapse "
                    + "server doesn't seem to be started yet), from file : "
                    + deploymentFileData.getAbsolutePath());
        }
        return;
    }

    SynapseArtifactDeploymentStore deploymentStore = getSynapseConfiguration().getArtifactDeploymentStore();

    // check whether this is triggered by a restore, if it is a restore we do not want to
    // deploy it again
    if (deploymentStore.isRestoredFile(filename)) {
        if (log.isDebugEnabled()) {
            log.debug("Restored artifact detected with filename : " + filename);
        }
        // only one deployment trigger can happen after a restore and hence remove it from
        // restoredFiles at the first hit, allowing the further deployments/updates to take
        // place as usual
        deploymentStore.removeRestoredFile(filename);
        return;
    }

    try {
        InputStream in = FileUtils.openInputStream(new File(filename));
        try {
            // construct the xml element from the file, it has to be XML,
            // since all synapse artifacts are XML based
            OMElement element = new StAXOMBuilder(StAXUtils.createXMLStreamReader(in)).getDocumentElement();
            Properties properties = new Properties();
            properties.put(SynapseConstants.CLASS_MEDIATOR_LOADERS,
                    deploymentStore.getClassMediatorClassLoaders());
            properties.put(SynapseConstants.RESOLVE_ROOT, getSynapseEnvironment().getServerContextInformation()
                    .getServerConfigurationInformation().getResolveRoot());
            String artifactName = null;
            if (deploymentStore.isUpdatingArtifact(filename)) {

                if (log.isDebugEnabled()) {
                    log.debug("Updating artifact detected with filename : " + filename);
                }
                // this is an hot-update case
                String existingArtifactName = deploymentStore.getUpdatingArtifactWithFileName(filename);
                deploymentStore.removeUpdatingArtifact(filename);
                try {
                    artifactName = updateSynapseArtifact(element, filename, existingArtifactName, properties);
                } catch (SynapseArtifactDeploymentException sade) {
                    log.error("Update of the Synapse Artifact from file : " + filename + " : Failed!", sade);
                    log.info("The updated file has been backed up into : "
                            + backupFile(deploymentFileData.getFile()));
                    log.info("Restoring the existing artifact into the file : " + filename);
                    restoreSynapseArtifact(existingArtifactName);
                    artifactName = existingArtifactName;
                    throw new DeploymentException(sade);
                }
            } else {
                // new artifact hot-deployment case
                try {
                    // When someone deploys either main or fault sequence what actually happens is they simply
                    // update the existing sequences.
                    if (filename.matches(".*/main-\\d+\\.\\d+\\.\\d+\\.xml")) {
                        artifactName = updateDefaultSequence(filename, element, properties,
                                deploymentStore.getMainSeqLstUpdatedFile(), deploymentStore);

                        String mainSeqFileName = filename.substring(filename.lastIndexOf(File.separator) + 1);
                        deploymentStore.setMainSeqLstUpdatedFile(mainSeqFileName);

                    } else if (filename.matches(".*/fault-\\d+\\.\\d+\\.\\d+\\.xml")) {
                        artifactName = updateDefaultSequence(filename, element, properties,
                                deploymentStore.getFaultSeqLstUpdatedFile(), deploymentStore);

                        String faultSeqFileName = filename.substring(filename.lastIndexOf(File.separator) + 1);
                        deploymentStore.setFaultSeqLstUpdatedFile(faultSeqFileName);

                    } else {
                        artifactName = deploySynapseArtifact(element, filename, properties);
                    }
                } catch (SynapseArtifactDeploymentException sade) {
                    log.error("Deployment of the Synapse Artifact from file : " + filename + " : Failed!",
                            sade);
                    log.info("The file has been backed up into : " + backupFile(deploymentFileData.getFile()));
                    throw new DeploymentException(sade);
                }
            }
            if (artifactName != null) {
                deploymentStore.addArtifact(filename, artifactName);
            }
        } finally {
            in.close();
        }
    } catch (IOException ex) {
        handleDeploymentError(
                "Deployment of synapse artifact failed. Error reading " + filename + " : " + ex.getMessage(),
                ex, filename);
        throw new DeploymentException(ex);
    } catch (XMLStreamException ex) {
        handleDeploymentError(
                "Deployment of synapse artifact failed. Error parsing " + filename + " : " + ex.getMessage(),
                ex, filename);
        throw new DeploymentException(ex);
    } catch (OMException ex) {
        handleDeploymentError(
                "Deployment of synapse artifact failed. Error parsing " + filename + " : " + ex.getMessage(),
                ex, filename);
        throw new DeploymentException(ex);
    }

    if (log.isDebugEnabled()) {
        log.debug("Deployment of the synapse artifact from file : " + filename + " : COMPLETED");
    }
}

From source file:org.apache.synapse.deployers.AbstractSynapseArtifactDeployer.java

private void resetDefaultSequence(String sequenceFileName, String fileName,
        SynapseArtifactDeploymentStore deploymentStore) throws IOException, XMLStreamException {

    String dirpath = fileName.substring(0, fileName.lastIndexOf(File.separator));
    String seqOrgi = dirpath + File.separator + sequenceFileName;

    InputStream in = FileUtils.openInputStream(new File(seqOrgi));

    // construct the xml element from the file, it has to be XML,
    // since all synapse artifacts are XML based
    OMElement element = new StAXOMBuilder(StAXUtils.createXMLStreamReader(in)).getDocumentElement();
    Properties properties = new Properties();
    properties.put(SynapseConstants.RESOLVE_ROOT, getSynapseEnvironment().getServerContextInformation()
            .getServerConfigurationInformation().getResolveRoot());

    String existingArtifactName = deploymentStore.getArtifactNameForFile(fileName);

    deploymentStore.removeArtifactWithFileName(fileName);

    String artifactName = updateSynapseArtifact(element, seqOrgi, existingArtifactName, properties);

    if (artifactName != null) {
        deploymentStore.addArtifact(seqOrgi, artifactName);
    }/*w  w w  .j  a  va2  s .c o  m*/
}

From source file:org.apache.syncope.client.enduser.SyncopeEnduserApplication.java

@Override
protected void init() {
    super.init();

    // read enduser.properties
    Properties props = PropertyUtils.read(getClass(), ENDUSER_PROPERTIES, "enduser.directory").getLeft();

    domain = props.getProperty("domain", SyncopeConstants.MASTER_DOMAIN);
    adminUser = props.getProperty("adminUser");
    Args.notNull(adminUser, "<adminUser>");
    anonymousUser = props.getProperty("anonymousUser");
    Args.notNull(anonymousUser, "<anonymousUser>");
    anonymousKey = props.getProperty("anonymousKey");
    Args.notNull(anonymousKey, "<anonymousKey>");

    captchaEnabled = Boolean.parseBoolean(props.getProperty("captcha"));
    Args.notNull(captchaEnabled, "<captcha>");

    xsrfEnabled = Boolean.parseBoolean(props.getProperty("xsrf"));
    Args.notNull(xsrfEnabled, "<xsrf>");

    String scheme = props.getProperty("scheme");
    Args.notNull(scheme, "<scheme>");
    String host = props.getProperty("host");
    Args.notNull(host, "<host>");
    String port = props.getProperty("port");
    Args.notNull(port, "<port>");
    String rootPath = props.getProperty("rootPath");
    Args.notNull(rootPath, "<rootPath>");
    String useGZIPCompression = props.getProperty("useGZIPCompression");
    Args.notNull(useGZIPCompression, "<useGZIPCompression>");
    maxUploadFileSizeMB = props.getProperty("maxUploadFileSizeMB") == null ? null
            : Integer.valueOf(props.getProperty("maxUploadFileSizeMB"));

    clientFactory = new SyncopeClientFactoryBean()
            .setAddress(scheme + "://" + host + ":" + port + "/" + rootPath)
            .setContentType(SyncopeClientFactoryBean.ContentType.JSON)
            .setUseCompression(BooleanUtils.toBoolean(useGZIPCompression));

    // read customForm.json
    try (InputStream is = getClass().getResourceAsStream("/" + CUSTOM_FORM_FILE)) {
        customForm = MAPPER.readValue(is, new TypeReference<HashMap<String, CustomAttributesInfo>>() {
        });/*from w  w w .ja  v a  2s .  com*/
        File enduserDir = new File(props.getProperty("enduser.directory"));
        boolean existsEnduserDir = enduserDir.exists() && enduserDir.canRead() && enduserDir.isDirectory();
        if (existsEnduserDir) {
            File customFormFile = FileUtils.getFile(enduserDir, CUSTOM_FORM_FILE);
            if (customFormFile.exists() && customFormFile.canRead() && customFormFile.isFile()) {
                customForm = MAPPER.readValue(FileUtils.openInputStream(customFormFile),
                        new TypeReference<HashMap<String, CustomAttributesInfo>>() {
                        });
            }
        }
        FileAlterationObserver observer = existsEnduserDir
                ? new FileAlterationObserver(enduserDir,
                        pathname -> StringUtils.contains(pathname.getPath(), CUSTOM_FORM_FILE))
                : new FileAlterationObserver(getClass().getResource("/" + CUSTOM_FORM_FILE).getFile(),
                        pathname -> StringUtils.contains(pathname.getPath(), CUSTOM_FORM_FILE));

        FileAlterationMonitor monitor = new FileAlterationMonitor(5000);

        FileAlterationListener listener = new FileAlterationListenerAdaptor() {

            @Override
            public void onFileChange(final File file) {
                try {
                    LOG.trace("{} has changed. Reloading form customization configuration.", CUSTOM_FORM_FILE);
                    customForm = MAPPER.readValue(FileUtils.openInputStream(file),
                            new TypeReference<HashMap<String, CustomAttributesInfo>>() {
                            });
                } catch (IOException e) {
                    e.printStackTrace(System.err);
                }
            }

            @Override
            public void onFileCreate(final File file) {
                try {
                    LOG.trace("{} has been created. Loading form customization configuration.",
                            CUSTOM_FORM_FILE);
                    customForm = MAPPER.readValue(FileUtils.openInputStream(file),
                            new TypeReference<HashMap<String, CustomAttributesInfo>>() {
                            });
                } catch (IOException e) {
                    e.printStackTrace(System.err);
                }
            }

            @Override
            public void onFileDelete(final File file) {
                LOG.trace("{} has been deleted. Resetting form customization configuration.", CUSTOM_FORM_FILE);
                customForm = null;
            }
        };

        observer.addListener(listener);
        monitor.addObserver(observer);
        monitor.start();
    } catch (Exception e) {
        throw new WicketRuntimeException("Could not read " + CUSTOM_FORM_FILE, e);
    }

    // mount resources
    ClassPathScanImplementationLookup classPathScanImplementationLookup = (ClassPathScanImplementationLookup) getServletContext()
            .getAttribute(EnduserInitializer.CLASSPATH_LOOKUP);
    for (final Class<? extends AbstractResource> resource : classPathScanImplementationLookup.getResources()) {
        Resource annotation = resource.getAnnotation(Resource.class);
        if (annotation == null) {
            LOG.debug("No @Resource annotation found on {}, ignoring", resource.getName());
        } else {
            try {
                final AbstractResource instance = resource.newInstance();

                mountResource(annotation.path(), new ResourceReference(annotation.key()) {

                    private static final long serialVersionUID = -128426276529456602L;

                    @Override
                    public IResource getResource() {
                        return instance;
                    }
                });
            } catch (Exception e) {
                LOG.error("Could not instantiate {}", resource.getName(), e);
            }
        }
    }
    //mount captcha resource only if captcha is enabled
    if (captchaEnabled) {
        mountResource("/api/captcha", new ResourceReference("captcha") {

            private static final long serialVersionUID = -128426276529456602L;

            @Override
            public IResource getResource() {
                return new CaptchaResource();
            }
        });
    }
}

From source file:org.apache.usergrid.services.assets.data.LocalFileBinaryStore.java

@Override
public InputStream read(UUID appId, Entity entity, long offset, long length) throws IOException {
    return new BufferedInputStream(FileUtils.openInputStream(path(appId, entity)));
}

From source file:org.apereo.lap.services.input.csv.BaseCSVInputHandler.java

/**
 * Reads a CSV file and verifies basic infor about it
 * @param minColumns min number of columns
 * @param headerStartsWith expected header value
 * @param reRead force reading the file again (otherwise it will use the existing copy)
 * @return the CSVReader//from w  ww  .j a va 2 s  .  c om
 * @throws IllegalStateException if we fail to produce the reader
 */
CSVReader readCSV(int minColumns, String headerStartsWith, boolean reRead, File file) {
    if (this.reader == null || reRead) {
        assert StringUtils.isNotBlank(file.getAbsolutePath()) : "filePath must not be blank: "
                + file.getAbsolutePath();
        assert minColumns > 0 : "minColumns must be > 0: " + minColumns;
        assert StringUtils.isNotBlank(headerStartsWith) : "headerStartsWith must not be blank: "
                + file.getAbsolutePath();
        CSVReader fileCSV;
        try {
            InputStream fileCSV_IS = FileUtils.openInputStream(file);
            fileCSV = new CSVReader(new InputStreamReader(fileCSV_IS));
            String[] check = fileCSV.readNext();
            if (check != null && check.length >= minColumns
                    && StringUtils.startsWithIgnoreCase(headerStartsWith, StringUtils.trimToEmpty(check[0]))) {
                //logger.debug(fileName+" file and header appear valid");
                this.reader = fileCSV;
            } else {
                throw new IllegalStateException(
                        file.getAbsolutePath() + " file and header do not appear valid (no " + headerStartsWith
                                + " header or less than " + minColumns + " required columns");
            }
        } catch (Exception e) {
            throw new IllegalStateException(file.getAbsolutePath() + " CSV is invalid: " + e);
        }
    }
    return this.reader;
}

From source file:org.artificer.atom.archive.ArtificerArchive.java

/**
 * Gets the content {@link InputStream} for the given S-RAMP archive entry.
 * @param entry the s-ramp archive entry
 * @return an {@link InputStream} over the artifact content or null if no content found (meta-data only)
 * @throws IOException/*from w  w  w .j a  v a2  s  .  c o m*/
 */
public InputStream getInputStream(ArtificerArchiveEntry entry) throws IOException {
    File artifactPath = new File(this.workDir, entry.getPath());
    if (artifactPath.exists())
        return FileUtils.openInputStream(artifactPath);
    else
        return null;
}