Example usage for java.io IOException getClass

List of usage examples for java.io IOException getClass

Introduction

In this page you can find the example usage for java.io IOException getClass.

Prototype

@HotSpotIntrinsicCandidate
public final native Class<?> getClass();

Source Link

Document

Returns the runtime class of this Object .

Usage

From source file:de.decoit.visa.rdf.RDFManager.java

/**
 * Read a SPARQL query from a file into a String and create a Query object
 * from that. If a resource was specified all occurrences of $URI$
 * placeholder in the read query will be replaced with the URI of the
 * resource. If a model URI is specified, GRAPH lines will be added to the
 * query using the placeholders $S_MOD$ and $E_MOD$.
 *
 * @param pFileName File name of the SPARQL file. The file must exist and be
 *            located in 'res/sparql'/*from w ww .  ja  va2  s  .c o  m*/
 * @param pRes Optional resource object, will be used to replace the $URI$
 *            placeholder. Can be set to null if not required.
 * @param pMod Optional model URI, will be used to add GRAPH lines to the
 *            query. If set to null the query will be executed on the
 *            default model of the dataset.
 * @return A Query object containing the read SPARQL query, null if the
 *         input file cannot be read
 */
private Query readSPARQL(String pFileName, Resource pRes, String pMod) {
    try {
        // Open the SPARQL file for reading
        Path inFile = Paths.get("res/sparql", pFileName);
        BufferedReader br = Files.newBufferedReader(inFile, StandardCharsets.UTF_8);

        // Read all lines and concatenate them using a StringBuilder
        StringBuilder rv = new StringBuilder();
        String line = br.readLine();
        while (line != null) {
            rv.append(line);
            rv.append(System.lineSeparator());

            line = br.readLine();
        }
        br.close();

        // Get the String from the StringBuilder and, if required, replace
        // the $URI$ placeholder
        String rvStr = rv.toString();
        if (pRes != null) {
            rvStr = rvStr.replaceAll("\\$URI\\$", pRes.getURI());
        }

        if (pMod != null && !pMod.isEmpty()) {
            StringBuilder graphLine = new StringBuilder("GRAPH <");
            graphLine.append(pMod);
            graphLine.append("> {");

            rvStr = rvStr.replaceAll("\\$S_MOD\\$", graphLine.toString()).replaceAll("\\$E_MOD\\$", "}");
        } else {
            rvStr = rvStr.replaceAll("\\$S_MOD\\$", "").replaceAll("\\$E_MOD\\$", "");
        }

        // Build a Query object and return it
        return QueryFactory.create(rvStr);
    } catch (IOException ex) {
        StringBuilder sb = new StringBuilder("Caught: [");
        sb.append(ex.getClass().getSimpleName());
        sb.append("] ");
        sb.append(ex.getMessage());
        log.error(sb.toString());

        if (log.isDebugEnabled()) {
            for (StackTraceElement ste : ex.getStackTrace()) {
                log.debug(ste.toString());
            }
        }

        return null;
    }
}

From source file:org.apache.pig.test.TestLogicalPlanBuilder.java

public LogicalPlan buildPlan(String query, ClassLoader cldr) {
    LogicalPlanBuilder.classloader = cldr;

    try {/*from   ww  w .j a  v a2  s  .  c o  m*/
        pigContext.connect();
        LogicalPlanBuilder builder = new LogicalPlanBuilder(pigContext); //

        LogicalPlan lp = builder.parse("Test-Plan-Builder", query, aliases, logicalOpTable, aliasOp,
                fileNameMap);
        List<LogicalOperator> roots = lp.getRoots();

        if (roots.size() > 0) {
            for (LogicalOperator op : roots) {
                if (!(op instanceof LOLoad) && !(op instanceof LODefine)) {
                    throw new Exception("Cannot have a root that is not the load or define operator. Found "
                            + op.getClass().getName());
                }
            }
        }

        //System.err.println("Query: " + query);

        assertNotNull(lp != null);
        return lp;
    } catch (IOException e) {
        // log.error(e);
        //System.err.println("IOException Stack trace for query: " + query);
        //e.printStackTrace();
        PigException pe = LogUtils.getPigException(e);
        fail("IOException: " + (pe == null ? e.getMessage() : pe.getMessage()));
    } catch (Exception e) {
        log.error(e);
        //System.err.println("Exception Stack trace for query: " + query);
        //e.printStackTrace();
        PigException pe = LogUtils.getPigException(e);
        fail(e.getClass().getName() + ": " + (pe == null ? e.getMessage() : pe.getMessage()) + " -- " + query);
    }
    return null;
}

From source file:com.cloudera.cli.validator.components.ParcelFileRunner.java

@Override
public boolean run(String target, Writer writer) throws IOException {
    File parcelFile = new File(target);
    writer.write(String.format("Validating: %s\n", parcelFile.getPath()));

    if (!checkExistence(parcelFile, false, writer)) {
        return false;
    }//  www  . j a v a 2 s  . c o m

    String expectedDir;
    String distro;
    Matcher parcelMatcher = PARCEL_PATTERN.matcher(parcelFile.getName());
    if (parcelMatcher.find()) {
        expectedDir = parcelMatcher.group(1) + '-' + parcelMatcher.group(2);
        distro = parcelMatcher.group(3);
    } else {
        writer.write(String.format("==> %s is not a valid parcel filename\n", parcelFile.getName()));
        return false;
    }

    if (!KNOWN_DISTROS.contains(distro)) {
        writer.write(String.format("==> %s does not appear to be a distro supported by CM\n", distro));
    }

    FileInputStream fin = null;
    BufferedInputStream bin = null;
    GzipCompressorInputStream gin = null;
    TarArchiveInputStream tin = null;
    try {
        InputStream in = null;

        fin = new FileInputStream(parcelFile);
        bin = new BufferedInputStream(fin);
        try {
            gin = new GzipCompressorInputStream(bin);
            in = gin;
        } catch (IOException e) {
            // It's not compressed. Proceed as if uncompressed tar.
            writer.write(String.format("==> Warning: Parcel is not compressed with gzip\n"));
            in = bin;
        }
        tin = new TarArchiveInputStream(in);

        byte[] parcelJson = null;
        byte[] alternativesJson = null;
        byte[] permissionsJson = null;

        Map<String, Boolean> tarEntries = Maps.newHashMap();
        Set<String> unexpectedDirs = Sets.newHashSet();
        for (TarArchiveEntry e = tin.getNextTarEntry(); e != null; e = tin.getNextTarEntry()) {
            String name = e.getName();

            // Remove trailing '/'
            tarEntries.put(name.replaceAll("/$", ""), e.isDirectory());

            if (!StringUtils.startsWith(name, expectedDir)) {
                unexpectedDirs.add(name.split("/")[0]);
            }

            if (e.getName().equals(expectedDir + PARCEL_JSON_PATH)) {
                parcelJson = new byte[(int) e.getSize()];
                tin.read(parcelJson);
            } else if (e.getName().equals(expectedDir + ALTERNATIVES_JSON_PATH)) {
                alternativesJson = new byte[(int) e.getSize()];
                tin.read(alternativesJson);
            } else if (e.getName().equals(expectedDir + PERMISSIONS_JSON_PATH)) {
                permissionsJson = new byte[(int) e.getSize()];
                tin.read(permissionsJson);
            }
        }

        boolean ret = true;

        if (!unexpectedDirs.isEmpty()) {
            writer.write(String.format("==> The following unexpected top level directories were observed: %s\n",
                    unexpectedDirs.toString()));
            writer.write(
                    String.format("===> The only valid top level directory, based on parcel filename, is: %s\n",
                            expectedDir));
            ret = false;
        }

        ret &= checkParcelJson(expectedDir, parcelJson, tarEntries, writer);
        ret &= checkAlternatives(expectedDir, alternativesJson, tarEntries, writer);
        ret &= checkPermissions(expectedDir, permissionsJson, tarEntries, writer);

        return ret;
    } catch (IOException e) {
        writer.write(String.format("==> %s: %s\n", e.getClass().getName(), e.getMessage()));
        return false;
    } finally {
        IOUtils.closeQuietly(tin);
        IOUtils.closeQuietly(gin);
        IOUtils.closeQuietly(bin);
        IOUtils.closeQuietly(fin);
    }
}

From source file:org.apache.manifoldcf.crawler.connectors.alfresco.AlfrescoRepositoryConnector.java

/** Process a set of documents.
* This is the method that should cause each document to be fetched, processed, and the results either added
* to the queue of documents for the current job, and/or entered into the incremental ingestion manager.
* The document specification allows this class to filter what is done based on the job.
* The connector will be connected before this method can be called.
*@param documentIdentifiers is the set of document identifiers to process.
*@param statuses are the currently-stored document versions for each document in the set of document identifiers
* passed in above.//from ww  w.j a  va  2s  . c  o  m
*@param activities is the interface this method should use to queue up new document references
* and ingest documents.
*@param jobMode is an integer describing how the job is being run, whether continuous or once-only.
*@param usesDefaultAuthority will be true only if the authority in use for these documents is the default one.
*/
@Override
public void processDocuments(String[] documentIdentifiers, IExistingVersions statuses, Specification spec,
        IProcessActivity activities, int jobMode, boolean usesDefaultAuthority)
        throws ManifoldCFException, ServiceInterruption {

    for (String documentIdentifier : documentIdentifiers) {
        // Prepare to access the document
        String nodeReference = documentIdentifier;
        String uuid = NodeUtils.getUuidFromNodeReference(nodeReference);

        if (Logging.connectors.isDebugEnabled())
            Logging.connectors.debug("Alfresco: Processing document identifier '" + nodeReference + "'");

        Reference reference = new Reference();
        reference.setStore(SearchUtils.STORE);
        reference.setUuid(uuid);

        Predicate predicate = new Predicate();
        predicate.setStore(SearchUtils.STORE);
        predicate.setNodes(new Reference[] { reference });

        Node resultNode = null;
        try {
            resultNode = NodeUtils.get(endpoint, username, password, socketTimeout, session, predicate);
        } catch (IOException e) {
            Logging.connectors.warn("Alfresco: IOException getting node: " + e.getMessage(), e);
            handleIOException(e);
        }

        NamedValue[] properties = resultNode.getProperties();
        boolean isDocument;
        String versionString = "";
        if (properties != null)
            isDocument = ContentModelUtils.isDocument(properties);
        else
            isDocument = false;
        if (isDocument) {
            boolean isVersioned = NodeUtils.isVersioned(resultNode.getAspects());
            if (isVersioned) {
                versionString = NodeUtils.getVersionLabel(properties);
            }
        }

        if (versionString.length() == 0
                || activities.checkDocumentNeedsReindexing(documentIdentifier, versionString)) {
            // Need to (re)index

            String errorCode = "OK";
            String errorDesc = StringUtils.EMPTY;
            Long fileLengthLong = null;

            long startTime = System.currentTimeMillis();

            try {

                try {
                    boolean isFolder = ContentModelUtils.isFolder(endpoint, username, password, socketTimeout,
                            session, reference);

                    //a generic node in Alfresco could have child-associations
                    if (isFolder) {
                        // queue all the children of the folder
                        QueryResult queryResult = SearchUtils.getChildren(endpoint, username, password,
                                socketTimeout, session, reference);
                        ResultSet resultSet = queryResult.getResultSet();
                        ResultSetRow[] resultSetRows = resultSet.getRows();
                        for (ResultSetRow resultSetRow : resultSetRows) {
                            NamedValue[] childProperties = resultSetRow.getColumns();
                            String childNodeReference = PropertiesUtils.getNodeReference(childProperties);
                            activities.addDocumentReference(childNodeReference, nodeReference,
                                    RELATIONSHIP_CHILD);
                        }
                    }

                } catch (IOException e) {
                    errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
                    errorDesc = e.getMessage();
                    Logging.connectors.warn("Alfresco: IOException finding children: " + e.getMessage(), e);
                    handleIOException(e);
                }

                //a generic node in Alfresco could also have binaries content
                if (isDocument) {
                    // this is a content to ingest
                    InputStream is = null;
                    long fileLength = 0;
                    try {
                        //properties ingestion
                        RepositoryDocument rd = new RepositoryDocument();
                        List<NamedValue> contentProperties = PropertiesUtils.getContentProperties(properties);
                        PropertiesUtils.ingestProperties(rd, properties, contentProperties);

                        // binaries ingestion - in Alfresco we could have more than one binary for each node (custom content models)
                        for (NamedValue contentProperty : contentProperties) {
                            //we are ingesting all the binaries defined as d:content property in the Alfresco content model
                            Content binary = ContentReader.read(endpoint, username, password, socketTimeout,
                                    session, predicate, contentProperty.getName());
                            fileLength = binary.getLength();
                            is = ContentReader.getBinary(endpoint, binary, username, password, socketTimeout,
                                    session);
                            rd.setBinary(is, fileLength);

                            //id is the node reference only if the node has an unique content stream
                            //For a node with a single d:content property: id = node reference
                            String id = PropertiesUtils.getNodeReference(properties);

                            //For a node with multiple d:content properties: id = node reference;QName
                            //The QName of a property of type d:content will be appended to the node reference
                            if (contentProperties.size() > 1) {
                                id = id + INGESTION_SEPARATOR_FOR_MULTI_BINARY + contentProperty.getName();
                            }

                            //the document uri is related to the specific d:content property available in the node
                            //we want to ingest each content stream that are nested in a single node
                            String documentURI = binary.getUrl();
                            activities.ingestDocumentWithException(documentIdentifier, id, versionString,
                                    documentURI, rd);
                            fileLengthLong = new Long(fileLength);
                        }

                        AuthenticationUtils.endSession();

                    } catch (ParseException e) {
                        errorCode = "PARSEEXCEPTION";
                        errorDesc = e.getMessage();
                        Logging.connectors.warn(
                                "Alfresco: Error during the reading process of dates: " + e.getMessage(), e);
                        handleParseException(e);
                    } catch (IOException e) {
                        errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
                        errorDesc = e.getMessage();
                        Logging.connectors.warn("Alfresco: IOException: " + e.getMessage(), e);
                        handleIOException(e);
                    } finally {
                        session = null;
                        try {
                            if (is != null) {
                                is.close();
                            }
                        } catch (InterruptedIOException e) {
                            errorCode = null;
                            throw new ManifoldCFException(e.getMessage(), e, ManifoldCFException.INTERRUPTED);
                        } catch (IOException e) {
                            errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
                            errorDesc = e.getMessage();
                            Logging.connectors.warn(
                                    "Alfresco: IOException closing file input stream: " + e.getMessage(), e);
                            handleIOException(e);
                        }
                    }

                }
            } catch (ManifoldCFException e) {
                if (e.getErrorCode() == ManifoldCFException.INTERRUPTED)
                    errorCode = null;
                throw e;
            } finally {
                if (errorCode != null)
                    activities.recordActivity(new Long(startTime), ACTIVITY_READ, fileLengthLong, nodeReference,
                            errorCode, errorDesc, null);
            }
        }
    }

}

From source file:org.kuali.ole.module.purap.service.impl.ElectronicInvoiceHelperServiceImpl.java

@Override
public ElectronicInvoiceLoad loadElectronicInvoices() {

    //add a step to check for directory paths
    prepareDirectories(getRequiredDirectoryNames());

    String baseDirName = getBaseDirName();
    String rejectDirName = getRejectDirName();
    String acceptDirName = getAcceptDirName();
    emailTextErrorList = new StringBuffer();

    boolean moveFiles = SpringContext.getBean(ParameterService.class).getParameterValueAsBoolean(
            ElectronicInvoiceStep.class,
            PurapParameterConstants.ElectronicInvoiceParameters.FILE_MOVE_AFTER_LOAD_IND);

    int failedCnt = 0;

    if (LOG.isInfoEnabled()) {
        LOG.info("Invoice Base Directory - " + electronicInvoiceInputFileType.getDirectoryPath());
        LOG.info("Invoice Accept Directory - " + acceptDirName);
        LOG.info("Invoice Reject Directory - " + rejectDirName);
        LOG.info("Is moving files allowed - " + moveFiles);
    }/*www . j  a va 2s . c om*/

    if (StringUtils.isBlank(rejectDirName)) {
        throw new RuntimeException("Reject directory name should not be empty");
    }

    if (StringUtils.isBlank(acceptDirName)) {
        throw new RuntimeException("Accept directory name should not be empty");
    }

    File baseDir = new File(baseDirName);
    if (!baseDir.exists()) {
        throw new RuntimeException("Base dir [" + baseDirName + "] doesn't exists in the system");
    }

    File[] filesToBeProcessed = baseDir.listFiles(new FileFilter() {
        @Override
        public boolean accept(File file) {
            String fullPath = FilenameUtils.getFullPath(file.getAbsolutePath());
            String fileName = FilenameUtils.getBaseName(file.getAbsolutePath());
            File processedFile = new File(fullPath + File.separator + fileName + ".processed");
            return (!file.isDirectory() && file.getName().endsWith(".xml") && !processedFile.exists());
        }
    });

    ElectronicInvoiceLoad eInvoiceLoad = new ElectronicInvoiceLoad();

    if (filesToBeProcessed == null || filesToBeProcessed.length == 0) {

        StringBuffer mailText = new StringBuffer();

        mailText.append("\n\n");
        mailText.append(PurapConstants.ElectronicInvoice.NO_FILES_PROCESSED_EMAIL_MESSAGE);
        mailText.append("\n\n");

        sendSummary(mailText);
        return eInvoiceLoad;
    }

    try {
        /**
         * Create, if not there
         */
        FileUtils.forceMkdir(new File(acceptDirName));
        FileUtils.forceMkdir(new File(rejectDirName));
    } catch (IOException e) {
        throw new RuntimeException(e);
    }

    if (LOG.isInfoEnabled()) {
        LOG.info(filesToBeProcessed.length + " file(s) available for processing");
    }

    StringBuilder emailMsg = new StringBuilder();

    for (File element2 : filesToBeProcessed) {

        // MSU Contribution DTT-3014 OLEMI-8483 OLECNTRB-974
        File xmlFile = element2;
        LOG.info("Processing " + xmlFile.getName() + "....");

        byte[] modifiedXML = null;
        // process only if file exists and not empty
        if (xmlFile.length() != 0L) {
            modifiedXML = addNamespaceDefinition(eInvoiceLoad, xmlFile);
        }

        boolean isRejected = false;

        if (modifiedXML == null) {//Not able to parse the xml
            isRejected = true;
        } else {
            try {
                isRejected = processElectronicInvoice(eInvoiceLoad, xmlFile, modifiedXML);
            } catch (Exception e) {
                String msg = xmlFile.getName() + "\n";
                LOG.error(msg);

                //since getMessage() is empty we'll compose the stack trace and nicely format it.
                StackTraceElement[] elements = e.getStackTrace();
                StringBuffer trace = new StringBuffer();
                trace.append(e.getClass().getName());
                if (e.getMessage() != null) {
                    trace.append(": ");
                    trace.append(e.getMessage());
                }
                trace.append("\n");
                for (StackTraceElement element : elements) {
                    trace.append("    at ");
                    trace.append(describeStackTraceElement(element));
                    trace.append("\n");
                }

                LOG.error(trace);
                emailMsg.append(msg);
                msg += "\n--------------------------------------------------------------------------------------\n"
                        + trace;
                logProcessElectronicInvoiceError(msg);
                failedCnt++;

                /**
                 * Clear the error map, so that subsequent EIRT routing isn't prevented since rice is throwing a
                 * ValidationException if the error map is not empty before routing the doc.
                 */
                GlobalVariables.getMessageMap().clearErrorMessages();

                //Do not execute rest of code below
                continue;
            }
        }

        /**
         * If there is a single order has rejects and the remainings are accepted in a invoice file,
         * then the entire file has been moved to the reject dir.
         */
        if (isRejected) {
            if (LOG.isInfoEnabled()) {
                LOG.info(xmlFile.getName() + " has been rejected");
            }
            if (moveFiles) {
                if (LOG.isInfoEnabled()) {
                    LOG.info(xmlFile.getName() + " has been marked to move to " + rejectDirName);
                }
                eInvoiceLoad.addRejectFileToMove(xmlFile, rejectDirName);
            }
        } else {
            if (LOG.isInfoEnabled()) {
                LOG.info(xmlFile.getName() + " has been accepted");
            }
            if (moveFiles) {
                if (!moveFile(xmlFile, acceptDirName)) {
                    String msg = xmlFile.getName() + " unable to move";
                    LOG.error(msg);
                    throw new PurError(msg);
                }
            }
        }

        if (!moveFiles) {
            String fullPath = FilenameUtils.getFullPath(xmlFile.getAbsolutePath());
            String fileName = FilenameUtils.getBaseName(xmlFile.getAbsolutePath());
            File processedFile = new File(fullPath + File.separator + fileName + ".processed");
            try {
                FileUtils.touch(processedFile);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }

        //  delete the .done file
        deleteDoneFile(xmlFile);
    }

    emailTextErrorList.append("\nFAILED FILES\n");
    emailTextErrorList.append("-----------------------------------------------------------\n\n");
    emailTextErrorList.append(emailMsg);
    emailTextErrorList.append("\nTOTAL COUNT\n");
    emailTextErrorList.append("===========================\n");
    emailTextErrorList.append("      " + failedCnt + " FAILED\n");
    emailTextErrorList.append("===========================\n");

    StringBuffer summaryText = saveLoadSummary(eInvoiceLoad);

    StringBuffer finalText = new StringBuffer();
    finalText.append(summaryText);
    finalText.append("\n");
    finalText.append(emailTextErrorList);
    sendSummary(finalText);

    LOG.info("Processing completed");

    return eInvoiceLoad;

}

From source file:org.kuali.kfs.module.purap.service.impl.ElectronicInvoiceHelperServiceImpl.java

@Override
@NonTransactional/*from w w  w .java  2  s  .  c  o  m*/
public ElectronicInvoiceLoad loadElectronicInvoices() {

    //add a step to check for directory paths
    prepareDirectories(getRequiredDirectoryNames());

    String rejectDirName = getRejectDirName();
    String acceptDirName = getAcceptDirName();
    emailTextErrorList = new StringBuffer();

    boolean moveFiles = SpringContext.getBean(ParameterService.class).getParameterValueAsBoolean(
            ElectronicInvoiceStep.class,
            PurapParameterConstants.ElectronicInvoiceParameters.FILE_MOVE_AFTER_LOAD_IND);

    int failedCnt = 0;

    if (LOG.isInfoEnabled()) {
        LOG.info("Invoice Base Directory - " + electronicInvoiceInputFileType.getDirectoryPath());
        LOG.info("Invoice Accept Directory - " + acceptDirName);
        LOG.info("Invoice Reject Directory - " + rejectDirName);
        LOG.info("Is moving files allowed - " + moveFiles);
    }

    if (StringUtils.isBlank(rejectDirName)) {
        throw new RuntimeException("Reject directory name should not be empty");
    }

    if (StringUtils.isBlank(acceptDirName)) {
        throw new RuntimeException("Accept directory name should not be empty");
    }

    File[] filesToBeProcessed = getFilesToBeProcessed();
    ElectronicInvoiceLoad eInvoiceLoad = new ElectronicInvoiceLoad();

    if (filesToBeProcessed == null || filesToBeProcessed.length == 0) {

        StringBuffer mailText = new StringBuffer();

        mailText.append("\n\n");
        mailText.append(PurapConstants.ElectronicInvoice.NO_FILES_PROCESSED_EMAIL_MESSAGE);
        mailText.append("\n\n");

        sendSummary(mailText);
        return eInvoiceLoad;
    }

    try {
        /**
         * Create, if not there
         */
        FileUtils.forceMkdir(new File(acceptDirName));
        FileUtils.forceMkdir(new File(rejectDirName));
    } catch (IOException e) {
        throw new RuntimeException(e);
    }

    if (LOG.isInfoEnabled()) {
        LOG.info(filesToBeProcessed.length + " file(s) available for processing");
    }

    StringBuilder emailMsg = new StringBuilder();

    for (int i = 0; i < filesToBeProcessed.length; i++) {

        File xmlFile = filesToBeProcessed[i];
        LOG.info("Processing " + xmlFile.getName() + "....");

        byte[] modifiedXML = null;
        //process only if file exists and not empty
        if (xmlFile.length() != 0L) {
            modifiedXML = addNamespaceDefinition(eInvoiceLoad, xmlFile);
        }

        boolean isRejected = false;

        if (modifiedXML == null) {//Not able to parse the xml
            isRejected = true;
        } else {
            try {
                isRejected = processElectronicInvoice(eInvoiceLoad, xmlFile, modifiedXML);
            } catch (Exception e) {
                String msg = xmlFile.getName() + "\n";
                LOG.error(msg);

                //since getMessage() is empty we'll compose the stack trace and nicely format it.
                StackTraceElement[] elements = e.getStackTrace();
                StringBuffer trace = new StringBuffer();
                trace.append(e.getClass().getName());
                if (e.getMessage() != null) {
                    trace.append(": ");
                    trace.append(e.getMessage());
                }
                trace.append("\n");
                for (int j = 0; j < elements.length; ++j) {
                    StackTraceElement element = elements[j];

                    trace.append("    at ");
                    trace.append(describeStackTraceElement(element));
                    trace.append("\n");
                }

                LOG.error(trace);
                emailMsg.append(msg);
                msg += "\n--------------------------------------------------------------------------------------\n"
                        + trace;
                logProcessElectronicInvoiceError(msg);
                failedCnt++;

                /**
                 * Clear the error map, so that subsequent EIRT routing isn't prevented since rice
                 * is throwing a ValidationException if the error map is not empty before routing the doc.
                 */
                GlobalVariables.getMessageMap().clearErrorMessages();

                //Do not execute rest of code below
                continue;
            }
        }

        /**
         * If there is a single order has rejects and the remainings are accepted in a invoice file,
         * then the entire file has been moved to the reject dir.
         */
        if (isRejected) {
            if (LOG.isInfoEnabled()) {
                LOG.info(xmlFile.getName() + " has been rejected");
            }
            if (moveFiles) {
                if (LOG.isInfoEnabled()) {
                    LOG.info(xmlFile.getName() + " has been marked to move to " + rejectDirName);
                }
                eInvoiceLoad.addRejectFileToMove(xmlFile, rejectDirName);
            }
        } else {
            if (LOG.isInfoEnabled()) {
                LOG.info(xmlFile.getName() + " has been accepted");
            }
            if (moveFiles) {
                if (!moveFile(xmlFile, acceptDirName)) {
                    String msg = xmlFile.getName() + " unable to move";
                    LOG.error(msg);
                    throw new PurError(msg);
                }
            }
        }

        if (!moveFiles) {
            String fullPath = FilenameUtils.getFullPath(xmlFile.getAbsolutePath());
            String fileName = FilenameUtils.getBaseName(xmlFile.getAbsolutePath());
            File processedFile = new File(fullPath + File.separator + fileName + ".processed");
            try {
                FileUtils.touch(processedFile);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }

        //  delete the .done file
        deleteDoneFile(xmlFile);
    }

    emailTextErrorList.append("\nFAILED FILES\n");
    emailTextErrorList.append("-----------------------------------------------------------\n\n");
    emailTextErrorList.append(emailMsg);
    emailTextErrorList.append("\nTOTAL COUNT\n");
    emailTextErrorList.append("===========================\n");
    emailTextErrorList.append("      " + failedCnt + " FAILED\n");
    emailTextErrorList.append("===========================\n");

    StringBuffer summaryText = saveLoadSummary(eInvoiceLoad);

    StringBuffer finalText = new StringBuffer();
    finalText.append(summaryText);
    finalText.append("\n");
    finalText.append(emailTextErrorList);
    sendSummary(finalText);

    LOG.info("Processing completed");

    return eInvoiceLoad;

}

From source file:org.apache.hadoop.hdfs.nfs.nfs3.RpcProgramNfs3.java

public READLINK3Response readlink(XDR xdr, SecurityHandler securityHandler, InetAddress client) {
    READLINK3Response response = new READLINK3Response(Nfs3Status.NFS3_OK);

    if (!checkAccessPrivilege(client, AccessPrivilege.READ_ONLY)) {
        response.setStatus(Nfs3Status.NFS3ERR_ACCES);
        return response;
    }/*from   www  .  j a v  a  2  s.c  o m*/

    DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
    if (dfsClient == null) {
        response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
        return response;
    }

    READLINK3Request request = null;

    try {
        request = new READLINK3Request(xdr);
    } catch (IOException e) {
        LOG.error("Invalid READLINK request");
        return new READLINK3Response(Nfs3Status.NFS3ERR_INVAL);
    }

    FileHandle handle = request.getHandle();
    if (LOG.isDebugEnabled()) {
        LOG.debug("NFS READLINK fileId: " + handle.getFileId());
    }

    String fileIdPath = Nfs3Utils.getFileIdPath(handle);
    try {
        String target = dfsClient.getLinkTarget(fileIdPath);

        Nfs3FileAttributes postOpAttr = Nfs3Utils.getFileAttr(dfsClient, fileIdPath, iug);
        if (postOpAttr == null) {
            LOG.info("Can't get path for fileId:" + handle.getFileId());
            return new READLINK3Response(Nfs3Status.NFS3ERR_STALE);
        }
        if (postOpAttr.getType() != NfsFileType.NFSLNK.toValue()) {
            LOG.error("Not a symlink, fileId:" + handle.getFileId());
            return new READLINK3Response(Nfs3Status.NFS3ERR_INVAL);
        }
        if (target == null) {
            LOG.error("Symlink target should not be null, fileId:" + handle.getFileId());
            return new READLINK3Response(Nfs3Status.NFS3ERR_SERVERFAULT);
        }
        int rtmax = config.getInt(Nfs3Constant.MAX_READ_TRANSFER_SIZE_KEY,
                Nfs3Constant.MAX_READ_TRANSFER_SIZE_DEFAULT);
        if (rtmax < target.getBytes().length) {
            LOG.error("Link size: " + target.getBytes().length + " is larger than max transfer size: " + rtmax);
            return new READLINK3Response(Nfs3Status.NFS3ERR_IO, postOpAttr, new byte[0]);
        }

        return new READLINK3Response(Nfs3Status.NFS3_OK, postOpAttr, target.getBytes());

    } catch (IOException e) {
        LOG.warn("Readlink error: " + e.getClass(), e);
        if (e instanceof FileNotFoundException) {
            return new READLINK3Response(Nfs3Status.NFS3ERR_STALE);
        } else if (e instanceof AccessControlException) {
            return new READLINK3Response(Nfs3Status.NFS3ERR_ACCES);
        }
        return new READLINK3Response(Nfs3Status.NFS3ERR_IO);
    }
}

From source file:org.paxle.crawler.http.impl.HttpCrawler.java

public ICrawlerDocument request(URI requestUri) {
    if (requestUri == null)
        throw new NullPointerException("URL was null");
    this.logger.debug(String.format("Crawling URL '%s' ...", requestUri));

    ICrawlerDocument doc = null;/*from  w  w w  . j av  a 2s  .co m*/
    HttpMethod method = null;
    try {
        final ICrawlerContext ctx = this.contextLocal.getCurrentContext();

        // creating an empty crawler-document
        doc = ctx.createDocument();
        doc.setLocation(requestUri);

        final String uriAsciiString = requestUri.toASCIIString();

        /* ==============================================================================
         * HTTP HEAD request
         * 
         * first use the HEAD method to determine whether the MIME-type is supported
         * and to compare the content-length with the maximum allowed download size
         * (both only if the server provides this information, if not, the file is
         * fetched)
         * ============================================================================== */
        method = new HeadMethod(uriAsciiString); // automatically follows redirects
        this.initRequestMethod(method);
        int statusCode = this.getHttpClient().executeMethod(method);

        final boolean headUnsupported = (statusCode == HttpStatus.SC_METHOD_FAILURE
                || statusCode == HttpStatus.SC_METHOD_NOT_ALLOWED);
        if (!headUnsupported) {
            if (statusCode != HttpStatus.SC_OK) {
                // RFC 2616 states that the GET and HEAD methods _must_ be supported by any
                // general purpose servers (which are in fact the ones we are connecting to here)

                if (statusCode == HttpStatus.SC_NOT_FOUND) {
                    doc.setStatus(ICrawlerDocument.Status.NOT_FOUND);
                } else {
                    doc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE,
                            String.format("Server returned: %s", method.getStatusLine()));
                }

                this.logger.warn(String.format("Crawling of URL '%s' failed. Server returned: %s", requestUri,
                        method.getStatusLine()));
                return doc;
            }

            // getting the mimetype and charset
            Header contentTypeHeader = method.getResponseHeader(HTTPHEADER_CONTENT_TYPE);
            if (!handleContentTypeHeader(contentTypeHeader, doc))
                return doc;

            // reject the document if content-length is above our limit
            Header contentLengthHeader = method.getResponseHeader(HTTPHEADER_CONTENT_LENGTH);
            if (!handleContentLengthHeader(contentLengthHeader, doc))
                return doc;

            // FIXME: we've been redirected, re-enqueue the new URL and abort processing
            //if (!requestUri.equals(method.getURI())) ;            
        }

        /* ==============================================================================
         * HTTP GET request
         * 
         * secondly - if everything is alright up to now - proceed with getting the 
         * actual document
         * ============================================================================== */
        HttpMethod getMethod = new GetMethod(uriAsciiString); // automatically follows redirects
        method.releaseConnection();

        method = getMethod;
        this.initRequestMethod(method);

        // send the request to the server
        statusCode = this.getHttpClient().executeMethod(method);

        // check the response status code
        if (statusCode != HttpStatus.SC_OK) {
            if (statusCode == HttpStatus.SC_NOT_FOUND) {
                doc.setStatus(ICrawlerDocument.Status.NOT_FOUND);
            } else {
                doc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE,
                        String.format("Server returned: %s", method.getStatusLine()));
            }

            this.logger.warn(String.format("Crawling of URL '%s' failed. Server returned: %s", requestUri,
                    method.getStatusLine()));
            return doc;
        }

        // FIXME: we've been redirected, re-enqueue the new URL and abort processing
        // if (!requestUri.equals(method.getURI())) ; 

        /*
         * HTTP Content-Type
         * - getting the mimetype and charset
         */
        Header contentTypeHeader = method.getResponseHeader(HTTPHEADER_CONTENT_TYPE);
        if (!handleContentTypeHeader(contentTypeHeader, doc))
            return doc;

        /* 
         * HTTP Content-Length
         * - Reject the document if content-length is above our limit
         * 
         *   We do this a second time here because some servers may have set the content-length
         *   of the head response to <code>0</code>
         */
        Header contentLengthHeader = method.getResponseHeader(HTTPHEADER_CONTENT_LENGTH);
        if (!handleContentLengthHeader(contentLengthHeader, doc))
            return doc;

        extractHttpHeaders(method, doc); // externalised into this method to cleanup here a bit

        // getting the response body
        InputStream respBody = method.getResponseBodyAsStream();

        // handle the content-encoding, i.e. decompress the server's response
        Header contentEncodingHeader = method.getResponseHeader(HTTPHEADER_CONTENT_ENCODING);
        try {
            respBody = handleContentEncoding(contentEncodingHeader, respBody);

            /* Limit the max allowed length of the content to copy. -1 is used for no limit.
             * 
             * We need to set a limit if:
             * a) the user has configured a max-download-size AND
             * b) the server returned no content-length header
             */
            int copyLimit = (this.maxDownloadSize <= 0 || contentLengthHeader != null) ? -1
                    : this.maxDownloadSize;

            // copy the content to file
            final ICrawlerTools crawlerTools = ctx.getCrawlerTools();
            crawlerTools.saveInto(doc, respBody, lrc, copyLimit);

            doc.setStatus(ICrawlerDocument.Status.OK);
            this.logger.debug(String.format("Crawling of URL '%s' finished.", requestUri));
        } catch (IOException e) {
            String msg = e.getMessage();
            if (msg == null || !msg.equals("Corrupt GZIP trailer"))
                throw e;

            setHostSetting(method.getURI().getHost(), PREF_NO_ENCODING);
            msg = String.format("server sent a corrupt gzip trailer at URL '%s'", requestUri);
            logger.warn(msg);

            // FIXME re-enqueue command
            doc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, msg);
        } finally {
            respBody.close();
        }
    } catch (NoRouteToHostException e) {
        this.logger.warn(String.format("Error crawling %s: %s", requestUri, e.getMessage()));
        doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage());
    } catch (UnknownHostException e) {
        this.logger.warn(String.format("Error crawling %s: Unknown host.", requestUri));
        doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage());
    } catch (ConnectException e) {
        this.logger.warn(String.format("Error crawling %s: Unable to connect to host.", requestUri));
        doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage());
    } catch (ConnectTimeoutException e) {
        this.logger.warn(String.format("Error crawling %s: %s.", requestUri, e.getMessage()));
        doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage());
    } catch (SocketTimeoutException e) {
        this.logger.warn(String.format("Error crawling %s: Connection timeout.", requestUri));
        doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage());
    } catch (CircularRedirectException e) {
        this.logger.warn(String.format("Error crawling %s: %s", requestUri, e.getMessage()));
        doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage());
    } catch (NoHttpResponseException e) {
        this.logger.warn(String.format("Error crawling %s: %s", requestUri, e.getMessage()));
        doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage());
    } catch (ContentLengthLimitExceededException e) {
        this.logger.warn(String.format("Error crawling %s: %s", requestUri, e.getMessage()));
        doc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, e.getMessage());
    } catch (Throwable e) {
        String errorMsg;
        if (e instanceof HttpException) {
            errorMsg = "Unrecovered protocol exception: [%s] %s";
        } else if (e instanceof IOException) {
            errorMsg = "Transport exceptions: [%s] %s";
        } else {
            errorMsg = "Unexpected exception: [%s] %s";
        }
        errorMsg = String.format(errorMsg, e.getClass().getName(), e.getMessage());

        this.logger.error(String.format("Error crawling %s: %s", requestUri, errorMsg));
        doc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, errorMsg);
        e.printStackTrace();
    } finally {
        if (method != null)
            method.releaseConnection();
    }

    return doc;
}

From source file:com.buaa.cfs.nfs3.RpcProgramNfs3.java

@VisibleForTesting
READLINK3Response readlink(XDR xdr, SecurityHandler securityHandler, SocketAddress remoteAddress) {
    READLINK3Response response = new READLINK3Response(Nfs3Status.NFS3_OK);

    if (!checkAccessPrivilege(remoteAddress, AccessPrivilege.READ_ONLY)) {
        response.setStatus(Nfs3Status.NFS3ERR_ACCES);
        return response;
    }//w w  w.j a va2 s . c  o  m

    DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
    if (dfsClient == null) {
        response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
        return response;
    }

    READLINK3Request request;

    try {
        request = READLINK3Request.deserialize(xdr);
    } catch (IOException e) {
        LOG.error("Invalid READLINK request");
        return new READLINK3Response(Nfs3Status.NFS3ERR_INVAL);
    }

    FileHandle handle = request.getHandle();
    if (LOG.isDebugEnabled()) {
        LOG.debug("NFS READLINK fileId: " + handle.getFileId() + " client: " + remoteAddress);
    }

    String fileIdPath = Nfs3Utils.getFileIdPath(handle);
    try {
        String target = dfsClient.getLinkTarget(fileIdPath);

        Nfs3FileAttributes postOpAttr = Nfs3Utils.getFileAttr(dfsClient, fileIdPath, iug);
        if (postOpAttr == null) {
            LOG.info("Can't get path for fileId: " + handle.getFileId());
            return new READLINK3Response(Nfs3Status.NFS3ERR_STALE);
        }
        if (postOpAttr.getType() != NfsFileType.NFSLNK.toValue()) {
            LOG.error("Not a symlink, fileId: " + handle.getFileId());
            return new READLINK3Response(Nfs3Status.NFS3ERR_INVAL);
        }
        if (target == null) {
            LOG.error("Symlink target should not be null, fileId: " + handle.getFileId());
            return new READLINK3Response(Nfs3Status.NFS3ERR_SERVERFAULT);
        }
        int rtmax = config.getInt(NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_KEY,
                NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_DEFAULT);
        if (rtmax < target.getBytes(Charset.forName("UTF-8")).length) {
            LOG.error("Link size: " + target.getBytes(Charset.forName("UTF-8")).length
                    + " is larger than max transfer size: " + rtmax);
            return new READLINK3Response(Nfs3Status.NFS3ERR_IO, postOpAttr, new byte[0]);
        }

        return new READLINK3Response(Nfs3Status.NFS3_OK, postOpAttr, target.getBytes(Charset.forName("UTF-8")));

    } catch (IOException e) {
        LOG.warn("Readlink error: " + e.getClass(), e);
        int status = mapErrorStatus(e);
        return new READLINK3Response(status);
    }
}

From source file:org.ecoinformatics.datamanager.download.DownloadHandler.java

/**
 * Gets content from given source and writes it to DataStorageInterface 
 * to store them. This method will be called by run()
 * //from w w  w.ja va 2s.  c o  m
 * @param resourceName  the URL to the source data to be retrieved
 */
protected boolean getContentFromSource(String resourceName) {
    boolean successFlag = false;
    QualityCheck onlineURLsQualityCheck = null;
    boolean onlineURLsException = false; // used to determine status of onlineURLs quality check

    if (resourceName != null) {
        resourceName = resourceName.trim();
    }

    if (resourceName != null && (resourceName.startsWith("http://") || resourceName.startsWith("https://")
            || resourceName.startsWith("file://") || resourceName.startsWith("ftp://"))) {
        // get the data from a URL
        int responseCode = 0;
        String responseMessage = null;

        try {
            URL url = new URL(resourceName);
            boolean isFTP = false;

            if (entity != null) {
                String contentType = null;

                // Find the right MIME type and set it as content type
                if (resourceName.startsWith("http")) {
                    HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection();
                    httpURLConnection.setRequestMethod("HEAD");
                    httpURLConnection.connect();
                    contentType = httpURLConnection.getContentType();
                    responseCode = httpURLConnection.getResponseCode();
                    responseMessage = httpURLConnection.getResponseMessage();
                } else if (resourceName.startsWith("file")) {
                    URLConnection urlConnection = url.openConnection();
                    urlConnection.connect();
                    contentType = urlConnection.getContentType();
                } else { // FTP
                    isFTP = true;
                    contentType = "application/octet-stream";
                }

                entity.setUrlContentType(contentType);
            }

            if (!isFTP) { // HTTP(S) or FILE
                InputStream filestream = url.openStream();

                try {
                    successFlag = this.writeRemoteInputStreamIntoDataStorage(filestream);
                } catch (IOException e) {
                    exception = e;
                    String errorMessage = e.getMessage();
                    if (errorMessage.startsWith(ONLINE_URLS_EXCEPTION_MESSAGE)) {
                        onlineURLsException = true;
                    }
                } finally {
                    filestream.close();
                }
            } else { // FTP
                String[] urlParts = resourceName.split("/");
                String address = urlParts[2];
                String dir = "/";
                for (int i = 3; i < urlParts.length - 1; i++) {
                    dir += urlParts[i] + "/";
                }
                String fileName = urlParts[urlParts.length - 1];
                FTPClient ftpClient = new FTPClient();
                ftpClient.connect(address);
                ftpClient.login(ANONYMOUS, anonymousFtpPasswd);
                ftpClient.changeWorkingDirectory(dir);
                ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
                ftpClient.enterLocalPassiveMode(); // necessary to avoid firewall blocking
                InputStream filestream = ftpClient.retrieveFileStream(fileName);
                try {
                    successFlag = this.writeRemoteInputStreamIntoDataStorage(filestream);
                } catch (IOException e) {
                    exception = e;
                    String errorMessage = e.getMessage();
                    if (errorMessage.startsWith(ONLINE_URLS_EXCEPTION_MESSAGE)) {
                        onlineURLsException = true;
                    }
                } finally {
                    try {
                        filestream.close();
                    } catch (IOException e) {
                        exception = new DataSourceNotFoundException(String
                                .format("Error closing local file '%s': %s", resourceName, e.getMessage()));
                        onlineURLsException = true;
                    }
                }

                // logout and disconnect if FTP session
                if (resourceName.startsWith("ftp") && ftpClient != null) {
                    try {
                        ftpClient.enterLocalActiveMode();
                        ftpClient.logout();
                        ftpClient.disconnect();
                    } catch (IOException e) {
                        exception = new DataSourceNotFoundException(
                                String.format("Error disconnecting from FTP with resource '%s': %s",
                                        resourceName, e.getMessage()));
                        onlineURLsException = true;
                    }
                }
            }
        } catch (MalformedURLException e) {
            String eClassName = e.getClass().getName();
            String eMessage = String.format("%s: %s", eClassName, e.getMessage());
            exception = new DataSourceNotFoundException(
                    String.format("The URL '%s' is a malformed URL: %s", resourceName, eMessage));
        } catch (IOException e) {
            String eClassName = e.getClass().getName();
            String eMessage = String.format("%s: %s", eClassName, e.getMessage());
            if (responseCode > 0) {
                eMessage = String.format("Response Code: %d %s; %s", responseCode, responseMessage, eMessage);
            }
            exception = new DataSourceNotFoundException(
                    String.format("The URL '%s' is not reachable: %s", resourceName, eMessage));
        }

        // Initialize the "Online URLs are live" quality check
        String qualityCheckIdentifier = "onlineURLs";
        QualityCheck qualityCheckTemplate = QualityReport.getQualityCheckTemplate(qualityCheckIdentifier);
        onlineURLsQualityCheck = new QualityCheck(qualityCheckIdentifier, qualityCheckTemplate);

        if (QualityCheck.shouldRunQualityCheck(entity, onlineURLsQualityCheck)) {
            String resourceNameEscaped = embedInCDATA(resourceName);

            if (!onlineURLsException) {
                onlineURLsQualityCheck.setStatus(Status.valid);
                onlineURLsQualityCheck.setFound("true");
                onlineURLsQualityCheck.setExplanation("Succeeded in accessing URL: " + resourceNameEscaped);
            } else {
                onlineURLsQualityCheck.setFailedStatus();
                onlineURLsQualityCheck.setFound("false");
                String explanation = "Failed to access URL: " + resourceNameEscaped;
                explanation = explanation + "; " + embedInCDATA(exception.getMessage());
                onlineURLsQualityCheck.setExplanation(explanation);
            }

            entity.addQualityCheck(onlineURLsQualityCheck);
        }

        return successFlag;
    } else if (resourceName != null && resourceName.startsWith("ecogrid://")) {
        // get the docid from url
        int start = resourceName.indexOf("/", 11) + 1;
        //log.debug("start: " + start);
        int end = resourceName.indexOf("/", start);

        if (end == -1) {
            end = resourceName.length();
        }

        //log.debug("end: " + end);
        String ecogridIdentifier = resourceName.substring(start, end);
        // pass this docid and get data item
        //System.out.println("the endpoint is "+ECOGRIDENDPOINT);
        //System.out.println("The identifier is "+ecogridIdentifier);
        //return false;
        return getContentFromEcoGridSource(ecogridEndPoint, ecogridIdentifier);
    } else if (resourceName != null && resourceName.startsWith("srb://")) {
        // get srb docid from the url
        String srbIdentifier = transformSRBurlToDocid(resourceName);
        // reset endpoint for srb (This is hack we need to figure ou
        // elegent way to do this
        //mEndPoint = Config.getValue("//ecogridService/srb/endPoint");
        // pass this docid and get data item
        //log.debug("before get srb data");
        return getContentFromEcoGridSource(SRBENDPOINT, srbIdentifier);
    } else {
        successFlag = false;
        return successFlag;
    }
}