Example usage for java.io IOException getClass

List of usage examples for java.io IOException getClass

Introduction

In this page you can find the example usage for java.io IOException getClass.

Prototype

@HotSpotIntrinsicCandidate
public final native Class<?> getClass();

Source Link

Document

Returns the runtime class of this Object .

Usage

From source file:edu.lternet.pasta.dml.download.DownloadHandler.java

/**
 * Gets content from given source and writes it to DataStorageInterface 
 * to store them. This method will be called by run()
 * //from ww w .jav  a  2  s  .  c  o  m
 * @param resourceName  the URL to the source data to be retrieved
 */
protected boolean getContentFromSource(String resourceName) {
    boolean successFlag = false;
    QualityCheck onlineURLsQualityCheck = null;
    boolean onlineURLsException = false; // used to determine status of onlineURLs quality check

    if (resourceName != null) {
        resourceName = resourceName.trim();
    }

    if (resourceName != null && (resourceName.startsWith("http://") || resourceName.startsWith("https://")
            || resourceName.startsWith("file://") || resourceName.startsWith("ftp://"))) {
        // get the data from a URL
        int responseCode = 0;
        String responseMessage = null;

        try {
            URL url = new URL(resourceName);
            boolean isFTP = false;

            if (entity != null) {
                String contentType = null;

                // Find the right MIME type and set it as content type
                if (resourceName.startsWith("http")) {
                    HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection();
                    httpURLConnection.setRequestMethod("HEAD");
                    httpURLConnection.connect();
                    contentType = httpURLConnection.getContentType();
                    responseCode = httpURLConnection.getResponseCode();
                    responseMessage = httpURLConnection.getResponseMessage();
                } else if (resourceName.startsWith("file")) {
                    URLConnection urlConnection = url.openConnection();
                    urlConnection.connect();
                    contentType = urlConnection.getContentType();
                } else { // FTP
                    isFTP = true;
                    contentType = "application/octet-stream";
                }

                entity.setUrlContentType(contentType);
            }

            if (!isFTP) { // HTTP(S) or FILE
                InputStream filestream = url.openStream();

                try {
                    successFlag = this.writeRemoteInputStreamIntoDataStorage(filestream);
                } catch (IOException e) {
                    exception = e;
                    String errorMessage = e.getMessage();
                    if (errorMessage.startsWith(ONLINE_URLS_EXCEPTION_MESSAGE)) {
                        onlineURLsException = true;
                    }
                } finally {
                    filestream.close();
                }
            } else { // FTP
                String[] urlParts = resourceName.split("/");
                String address = urlParts[2];
                String dir = "/";
                for (int i = 3; i < urlParts.length - 1; i++) {
                    dir += urlParts[i] + "/";
                }
                String fileName = urlParts[urlParts.length - 1];
                FTPClient ftpClient = new FTPClient();
                ftpClient.connect(address);
                ftpClient.login(ANONYMOUS, anonymousFtpPasswd);
                ftpClient.changeWorkingDirectory(dir);
                ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
                ftpClient.enterLocalPassiveMode(); // necessary to avoid firewall blocking
                InputStream filestream = ftpClient.retrieveFileStream(fileName);
                try {
                    successFlag = this.writeRemoteInputStreamIntoDataStorage(filestream);
                } catch (IOException e) {
                    exception = e;
                    String errorMessage = e.getMessage();
                    if (errorMessage.startsWith(ONLINE_URLS_EXCEPTION_MESSAGE)) {
                        onlineURLsException = true;
                    }
                } finally {
                    try {
                        filestream.close();
                    } catch (IOException e) {
                        exception = new DataSourceNotFoundException(String
                                .format("Error closing local file '%s': %s", resourceName, e.getMessage()));
                        onlineURLsException = true;
                    }
                }

                // logout and disconnect if FTP session
                if (resourceName.startsWith("ftp") && ftpClient != null) {
                    try {
                        ftpClient.enterLocalActiveMode();
                        ftpClient.logout();
                        ftpClient.disconnect();
                    } catch (IOException e) {
                        exception = new DataSourceNotFoundException(
                                String.format("Error disconnecting from FTP with resource '%s': %s",
                                        resourceName, e.getMessage()));
                        onlineURLsException = true;
                    }
                }
            }
        } catch (MalformedURLException e) {
            String eClassName = e.getClass().getName();
            String eMessage = String.format("%s: %s", eClassName, e.getMessage());
            onlineURLsException = true;
            exception = new DataSourceNotFoundException(
                    String.format("The URL '%s' is a malformed URL: %s", resourceName, eMessage));
        } catch (IOException e) {
            String eClassName = e.getClass().getName();
            String eMessage = String.format("%s: %s", eClassName, e.getMessage());
            if (responseCode > 0) {
                eMessage = String.format("Response Code: %d %s; %s", responseCode, responseMessage, eMessage);
            }
            onlineURLsException = true;
            exception = new DataSourceNotFoundException(
                    String.format("The URL '%s' is not reachable: %s", resourceName, eMessage));
        }

        // Initialize the "Online URLs are live" quality check
        String qualityCheckIdentifier = "onlineURLs";
        QualityCheck qualityCheckTemplate = QualityReport.getQualityCheckTemplate(qualityCheckIdentifier);
        onlineURLsQualityCheck = new QualityCheck(qualityCheckIdentifier, qualityCheckTemplate);

        if (QualityCheck.shouldRunQualityCheck(entity, onlineURLsQualityCheck)) {
            String resourceNameEscaped = embedInCDATA(resourceName);

            if (!onlineURLsException) {
                onlineURLsQualityCheck.setStatus(Status.valid);
                onlineURLsQualityCheck.setFound("true");
                onlineURLsQualityCheck.setExplanation("Succeeded in accessing URL: " + resourceNameEscaped);
            } else {
                onlineURLsQualityCheck.setFailedStatus();
                onlineURLsQualityCheck.setFound("false");
                String explanation = "Failed to access URL: " + resourceNameEscaped;
                explanation = explanation + "; " + embedInCDATA(exception.getMessage());
                onlineURLsQualityCheck.setExplanation(explanation);
            }

            entity.addQualityCheck(onlineURLsQualityCheck);
        }

        return successFlag;
    } else if (resourceName != null && resourceName.startsWith("ecogrid://")) {
        // get the docid from url
        int start = resourceName.indexOf("/", 11) + 1;
        //log.debug("start: " + start);
        int end = resourceName.indexOf("/", start);

        if (end == -1) {
            end = resourceName.length();
        }

        //log.debug("end: " + end);
        String ecogridIdentifier = resourceName.substring(start, end);
        // pass this docid and get data item
        //System.out.println("the endpoint is "+ECOGRIDENDPOINT);
        //System.out.println("The identifier is "+ecogridIdentifier);
        //return false;
        return getContentFromEcoGridSource(ecogridEndPoint, ecogridIdentifier);
    } else if (resourceName != null && resourceName.startsWith("srb://")) {
        // get srb docid from the url
        String srbIdentifier = transformSRBurlToDocid(resourceName);
        // reset endpoint for srb (This is hack we need to figure ou
        // elegent way to do this
        //mEndPoint = Config.getValue("//ecogridService/srb/endPoint");
        // pass this docid and get data item
        //log.debug("before get srb data");
        return getContentFromEcoGridSource(SRBENDPOINT, srbIdentifier);
    } else {
        successFlag = false;
        return successFlag;
    }
}

From source file:com.kkbox.toolkit.internal.api.APIRequest.java

@Override
public Void doInBackground(Object... params) {
    int readLength;
    final byte[] buffer = new byte[128];
    listener = (APIRequestListener) params[0];
    int retryTimes = 0;
    File cacheFile = null;//www . ja v  a2s.  co  m
    ConnectivityManager connectivityManager = null;
    if (context != null) {
        final File cacheDir = new File(context.getCacheDir().getAbsolutePath() + File.separator + "api");
        if (!cacheDir.exists()) {
            cacheDir.mkdir();
        }
        cacheFile = new File(
                cacheDir.getAbsolutePath() + File.separator + StringUtils.getMd5Hash(url + getParams));
        connectivityManager = (ConnectivityManager) context.getSystemService(context.CONNECTIVITY_SERVICE);
    }

    if (context != null && cacheTimeOut > 0 && cacheFile.exists()
            && ((System.currentTimeMillis() - cacheFile.lastModified() < cacheTimeOut)
                    || connectivityManager.getActiveNetworkInfo() == null)) {
        try {
            parseInputStream(new FileInputStream(cacheFile), cipher);
        } catch (IOException e) {
            isNetworkError = true;
        } catch (Exception e) {
            e.printStackTrace();
        }
    } else {
        do {
            try {
                KKDebug.i("Connect API url " + url + getParams);
                if (postParams != null || multipartEntity != null || stringEntity != null || fileEntity != null
                        || byteArrayEntity != null || gzipStreamEntity != null
                        || (headerParams != null && postParams != null)) {
                    final HttpPost httppost = new HttpPost(url + getParams);
                    if (postParams != null) {
                        httppost.setEntity(new UrlEncodedFormEntity(postParams, HTTP.UTF_8));
                    }
                    if (multipartEntity != null) {
                        httppost.setEntity(multipartEntity);
                    }
                    if (stringEntity != null) {
                        httppost.setEntity(stringEntity);
                    }
                    if (fileEntity != null) {
                        httppost.setEntity(fileEntity);
                    }
                    if (byteArrayEntity != null) {
                        httppost.setEntity(byteArrayEntity);
                    }
                    if (gzipStreamEntity != null) {
                        httppost.setHeader("Accept-Encoding", "gzip");
                        httppost.setEntity(gzipStreamEntity);
                    }
                    if (headerParams != null) {
                        for (NameValuePair header : headerParams) {
                            httppost.setHeader(header.getName(), header.getValue());
                        }
                    }
                    response = httpclient.execute(httppost);
                } else {
                    final HttpGet httpGet = new HttpGet(url + getParams);
                    if (headerParams != null) {
                        for (NameValuePair header : headerParams) {
                            httpGet.setHeader(header.getName(), header.getValue());
                        }
                    }
                    response = httpclient.execute(httpGet);
                }
                httpStatusCode = response.getStatusLine().getStatusCode();
                int httpStatusType = httpStatusCode / 100;
                switch (httpStatusType) {
                case 2:
                    is = getInputStreamFromHttpResponse();
                    isNetworkError = false;
                    break;
                case 4:
                    KKDebug.w("Get client error " + httpStatusCode + " with connection : " + url + getParams);
                    is = getInputStreamFromHttpResponse();
                    isHttpStatusError = true;
                    isNetworkError = false;
                    break;
                case 5:
                    KKDebug.w("Get server error " + httpStatusCode + " with connection : " + url + getParams);
                    is = getInputStreamFromHttpResponse();
                    isHttpStatusError = true;
                    isNetworkError = false;
                    break;
                default:
                    KKDebug.w("connection to " + url + getParams + " returns " + httpStatusCode);
                    retryTimes++;
                    isNetworkError = true;
                    SystemClock.sleep(1000);
                    break;
                }
            } catch (final SSLException e) {
                KKDebug.w("connection to " + url + getParams + " failed with " + e.getClass().getName());
                isNetworkError = true;
                errorMessage = e.getClass().getName();
                return null;
            } catch (final Exception e) {
                KKDebug.w("connection to " + url + getParams + " failed!");
                retryTimes++;
                isNetworkError = true;
                SystemClock.sleep(1000);
            }
        } while (isNetworkError && retryTimes < retryLimit);

        try {
            if (!isNetworkError && !isHttpStatusError && listener != null) {
                if (cacheTimeOut > 0) {
                    FileOutputStream fileOutputStream = new FileOutputStream(cacheFile);
                    while ((readLength = is.read(buffer, 0, buffer.length)) != -1) {
                        fileOutputStream.write(buffer, 0, readLength);
                    }
                    fileOutputStream.close();
                    parseInputStream(new FileInputStream(cacheFile), cipher);
                } else {
                    parseInputStream(is, cipher);
                }
            } else if (isHttpStatusError) {
                ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
                while ((readLength = is.read(buffer, 0, buffer.length)) != -1) {
                    byteArrayOutputStream.write(buffer, 0, readLength);
                }
                byteArrayOutputStream.flush();
                errorMessage = byteArrayOutputStream.toString();
            }
            response.getEntity().consumeContent();
        } catch (IOException e) {
            isNetworkError = true;
        } catch (Exception e) {
        }
    }
    return null;
}

From source file:org.apache.axis2.context.MessageContext.java

/**
 * Calls the serializeSelfManagedData() method of each handler that
 * implements the <bold>SelfManagedDataManager</bold> interface.
 * Handlers for this message context are identified via the
 * executionChain list./*from   w  w  w.  j  ava2 s  .c o  m*/
 *
 * @param out The output stream
 */
private void serializeSelfManagedData(ObjectOutput out) {
    selfManagedDataHandlerCount = 0;

    try {
        if ((selfManagedDataMap == null) || (executionChain == null) || (selfManagedDataMap.size() == 0)
                || (executionChain.size() == 0)) {
            out.writeBoolean(ExternalizeConstants.EMPTY_OBJECT);

            if (DEBUG_ENABLED && log.isTraceEnabled()) {
                log.trace(getLogIDString() + ":serializeSelfManagedData(): No data : END");
            }

            return;
        }

        // let's create a temporary list with the handlers
        ArrayList<Handler> flatExecChain = flattenPhaseListToHandlers(executionChain, null);

        //ArrayList selfManagedDataHolderList = serializeSelfManagedDataHelper(flatExecChain.iterator(), new ArrayList());
        ArrayList<SelfManagedDataHolder> selfManagedDataHolderList = serializeSelfManagedDataHelper(
                flatExecChain);

        if (selfManagedDataHolderList.size() == 0) {
            out.writeBoolean(ExternalizeConstants.EMPTY_OBJECT);

            if (DEBUG_ENABLED && log.isTraceEnabled()) {
                log.trace(getLogIDString() + ":serializeSelfManagedData(): No data : END");
            }

            return;
        }

        out.writeBoolean(ExternalizeConstants.ACTIVE_OBJECT);

        // SelfManagedData can be binary so won't be able to treat it as a
        // string - need to treat it as a byte []

        // how many handlers actually
        // returned serialized SelfManagedData
        out.writeInt(selfManagedDataHolderList.size());

        for (int i = 0; i < selfManagedDataHolderList.size(); i++) {
            out.writeObject(selfManagedDataHolderList.get(i));
        }

    } catch (IOException e) {
        if (DEBUG_ENABLED && log.isTraceEnabled()) {
            log.trace("MessageContext:serializeSelfManagedData(): Exception [" + e.getClass().getName()
                    + "]  description [" + e.getMessage() + "]", e);
        }
    }

}

From source file:ca.uhn.fhir.tinder.ant.TinderGeneratorTask.java

@Override
public void execute() throws BuildException {
    validateAttributes();//from   w ww  .ja  v a  2s . com

    try {

        if (baseResourceNames == null || baseResourceNames.isEmpty()) {
            baseResourceNames = new ArrayList<String>();

            log("No resource names supplied, going to use all resources from version: "
                    + fhirContext.getVersion().getVersion());

            Properties p = new Properties();
            try {
                p.load(fhirContext.getVersion().getFhirVersionPropertiesFile());
            } catch (IOException e) {
                throw new BuildException("Failed to load version property file", e);
            }

            if (verbose) {
                log("Property file contains: " + p);
            }

            for (Object next : p.keySet()) {
                if (((String) next).startsWith("resource.")) {
                    baseResourceNames.add(((String) next).substring("resource.".length()).toLowerCase());
                }
            }
        } else {
            for (int i = 0; i < baseResourceNames.size(); i++) {
                baseResourceNames.set(i, baseResourceNames.get(i).toLowerCase());
            }
        }

        if (excludeResourceNames != null) {
            for (int i = 0; i < excludeResourceNames.size(); i++) {
                baseResourceNames.remove(excludeResourceNames.get(i).toLowerCase());
            }
        }

        log("Including the following resources: " + baseResourceNames);

        ResourceGeneratorUsingSpreadsheet gen = new ResourceGeneratorUsingSpreadsheet(version, projectHome);
        gen.setBaseResourceNames(baseResourceNames);

        try {
            gen.parse();

            //            gen.setFilenameSuffix("ResourceProvider");
            //            gen.setTemplate("/vm/jpa_daos.vm");
            //            gen.writeAll(packageDirectoryBase, null,packageBase);

            // gen.setFilenameSuffix("ResourceTable");
            // gen.setTemplate("/vm/jpa_resource_table.vm");
            // gen.writeAll(directoryBase, packageBase);

        } catch (Exception e) {
            throw new BuildException("Failed to parse FHIR metadata", e);
        }

        try {
            VelocityContext ctx = new VelocityContext();
            ctx.put("resources", gen.getResources());
            ctx.put("packageBase", packageBase);
            ctx.put("targetPackage", targetPackage);
            ctx.put("version", version);
            ctx.put("esc", new EscapeTool());

            String capitalize = WordUtils.capitalize(version);
            if ("Dstu".equals(capitalize)) {
                capitalize = "Dstu1";
            }
            ctx.put("versionCapitalized", capitalize);

            VelocityEngine v = new VelocityEngine();
            v.setProperty("resource.loader", "cp");
            v.setProperty("cp.resource.loader.class",
                    "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
            v.setProperty("runtime.references.strict", Boolean.TRUE);

            targetDirectoryFile.mkdirs();

            if (targetFile != null) {
                InputStream templateIs = null;
                if (templateFileFile != null) {
                    templateIs = new FileInputStream(templateFileFile);
                } else {
                    templateIs = ResourceGeneratorUsingSpreadsheet.class.getResourceAsStream(template);
                }
                InputStreamReader templateReader = new InputStreamReader(templateIs);

                File target = null;
                if (targetPackage != null) {
                    target = new File(targetDir, targetPackage.replace('.', File.separatorChar));
                } else {
                    target = new File(targetDir);
                }
                target.mkdirs();
                File f = new File(target, targetFile);
                OutputStreamWriter w = new OutputStreamWriter(new FileOutputStream(f, false), "UTF-8");

                v.evaluate(ctx, w, "", templateReader);
                w.close();

            } else {
                File packageDirectoryBase = new File(targetDir,
                        packageBase.replace(".", File.separatorChar + ""));
                packageDirectoryBase.mkdirs();

                gen.setFilenameSuffix(targetClassSuffix);
                gen.setTemplate(template);
                gen.setTemplateFile(templateFileFile);
                gen.writeAll(packageDirectoryBase, null, packageBase);

            }

        } catch (Exception e) {
            log("Caught exception: " + e.getClass().getName() + " [" + e.getMessage() + "]", 1);
            e.printStackTrace();
            throw new BuildException("Failed to generate file(s)", e);
        }

        cleanup();

    } catch (Exception e) {
        if (e instanceof BuildException) {
            throw (BuildException) e;
        }
        log("Caught exception: " + e.getClass().getName() + " [" + e.getMessage() + "]", 1);
        e.printStackTrace();
        throw new BuildException("Error processing " + getTaskName() + " task.", e);
    }
}

From source file:fr.paris.lutece.plugins.document.service.docsearch.DocSearchService.java

/**
 * Indexing documents for searching/*w  ww .  j  a  va2s.c  o m*/
 * @param bCreate tell if it's total indexing or total (total = true)
 * @return indexing logs
 */
public String processIndexing(boolean bCreate) {
    StringBuilder sbLogs = new StringBuilder();

    IndexWriter writer = null;
    boolean bCreateIndex = bCreate;

    try {
        sbLogs.append("\r\nIndexing all contents ...\r\n");

        Directory dir = NIOFSDirectory.open(new File(_strIndex));

        if (!DirectoryReader.indexExists(dir)) { //init index
            bCreateIndex = true;
        }

        Date start = new Date();
        IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_46, _analyzer);

        if (bCreateIndex) {
            conf.setOpenMode(OpenMode.CREATE);
        } else {
            conf.setOpenMode(OpenMode.APPEND);
        }

        writer = new IndexWriter(dir, conf);

        if (!bCreateIndex) {
            //incremental indexing

            //add all document which must be add
            for (IndexerAction action : getAllIndexerActionByTask(IndexerAction.TASK_CREATE)) {
                try {
                    ArrayList<Integer> luceneDocumentId = new ArrayList<Integer>();
                    luceneDocumentId.add(action.getIdDocument());

                    List<org.apache.lucene.document.Document> luceneDocument = _indexer
                            .getDocuments(luceneDocumentId);

                    if ((luceneDocument != null) && (luceneDocument.size() > 0)) {
                        Iterator<org.apache.lucene.document.Document> it = luceneDocument.iterator();

                        while (it.hasNext()) {
                            org.apache.lucene.document.Document doc = it.next();
                            writer.addDocument(doc);
                            sbLogs.append("Adding ");
                            sbLogs.append(doc.get(DocSearchItem.FIELD_TYPE));
                            sbLogs.append(" #");
                            sbLogs.append(doc.get(DocSearchItem.FIELD_UID));
                            sbLogs.append(" - ");
                            sbLogs.append(doc.get(DocSearchItem.FIELD_TITLE));
                            sbLogs.append("\r\n");
                        }
                    }
                } catch (IOException e) {
                    sbLogs.append("Error durign document indexation parsing.");
                    sbLogs.append("\r\n");
                }

                removeIndexerAction(action.getIdAction());
            }

            //Update all document which must be update
            for (IndexerAction action : getAllIndexerActionByTask(IndexerAction.TASK_MODIFY)) {
                try {
                    ArrayList<Integer> luceneDocumentId = new ArrayList<Integer>();
                    luceneDocumentId.add(action.getIdDocument());

                    List<org.apache.lucene.document.Document> luceneDocument = _indexer
                            .getDocuments(luceneDocumentId);

                    if ((luceneDocument != null) && (luceneDocument.size() > 0)) {
                        Iterator<org.apache.lucene.document.Document> it = luceneDocument.iterator();

                        while (it.hasNext()) {
                            org.apache.lucene.document.Document doc = it.next();
                            writer.updateDocument(
                                    new Term(DocSearchItem.FIELD_UID, Integer.toString(action.getIdDocument())),
                                    doc);
                            sbLogs.append("Updating ");
                            sbLogs.append(doc.get(DocSearchItem.FIELD_TYPE));
                            sbLogs.append(" #");
                            sbLogs.append(doc.get(DocSearchItem.FIELD_UID));
                            sbLogs.append(" - ");
                            sbLogs.append(doc.get(DocSearchItem.FIELD_TITLE));
                            sbLogs.append("\r\n");
                        }
                    }
                } catch (IOException e) {
                    sbLogs.append("Error durign document indexation parsing.");
                    sbLogs.append("\r\n");
                }

                removeIndexerAction(action.getIdAction());
            }

            //delete all document which must be delete
            for (IndexerAction action : getAllIndexerActionByTask(IndexerAction.TASK_DELETE)) {
                writer.deleteDocuments(
                        new Term(DocSearchItem.FIELD_UID, Integer.toString(action.getIdDocument())));
                sbLogs.append("Deleting ");
                sbLogs.append(" #");
                sbLogs.append(action.getIdDocument());
                sbLogs.append("\r\n");

                removeIndexerAction(action.getIdAction());
            }
        } else {
            //delete all incremental action
            removeAllIndexerAction();

            Collection<Integer> listIdDocuments = DocumentHome.findAllPrimaryKeys();
            ArrayList<Integer> luceneDocumentId;

            for (Integer nIdDocument : listIdDocuments) {
                try {
                    luceneDocumentId = new ArrayList<Integer>();
                    luceneDocumentId.add(nIdDocument);

                    List<Document> listDocuments = _indexer.getDocuments(luceneDocumentId);

                    for (Document doc : listDocuments) {
                        writer.addDocument(doc);
                        sbLogs.append("Indexing ");
                        sbLogs.append(doc.get(DocSearchItem.FIELD_TYPE));
                        sbLogs.append(" #");
                        sbLogs.append(doc.get(DocSearchItem.FIELD_UID));
                        sbLogs.append(" - ");
                        sbLogs.append(doc.get(DocSearchItem.FIELD_TITLE));
                        sbLogs.append("\r\n");
                    }
                } catch (IOException e) {
                    sbLogs.append("Error durign document indexation parsing.");
                    sbLogs.append("\r\n");
                }
            }
        }

        Date end = new Date();
        sbLogs.append("Duration of the treatment : ");
        sbLogs.append(end.getTime() - start.getTime());
        sbLogs.append(" milliseconds\r\n");
    } catch (Exception e) {
        sbLogs.append(" caught a ");
        sbLogs.append(e.getClass());
        sbLogs.append("\n with message: ");
        sbLogs.append(e.getMessage());
        sbLogs.append("\r\n");
        AppLogService.error("Indexing error : " + e.getMessage(), e);
    } finally {
        try {
            if (writer != null) {
                writer.close();
            }
        } catch (IOException e) {
            AppLogService.error(e.getMessage(), e);
        }
    }

    return sbLogs.toString();
}

From source file:org.apache.hadoop.hdfs.nfs.nfs3.RpcProgramNfs3.java

@Override
public READ3Response read(XDR xdr, SecurityHandler securityHandler, InetAddress client) {
    READ3Response response = new READ3Response(Nfs3Status.NFS3_OK);
    final String userName = securityHandler.getUser();

    if (!checkAccessPrivilege(client, AccessPrivilege.READ_ONLY)) {
        response.setStatus(Nfs3Status.NFS3ERR_ACCES);
        return response;
    }/*  www . ja  v  a2 s .com*/

    DFSClient dfsClient = clientCache.getDfsClient(userName);
    if (dfsClient == null) {
        response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
        return response;
    }

    READ3Request request = null;

    try {
        request = new READ3Request(xdr);
    } catch (IOException e) {
        LOG.error("Invalid READ request");
        return new READ3Response(Nfs3Status.NFS3ERR_INVAL);
    }

    long offset = request.getOffset();
    int count = request.getCount();

    FileHandle handle = request.getHandle();
    if (LOG.isDebugEnabled()) {
        LOG.debug("NFS READ fileId: " + handle.getFileId() + " offset: " + offset + " count: " + count);
    }

    Nfs3FileAttributes attrs;
    boolean eof;
    if (count == 0) {
        // Only do access check.
        try {
            // Don't read from cache. Client may not have read permission.
            attrs = Nfs3Utils.getFileAttr(dfsClient, Nfs3Utils.getFileIdPath(handle), iug);
        } catch (IOException e) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("Get error accessing file, fileId:" + handle.getFileId(), e);
            }
            return new READ3Response(Nfs3Status.NFS3ERR_IO);
        }
        if (attrs == null) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("Can't get path for fileId:" + handle.getFileId());
            }
            return new READ3Response(Nfs3Status.NFS3ERR_NOENT);
        }
        int access = Nfs3Utils.getAccessRightsForUserGroup(securityHandler.getUid(), securityHandler.getGid(),
                attrs);
        if ((access & Nfs3Constant.ACCESS3_READ) != 0) {
            eof = offset < attrs.getSize() ? false : true;
            return new READ3Response(Nfs3Status.NFS3_OK, attrs, 0, eof, ByteBuffer.wrap(new byte[0]));
        } else {
            return new READ3Response(Nfs3Status.NFS3ERR_ACCES);
        }
    }

    // In case there is buffered data for the same file, flush it. This can be
    // optimized later by reading from the cache.
    int ret = writeManager.commitBeforeRead(dfsClient, handle, offset + count);
    if (ret != Nfs3Status.NFS3_OK) {
        LOG.warn("commitBeforeRead didn't succeed with ret=" + ret + ". Read may not get most recent data.");
    }

    try {
        int rtmax = config.getInt(Nfs3Constant.MAX_READ_TRANSFER_SIZE_KEY,
                Nfs3Constant.MAX_READ_TRANSFER_SIZE_DEFAULT);
        int buffSize = Math.min(rtmax, count);
        byte[] readbuffer = new byte[buffSize];

        int readCount = 0;
        /**
         * Retry exactly once because the DFSInputStream can be stale.
         */
        for (int i = 0; i < 1; ++i) {
            FSDataInputStream fis = clientCache.getDfsInputStream(userName, Nfs3Utils.getFileIdPath(handle));

            try {
                readCount = fis.read(offset, readbuffer, 0, count);
            } catch (IOException e) {
                // TODO: A cleaner way is to throw a new type of exception
                // which requires incompatible changes.
                if (e.getMessage() == "Stream closed") {
                    clientCache.invalidateDfsInputStream(userName, Nfs3Utils.getFileIdPath(handle));
                    continue;
                } else {
                    throw e;
                }
            }
        }

        attrs = Nfs3Utils.getFileAttr(dfsClient, Nfs3Utils.getFileIdPath(handle), iug);
        if (readCount < count) {
            LOG.info("Partical read. Asked offset:" + offset + " count:" + count + " and read back:" + readCount
                    + "file size:" + attrs.getSize());
        }
        // HDFS returns -1 for read beyond file size.
        if (readCount < 0) {
            readCount = 0;
        }
        eof = (offset + readCount) < attrs.getSize() ? false : true;
        return new READ3Response(Nfs3Status.NFS3_OK, attrs, readCount, eof, ByteBuffer.wrap(readbuffer));

    } catch (IOException e) {
        LOG.warn("Read error: " + e.getClass() + " offset: " + offset + " count: " + count, e);
        return new READ3Response(Nfs3Status.NFS3ERR_IO);
    }
}

From source file:com.buaa.cfs.nfs3.RpcProgramNfs3.java

@VisibleForTesting
READ3Response read(XDR xdr, SecurityHandler securityHandler, SocketAddress remoteAddress) {
    READ3Response response = new READ3Response(Nfs3Status.NFS3_OK);
    final String userName = securityHandler.getUser();

    if (!checkAccessPrivilege(remoteAddress, AccessPrivilege.READ_ONLY)) {
        response.setStatus(Nfs3Status.NFS3ERR_ACCES);
        return response;
    }//from  w w  w  .j a v  a  2s . c o  m

    DFSClient dfsClient = clientCache.getDfsClient(userName);
    if (dfsClient == null) {
        response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
        return response;
    }

    READ3Request request;

    try {
        request = READ3Request.deserialize(xdr);
    } catch (IOException e) {
        LOG.error("Invalid READ request");
        return new READ3Response(Nfs3Status.NFS3ERR_INVAL);
    }

    long offset = request.getOffset();
    int count = request.getCount();

    FileHandle handle = request.getHandle();
    if (LOG.isDebugEnabled()) {
        LOG.debug("NFS READ fileId: " + handle.getFileId() + " offset: " + offset + " count: " + count
                + " client: " + remoteAddress);
    }

    Nfs3FileAttributes attrs;
    boolean eof;
    if (count == 0) {
        // Only do access check.
        try {
            // Don't read from cache. Client may not have read permission.
            attrs = Nfs3Utils.getFileAttr(dfsClient, Nfs3Utils.getFileIdPath(handle), iug);
        } catch (IOException e) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("Get error accessing file, fileId: " + handle.getFileId(), e);
            }
            return new READ3Response(Nfs3Status.NFS3ERR_IO);
        }
        if (attrs == null) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("Can't get path for fileId: " + handle.getFileId());
            }
            return new READ3Response(Nfs3Status.NFS3ERR_NOENT);
        }
        int access = Nfs3Utils.getAccessRightsForUserGroup(securityHandler.getUid(), securityHandler.getGid(),
                securityHandler.getAuxGids(), attrs);
        if ((access & Nfs3Constant.ACCESS3_READ) != 0) {
            eof = offset >= attrs.getSize();
            return new READ3Response(Nfs3Status.NFS3_OK, attrs, 0, eof, ByteBuffer.wrap(new byte[0]));
        } else {
            return new READ3Response(Nfs3Status.NFS3ERR_ACCES);
        }
    }

    // In case there is buffered data for the same file, flush it. This can be
    // optimized later by reading from the cache.
    int ret = writeManager.commitBeforeRead(dfsClient, handle, offset + count);
    if (ret != Nfs3Status.NFS3_OK) {
        LOG.warn("commitBeforeRead didn't succeed with ret=" + ret + ". Read may not get most recent data.");
    }

    try {
        int rtmax = config.getInt(NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_KEY,
                NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_DEFAULT);
        int buffSize = Math.min(rtmax, count);
        byte[] readbuffer = new byte[buffSize];

        int readCount = 0;
        /**
         * Retry exactly once because the DFSInputStream can be stale.
         */
        for (int i = 0; i < 1; ++i) {
            FSDataInputStream fis = clientCache.getDfsInputStream(userName, Nfs3Utils.getFileIdPath(handle));

            if (fis == null) {
                return new READ3Response(Nfs3Status.NFS3ERR_ACCES);
            }

            try {
                readCount = fis.read(offset, readbuffer, 0, count);
                //                    metrics.incrBytesRead(readCount);
            } catch (IOException e) {
                // TODO: A cleaner way is to throw a new type of exception
                // which requires incompatible changes.
                if (e.getMessage().equals("Stream closed")) {
                    clientCache.invalidateDfsInputStream(userName, Nfs3Utils.getFileIdPath(handle));
                    continue;
                } else {
                    throw e;
                }
            }
        }

        attrs = Nfs3Utils.getFileAttr(dfsClient, Nfs3Utils.getFileIdPath(handle), iug);
        if (readCount < count) {
            LOG.info("Partical read. Asked offset: " + offset + " count: " + count + " and read back: "
                    + readCount + " file size: " + attrs.getSize());
        }
        // HDFS returns -1 for read beyond file size.
        if (readCount < 0) {
            readCount = 0;
        }
        eof = (offset + readCount) >= attrs.getSize();
        return new READ3Response(Nfs3Status.NFS3_OK, attrs, readCount, eof, ByteBuffer.wrap(readbuffer));

    } catch (IOException e) {
        LOG.warn("Read error: " + e.getClass() + " offset: " + offset + " count: " + count, e);
        int status = mapErrorStatus(e);
        return new READ3Response(status);
    }
}

From source file:org.apache.manifoldcf.crawler.connectors.webcrawler.WebcrawlerConnector.java

protected void processDocument(IProcessActivity activities, String documentIdentifier, String versionString,
        boolean indexDocument, Map<String, Set<String>> metaHash, String[] acls, DocumentURLFilter filter)
        throws ManifoldCFException, ServiceInterruption {
    // Consider this document for ingestion.
    String errorCode = null;//  w w  w.  j  a v  a2  s  .c  om
    String errorDesc = null;
    Long fileLengthLong = null;
    long startTime = System.currentTimeMillis();

    try {
        // We can exclude it if it does not seem to be a kind of document that the ingestion system knows
        // about.

        if (!indexDocument) {
            errorCode = "CONTENTNOTINDEXABLE";
            errorDesc = "Content not indexable";
            activities.noDocument(documentIdentifier, versionString);
            return;
        }

        int responseCode = cache.getResponseCode(documentIdentifier);
        if (responseCode != 200) {
            if (Logging.connectors.isDebugEnabled())
                Logging.connectors.debug("Web: For document '" + documentIdentifier
                        + "', not indexing because response code not indexable: " + responseCode);
            errorCode = "RESPONSECODENOTINDEXABLE";
            errorDesc = "HTTP response code not indexable (" + responseCode + ")";
            activities.noDocument(documentIdentifier, versionString);
            return;
        }

        long dataLength = cache.getDataLength(documentIdentifier);
        if (!activities.checkLengthIndexable(dataLength)) {
            if (Logging.connectors.isDebugEnabled())
                Logging.connectors.debug("Web: For document '" + documentIdentifier
                        + "', not indexing because pipeline thinks length " + dataLength
                        + " is not acceptable");
            errorCode = activities.EXCLUDED_LENGTH;
            errorDesc = "Rejected due to length (" + dataLength + ")";
            activities.noDocument(documentIdentifier, versionString);
            return;
        }

        if (activities.checkURLIndexable(documentIdentifier) == false) {
            if (Logging.connectors.isDebugEnabled())
                Logging.connectors.debug("Web: For document '" + documentIdentifier
                        + "', not indexing because output connector does not want URL");
            errorCode = activities.EXCLUDED_URL;
            errorDesc = "Rejected due to URL ('" + documentIdentifier + "')";
            activities.noDocument(documentIdentifier, versionString);
            return;
        }

        String ingestURL = filter.isDocumentIndexable(documentIdentifier);
        if (ingestURL == null) {
            if (Logging.connectors.isDebugEnabled())
                Logging.connectors.debug("Web: For document '" + documentIdentifier
                        + "', not indexing because document does not match web job constraints");
            errorCode = "JOBRESTRICTION";
            errorDesc = "Rejected because job excludes this URL ('" + documentIdentifier + "')";
            activities.noDocument(documentIdentifier, versionString);
            return;
        }

        // Check if it's a recognized content type
        String contentType = cache.getContentType(documentIdentifier);

        // Some sites have multiple content types.  We just look at the LAST one in that case.
        if (contentType != null) {
            String[] contentTypes = contentType.split(",");
            if (contentTypes.length > 0)
                contentType = contentTypes[contentTypes.length - 1].trim();
            else
                contentType = null;
        }

        if (contentType != null) {
            int pos = contentType.indexOf(";");
            if (pos != -1)
                contentType = contentType.substring(0, pos);
            contentType = contentType.trim();
        }

        if (!activities.checkMimeTypeIndexable(contentType)) {
            if (Logging.connectors.isDebugEnabled())
                Logging.connectors.debug("Web: For document '" + documentIdentifier
                        + "', not indexing because output connector does not want mime type '" + contentType
                        + "'");
            errorCode = activities.EXCLUDED_MIMETYPE;
            errorDesc = "Rejected because of mime type (" + contentType + ")";
            activities.noDocument(documentIdentifier, versionString);
            return;
        }

        // Ingest the document
        if (Logging.connectors.isDebugEnabled())
            Logging.connectors.debug("WEB: Decided to ingest '" + documentIdentifier + "'");

        RepositoryDocument rd = new RepositoryDocument();

        // Set the file name
        String fileName = "";
        try {
            fileName = documentIdentifiertoFileName(documentIdentifier);
        } catch (URISyntaxException e1) {
            fileName = "";
        }
        if (fileName.length() > 0) {
            rd.setFileName(fileName);
        }

        // Set the content type
        String mimeType = cache.getContentType(documentIdentifier);
        if (mimeType != null)
            rd.setMimeType(mimeType);

        // Turn into acls and add into description
        String[] denyAcls;
        if (acls == null)
            denyAcls = null;
        else {
            if (acls.length > 0)
                denyAcls = new String[] { defaultAuthorityDenyToken };
            else
                denyAcls = new String[0];
        }

        if (acls != null && denyAcls != null)
            rd.setSecurity(RepositoryDocument.SECURITY_TYPE_DOCUMENT, acls, denyAcls);

        // Grab metadata
        for (String key : metaHash.keySet()) {
            Set<String> metaList = metaHash.get(key);
            String[] values = new String[metaList.size()];
            int k = 0;
            for (String value : metaList) {
                values[k++] = value;
            }
            rd.addField(key, values);
        }

        InputStream is = cache.getData(documentIdentifier);

        if (is != null) {
            try {
                rd.setBinary(is, dataLength);
                try {
                    activities.ingestDocumentWithException(documentIdentifier, versionString, ingestURL, rd);
                    errorCode = "OK";
                    fileLengthLong = new Long(dataLength);
                } catch (IOException e) {
                    errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
                    errorDesc = e.getMessage();
                    handleIOException(e, "reading data");
                }
            } finally {
                try {
                    is.close();
                } catch (IOException e) {
                    errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
                    errorDesc = e.getMessage();
                    handleIOException(e, "closing stream");
                }
            }
        } else
            Logging.connectors.error(
                    "WEB: Expected a cached document for '" + documentIdentifier + "', but none present!");

    } catch (ManifoldCFException e) {
        if (e.getErrorCode() == ManifoldCFException.INTERRUPTED)
            errorCode = null;
        throw e;
    } finally {
        if (errorCode != null)
            activities.recordActivity(new Long(startTime), ACTIVITY_PROCESS, fileLengthLong, documentIdentifier,
                    errorCode, errorDesc, null);
    }

}

From source file:fr.certu.chouette.gui.command.Command.java

/**
 * import command :  ( -fileFormat utilis si l'extension du fichier n'est pas reprsentative du format)
 * -c import -o line -format XXX -inputFile YYYY [-fileFormat TTT] -importId ZZZ ... 
 * @param manager//w w w .  j av  a2  s  .c  om
 * @param parameters
 * @return
 */
private int executeImport(INeptuneManager<NeptuneIdentifiedObject> manager,
        Map<String, List<String>> parameters) {
    parameters.put("reportforsave", Arrays.asList(new String[] { "true" }));
    // parameters.put("validate",Arrays.asList(new String[]{"true"})); // force validation if possible

    GuiReport saveReport = new GuiReport("SAVE", Report.STATE.OK);
    Report importReport = null;

    List<Report> reports = new ArrayList<Report>();
    // check if import exists and accept unzip before call
    String format = getSimpleString(parameters, "format");
    String inputFile = getSimpleString(parameters, "inputfile");
    // String fileFormat = getSimpleString(parameters,"fileformat","");
    Long importId = Long.valueOf(getSimpleString(parameters, "importid"));
    if (!importDao.exists(importId)) {
        // error import not found
        logger.error("import not found " + importId);
        return 1;
    }
    GuiImport guiImport = importDao.get(importId);
    logger.info("Export data for export id " + importId);
    logger.info("  type : " + guiImport.getType());
    logger.info("  options : " + guiImport.getOptions());

    Referential referential = referentialDao.get(guiImport.getReferentialId());
    logger.info("Referential " + guiImport.getReferentialId());
    logger.info("  name : " + referential.getName());
    logger.info("  slug : " + referential.getSlug());
    logger.info("  projection type : " + referential.getProjectionType());

    String projectionType = null;
    if (referential.getProjectionType() != null && !referential.getProjectionType().isEmpty()) {
        logger.info("  projection type for export: " + referential.getProjectionType());
        projectionType = referential.getProjectionType();
        parameters.put("srid", Arrays.asList(new String[] { projectionType }));
    }
    // set projection for import (inactive if not set)
    geographicService.switchProjection(projectionType);

    int beanCount = 0;

    boolean zipped = (inputFile.toLowerCase().endsWith(".zip"));

    try {
        List<FormatDescription> formats = manager.getImportFormats(null);
        FormatDescription description = null;

        for (FormatDescription formatDescription : formats) {
            if (formatDescription.getName().equalsIgnoreCase(format)) {
                description = formatDescription;
                break;
            }
        }
        if (description == null) {
            throw new IllegalArgumentException("format " + format + " unavailable");
        }
        List<String> suffixes = new ArrayList<String>();
        for (ParameterDescription desc : description.getParameterDescriptions()) {
            if (desc.getName().equalsIgnoreCase("inputfile")) {
                suffixes = desc.getAllowedExtensions();
                break;
            }
        }
        List<ParameterValue> values = populateParameters(description, parameters, "inputfile", "fileformat");
        if (zipped && description.isUnzipAllowed()) {
            SimpleParameterValue inputFileParam = new SimpleParameterValue("inputFile");
            values.add(inputFileParam);
            // unzip files , import and save contents 
            ZipFile zip = null;
            File temp = null;
            File tempRep = new File(FileUtils.getTempDirectory(), "massImport" + importId);
            if (!tempRep.exists())
                tempRep.mkdirs();
            try {

                zip = new ZipFile(inputFile);
                for (Enumeration<? extends ZipEntry> entries = zip.entries(); entries.hasMoreElements();) {
                    ZipEntry entry = entries.nextElement();

                    if (entry.isDirectory()) {
                        File dir = new File(tempRep, entry.getName());
                        dir.mkdirs();
                        continue;
                    }
                    if (!FilenameUtils.isExtension(entry.getName().toLowerCase(), suffixes)) {
                        logger.error("entry " + entry.getName() + " ignored, unknown extension");
                        continue;
                    }
                    InputStream stream = null;
                    try {
                        stream = zip.getInputStream(entry);
                    } catch (IOException e) {
                        logger.error("entry " + entry.getName() + " cannot read");
                        continue;
                    }
                    byte[] bytes = new byte[4096];
                    int len = stream.read(bytes);
                    temp = new File(tempRep, entry.getName());
                    FileOutputStream fos = new FileOutputStream(temp);
                    while (len > 0) {
                        fos.write(bytes, 0, len);
                        len = stream.read(bytes);
                    }
                    fos.close();

                    // import
                    if (verbose)
                        System.out.println("import file " + entry.getName());
                    logger.info("import file " + entry.getName());
                    inputFileParam.setFilepathValue(temp.getAbsolutePath());
                    ReportHolder holder = new ReportHolder();
                    List<NeptuneIdentifiedObject> beans = manager.doImport(null, format, values, holder);
                    if (holder.getReport() != null) {
                        if (importReport == null) {
                            importReport = holder.getReport();
                            reports.add(importReport);
                        } else {
                            importReport.addAll(holder.getReport().getItems());
                        }

                    }
                    // save
                    if (beans != null && !beans.isEmpty()) {

                        for (NeptuneIdentifiedObject bean : beans) {
                            if (verbose) {
                                System.out.println("save " + bean.getName() + " (" + bean.getObjectId() + ")");
                            }
                            logger.info("save " + bean.getName() + " (" + bean.getObjectId() + ")");
                            // check all stopareas
                            if (bean instanceof Line) {
                                Line line = (Line) bean;
                                checkProjection(line);
                            }
                        }
                        try {
                            manager.saveAll(null, beans, true, true);
                            for (NeptuneIdentifiedObject bean : beans) {
                                GuiReportItem item = new GuiReportItem("SAVE_OK", Report.STATE.OK,
                                        bean.getName());
                                importReport.addItem(item);
                                beanCount++;
                            }
                        } catch (Exception e) {
                            logger.error("fail to save data :" + e.getMessage(), e);
                            for (NeptuneIdentifiedObject bean : beans) {
                                GuiReportItem item = new GuiReportItem("SAVE_ERROR", Report.STATE.ERROR,
                                        bean.getName(), filter_chars(e.getMessage()));
                                importReport.addItem(item);
                            }
                        }
                    }
                    temp.delete();
                }
                try {
                    zip.close();
                } catch (IOException e) {
                    logger.info("cannot close zip file");
                }
            } catch (IOException e) {
                //reports.add(saveReport);
                System.out.println("import failed " + e.getMessage());
                logger.error("import failed " + e.getMessage(), e);
                saveImportReports(importId, format, reports);
                return 1;
            } finally {
                try {
                    FileUtils.deleteDirectory(tempRep);
                } catch (IOException e) {
                    logger.warn("temporary directory " + tempRep.getAbsolutePath() + " could not be deleted");
                }
            }

        } else {
            SimpleParameterValue inputFileParam = new SimpleParameterValue("inputFile");
            inputFileParam.setFilepathValue(inputFile);
            values.add(inputFileParam);
            //            if (!fileFormat.isEmpty())
            //            {
            //               SimpleParameterValue fileFormatParam = new SimpleParameterValue("fileFormat");
            //               fileFormatParam.setStringValue(fileFormat);
            //               values.add(fileFormatParam);
            //            }
            // surround with try catch 
            ReportHolder holder = new ReportHolder();
            List<NeptuneIdentifiedObject> beans = manager.doImport(null, format, values, holder);
            if (holder.getReport() != null) {
                importReport = holder.getReport();
                reports.add(holder.getReport());
            }
            logger.info("imported Lines " + beans.size());

            for (NeptuneIdentifiedObject bean : beans) {
                if (bean instanceof Line) {
                    Line line = (Line) bean;
                    checkProjection(line);
                }
                List<NeptuneIdentifiedObject> oneBean = new ArrayList<NeptuneIdentifiedObject>();
                oneBean.add(bean);
                try {
                    logger.info("save  Line " + bean.getName());
                    manager.saveAll(null, oneBean, true, true);
                    GuiReportItem item = new GuiReportItem("SAVE_OK", Report.STATE.OK, bean.getName());
                    saveReport.addItem(item);
                    beanCount++;
                } catch (Exception e) {
                    logger.error("save failed " + e.getMessage(), e);
                    GuiReportItem item = new GuiReportItem("SAVE_ERROR", Report.STATE.ERROR, bean.getName(),
                            e.getMessage());
                    saveReport.addItem(item);
                }
            }
        }
    } catch (Exception e) {
        // fill report with error
        if (saveReport.getItems() != null && !saveReport.getItems().isEmpty())
            reports.add(saveReport);
        String msg = e.getMessage();
        if (msg == null)
            msg = e.getClass().getName();
        System.out.println("import failed " + msg);
        logger.error("import failed " + msg, e);
        GuiReport errorReport = new GuiReport("IMPORT_ERROR", Report.STATE.ERROR);
        GuiReportItem item = new GuiReportItem("EXCEPTION", Report.STATE.ERROR, msg);
        errorReport.addItem(item);
        reports.add(errorReport);
        saveImportReports(importId, format, reports);

        return 1;
    }
    if (saveReport.getItems() != null && !saveReport.getItems().isEmpty())
        reports.add(saveReport);
    saveImportReports(importId, format, reports);
    return (beanCount == 0 ? 1 : 0);

}