Example usage for java.lang InterruptedException toString

List of usage examples for java.lang InterruptedException toString

Introduction

In this page you can find the example usage for java.lang InterruptedException toString.

Prototype

public String toString() 

Source Link

Document

Returns a short description of this throwable.

Usage

From source file:se.lu.nateko.edca.BackboneSvc.java

/** Start the Service's RotateAnimation. */
public void startAnimation() {
    //      Log.d(TAG, "startAnimation() called.");
    /* Start the animation in a separate thread to avoid blocking more crucial threads. */
    new Thread(new Runnable() {
        public void run() {
            try {
                Log.v(TAG, "Thread " + Thread.currentThread().getId() + " starting animation.");
                getAnimation().setRepeatCount(Animation.INFINITE); // Start the animation showing that a web communicating thread is active.
                unlockAnimation();//from w  ww . ja  v a 2 s .c  om
            } catch (InterruptedException e) {
                Log.w(TAG, "Thread " + Thread.currentThread().getId() + " interrupted. " + e.toString());
            }
        }
    }).start();
}

From source file:se.lu.nateko.edca.BackboneSvc.java

/** Stop the Service's RotateAnimation. */
public void stopAnimation() {
    //      Log.d(TAG, "stopAnimation() called.");
    /* Stop the animation in a separate thread to avoid blocking more crucial threads. */
    new Thread(new Runnable() {
        public void run() {
            try {
                Log.v(TAG, "Thread " + Thread.currentThread().getId() + " stopping animation.");
                getAnimation().setRepeatCount(0); // Stop the animation showing that a web communicating thread has finished.
                unlockAnimation();/*from   w ww  .j a  v  a 2s . c  om*/
            } catch (InterruptedException e) {
                Log.w(TAG, "Thread " + Thread.currentThread().getId() + " interrupted. " + e.toString());
            }
        }
    }).start();
}

From source file:org.apache.hadoop.mapred.util.Shell.java

/** Run a command */
private void runCommand() throws IOException {
    ProcessBuilder builder = new ProcessBuilder(getExecString());
    Timer timeOutTimer = null;/* w ww.  j  a  va 2 s .co  m*/
    ShellTimeoutTimerTask timeoutTimerTask = null;
    timedOut = new AtomicBoolean(false);
    completed = new AtomicBoolean(false);

    if (environment != null) {
        builder.environment().putAll(this.environment);
    }
    if (dir != null) {
        builder.directory(this.dir);
    }

    process = builder.start();
    if (timeOutInterval > 0) {
        timeOutTimer = new Timer();
        timeoutTimerTask = new ShellTimeoutTimerTask(this);
        //One time scheduling.
        timeOutTimer.schedule(timeoutTimerTask, timeOutInterval);
    }
    final BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream()));
    BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream()));
    final StringBuffer errMsg = new StringBuffer();

    // read error and input streams as this would free up the buffers
    // free the error stream buffer
    Thread errThread = new Thread() {
        @Override
        public void run() {
            try {
                String line = errReader.readLine();
                while ((line != null) && !isInterrupted()) {
                    errMsg.append(line);
                    errMsg.append(System.getProperty("line.separator"));
                    line = errReader.readLine();
                }
            } catch (IOException ioe) {
                LOG.warn("Error reading the error stream", ioe);
            }
        }
    };
    try {
        errThread.start();
    } catch (IllegalStateException ise) {
    }
    try {
        parseExecResult(inReader); // parse the output
        // clear the input stream buffer
        String line = inReader.readLine();
        while (line != null) {
            line = inReader.readLine();
        }
        // wait for the process to finish and check the exit code
        exitCode = process.waitFor();
        try {
            // make sure that the error thread exits
            errThread.join();
        } catch (InterruptedException ie) {
            LOG.warn("Interrupted while reading the error stream", ie);
        }
        completed.set(true);
        //the timeout thread handling
        //taken care in finally block
        if (exitCode != 0) {
            throw new ExitCodeException(exitCode, errMsg.toString());
        }
    } catch (InterruptedException ie) {
        throw new IOException(ie.toString());
    } finally {
        if ((timeOutTimer != null) && !timedOut.get()) {
            timeOutTimer.cancel();
        }
        // close the input stream
        try {
            inReader.close();
        } catch (IOException ioe) {
            LOG.warn("Error while closing the input stream", ioe);
        }
        if (!completed.get()) {
            errThread.interrupt();
        }
        try {
            errReader.close();
        } catch (IOException ioe) {
            LOG.warn("Error while closing the error stream", ioe);
        }
        process.destroy();
        lastTime = System.currentTimeMillis();
    }
}

From source file:com.nubits.nubot.trading.wrappers.BterWrapper.java

@Override
public ApiResponse clearOrders(CurrencyPair pair) {
    //Since there is no API entry point for that, this call will iterate over actie
    ApiResponse toReturn = new ApiResponse();
    boolean ok = true;

    ApiResponse activeOrdersResponse = getActiveOrders();
    if (activeOrdersResponse.isPositive()) {
        ArrayList<Order> orderList = (ArrayList<Order>) activeOrdersResponse.getResponseObject();
        for (int i = 0; i < orderList.size(); i++) {
            Order tempOrder = orderList.get(i);

            //Wait to avoid placing requests too fast
            try {
                Thread.sleep(1200);
            } catch (InterruptedException ex) {
                LOG.error(ex.toString());
            }/*  w  w w.  j a  v a 2s.  co  m*/

            ApiResponse deleteOrderResponse = cancelOrder(tempOrder.getId(), null);
            if (deleteOrderResponse.isPositive()) {
                boolean deleted = (boolean) deleteOrderResponse.getResponseObject();

                if (deleted) {
                    LOG.warn("Order " + tempOrder.getId() + " deleted succesfully");
                } else {
                    LOG.warn("Could not delete order " + tempOrder.getId() + "");
                    ok = false;
                }

            } else {
                LOG.error(deleteOrderResponse.getError().toString());
                toReturn.setError(deleteOrderResponse.getError());
                return toReturn;
            }
        }
        //Wait to avoid placing requests too fast
        try {
            Thread.sleep(800);
        } catch (InterruptedException ex) {
            LOG.error(ex.toString());
        }
        toReturn.setResponseObject(ok);
    } else {
        LOG.error(activeOrdersResponse.getError().toString());
        toReturn.setError(activeOrdersResponse.getError());
        return toReturn;
    }

    return toReturn;
}

From source file:org.pbccrc.zsls.utils.Shell.java

/** Run a command */
private void runCommand() throws IOException {
    ProcessBuilder builder = new ProcessBuilder(getExecString());
    Timer timeOutTimer = null;//  ww  w  .  j  a v  a  2 s  .  c  o m
    ShellTimeoutTimerTask timeoutTimerTask = null;
    timedOut = new AtomicBoolean(false);
    completed = new AtomicBoolean(false);

    if (environment != null) {
        builder.environment().putAll(this.environment);
    }
    if (dir != null) {
        builder.directory(this.dir);
    }

    builder.redirectErrorStream(redirectErrorStream);

    if (Shell.WINDOWS) {
        synchronized (WindowsProcessLaunchLock) {
            // To workaround the race condition issue with child processes
            // inheriting unintended handles during process launch that can
            // lead to hangs on reading output and error streams, we
            // serialize process creation. More info available at:
            // http://support.microsoft.com/kb/315939
            process = builder.start();
        }
    } else {
        process = builder.start();
    }

    if (timeOutInterval > 0) {
        timeOutTimer = new Timer("Shell command timeout");
        timeoutTimerTask = new ShellTimeoutTimerTask(this);
        //One time scheduling.
        timeOutTimer.schedule(timeoutTimerTask, timeOutInterval);
    }
    final BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream()));
    BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream()));
    final StringBuffer errMsg = new StringBuffer();

    // read error and input streams as this would free up the buffers
    // free the error stream buffer
    Thread errThread = new Thread() {
        @Override
        public void run() {
            boolean overErrMsg = false;
            try {
                String line = errReader.readLine();
                while ((line != null) && !isInterrupted()) {
                    if (!overErrMsg) {
                        if (line.length() + errMsg.length() > ERR_MSG_BUFF_SIZE)
                            overErrMsg = true;
                        else {
                            errMsg.append(line);
                            errMsg.append(System.getProperty("line.separator"));
                        }
                    }
                    line = errReader.readLine();
                }
            } catch (IOException ioe) {
                LOG.warn("Error reading the error stream", ioe);
            }
        }
    };
    try {
        errThread.start();
    } catch (IllegalStateException ise) {
    }
    try {
        parseExecResult(inReader); // parse the output
        // clear the input stream buffer
        String line = inReader.readLine();
        while (line != null) {
            line = inReader.readLine();
        }
        // wait for the process to finish and check the exit code
        exitCode = process.waitFor();
        // make sure that the error thread exits
        joinThread(errThread);
        completed.set(true);
        //the timeout thread handling
        //taken care in finally block
        if (exitCode != 0) {
            throw new ExitCodeException(exitCode, errMsg.toString());
        }
    } catch (InterruptedException ie) {
        throw new IOException(ie.toString());
    } finally {
        if (timeOutTimer != null) {
            timeOutTimer.cancel();
        }
        // close the input stream
        try {
            // JDK 7 tries to automatically drain the input streams for us
            // when the process exits, but since close is not synchronized,
            // it creates a race if we close the stream first and the same
            // fd is recycled.  the stream draining thread will attempt to
            // drain that fd!!  it may block, OOM, or cause bizarre behavior
            // see: https://bugs.openjdk.java.net/browse/JDK-8024521
            //      issue is fixed in build 7u60
            InputStream stdout = process.getInputStream();
            synchronized (stdout) {
                inReader.close();
            }
        } catch (IOException ioe) {
            LOG.warn("Error while closing the input stream", ioe);
        }
        if (!completed.get()) {
            errThread.interrupt();
            joinThread(errThread);
        }
        try {
            InputStream stderr = process.getErrorStream();
            synchronized (stderr) {
                errReader.close();
            }
        } catch (IOException ioe) {
            LOG.warn("Error while closing the error stream", ioe);
        }
        process.destroy();
        lastTime = clock.getTime();
    }
}

From source file:org.pentaho.di.job.entries.shell.JobEntryShell.java

private void executeShell(Result result, List<RowMetaAndData> cmdRows, String[] args) {
    FileObject fileObject = null;
    String realScript = null;/*from  ww w  .  j  a  v a  2s .  c o  m*/
    FileObject tempFile = null;

    try {
        // What's the exact command?
        String[] base = null;
        List<String> cmds = new ArrayList<String>();

        if (log.isBasic()) {
            logBasic(BaseMessages.getString(PKG, "JobShell.RunningOn", Const.getOS()));
        }

        if (insertScript) {
            realScript = environmentSubstitute(script);
        } else {
            String realFilename = environmentSubstitute(getFilename());
            fileObject = KettleVFS.getFileObject(realFilename, this);
        }

        if (Const.getOS().equals("Windows 95")) {
            base = new String[] { "command.com", "/C" };
            if (insertScript) {
                tempFile = KettleVFS.createTempFile("kettle", "shell.bat", environmentSubstitute(workDirectory),
                        this);
                fileObject = createTemporaryShellFile(tempFile, realScript);
            }
        } else if (Const.getOS().startsWith("Windows")) {
            base = new String[] { "cmd.exe", "/C" };
            if (insertScript) {
                tempFile = KettleVFS.createTempFile("kettle", "shell.bat", environmentSubstitute(workDirectory),
                        this);
                fileObject = createTemporaryShellFile(tempFile, realScript);
            }
        } else {
            if (insertScript) {
                tempFile = KettleVFS.createTempFile("kettle", "shell", environmentSubstitute(workDirectory),
                        this);
                fileObject = createTemporaryShellFile(tempFile, realScript);
            }
            base = new String[] { KettleVFS.getFilename(fileObject) };
        }

        // Construct the arguments...
        if (argFromPrevious && cmdRows != null) {
            // Add the base command...
            for (int i = 0; i < base.length; i++) {
                cmds.add(base[i]);
            }

            if (Const.getOS().equals("Windows 95") || Const.getOS().startsWith("Windows")) {
                // for windows all arguments including the command itself
                // need to be
                // included in 1 argument to cmd/command.

                StringBuffer cmdline = new StringBuffer(300);

                cmdline.append('"');
                cmdline.append(Const.optionallyQuoteStringByOS(KettleVFS.getFilename(fileObject)));
                // Add the arguments from previous results...
                for (int i = 0; i < cmdRows.size(); i++) {
                    // Normally just one row, but once in a while to remain compatible we have multiple.

                    RowMetaAndData r = cmdRows.get(i);
                    for (int j = 0; j < r.size(); j++) {
                        cmdline.append(' ');
                        cmdline.append(Const.optionallyQuoteStringByOS(r.getString(j, null)));
                    }
                }
                cmdline.append('"');
                cmds.add(cmdline.toString());
            } else {
                // Add the arguments from previous results...
                for (int i = 0; i < cmdRows.size(); i++) {
                    // Normally just one row, but once in a while to remain compatible we have multiple.

                    RowMetaAndData r = cmdRows.get(i);
                    for (int j = 0; j < r.size(); j++) {
                        cmds.add(Const.optionallyQuoteStringByOS(r.getString(j, null)));
                    }
                }
            }
        } else if (args != null) {
            // Add the base command...
            for (int i = 0; i < base.length; i++) {
                cmds.add(base[i]);
            }

            if (Const.getOS().equals("Windows 95") || Const.getOS().startsWith("Windows")) {
                // for windows all arguments including the command itself
                // need to be
                // included in 1 argument to cmd/command.

                StringBuffer cmdline = new StringBuffer(300);

                cmdline.append('"');
                cmdline.append(Const.optionallyQuoteStringByOS(KettleVFS.getFilename(fileObject)));

                for (int i = 0; i < args.length; i++) {
                    cmdline.append(' ');
                    cmdline.append(Const.optionallyQuoteStringByOS(args[i]));
                }
                cmdline.append('"');
                cmds.add(cmdline.toString());
            } else {
                for (int i = 0; i < args.length; i++) {
                    cmds.add(args[i]);
                }
            }
        }

        StringBuffer command = new StringBuffer();

        Iterator<String> it = cmds.iterator();
        boolean first = true;
        while (it.hasNext()) {
            if (!first) {
                command.append(' ');
            } else {
                first = false;
            }
            command.append(it.next());
        }
        if (log.isBasic()) {
            logBasic(BaseMessages.getString(PKG, "JobShell.ExecCommand", command.toString()));
        }

        // Build the environment variable list...
        ProcessBuilder procBuilder = new ProcessBuilder(cmds);
        Map<String, String> env = procBuilder.environment();
        String[] variables = listVariables();
        for (int i = 0; i < variables.length; i++) {
            env.put(variables[i], getVariable(variables[i]));
        }

        if (getWorkDirectory() != null && !Const.isEmpty(Const.rtrim(getWorkDirectory()))) {
            String vfsFilename = environmentSubstitute(getWorkDirectory());
            File file = new File(KettleVFS.getFilename(KettleVFS.getFileObject(vfsFilename, this)));
            procBuilder.directory(file);
        }
        Process proc = procBuilder.start();

        // any error message?
        StreamLogger errorLogger = new StreamLogger(log, proc.getErrorStream(), "(stderr)", true);

        // any output?
        StreamLogger outputLogger = new StreamLogger(log, proc.getInputStream(), "(stdout)");

        // kick them off
        Thread errorLoggerThread = new Thread(errorLogger);
        errorLoggerThread.start();
        Thread outputLoggerThread = new Thread(outputLogger);
        outputLoggerThread.start();

        proc.waitFor();
        if (log.isDetailed()) {
            logDetailed(BaseMessages.getString(PKG, "JobShell.CommandFinished", command.toString()));
        }

        // What's the exit status?
        result.setExitStatus(proc.exitValue());
        if (result.getExitStatus() != 0) {
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobShell.ExitStatus",
                        environmentSubstitute(getFilename()), "" + result.getExitStatus()));
            }

            result.setNrErrors(1);
        }

        // wait until loggers read all data from stdout and stderr
        errorLoggerThread.join();
        outputLoggerThread.join();

        // close the streams
        // otherwise you get "Too many open files, java.io.IOException" after a lot of iterations
        proc.getErrorStream().close();
        proc.getOutputStream().close();

    } catch (IOException ioe) {
        logError(BaseMessages.getString(PKG, "JobShell.ErrorRunningShell", environmentSubstitute(getFilename()),
                ioe.toString()), ioe);
        result.setNrErrors(1);
    } catch (InterruptedException ie) {
        logError(BaseMessages.getString(PKG, "JobShell.Shellinterupted", environmentSubstitute(getFilename()),
                ie.toString()), ie);
        result.setNrErrors(1);
    } catch (Exception e) {
        logError(BaseMessages.getString(PKG, "JobShell.UnexpectedError", environmentSubstitute(getFilename()),
                e.toString()), e);
        result.setNrErrors(1);
    } finally {
        // If we created a temporary file, remove it...
        //
        if (tempFile != null) {
            try {
                tempFile.delete();
            } catch (Exception e) {
                BaseMessages.getString(PKG, "JobShell.UnexpectedError", tempFile.toString(), e.toString());
            }
        }
    }

    if (result.getNrErrors() > 0) {
        result.setResult(false);
    } else {
        result.setResult(true);
    }
}

From source file:com.chinamobile.bcbsp.pipes.TaskHandler.java

@Override
public boolean saveResult(String vertex) {
    try {//from w w  w  .  ja  v a 2  s. c  o m
        try {
            String vertexEdge;

            // String path = "/home/bcbsp/vertex.txt";
            //File filename = new File(path);
            // filename.createNewFile();
            // RandomAccessFile mm = null;

            // mm = new RandomAccessFile(filename, "rw");
            //  mm.writeBytes(output.toString());
            // mm.writeBytes("vertexEdge = " + vertex);
            vertexEdge = vertex;
            output.write(new Text(vertex));
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    } catch (IOException e) {
        LOG.error("Exception has been catched in BSPStaff--saveResult !", e);
        BSPConfiguration conf = new BSPConfiguration();
        if (staff.getRecoveryTimes() < conf.getInt(Constants.BC_BSP_JOB_RECOVERY_ATTEMPT_MAX, 0)) {
            staff.recovery(job, staff, workerAgent);
        } else {
            workerAgent.setStaffStatus(sid, Constants.SATAFF_STATUS.FAULT,
                    new Fault(Fault.Type.DISK, Fault.Level.INDETERMINATE,
                            workerAgent.getWorkerManagerName(job.getJobID(), sid), e.toString(), job.toString(),
                            sid.toString()),
                    2);
            LOG.info("=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=" + "*=*=*=*=*=*");
            LOG.error("Other Exception has happened and been catched, "
                    + "the exception will be reported to WorkerManager", e);
        }
    }
    return true;
}

From source file:org.apache.hadoop.mapreduce.filecache.TrackerDistributedCacheManager.java

private void setPermissions(Configuration conf, CacheStatus cacheStatus, boolean isPublic) throws IOException {
    if (isPublic) {
        Path localizedUniqueDir = cacheStatus.getLocalizedUniqueDir();
        LOG.info("Doing chmod on localdir :" + localizedUniqueDir);
        try {//ww  w.j av  a  2s  .  co  m
            FileUtil.chmod(localizedUniqueDir.toString(), "ugo+rx", true);
        } catch (InterruptedException e) {
            LOG.warn("Exception in chmod" + e.toString());
            throw new IOException(e);
        }
    } else {
        // invoke taskcontroller to set permissions
        DistributedCacheFileContext context = new DistributedCacheFileContext(conf.get(MRJobConfig.USER_NAME),
                new File(cacheStatus.localizedBaseDir.toString()), cacheStatus.localizedBaseDir,
                cacheStatus.uniqueString);
        taskController.initializeDistributedCacheFile(context);
    }
}

From source file:com.streamsets.pipeline.lib.dirspooler.SpoolDirRunnable.java

private Offset produce(Offset lastSourceOffset, BatchContext batchContext) throws StageException {

    // if lastSourceOffset is NULL (beginning of source) it returns NULL
    String file = lastSourceOffset.getRawFile();
    String lastSourceFile = file;
    String fullPath = (file != null) ? spooler.getSpoolDir() + FILE_SEPARATOR + file : null;
    // if lastSourceOffset is NULL (beginning of source) it returns 0
    String offset = lastSourceOffset.getOffset();

    try {/*from   ww w .  j  a va  2s.c o  m*/
        if (hasToFetchNextFileFromSpooler(file, offset)) {
            updateGauge(Status.SPOOLING, null);
            currentFile = null;
            try {
                WrappedFile nextAvailFile = null;
                do {
                    if (nextAvailFile != null) {
                        LOG.warn("Ignoring file '{}' in spool directory as is lesser than offset file '{}'",
                                nextAvailFile.getAbsolutePath(), fullPath);
                    }
                    nextAvailFile = spooler.poolForFile(conf.poolingTimeoutSecs, TimeUnit.SECONDS);
                } while (!isFileFromSpoolerEligible(nextAvailFile, fullPath, offset));

                if (nextAvailFile == null) {
                    // no file to process
                    LOG.debug("No new file available in spool directory after '{}' secs, producing empty batch",
                            conf.poolingTimeoutSecs);

                    // no-more-data event needs to be sent.
                    shouldSendNoMoreDataEvent = true;

                } else {
                    // since we have data to process, don't trigger the no-more-data event.
                    shouldSendNoMoreDataEvent = false;

                    // file to process
                    currentFile = nextAvailFile;

                    // if the current offset file is null or the file returned by the dirspooler is greater than the current offset
                    // file we take the file returned by the dirspooler as the new file and set the offset to zero
                    // if not, it means the dirspooler returned us the current file, we just keep processing it from the last
                    // offset we processed (known via offset tracking)
                    boolean pickFileFromSpooler = false;
                    if (file == null) {
                        pickFileFromSpooler = true;
                    } else if (useLastModified) {
                        WrappedFile fileObject = fs.getFile(spooler.getSpoolDir(), file);
                        if (SpoolDirUtil.compareFiles(fs, nextAvailFile, fileObject)) {
                            pickFileFromSpooler = true;
                        }
                    } else if (nextAvailFile.getFileName().compareTo(file) > 0) {
                        pickFileFromSpooler = true;
                    }

                    if (pickFileFromSpooler) {
                        file = currentFile.getAbsolutePath();
                        if (conf.processSubdirectories) {
                            // Since you are working from the specified path, all the paths MUST be relative to this base
                            // path that's why we are removing the first '/'
                            file = currentFile.toString().replaceFirst(spooler.getSpoolDir() + FILE_SEPARATOR,
                                    "");
                        }
                        if (offsets.containsKey(file)) {
                            offset = offsets.get(file).getOffset();
                        } else {
                            offset = ZERO;
                        }
                    }
                }

                if (currentFile != null && !offset.equals(MINUS_ONE)) {
                    perFileRecordCount = 0;
                    perFileErrorCount = 0;
                    SpoolDirEvents.NEW_FILE.create(context, batchContext)
                            .with("filepath", currentFile.getAbsolutePath()).createAndSend();
                    noMoreDataFileCount++;
                    totalFiles++;
                }

            } catch (InterruptedException ex) {
                // the dirspooler was interrupted while waiting for a file, we log and return, the pipeline agent will invoke us
                // again to wait for a file again
                LOG.warn("Pooling interrupted");
                Thread.currentThread().interrupt();
            }
        }
    } catch (IOException ex) {
        LOG.error(ex.toString(), ex);
    }

    if (currentFile != null && !offset.equals(MINUS_ONE)) {
        // we have a file to process (from before or new from dirspooler)
        try {
            updateGauge(Status.READING, offset);

            // we ask for a batch from the currentFile starting at offset
            offset = generateBatch(currentFile, offset, batchSize, batchContext.getBatchMaker());

            if (MINUS_ONE.equals(offset)) {
                SpoolDirEvents.FINISHED_FILE.create(context, batchContext)
                        .with("filepath", currentFile.getAbsolutePath()).with("error-count", perFileErrorCount)
                        .with("record-count", perFileRecordCount).createAndSend();

                LineageEvent event = context.createLineageEvent(LineageEventType.ENTITY_READ);
                event.setSpecificAttribute(LineageSpecificAttribute.ENTITY_NAME, currentFile.getAbsolutePath());
                event.setSpecificAttribute(LineageSpecificAttribute.ENDPOINT_TYPE,
                        EndPointType.LOCAL_FS.name());
                event.setSpecificAttribute(LineageSpecificAttribute.DESCRIPTION, conf.filePattern);
                Map<String, String> props = new HashMap<>();
                props.put("Record Count", Long.toString(perFileRecordCount));
                event.setProperties(props);
                context.publishLineageEvent(event);
            }
        } catch (BadSpoolFileException ex) {
            LOG.error(Errors.SPOOLDIR_01.getMessage(), ex.getFile(), ex.getPos(), ex.toString(), ex);
            context.reportError(Errors.SPOOLDIR_01, ex.getFile(), ex.getPos(), ex.toString(), ex);

            try {
                // then we ask the dirspooler to error handle the failed file
                spooler.handleCurrentFileAsError();

            } catch (IOException ex1) {
                throw new StageException(Errors.SPOOLDIR_00, currentFile, ex1.toString(), ex1);
            }
            // we set the offset to -1 to indicate we are done with the file and we should fetch a new one from the dirspooler
            offset = MINUS_ONE;
        }
    }

    if (shouldSendNoMoreDataEvent) {
        LOG.info("sending no-more-data event.  records {} errors {} files {} ", noMoreDataRecordCount,
                noMoreDataErrorCount, noMoreDataFileCount);
        SpoolDirEvents.NO_MORE_DATA.create(context, batchContext).with("record-count", noMoreDataRecordCount)
                .with("error-count", noMoreDataErrorCount).with("file-count", noMoreDataFileCount)
                .createAndSend();
        shouldSendNoMoreDataEvent = false;
        noMoreDataRecordCount = 0;
        noMoreDataErrorCount = 0;
        noMoreDataFileCount = 0;
    }

    Offset newOffset = new Offset(Offset.VERSION_ONE, file, offset);

    // Process And Commit offsets
    boolean batchProcessed = context.processBatch(batchContext, newOffset.getFile(),
            newOffset.getOffsetString());

    // Commit offset and perform post-processing only if the batch was properly processed
    if (batchProcessed && !context.isPreview()) {
        if (lastSourceFile != null && !lastSourceFile.equals(newOffset.getFile())) {
            context.commitOffset(lastSourceFile, null);
        }

        // if this is the end of the file, do post processing
        if (currentFile != null && newOffset.getOffset().equals(MINUS_ONE)) {
            spooler.doPostProcessing(fs.getFile(conf.spoolDir, newOffset.getFile()));
        }
    }

    updateGauge(Status.BATCH_GENERATED, offset);
    return newOffset;
}

From source file:eionet.cr.web.action.factsheet.FolderActionBean.java

private void saveAndHarvest() throws IOException, DAOException {
    // start the thread that saves the file's content and attempts to harvest it
    Thread thread = new Thread(this);
    thread.start();// ww w  .  j av  a 2s  .c o m

    // check the thread after every second, exit loop if it hasn't finished in 15 seconds
    for (int loopCount = 0; thread.isAlive() && loopCount < 15; loopCount++) {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            throw new CRRuntimeException(e.toString(), e);
        }
    }

    // if the the thread reported an exception, throw it
    if (saveAndHarvestException != null) {
        if (saveAndHarvestException instanceof DAOException) {
            throw (DAOException) saveAndHarvestException;
        } else if (saveAndHarvestException instanceof IOException) {
            throw (IOException) saveAndHarvestException;
        } else if (saveAndHarvestException instanceof RuntimeException) {
            throw (RuntimeException) saveAndHarvestException;
        } else {
            throw new CRRuntimeException(saveAndHarvestException.getMessage(), saveAndHarvestException);
        }
    }

    // add feedback message to the bean's context
    if (!thread.isAlive()) {
        addSystemMessage("File saved and harvested!");
    } else {
        if (!contentSaved) {
            addSystemMessage("Saving and harvesting the file continues in the background!");
        } else {
            addSystemMessage("File content saved, but harvest continues in the background!");
        }
    }
}