Example usage for java.lang InterruptedException InterruptedException

List of usage examples for java.lang InterruptedException InterruptedException

Introduction

In this page you can find the example usage for java.lang InterruptedException InterruptedException.

Prototype

public InterruptedException(String s) 

Source Link

Document

Constructs an InterruptedException with the specified detail message.

Usage

From source file:com.baidu.asynchttpclient.AsyncHttpResponseHandler.java

void sendResponseMessage(HttpResponse response) {
    StatusLine status = response.getStatusLine();
    int statusCode = status.getStatusCode();

    String responseBody = null;/*from ww  w .java2s.c o m*/
    try {
        HttpEntity entity = response.getEntity();
        if (statusCode >= 300) {
            responseBody = EntityUtils.toString(entity);
        } else {
            if (entity == null) {
                throw new IllegalArgumentException("HTTP entity may not be null");
            }
            // 
            InputStream instream = entity.getContent();
            if (instream == null) {
                sendReceiveStartMessage(0, null/* , headers */);
                sendReceiveUpdateMessage(new byte[0], 0);
                sendReceiveEndMessage();
                return;
            }
            // ///////////////phase 1////////////////////////////
            if (entity.getContentLength() > Integer.MAX_VALUE) {
                throw new IllegalArgumentException("HTTP entity too large to be buffered in memory");
            }

            int i = (int) entity.getContentLength();
            if (i < 0) {
                i = 4096;
            }
            String charset = null;
            if (entity.getContentType() != null) {
                HeaderElement values[] = entity.getContentType().getElements();
                if (values.length > 0) {
                    NameValuePair param = values[0].getParameterByName("charset");
                    if (param != null) {
                        charset = param.getValue();
                    }
                }
            }
            sendReceiveStartMessage(i, charset/* , headers */);
            // ////////////////////////phase 2//////////////////////////

            final int _1KSize = 1024;
            // final int _100KSize = _1KSize * 100;
            int tmpSize = _1KSize;
            // if (i > _100KSize) {
            // tmpSize = _1KSize * 2;
            // }

            boolean readDone = false;
            byte[] tmp = null;
            int remain = 0;
            do {
                if (Thread.currentThread().isInterrupted()) {
                    sendFailureMessage(new InterruptedException("request interupted!"), null);
                    return;
                }
                if (tmp == null) {
                    tmp = new byte[tmpSize];
                }
                int offset = 0;
                remain = tmpSize;
                do {
                    if (Thread.currentThread().isInterrupted()) {
                        sendFailureMessage(new InterruptedException("request interupted!"), null);
                        return;
                    }
                    int length = instream.read(tmp, offset, remain);
                    if (length != -1) {
                        offset += length;
                        remain -= length;
                    } else {
                        readDone = true;
                        break;
                    }
                } while (remain > 0);

                if (offset >= 0) {
                    sendReceiveUpdateMessage(tmp, offset);
                }
                tmp = null;
            } while (!readDone);
            // ////////////////////////phase 3//////////////////////////
            sendReceiveEndMessage();
        }

    } catch (IOException e) {
        sendFailureMessage(e, null);
    }

    if (status.getStatusCode() >= 300) {
        sendFailureMessage(new HttpResponseException(status.getStatusCode(), status.getReasonPhrase()),
                responseBody);
    }
}

From source file:it.eng.spagobi.studio.core.wizards.deployDatasetWizard.SpagoBIDeployDatasetWizardFormPage.java

/**
 *  fill the value/*from w ww  . j a v  a  2  s . c  om*/
 */
public void fillValues() {
    logger.debug("IN");
    IFile fileSel = (IFile) selection.toList().get(0);
    String queryStr = DeployDatasetService.getMetaQuery(fileSel);
    logger.debug("Query in file is " + queryStr);
    queryStr = queryStr != null ? queryStr : "";
    //      queryText.setText(queryStr);
    query = queryStr;

    // first of all get info from server      
    final SpagoBIServerObjectsFactory proxyObjects;
    SDKProxyFactory proxyFactory = null;
    try {
        proxyObjects = new SpagoBIServerObjectsFactory(projectName);
    } catch (NoActiveServerException e1) {
        logger.error("No active server found", e1);
        MessageDialog.openError(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "Error",
                "No active server found");
        return;
    }

    // progress monitor to recover datasource information
    IRunnableWithProgress op = new IRunnableWithProgress() {
        public void run(IProgressMonitor monitor) throws InvocationTargetException {
            monitor.beginTask("Deploy a new dataset: retrieve data sources ", IProgressMonitor.UNKNOWN);

            try {

                datasourceList = proxyObjects.getServerDataSources().getDataSourceList();
                logger.debug("datasources retrieved");
                domainList = proxyObjects.getServerDomains().getDomainsListByDomainCd("CATEGORY_TYPE");
                logger.debug("domains retrieved");

            } catch (Exception e) {
                logger.error("No comunication with SpagoBI server", e);
                MessageDialog.openError(getShell(), "No comunication with server",
                        "Error in comunication with SpagoBi Server; check its definition and check if the service is avalaible");
                return;
            }
            monitor.done();
            if (monitor.isCanceled())
                logger.error("Operation not ended",
                        new InterruptedException("The long running operation was cancelled"));
        }
    };

    ProgressMonitorDialog dialog = new ProgressMonitorDialog(getShell());
    try {
        dialog.run(true, true, op);
    } catch (InvocationTargetException e1) {
        logger.error(
                "Error in comunication with SpagoBi Server; check its definition and check if the service is avalaible",
                e1);
        dialog.close();
        MessageDialog.openError(getShell(), "No comunication with server",
                "Error in comunication with SpagoBi Server; check its definition and check if the service is avalaible");
        return;
    } catch (InterruptedException e1) {
        logger.error("No comunication with SpagoBI server", e1);
        dialog.close();
        MessageDialog.openError(getShell(), "No comunication with server",
                "Error in comunication with SpagoBi Server; check its definition and check if the service is avalaible");
        return;
    }
    dialog.close();

    dataSourceLabelIdMap = new HashMap<String, Integer>();
    String[] datasourceLabels = new String[datasourceList.length];
    for (int i = 0; i < datasourceLabels.length; i++) {
        DataSource dataSource = datasourceList[i];
        logger.debug("Datasource " + dataSource.getName());
        datasourceLabels[i] = dataSource.getLabel();
        dataSourceLabelIdMap.put(dataSource.getLabel(), dataSource.getId());
    }
    Arrays.sort(datasourceLabels);
    dataSourceCombo.setItems(datasourceLabels);

    logger.debug("Datasources combo filled");

    domainsLabelIdMap = new HashMap<String, Integer>();
    String[] domainsLabels = new String[domainList.length];
    for (int i = 0; i < domainsLabels.length; i++) {
        Domain domain = domainList[i];
        logger.debug("Domain " + domain.getValueCd());
        domainsLabels[i] = domain.getValueCd();
        domainsLabelIdMap.put(domain.getValueCd(), domain.getValueId());
    }
    Arrays.sort(domainsLabels);
    categoryCombo.setItems(domainsLabels);

    publicCombo.add("Private");
    publicCombo.add("Public");
    publicCombo.select(0);
    logger.debug("OUT");

}

From source file:org.apache.hama.pipes.protocol.BinaryProtocol.java

@Override
public boolean waitForFinish() throws IOException, InterruptedException {
    // LOG.debug("waitForFinish... " + hasTask);
    synchronized (this.hasTaskLock) {

        while (this.hasTask) {
            this.hasTaskLock.wait(); // this call blocks
        }//from   w  w  w. j a v  a  2  s  . c o m

        // Check if UplinkReader thread has thrown exception
        if (uplinkException != null) {
            throw new InterruptedException(StringUtils.stringifyException(uplinkException));
        }
    }
    return hasTask;
}

From source file:org.alfresco.extension.bulkimport.impl.Scanner.java

/**
 * @see org.alfresco.extension.bulkimport.BulkImportCallback#submit(org.alfresco.extension.bulkimport.source.BulkImportItem)
 *///from   w  w  w . j a  va 2 s.  co  m
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public synchronized void submit(final BulkImportItem item) throws InterruptedException {
    // PRECONDITIONS
    if (item == null) {
        throw new IllegalArgumentException("Import source '" + source.getName()
                + "' has logic errors - a null import item was submitted.");
    }

    if (item.getVersions() == null || item.getVersions().size() <= 0) {
        throw new IllegalArgumentException("Import source '" + source.getName()
                + "' has logic errors - an empty import item was submitted.");
    }

    // Body
    if (importStatus.isStopping() || Thread.currentThread().isInterrupted())
        throw new InterruptedException(
                Thread.currentThread().getName() + " was interrupted. Terminating early.");

    // If the weight of the new item would blow out the current batch, submit the batch as-is (i.e. *before* adding the newly submitted item).
    // This ensures that heavy items start a new batch (and possibly end up in a batch by themselves).
    int weight = weight(item);

    if (weightOfCurrentBatch + weight > batchWeight) {
        submitCurrentBatch();
    }

    // Create a new batch, if necessary
    if (currentBatch == null) {
        currentBatchNumber++;
        currentBatch = new ArrayList<>(batchWeight);
        weightOfCurrentBatch = 0;
    }

    // Finally, add the item to the current batch
    currentBatch.add(item);
    weightOfCurrentBatch += weight;
}

From source file:ch.epfl.scapetoad.CartogramGastner.java

/**
 * Integrates the non-linear Volterra equation.
 * @return true if the displacement field has converged, false otherwise.
 *//* w w  w  . j  a v  a 2  s.c o m*/
private boolean integrateNonlinearVolterraEquation() throws InterruptedException {
    boolean stepsize_ok;
    double h, maxchange = this.INFTY, t, vxplus, vyplus, xguess, yguess;
    int i, j, k;

    do {
        this.initcond();
        this.nblurs++;
        //if (this.minpop < 0.0)
        //   double sigmaVal = SIGMA * Math.pow(this.SIGMAFAC, this.nblurs);

    } while (this.minpop < 0.0);

    h = HINITIAL;
    t = 0;

    for (j = 0; j <= this.lx; j++) {
        for (k = 0; k <= this.ly; k++) {
            this.x[j][k] = j;
            this.y[j][k] = k;
        }
    }

    this.calculateVelocityField(0.0);

    for (j = 0; j <= this.lx; j++) {
        for (k = 0; k <= this.ly; k++) {
            vx[j][k] = gridvx[j][k];
            vy[j][k] = gridvy[j][k];
        }
    }

    i = 1;

    do {
        // Stop if the user has interrupted the process.
        if (Thread.interrupted()) {
            // Raise an InterruptedException.
            throw new InterruptedException("Computation has been interrupted by the user.");
        }

        stepsize_ok = true;
        this.calculateVelocityField(t + h);

        for (j = 0; j <= this.lx; j++) {
            for (k = 0; k <= this.ly; k++) {

                double xinterpol = this.x[j][k] + (h * this.vx[j][k]);
                double yinterpol = this.y[j][k] + (h * this.vy[j][k]);
                if (xinterpol < 0.0 || yinterpol < 0.0) {
                    if (AppContext.DEBUG)
                        System.out.println("[ERROR] Cartogram out of bounds !");
                }

                vxplus = this.interpolateBilinear(this.gridvx, xinterpol, yinterpol);

                vyplus = this.interpolateBilinear(this.gridvy, xinterpol, yinterpol);

                xguess = this.x[j][k] + (0.5 * h * (this.vx[j][k] + vxplus));

                yguess = this.y[j][k] + (0.5 * h * (this.vy[j][k] + vyplus));

                double[] ptappr = new double[2];
                ptappr[0] = this.xappr[j][k];
                ptappr[1] = this.yappr[j][k];
                boolean solving_ok = this.newt2(h, ptappr, xguess, yguess, j, k);
                this.xappr[j][k] = ptappr[0];
                this.yappr[j][k] = ptappr[1];
                if (solving_ok == false)
                    return false;

                if (((xguess - this.xappr[j][k]) * (xguess - this.xappr[j][k]))
                        + ((yguess - this.yappr[j][k]) * (yguess - this.yappr[j][k])) > this.TOLINT) {
                    if (h < this.MINH) {
                        //double sigmaVal = this.SIGMA * Math.pow(
                        //   this.SIGMAFAC, this.nblurs);
                        this.nblurs++;
                        return false;
                    }
                    h = h / 10;
                    stepsize_ok = false;
                    break;
                }

            } // for (k = 0; k <= this.ly; k++)

        } // for (j = 0; j <= this.lx; j++)

        if (!stepsize_ok) {
            continue;
        } else {
            t += h;
            maxchange = 0.0;

            for (j = 0; j <= this.lx; j++) {
                for (k = 0; k <= this.ly; k++) {
                    if (((this.x[j][k] - this.xappr[j][k]) * (this.x[j][k] - this.xappr[j][k]))
                            + ((this.y[j][k] - this.yappr[j][k])
                                    * (this.y[j][k] - this.yappr[j][k])) > maxchange) {
                        maxchange = ((this.x[j][k] - this.xappr[j][k]) * (this.x[j][k] - this.xappr[j][k]))
                                + ((this.y[j][k] - this.yappr[j][k]) * (this.y[j][k] - this.yappr[j][k]));
                    }

                    this.x[j][k] = this.xappr[j][k];
                    this.y[j][k] = this.yappr[j][k];
                    this.vx[j][k] = this.interpolateBilinear(this.gridvx, this.xappr[j][k], this.yappr[j][k]);
                    this.vy[j][k] = this.interpolateBilinear(this.gridvy, this.xappr[j][k], this.yappr[j][k]);

                } // for (k=0; k<=ly; k++)

            } // for (j = 0; j <= this.lx; j++)

        }

        h = 1.2 * h;

        int progress = mProgressEnd;
        if (i < 200)
            progress = mProgressStart + (i * ((mProgressEnd - mProgressStart) / 200));
        mCartogramWizard.updateRunningStatus(progress, mProgressText, "Doing time step " + i);

        i++;

    } while (i < this.MAXINTSTEPS && t < this.TIMELIMIT && maxchange > this.CONVERGENCE);

    return true;

}

From source file:org.alfresco.extension.bulkimport.source.fs.DirectoryAnalyser.java

private final NavigableSet<FilesystemBulkImportItemVersion> constructImportItemVersions(
        final SortedMap<BigDecimal, Pair<File, File>> itemVersions) throws InterruptedException {
    // PRECONDITIONS
    if (itemVersions == null)
        throw new IllegalArgumentException("itemVersions cannot be null.");
    if (itemVersions.size() <= 0)
        throw new IllegalArgumentException("itemVersions cannot be empty.");

    // Body//  w w  w. ja va 2 s .  c om
    final NavigableSet<FilesystemBulkImportItemVersion> result = new TreeSet<>();

    for (final BigDecimal versionNumber : itemVersions.keySet()) {
        if (importStatus.isStopping() || Thread.currentThread().isInterrupted())
            throw new InterruptedException(
                    Thread.currentThread().getName() + " was interrupted. Terminating early.");

        final Pair<File, File> contentAndMetadataFiles = itemVersions.get(versionNumber);
        final FilesystemBulkImportItemVersion version = new FilesystemBulkImportItemVersion(serviceRegistry,
                configuredContentStore, metadataLoader, versionNumber, contentAndMetadataFiles.getFirst(),
                contentAndMetadataFiles.getSecond());

        result.add(version);
    }

    return (result);
}

From source file:org.alfresco.repo.content.transform.AbstractContentTransformer2.java

/**
 * @see org.alfresco.repo.content.transform.ContentTransformer#transform(org.alfresco.service.cmr.repository.ContentReader, org.alfresco.service.cmr.repository.ContentWriter, org.alfresco.service.cmr.repository.TransformationOptions)
 *///from   www .j  a  va2s  .com
public final void transform(ContentReader reader, ContentWriter writer, TransformationOptions options)
        throws ContentIOException {
    try {
        depth.set(depth.get() + 1);

        // begin timing
        long before = System.currentTimeMillis();

        String sourceMimetype = reader.getMimetype();
        String targetMimetype = writer.getMimetype();

        // check options map
        if (options == null) {
            options = new TransformationOptions();
        }

        try {
            if (transformerDebug.isEnabled()) {
                transformerDebug.pushTransform(this, reader.getContentUrl(), sourceMimetype, targetMimetype,
                        reader.getSize(), options);
            }

            // MNT-16381: check the mimetype of the file supplied by the user
            // matches the sourceMimetype of the reader. Intermediate files are
            // not checked.
            strictMimetypeCheck(reader, options, sourceMimetype);

            // Check the transformability
            checkTransformable(reader, writer, options);

            // Pass on any limits to the reader
            setReaderLimits(reader, writer, options);

            // Transform
            // MNT-12238: CLONE - CLONE - Upload of PPTX causes very high memory usage leading to system instability
            // Limiting transformation up to configured amount of milliseconds to avoid very high RAM consumption
            // and OOM during transforming problematic documents
            TransformationOptionLimits limits = getLimits(reader.getMimetype(), writer.getMimetype(), options);

            long timeoutMs = limits.getTimeoutMs();
            if (!useTimeoutThread || (null == limits) || (-1 == timeoutMs)) {
                transformInternal(reader, writer, options);
            } else {
                Future<?> submittedTask = null;
                StreamAwareContentReaderProxy proxiedReader = new StreamAwareContentReaderProxy(reader);
                StreamAwareContentWriterProxy proxiedWriter = new StreamAwareContentWriterProxy(writer);

                try {
                    submittedTask = getExecutorService()
                            .submit(new TransformInternalCallable(proxiedReader, proxiedWriter, options));
                    submittedTask.get(timeoutMs + additionalThreadTimout, TimeUnit.MILLISECONDS);
                } catch (TimeoutException e) {
                    releaseResources(submittedTask, proxiedReader, proxiedWriter);
                    throw new TimeoutException("Transformation failed due to timeout limit");
                } catch (InterruptedException e) {
                    releaseResources(submittedTask, proxiedReader, proxiedWriter);
                    throw new InterruptedException(
                            "Transformation failed, because the thread of the transformation was interrupted");
                } catch (ExecutionException e) {
                    Throwable cause = e.getCause();
                    if (cause instanceof TransformInternalCallableException) {
                        cause = ((TransformInternalCallableException) cause).getCause();
                    }

                    throw cause;
                }
            }

            // record time
            long after = System.currentTimeMillis();
            recordTime(sourceMimetype, targetMimetype, after - before);
        } catch (ContentServiceTransientException cste) {
            // A transient failure has occurred within the content transformer.
            // This should not be interpreted as a failure and therefore we should not
            // update the transformer's average time.
            if (logger.isDebugEnabled()) {
                logger.debug("Transformation has been transiently declined: \n" + "   reader: " + reader + "\n"
                        + "   writer: " + writer + "\n" + "   options: " + options + "\n" + "   transformer: "
                        + this);
            }
            // the finally block below will still perform tidyup. Otherwise we're done.
            // We rethrow the exception
            throw cste;
        } catch (UnsupportedTransformationException e) {
            // Don't record an error or even the time, as this is normal in compound transformations.
            transformerDebug.debug("          Failed", e);
            throw e;
        } catch (Throwable e) {
            // Make sure that this transformation gets set back i.t.o. time taken.
            // This will ensure that transformers that compete for the same transformation
            // will be prejudiced against transformers that tend to fail
            long after = System.currentTimeMillis();
            recordError(sourceMimetype, targetMimetype, after - before);

            // Ask Tika to detect the document, and report back on if
            //  the current mime type is plausible
            String differentType = getMimetypeService().getMimetypeIfNotMatches(reader.getReader());

            // Report the error
            if (differentType == null) {
                transformerDebug.debug("          Failed", e);
                throw new ContentIOException("Content conversion failed: \n" + "   reader: " + reader + "\n"
                        + "   writer: " + writer + "\n" + "   options: " + options.toString(false) + "\n"
                        + "   limits: " + getLimits(reader, writer, options), e);
            } else {
                transformerDebug.debug("          Failed: Mime type was '" + differentType + "'", e);

                if (retryTransformOnDifferentMimeType) {
                    // MNT-11015 fix.
                    // Set a new reader to refresh the input stream.
                    reader = reader.getReader();
                    // set the actual file MIME type detected by Tika for content reader
                    reader.setMimetype(differentType);

                    // Get correct transformer according actual file MIME type and try to transform file with
                    // actual transformer
                    ContentTransformer transformer = this.registry.getTransformer(differentType,
                            reader.getSize(), targetMimetype, options);
                    if (null != transformer) {
                        transformer.transform(reader, writer, options);
                    } else {
                        transformerDebug.debug("          Failed", e);
                        throw new ContentIOException("Content conversion failed: \n" + "   reader: " + reader
                                + "\n" + "   writer: " + writer + "\n" + "   options: "
                                + options.toString(false) + "\n" + "   limits: "
                                + getLimits(reader, writer, options) + "\n" + "   claimed mime type: "
                                + reader.getMimetype() + "\n" + "   detected mime type: " + differentType + "\n"
                                + "   transformer not found" + "\n", e);
                    }
                } else {
                    throw new ContentIOException("Content conversion failed: \n" + "   reader: " + reader + "\n"
                            + "   writer: " + writer + "\n" + "   options: " + options.toString(false) + "\n"
                            + "   limits: " + getLimits(reader, writer, options) + "\n"
                            + "   claimed mime type: " + reader.getMimetype() + "\n" + "   detected mime type: "
                            + differentType, e);
                }
            }
        } finally {
            transformerDebug.popTransform();

            // check that the reader and writer are both closed
            if (reader.isChannelOpen()) {
                logger.error("Content reader not closed by transformer: \n" + "   reader: " + reader + "\n"
                        + "   transformer: " + this);
            }
            if (writer.isChannelOpen()) {
                logger.error("Content writer not closed by transformer: \n" + "   writer: " + writer + "\n"
                        + "   transformer: " + this);
            }
        }

        // done
        if (logger.isDebugEnabled()) {
            logger.debug("Completed transformation: \n" + "   reader: " + reader + "\n" + "   writer: " + writer
                    + "\n" + "   options: " + options + "\n" + "   transformer: " + this);
        }
    } finally {
        depth.set(depth.get() - 1);
    }
}

From source file:com.siberhus.tdfl.DataFileLoader.java

/**
 * //from w ww.  j a v a2s  . com
 * @param dataContext
 * @param reader
 * @param successWriter
 * @param errorWriter
 * @throws Exception
 */
private void _doReadProcessWrite(DataContext dataContext, DataFileReader reader, DataFileWriter successWriter,
        DataFileWriter errorWriter) throws Exception {

    boolean successLabeled = false, errorLabeled = false;

    reader.open(dataContext);

    String labels[] = null, values[] = null;
    if (reader.isReadLabels()) {
        labels = reader.read();
    }

    int successCount = 0, lineCount = 0;
    while ((values = reader.read()) != null) {
        lineCount++;
        dataContext.linesRead += reader.getLinesRead();
        FieldSet fieldSet = null;
        if (labels != null) {
            if (values.length > labels.length) {
                List<String> vList = new ArrayList<String>();
                for (int i = 0; i < labels.length; i++) {
                    vList.add(values[i]);
                }
                values = vList.toArray(new String[0]);
            }
            fieldSet = new DefaultFieldSet(values, labels);
        } else {
            fieldSet = new DefaultFieldSet(values);
        }

        FieldDataException fde = new FieldDataException();
        T item = null;
        try {
            if (Thread.currentThread().isInterrupted()) {
                throw new InterruptedException("Current thread is interrupted");
            }
            item = dataFileProcessor.mapLine(fieldSet, fde);
            if (item == null) {
                //skip
                continue;
            }
            dataFileProcessor.validateItem(item, fde);
            if (fde.hasErrors()) {
                if (fde.isForceProcess()) {
                    dataFileProcessor.processItem(item);
                }
                throw fde;
            }
            dataFileProcessor.processItem(item);
            successCount++;
            if (successCount == updateInterval) {
                successCount = 0;//reset
                dataFileProcessor.update();
                reader.update(dataContext);
                if (successWriter != null)
                    successWriter.update(dataContext);
                if (errorWriter != null)
                    errorWriter.update(dataContext);
                //TODO: save dataContext here
            }
            dataContext.itemSuccessCount++;
            dataFileProcessor.onItemSuccess(item);
            if (!successLabeled && labels != null) {
                lazyOpenWriterThenWriteLabels(dataContext, successWriter, labels);
                successLabeled = true;
            }
            lazyOpenWriterThenWrite(dataContext, successWriter, values);
            if (successWriter != null)
                dataContext.successLinesWritten += successWriter.getLinesWritten();
        } catch (InterruptedException e) {
            logger.info("Data processing is interrupted on file " + dataContext.getResource().getFilename());
            dataFileProcessor.onInterrupt(e);
            throw new DataFileLoaderException(e);
        } catch (Exception e) {
            if (!(e instanceof FieldDataException)) {
                if (e instanceof CancelException) {
                    logger.info(
                            "Data processing is cancelled on file " + dataContext.getResource().getFilename());
                    dataFileProcessor.onCancel((CancelException) e);
                    //                  break;
                    throw e;
                }
                logger.error(e.getMessage(), e);
            }
            dataContext.itemErrorCount++;
            dataFileProcessor.onItemError(item, e);

            if (!errorLabeled && labels != null) {
                lazyOpenWriterThenWriteLabels(dataContext, errorWriter, labels);
                errorLabeled = true;
            }
            String valuesWithErr[] = (String[]) ArrayUtils.add(values, e.toString());
            lazyOpenWriterThenWrite(dataContext, errorWriter, valuesWithErr);
            if (errorWriter != null)
                dataContext.errorLinesWritten += errorWriter.getLinesWritten();
        } finally {
            if (lineCount > 0) {
                //do again in case there are no success item.
                dataFileProcessor.update();
                reader.update(dataContext);
                if (successWriter != null)
                    successWriter.update(dataContext);
                if (errorWriter != null)
                    errorWriter.update(dataContext);
                //TODO: save dataContext here
            }
        }
    } //end while
}

From source file:org.apache.storm.kafka.spout.KafkaSpout.java

private void throwKafkaConsumerInterruptedException() {
    //Kafka throws their own type of exception when interrupted.
    //Throw a new Java InterruptedException to ensure Storm can recognize the exception as a reaction to an interrupt.
    throw new RuntimeException(new InterruptedException("Kafka consumer was interrupted"));
}

From source file:org.deri.iris.performance.IRISPerformanceTest.java

/**
 * Executes a set of datalog queries using the given configuration
 * @param queries The set of Datalog queries
 * @param config The configuration for the test suite
 * @return a list of IRISTestCase objects with the result of the test campaign
 *//*ww  w  .  j  a va 2s  . c  o  m*/
public List<IRISTestCase> executeTests(final List<String> queries, final TestConfiguration config) {

    // Get the logger
    LOGGER = Logger.getLogger(IRISPerformanceTest.class.getName());

    // Construct a valid IRIS+- program using the queries and the configuration file
    String program = "";

    // add the query and its IRIS execution command to the program
    program += "/// Query ///\n";
    for (final String s : queries) {
        program += s + "\n";
        program += "?-" + s.substring(0, s.indexOf(":-")) + ".\n";
    }
    program += "\n";

    // If reasoning is enabled, add the TBOX to the program
    program += "/// TBox ///\n";
    if (config.getReasoning()) {
        String tboxPath = config.getTestHomePath() + "/" + config.getDataset() + "/tbox";
        if (config.getExpressiveness().compareTo("RDFS") == 0) {
            tboxPath += "/rdfs";
        }
        if (config.getExpressiveness().compareTo("OWL-QL") == 0) {
            tboxPath += "/owlql";
        }
        final String tbox = loadFile(tboxPath + "/" + config.getDataset() + ".dtg");
        program += tbox + "\n";
    } else {
        program += "/// EMPTY ///\n";
    }

    // Add the SBox
    program += "/// SBox ///\n";
    String sboxPath = config.getTestHomePath() + "/" + config.getDataset() + "/sbox";
    if (config.getExpressiveness().compareTo("RDFS") == 0) {
        sboxPath += "/rdfs";
    }
    if (config.getExpressiveness().compareTo("OWL-QL") == 0) {
        sboxPath += "/owlql";
    }
    final String sbox = loadFile(sboxPath + "/" + config.getDataset() + ".dtg");
    program += sbox + "\n\n";

    LOGGER.debug(program);

    // Get the parser
    final Parser parser = new Parser();

    // Parse the program
    try {
        parser.parse(program);
    } catch (final ParserException e) {
        e.printStackTrace();
    }

    // Get the TGDs from the set of rules
    final List<IRule> tgds = RewritingUtils.getTGDs(parser.getRules(), parser.getQueries());

    // Get the query bodies
    final List<IRule> bodies = new ArrayList<IRule>(parser.getRules());
    final List<IRule> datalogQueries = RewritingUtils.getQueries(bodies, parser.getQueries());

    // Get the constraints from the set of rules
    final Set<IRule> constraints = RewritingUtils.getConstraints(parser.getRules(), parser.getQueries());

    // Get the SBox rules from the set of rules
    final List<IRule> storageRules = RewritingUtils.getSBoxRules(parser.getRules(), parser.getQueries());

    // Check that the TBox is FO-reducible
    IRuleSafetyProcessor ruleProc = new LinearReducibleRuleSafetyProcessor();
    try {
        ruleProc.process(tgds);
    } catch (final RuleUnsafeException e) {
        e.printStackTrace();
    }

    // Check that the SBox rules are Safe Datalog
    ruleProc = new StandardRuleSafetyProcessor();
    try {
        ruleProc.process(storageRules);
    } catch (final RuleUnsafeException e) {
        e.printStackTrace();
    }

    // Connect to the storage
    StorageManager.getInstance();
    try {
        StorageManager.connect(config.getDBVendor(), config.getDBProtocol(), config.getDBHost(),
                config.getDBPort(), config.getDBName(), config.getSchemaName(), config.getDBUsername(),
                config.getDBPassword());
    } catch (final SQLException e) {
        e.printStackTrace();
    }

    // Evaluate the queries
    final List<IRISTestCase> output = new LinkedList<IRISTestCase>();
    for (final IQuery q : parser.getQueries()) {
        // Generate a new test-case
        final IRISTestCase currentTest = new IRISTestCase();
        int nTask = -10;

        // Get the Factories
        final IRelationFactory rf = new RelationFactory();

        // Get the Rewriter Engine
        final ParallelRewriter rewriter = new ParallelRewriter(DecompositionStrategy.DECOMPOSE,
                RewritingLanguage.UCQ, SubCheckStrategy.TAIL, NCCheck.TAIL);

        // Get and log the rule corresponding to the query
        final IRule ruleQuery = getRuleQuery(q, datalogQueries);
        currentTest.setQuery(ruleQuery);

        final Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils
                .computePositionDependencyGraph(tgds);

        final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);

        // Compute and log the FO-Rewriting
        LOGGER.info("Computing TBox Rewriting");
        float duration = -System.nanoTime();
        final Set<IRule> rewriting = rewriter.getRewriting(ruleQuery, tgds, constraints, deps, exprs);
        duration = ((duration + System.nanoTime()) / 1000000);
        currentTest.getTasks()
                .add(new Task(nTask++, "TBox Rewriting", duration, 0, 0, "ms", rewriting.toString()));
        LOGGER.info("done.");
        int count = 0;
        for (final IRule r : rewriting) {
            LOGGER.debug("(Qr" + ++count + ")" + r);
        }

        // Produce the rewriting according to the Nyaya Data Model
        final IQueryRewriter ndmRewriter = new NDMRewriter(storageRules);

        // Create a buffer for the output
        final IRelation outRelation = rf.createRelation();

        // Get the SBox rewriting
        try {
            LOGGER.info("Computing SBox Rewriting");
            final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>();
            duration = -System.nanoTime();
            for (final IRule pr : rewriting) {
                sboxRewriting.addAll(ndmRewriter.getRewriting(pr));
            }
            duration = ((duration + System.nanoTime()) / 1000000);
            currentTest.getTasks()
                    .add(new Task(nTask++, "SBox Rewriting", duration, 0, 0, "ms", sboxRewriting.toString()));
            LOGGER.info("done.");
            count = 0;
            for (final IRule n : sboxRewriting) {
                LOGGER.debug("(Qn" + ++count + ")" + n);
            }

            // Produce the SQL rewriting for each query in the program
            final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting);

            // Get the SQL rewriting as Union of Conjunctive Queries (UCQ)
            LOGGER.info("Computing SQL Rewriting");
            duration = -System.nanoTime();
            final List<String> ucqSQLRewriting = new LinkedList<String>();
            ucqSQLRewriting.add(sqlRewriter.getUCQSQLRewriting("", 10000, 0));
            duration = ((duration + System.nanoTime()) / 1000000);
            currentTest.getTasks()
                    .add(new Task(nTask++, "SQL Rewriting", duration, 0, 0, "ms", ucqSQLRewriting.toString()));
            LOGGER.info("done.");
            count = 0;
            for (final String s : ucqSQLRewriting) {
                LOGGER.debug("(Qs" + ++count + ") " + s);
            }

            // Execute the UCQ
            LOGGER.info("Executing SQL");

            // float ansConstructOverall = 0;

            // The synchronized structure to store the output tuples
            final Set<ITuple> result = Collections.synchronizedSet(new HashSet<ITuple>());

            /*
             * Prepare a set of runnable objects representing each partial rewriting to be executed in parallel
             */
            final List<RunnableQuery> rql = new LinkedList<RunnableQuery>();
            for (final String cq : ucqSQLRewriting) {
                // Construct a Runnable Query
                rql.add(new RunnableQuery(cq, result, currentTest.getTasks()));
            }

            // Get an executor that allows a number of parallel threads equals to the number of available processors
            // ExecutorService queryExecutor =
            // Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()*5);
            final ExecutorService queryExecutor = Executors.newSingleThreadScheduledExecutor();

            // Execute all the partial rewritings in parallel
            float ucqExecOverall = -System.nanoTime();
            for (final RunnableQuery rq : rql) {
                queryExecutor.execute(rq);
            }
            queryExecutor.shutdown();
            if (queryExecutor.awaitTermination(1, TimeUnit.DAYS)) {
                LOGGER.info("done.");
            } else
                throw new InterruptedException("Timeout Occured");
            ucqExecOverall = ((ucqExecOverall + System.nanoTime()) / 1000000);
            StorageManager.disconnect();

            // inizio aggiunta
            float minTime = System.nanoTime();
            float maxTime = 0;
            float avgTime = 0;
            int n = 0;
            for (final Task t : currentTest.getTasks()) {
                if (t.getName().contains("Execution")) {
                    avgTime += (t.getFinalTime() - t.getInitTime()) / 1000000;
                    n++;
                    if (t.getFinalTime() > maxTime) {
                        maxTime = t.getFinalTime();
                    }
                    if (t.getInitTime() < minTime) {
                        minTime = t.getInitTime();
                    }
                }
            }
            ucqExecOverall = (maxTime - minTime) / 1000000;
            // fine aggiunta

            currentTest.getTasks()
                    .add(new Task(nTask++, "UCQ Overall Execution Time", ucqExecOverall, 0, 0, "ms"));

            // inizio aggiunta
            avgTime = (avgTime / n);
            System.out.println(n);
            currentTest.getTasks().add(new Task(nTask++, "UCQ Average Execution Time", avgTime, 0, 0, "ms"));
            Collections.sort(currentTest.getTasks());
            // fine aggiunta

            for (final ITuple t : result) {
                outRelation.add(t);
            }

        } catch (final SQLException e) {
            e.printStackTrace();
        } catch (final EvaluationException e) {
            e.printStackTrace();
        } catch (final InterruptedException e) {
            e.printStackTrace();
        }
        currentTest.setAnswer(outRelation);
        output.add(currentTest);
    }
    return (output);
}