Example usage for java.lang InterruptedException InterruptedException

List of usage examples for java.lang InterruptedException InterruptedException

Introduction

In this page you can find the example usage for java.lang InterruptedException InterruptedException.

Prototype

public InterruptedException(String s) 

Source Link

Document

Constructs an InterruptedException with the specified detail message.

Usage

From source file:org.geopublishing.atlasStyler.classification.FeatureClassification.java

/**
 * This is where the magic happens. Here the attributes of the features are
 * summarized in a {@link DynamicBin1D} class.
 * /*from  www  .j ava2  s . com*/
 * @throws IOException
 */
@Override
synchronized public DynamicBin1D getStatistics() throws InterruptedException, IOException {

    cancelCalculation.set(false);

    if (value_field_name == null)
        throw new IllegalArgumentException("value field has to be set");
    if (normalizer_field_name == value_field_name)
        throw new RuntimeException("value field and the normalizer field may not be equal.");

    stats = staticStatsCache.get(getKey());
    // stats = null;

    if (stats == null || !cacheEnabled) {
        // Old style.. asking for ALL attributes
        // FeatureCollection<SimpleFeatureType, SimpleFeature> features =
        // getStyledFeatures()
        // .getFeatureCollectionFiltered();

        Filter filter = getStyledFeatures().getFilter();
        DefaultQuery query = new DefaultQuery(getStyledFeatures().getSchema().getTypeName(), filter);
        List<String> propNames = new ArrayList<String>();
        propNames.add(value_field_name);
        if (normalizer_field_name != null)
            propNames.add(normalizer_field_name);
        query.setPropertyNames(propNames);
        FeatureCollection<SimpleFeatureType, SimpleFeature> features = getStyledFeatures().getFeatureSource()
                .getFeatures(query);

        // Forget about the count of NODATA values
        noDataValuesCount.set(0);

        final DynamicBin1D stats_local = new DynamicBin1D();

        // get the AttributeMetaData for the given attribute to filter
        // NODATA values
        final AttributeMetadataImpl amd = getStyledFeatures().getAttributeMetaDataMap().get(value_field_name);
        final AttributeMetadataImpl amdNorm = getStyledFeatures().getAttributeMetaDataMap()
                .get(normalizer_field_name);

        // // Simulate a slow calculation
        // try {
        // Thread.sleep(40);
        // } catch (InterruptedException e) {
        // e.printStackTrace();
        // }

        /**
         * Iterating over the values and inserting them into the statistics
         */
        final FeatureIterator<SimpleFeature> iterator = features.features();
        try {
            Double numValue, valueNormDivider;
            while (iterator.hasNext()) {

                /**
                 * The calculation process has been stopped from external.
                 */
                if (cancelCalculation.get()) {
                    stats = null;
                    throw new InterruptedException(
                            "The statistics calculation has been externally interrupted by setting the 'cancelCalculation' flag.");
                }

                final SimpleFeature f = iterator.next();

                // Filter VALUE for NODATA
                final Object filtered = amd.fiterNodata(f.getAttribute(value_field_name));
                if (filtered == null) {
                    noDataValuesCount.incrementAndGet();
                    continue;
                }

                numValue = ((Number) filtered).doubleValue();

                if (normalizer_field_name != null) {

                    // Filter NORMALIZATION DIVIDER for NODATA
                    Object filteredNorm = amdNorm.fiterNodata(f.getAttribute(normalizer_field_name));
                    if (filteredNorm == null) {
                        noDataValuesCount.incrementAndGet();
                        continue;
                    }

                    valueNormDivider = ((Number) filteredNorm).doubleValue();
                    if (valueNormDivider == 0. || valueNormDivider.isInfinite() || valueNormDivider.isNaN()) {
                        // Even if it is not defined as a NODATA value,
                        // division by null is not definied.
                        noDataValuesCount.incrementAndGet();
                        continue;
                    }

                    numValue = numValue / valueNormDivider;
                }

                stats_local.add(numValue);

            }

            stats = stats_local;

            if (cacheEnabled)
                staticStatsCache.put(getKey(), stats);

        } finally {
            features.close(iterator);
        }
    }

    return stats;
}

From source file:org.apache.giraph.rexster.utils.RexsterUtils.java

/**
 * Utility to handle the output response in case of errors.
 *
 * @param conn connection to the Rexster Interface
 * @param type type of data saved (vertices or edges)
 *//*  w  ww.j a  va  2 s  .c om*/
private static void handleResponse(HttpURLConnection conn, String type)
        throws IOException, InterruptedException {

    if (conn.getResponseCode() != 200) {
        InputStream is = conn.getErrorStream();
        BufferedReader rd = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8")));

        JSONObject obj = new JSONObject(rd);
        StringBuffer sb = new StringBuffer("Error occured while saving " + type + ";");
        String aux;
        while ((aux = rd.readLine()) != null) {
            sb.append(aux);
        }
        sb.append(obj);

        /*
        try {
          LOG.info("--> " + obj);
          String message = obj.getString("message");
          sb.append(" ");
          sb.append(message);
        } catch (JSONException e) {
          LOG.error("Unable to extract the error message.");
        }
        */
        rd.close();

        throw new InterruptedException(sb.toString());
    }
}

From source file:com.clustercontrol.collect.dialog.ExportDialog.java

/**
 * Customize button bar/* w w w .  ja v a 2  s.  c o m*/
 */
@Override
protected void createButtonsForButtonBar(Composite parent) {
    // 
    Button exportButton = this.createButton(parent, IDialogConstants.OPEN_ID, Messages.getString("export"),
            true);
    WidgetTestUtil.setTestId(this, "export", exportButton);

    this.getButton(IDialogConstants.OPEN_ID).addSelectionListener(new SelectionAdapter() {
        private FileDialog saveDialog;

        @Override
        public void widgetSelected(SelectionEvent e) {
            // ????
            this.saveDialog = new FileDialog(getShell(), SWT.SAVE);
            boolean headerFlag = ExportDialog.this.headerCheckbox.getSelection();

            //???([summaryType]_.zip?summaryType?)
            // ??????????????

            // ?????ID()?
            SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
            // ???????????(??????)
            String defaultDateStr = sdf.format(new Date(System.currentTimeMillis()));
            String defaultFileName = SummaryTypeMessage.typeToStringEN(m_summaryType) + '_' + defaultDateStr;
            this.saveDialog.setFilterExtensions(new String[] { "*.zip" });
            defaultFileName += ".zip";
            // ??????+?????????????
            defaultFileName = defaultFileName.replaceAll(" ", "");
            this.saveDialog.setFileName(defaultFileName);

            String filePath = this.saveDialog.open();
            if (filePath != null) {
                m_log.debug("filePath = " + filePath + ", defaultFileName = " + defaultFileName);
                output(m_managerFacilityIdNameMap, m_summaryType, m_collectKeyInfoPkList,
                        m_targetManagerFacilityMap, headerFlag, filePath, defaultFileName, defaultDateStr);
            }
        }

        /**
         * Output
         */
        protected void output(TreeMap<String, String> managerFacilityIdNameMap, Integer summaryType,
                List<CollectKeyInfoPK> targetCollectKeyInfoList,
                TreeMap<String, List<String>> targetManagerFacilityMap, boolean headerFlag, String filePath,
                String fileName, String defaultDateStr) {

            // DataWriter??
            // ???
            writer = new RecordDataWriter(managerFacilityIdNameMap, summaryType, targetCollectKeyInfoList,
                    targetManagerFacilityMap, headerFlag, filePath, defaultDateStr);

            // Download & ???
            try {
                IRunnableWithProgress op = new IRunnableWithProgress() {
                    @Override
                    public void run(IProgressMonitor monitor)
                            throws InvocationTargetException, InterruptedException {
                        // ?
                        ServiceContext context = ContextProvider.getContext();
                        writer.setContext(context);
                        Thread exportThread = new Thread(writer);
                        exportThread.start();
                        Thread.sleep(3000);
                        monitor.beginTask(Messages.getString("export"), 100); // "?"

                        int progress = 0;
                        int buff = 0;
                        while (progress < 100) {
                            progress = writer.getProgress();

                            if (monitor.isCanceled()) {
                                throw new InterruptedException("");
                            }
                            if (writer.isCanceled()) {
                                throw new InterruptedException(writer.getCancelMessage());
                            }
                            Thread.sleep(50);
                            monitor.worked(progress - buff);
                            buff = progress;
                        }
                        monitor.done();
                    }
                };

                // ?
                new ProgressMonitorDialog(getShell()).run(true, true, op);

                // Start download file
                if (ClusterControlPlugin.isRAP()) {
                    FileDownloader.openBrowser(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(),
                            filePath, fileName);
                } else {
                    MessageDialog.openInformation(getShell(), Messages.getString("confirmed"),
                            Messages.getString("performance.export.success"));
                }
            } catch (InterruptedException e) {
                // ?????
                MessageDialog.openInformation(getShell(), Messages.getString("confirmed"),
                        Messages.getString("performance.export.cancel") + " : " + e.getMessage());
            } catch (Exception e) {
                // 
                m_log.warn("output() : " + e.getMessage(), e);
                MessageDialog.openInformation(getShell(), Messages.getString("confirmed"),
                        Messages.getString("performance.export.cancel") + " : " + e.getMessage() + "("
                                + e.getClass().getName() + ")");
            } finally {
                writer.setCanceled(true);
                if (ClusterControlPlugin.isRAP()) {
                    FileDownloader.cleanup(filePath);
                }
            }
        }
    });
    createButton(parent, IDialogConstants.CANCEL_ID, "close", false);
}

From source file:com.nttec.everychan.chans.dvach.DvachModule.java

@Override
public String sendPost(SendPostModel model, ProgressListener listener, CancellableTask task) throws Exception {
    String url = getUsingUrl() + model.boardName + "/post";
    ExtendedMultipartBuilder postEntityBuilder = ExtendedMultipartBuilder.create().setDelegates(listener, task)
            .addString("parent", model.threadNumber != null ? model.threadNumber : "0")
            .addString("name", model.name).addString("email", model.email).addString("subject", model.subject)
            .addString("message", model.comment)
            .addString("captcha", TextUtils.isEmpty(model.captchaAnswer) ? "" : model.captchaAnswer)
            .addString("password", model.password);
    if (model.threadNumber != null)
        postEntityBuilder.addString("noko", "on");
    if (model.attachments != null && model.attachments.length > 0)
        postEntityBuilder.addFile("file", model.attachments[0], model.randomHash);

    try {/*from  ww  w.j a  va 2s . c  o  m*/
        cssTest(model.boardName, task);
    } catch (Exception e) {
        Logger.e(TAG, "csstest failed", e);
    }

    if (task != null && task.isCancelled())
        throw new InterruptedException("interrupted");

    HttpRequestModel request = HttpRequestModel.builder().setPOST(postEntityBuilder.build()).setNoRedirect(true)
            .build();
    HttpResponseModel response = null;
    try {
        response = HttpStreamer.getInstance().getFromUrl(url, request, httpClient, null, task);
        if (response.statusCode == 200) {
            ByteArrayOutputStream output = new ByteArrayOutputStream(1024);
            IOUtils.copyStream(response.stream, output);
            String htmlResponse = output.toString("UTF-8");
            if (htmlResponse.contains("?")) {
                if (model.threadNumber == null) {
                    Matcher redirectMatcher = REDIRECT_PATTERN.matcher(htmlResponse);
                    if (redirectMatcher.find()) {
                        UrlPageModel redirModel = new UrlPageModel();
                        redirModel.chanName = CHAN_NAME;
                        redirModel.type = UrlPageModel.TYPE_THREADPAGE;
                        redirModel.boardName = model.boardName;
                        redirModel.threadNumber = redirectMatcher.group(1);
                        return buildUrl(redirModel);
                    }
                }
                return null;
            }
            Matcher errorMatcher = ERROR_PATTERN.matcher(htmlResponse);
            if (errorMatcher.find()) {
                throw new Exception(errorMatcher.group(1));
            }
        } else
            throw new Exception(response.statusCode + " - " + response.statusReason);
    } finally {
        if (response != null)
            response.release();
    }
    return null;
}

From source file:com.bittorrent.mpetazzoni.client.SharedTorrent.java

/**
 * Build this torrent's pieces array./*from w  w  w . j  a va2s. c  o m*/
 *
 * <p>
 * Hash and verify any potentially present local data and create this
 * torrent's pieces array from their respective hash provided in the
 * torrent meta-info.
 * </p>
 *
 * <p>
 * This function should be called soon after the constructor to initialize
 * the pieces array.
 * </p>
 */
public synchronized void init() throws InterruptedException, IOException {
    if (this.isInitialized()) {
        throw new IllegalStateException("Torrent was already initialized!");
    }

    int threads = getHashingThreadsCount();
    int nPieces = (int) (Math.ceil((double) this.getSize() / this.pieceLength));
    int step = 10;

    this.pieces = new Piece[nPieces];
    this.completedPieces = new BitSet(nPieces);
    this.piecesHashes.clear();

    ExecutorService executor = Executors.newFixedThreadPool(threads);
    List<Future<Piece>> results = new LinkedList<Future<Piece>>();

    try {
        logger.info("Analyzing local data for {} with {} threads ({} pieces)...",
                new Object[] { this.getName(), threads, nPieces });
        for (int idx = 0; idx < nPieces; idx++) {
            byte[] hash = new byte[Torrent.PIECE_HASH_SIZE];
            this.piecesHashes.get(hash);

            // The last piece may be shorter than the torrent's global piece
            // length. Let's make sure we get the right piece length in any
            // situation.
            long off = ((long) idx) * this.pieceLength;
            long len = Math.min(this.bucket.size() - off, this.pieceLength);

            this.pieces[idx] = new Piece(this.bucket, idx, off, len, hash, this.isSeeder());

            Callable<Piece> hasher = new Piece.CallableHasher(this.pieces[idx]);
            results.add(executor.submit(hasher));

            if (results.size() >= threads) {
                this.validatePieces(results);
            }

            if (idx / (float) nPieces * 100f > step) {
                logger.info("  ... {}% complete", step);
                step += 10;
            }
        }

        this.validatePieces(results);
    } finally {
        // Request orderly executor shutdown and wait for hashing tasks to
        // complete.
        executor.shutdown();
        while (!executor.isTerminated()) {
            if (this.stop) {
                throw new InterruptedException("Torrent data analysis " + "interrupted.");
            }

            Thread.sleep(10);
        }
    }

    logger.debug("{}: we have {}/{} bytes ({}%) [{}/{} pieces].",
            new Object[] { this.getName(), (this.getSize() - this.left), this.getSize(),
                    String.format("%.1f", (100f * (1f - this.left / (float) this.getSize()))),
                    this.completedPieces.cardinality(), this.pieces.length });
    this.initialized = true;
}

From source file:com.p2p.peercds.client.SharedTorrent.java

/**
 * Build this torrent's pieces array.//from w w  w  .ja va  2 s  .c om
 *
 * <p>
 * Hash and verify any potentially present local data and create this
 * torrent's pieces array from their respective hash provided in the
 * torrent meta-info.
 * </p>
 *
 * <p>
 * This function should be called soon after the constructor to initialize
 * the pieces array.
 * </p>
 */
public synchronized void init() throws InterruptedException, IOException {
    //      if (this.isInitialized()) {
    //         throw new IllegalStateException("Torrent was already initialized!");
    //      }

    int threads = getHashingThreadsCount();
    int nPieces = (int) (Math.ceil((double) this.getSize() / this.pieceLength));
    int step = 10;

    this.pieces = new Piece[nPieces];
    this.completedPieces = new BitSet(nPieces);
    this.piecesHashes.clear();
    ExecutorService executor = Executors.newFixedThreadPool(threads);
    List<Future<Piece>> results = new LinkedList<Future<Piece>>();

    try {
        logger.info("Analyzing local data for {} with {} threads ({} pieces)...",
                new Object[] { this.getName(), threads, nPieces });
        for (int idx = 0; idx < nPieces; idx++) {
            byte[] hash = new byte[PIECE_HASH_SIZE];
            this.piecesHashes.get(hash);

            // The last piece may be shorter than the torrent's global piece
            // length. Let's make sure we get the right piece length in any
            // situation.
            long off = ((long) idx) * this.pieceLength;
            long len = Math.min(this.bucket.size() - off, this.pieceLength);

            this.pieces[idx] = new Piece(this.bucket, idx, off, len, hash, this.isSeeder());

            Callable<Piece> hasher = new Piece.CallableHasher(this.pieces[idx]);
            results.add(executor.submit(hasher));

            if (results.size() >= threads) {
                this.validatePieces(results);
            }

            if (idx / (float) nPieces * 100f > step) {
                logger.info("  ... {}% complete", step);
                step += 10;
            }
        }

        this.validatePieces(results);
    } finally {
        // Request orderly executor shutdown and wait for hashing tasks to
        // complete.
        executor.shutdown();
        while (!executor.isTerminated()) {
            if (this.stop) {
                throw new InterruptedException("Torrent data analysis " + "interrupted.");
            }

            Thread.sleep(10);
        }
    }

    logger.debug("{}: we have {}/{} bytes ({}%) [{}/{} pieces].",
            new Object[] { this.getName(), (this.getSize() - this.left), this.getSize(),
                    String.format("%.1f", (100f * (1f - this.left / (float) this.getSize()))),
                    this.completedPieces.cardinality(), this.pieces.length });
    this.initialized = true;
}

From source file:com.twitter.common.zookeeper.ZooKeeperClient.java

/**
 * Returns the current active ZK connection or establishes a new one if none has yet been
 * established or a previous connection was disconnected or had its session time out.  This method
 * will attempt to re-use sessions when possible.  Equivalent to:
 * <pre>get(Amount.of(0L, ...)</pre>.
 *
 * @return a connected ZooKeeper client/*from  w  w w .  ja va 2 s. com*/
 * @throws ZooKeeperConnectionException if there was a problem connecting to the ZK cluster
 * @throws InterruptedException if interrupted while waiting for a connection to be established
 */
public synchronized ZooKeeper get() throws ZooKeeperConnectionException, InterruptedException {
    try {
        return get(WAIT_FOREVER);
    } catch (TimeoutException e) {
        InterruptedException interruptedException = new InterruptedException(
                "Got an unexpected TimeoutException for 0 wait");
        interruptedException.initCause(e);
        throw interruptedException;
    }
}

From source file:guru.benson.pinch.Pinch.java

/**
 * Download and inflate file from a ZIP stored on a HTTP server.
 *
 * @param entry/*from  w w  w.j ava  2  s  . c  o  m*/
 *     Entry representing file to download.
 * @param name
 *     Path where to store the downloaded file.
 * @param listener
 *
 * @throws IOException
 *     If an error occurred while reading from network or writing to disk.
 * @throws InterruptedException
 *     If the thread was interrupted.
 */
public void downloadFile(ExtendedZipEntry entry, String dir, String name, ProgressListener listener)
        throws IOException, InterruptedException {
    HttpURLConnection conn = null;
    InputStream is = null;
    FileOutputStream fos = null;

    try {
        File outFile = new File(dir != null ? dir + File.separator + name : name);

        if (!outFile.exists()) {
            if (outFile.getParentFile() != null) {
                outFile.getParentFile().mkdirs();
            }
        }

        // no need to download 0 byte size directories
        if (entry.isDirectory()) {
            return;
        }

        fos = new FileOutputStream(outFile);

        byte[] buf = new byte[2048];
        int read, bytes = 0;

        conn = getEntryInputStream(entry);

        // this is a stored (non-deflated) file, read it raw without inflating it
        if (entry.getMethod() == ZipEntry.STORED) {
            is = new BufferedInputStream(conn.getInputStream());
        } else {
            is = new InflaterInputStream(conn.getInputStream(), new Inflater(true));
        }

        long totalSize = entry.getSize();
        while ((read = is.read(buf)) != -1) {
            if (Thread.currentThread().isInterrupted()) {
                throw new InterruptedException("Download was interrupted");
            }
            // Ignore any extra data
            if (totalSize < read + bytes) {
                read = ((int) totalSize) - bytes;
            }

            fos.write(buf, 0, read);
            bytes += read;
            if (listener != null) {
                listener.onProgress(bytes, read, totalSize);
            }
        }

        log("Wrote " + bytes + " bytes to " + name);
    } finally {
        close(fos);
        close(is);
        disconnect(conn);
    }
}

From source file:org.jumpmind.symmetric.SymmetricWebServer.java

public void waitForEnginesToComeOnline(long maxWaitTimeInMs) throws InterruptedException {
    long startTime = System.currentTimeMillis();
    ServletContext servletContext = getServletContext();
    if (servletContext != null) {
        SymmetricEngineHolder engineHolder = ServletUtils.getSymmetricEngineHolder(servletContext);
        while (engineHolder.areEnginesStarting()) {
            AppUtils.sleep(500);//  ww w.j  a v  a2  s.  co m
            if ((System.currentTimeMillis() - startTime) > maxWaitTimeInMs) {
                throw new InterruptedException("Timed out waiting for engines to start");
            }
        }
    }
}

From source file:org.apache.mahout.classifier.sequencelearning.hmm.hadoop.BaumWelchDriver.java

/**
 * Run one iteration of the Baum-Welch Map Reduce algorithm using the supplied arguments
 *
 * @param conf                the Configuration to use
 * @param input               the Path to the directory containing input
 * @param modelIn             the Path to the HmmModel
 * @param modelOut            the Path to the output directory
 * @param hiddenStateToIdMap  the Path to the map of hidden states to ids
 * @param emittedStateToIdMap the Path to the map of emitted states to ids
 * @param numHidden           the number of Hidden states
 * @param numObserved         the number of Observed states
 * @param scaling             name of the scaling method
 * @param delta               the convergence delta value
 * @return true or false depending on convergence check
 *///from   w w  w  .  jav a2  s.  c  om

private static boolean runIteration(Configuration conf, Path input, Path modelIn, Path modelOut,
        Path hiddenStateToIdMap, Path emittedStateToIdMap, int numHidden, int numObserved, String scaling,
        String delta) throws IOException, InterruptedException, ClassNotFoundException {

    conf.set(BaumWelchConfigKeys.EMITTED_STATES_MAP_PATH, emittedStateToIdMap.toString());
    conf.set(BaumWelchConfigKeys.HIDDEN_STATES_MAP_PATH, hiddenStateToIdMap.toString());
    conf.set(BaumWelchConfigKeys.SCALING_OPTION_KEY, scaling);
    conf.set(BaumWelchConfigKeys.MODEL_PATH_KEY, modelIn.toString());
    conf.set(BaumWelchConfigKeys.NUMBER_OF_HIDDEN_STATES_KEY, ((Integer) numHidden).toString());
    conf.set(BaumWelchConfigKeys.NUMBER_OF_EMITTED_STATES_KEY, ((Integer) numObserved).toString());
    conf.set(BaumWelchConfigKeys.MODEL_CONVERGENCE_KEY, delta);

    Job job = new Job(conf, "Baum-Welch Driver running runIteration over modelIn: "
            + conf.get(BaumWelchConfigKeys.MODEL_PATH_KEY));
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(MapWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(MapWritable.class);

    job.setInputFormatClass(SequenceFileInputFormat.class);
    job.setOutputFormatClass(SequenceFileOutputFormat.class);
    job.setMapperClass(BaumWelchMapper.class);
    job.setCombinerClass(BaumWelchCombiner.class);
    job.setReducerClass(BaumWelchReducer.class);

    FileInputFormat.addInputPath(job, input);
    FileOutputFormat.setOutputPath(job, modelOut);

    job.setJarByClass(BaumWelchDriver.class);
    HadoopUtil.delete(conf, modelOut);
    if (!job.waitForCompletion(true)) {
        throw new InterruptedException("Baum-Welch Iteration failed processing " + modelIn);
    }

    return isConverged(modelIn, modelOut, numHidden, numObserved, conf);
}