Example usage for java.lang InterruptedException InterruptedException

List of usage examples for java.lang InterruptedException InterruptedException

Introduction

In this page you can find the example usage for java.lang InterruptedException InterruptedException.

Prototype

public InterruptedException(String s) 

Source Link

Document

Constructs an InterruptedException with the specified detail message.

Usage

From source file:net.sourceforge.sqlexplorer.sessiontree.model.utility.Dictionary.java

/**
 * Perform full load of dictionary for dbNode
 * /*from ww  w .ja  v  a 2s  .c o m*/
 * @param dbNode DatabaseNode of which to load dictionary information
 * @param monitor ProgressMonitor displayed whilst loading
 * @throws InterruptedException If user cancelled loading
 */
public void load(DatabaseNode dbNode, IProgressMonitor monitor) throws InterruptedException {

    try {

        // check for cancellation by user
        if (monitor.isCanceled()) {
            throw new InterruptedException(Messages.getString("Progress.Dictionary.Cancelled"));
        }

        INode[] children = dbNode.getChildNodes();

        if (children == null) {
            return;
        }

        // start task with a 1000 work units for every root node
        monitor.beginTask(dbNode.getSession().toString(), children.length * ROOT_WORK_UNIT);

        for (int i = 0; i < children.length; i++) {

            // check for cancellation by user
            if (monitor.isCanceled()) {
                throw new InterruptedException(Messages.getString("Progress.Dictionary.Cancelled"));
            }

            INode node = (INode) children[i];

            if (node instanceof SchemaNode || node instanceof CatalogNode) {
                loadSchemaCatalog(node, monitor);
            }

        }

        // store dictionary immediately so that
        // we can resuse it if a second session is opened
        store();

    } finally {
        monitor.done();
    }

}

From source file:com.aol.advertising.qiao.injector.file.AbstractFileReader.java

protected RandomAccessFile openFile(File file) throws InterruptedException {
    RandomAccessFile reader = null;
    while (running && reader == null) {
        try {//  ww w  .j a  v  a2 s  . c  o  m
            reader = new RandomAccessFile(file, RAF_MODE);
        } catch (FileNotFoundException e) {
            if (dataHandler != null)
                dataHandler.fileNotFound();
        }

        if (reader != null)
            return reader;

        boolean timed_out = CommonUtils.sleepQuietly(fileCheckDelayMillis);
        if (!timed_out) // interrupted
            break;
    }

    throw new InterruptedException("interrupted");

}

From source file:com.flipkart.poseidon.serviceclients.FutureTaskResultToDomainObjectPromiseWrapper.java

public Map<String, String> getHeaders() throws PromiseBrokenException, InterruptedException {
    try {//from  w  w  w  . j a v a 2 s.co m
        TaskResult taskResult;
        taskResult = futureList.get(0).get();
        if (taskResult == null) {
            throw new PromiseBrokenException("Task result is null");
        }

        ServiceResponse<DomainObject> response = (ServiceResponse<DomainObject>) taskResult.getData();
        return response.getHeaders();
    } catch (ExecutionException exception) {
        checkAndThrowServiceClientException(exception);
        promiseBrokenException = new PromiseBrokenException(exception);
        throw new InterruptedException(exception.getMessage());
    } catch (CancellationException exception) {
        promiseBrokenException = new PromiseBrokenException(exception);
        throw new PromiseBrokenException(promiseBrokenException);
    }
}

From source file:org.eclipse.osee.ote.core.test.shells.TelnetShell.java

public synchronized String captureTo(String string) throws InterruptedException {
    int index = inputBuffer.waitFor(string, false, MAX_RESPONSE_TIME);
    if (index < 0) {
        throw new InterruptedException(
                "Waiting for '" + string + "' took longer then " + MAX_RESPONSE_TIME + " miliseconds.");
    }//from w  w w. ja v  a 2s  .  c o  m
    return inputBuffer.subString(0, index);
}

From source file:org.voyanttools.trombone.input.index.LuceneIndexer.java

public String index(List<StoredDocumentSource> storedDocumentSources) throws IOException {

    // let's check if we need to create new sources because of tokenization parameters
    if (parameters.getParameterValue("tokenization", "").isEmpty() == false) {
        StoredDocumentSourceStorage sourceDocumentSourceStorage = storage.getStoredDocumentSourceStorage();
        String params = parameters.getParameterValue("tokenization");
        for (int i = 0, len = storedDocumentSources.size(); i < len; i++) {
            StoredDocumentSource storedDocumentSource = storedDocumentSources.get(i);
            String id = storedDocumentSource.getId();
            String newId = DigestUtils.md5Hex(id + params);
            InputStream inputStream = sourceDocumentSourceStorage.getStoredDocumentSourceInputStream(id);
            DocumentMetadata metadata = storedDocumentSource.getMetadata();
            metadata.setLastTokenPositionIndex(TokenType.lexical, 0); // this is crucial to ensure that document is re-analyzed and metadata re-rewritten
            InputSource inputSource = new InputStreamInputSource(newId, metadata, inputStream);
            storedDocumentSources.set(i, sourceDocumentSourceStorage.getStoredDocumentSource(inputSource));
            inputStream.close();//from   w w w  .  j a  v a 2s . c o  m
        }
    }

    List<String> ids = new ArrayList<String>();
    for (StoredDocumentSource storedDocumentSource : storedDocumentSources) {
        ids.add(storedDocumentSource.getId());
    }
    String corpusId = storage.storeStrings(ids);

    // determine if we need to modify the Lucene index
    Collection<StoredDocumentSource> storedDocumentSourceForLucene = new ArrayList<StoredDocumentSource>();
    if (storage.getLuceneManager().directoryExists()) {
        LeafReader reader = SlowCompositeReaderWrapper.wrap(storage.getLuceneManager().getDirectoryReader());
        Terms terms = reader.terms("id");
        if (terms == null) {
            storedDocumentSourceForLucene.addAll(storedDocumentSources);
        } else {
            TermsEnum termsEnum = terms.iterator();
            for (StoredDocumentSource storedDocumentSource : storedDocumentSources) {
                String id = storedDocumentSource.getId();
                if (!termsEnum.seekExact(new BytesRef(id))) {
                    storedDocumentSourceForLucene.add(storedDocumentSource);
                }
            }
        }
    } else {
        storedDocumentSourceForLucene.addAll(storedDocumentSources);
    }

    if (storedDocumentSourceForLucene.isEmpty() == false) {

        // index documents (or at least add corpus to document if not already there), we need to get a new writer
        IndexWriter indexWriter = storage.getLuceneManager().getIndexWriter();
        DirectoryReader indexReader = DirectoryReader.open(indexWriter, true);
        IndexSearcher indexSearcher = new IndexSearcher(indexReader);
        boolean verbose = parameters.getParameterBooleanValue("verbose");
        int processors = Runtime.getRuntime().availableProcessors();
        ExecutorService executor;

        // index
        executor = Executors.newFixedThreadPool(processors);
        for (StoredDocumentSource storedDocumentSource : storedDocumentSourceForLucene) {
            Runnable worker = new StoredDocumentSourceIndexer(storage, indexWriter, indexSearcher,
                    storedDocumentSource, corpusId, verbose);
            executor.execute(worker);
        }
        executor.shutdown();
        try {
            if (!executor.awaitTermination(parameters.getParameterIntValue("luceneIndexingTimeout", 60 * 10),
                    TimeUnit.SECONDS)) { // default 10 minutes
                throw new InterruptedException("Lucene indexing has run out of time.");
            }
        } catch (InterruptedException e) {
            throw new RuntimeException("Lucene indexing has been interrupted.", e);
        } finally {

            try {
                indexWriter.commit();
            } catch (IOException e) {
                indexWriter.close(); // this may also throw an exception, but docs say to close on commit error
                throw e;
            }
        }

        // this should almost never be called
        if (parameters.containsKey("forceMerge")) {
            indexWriter.forceMerge(parameters.getParameterIntValue("forceMerge"));
        }

        indexReader = DirectoryReader.open(indexWriter, true);
        storage.getLuceneManager().setDirectoryReader(indexReader); // make sure it's available afterwards            

        // now determine which documents need to be analyzed
        Collection<StoredDocumentSource> storedDocumentSourceForAnalysis = new ArrayList<StoredDocumentSource>();
        for (StoredDocumentSource storedDocumentSource : storedDocumentSourceForLucene) {
            if (storedDocumentSource.getMetadata().getLastTokenPositionIndex(TokenType.lexical) == 0) { // don't re-analyze
                storedDocumentSourceForAnalysis.add(storedDocumentSource);
            }
        }

        if (storedDocumentSourceForAnalysis.isEmpty() == false) {
            indexSearcher = new IndexSearcher(indexReader);
            executor = Executors.newFixedThreadPool(processors);
            for (StoredDocumentSource storedDocumentSource : storedDocumentSourceForAnalysis) {
                if (storedDocumentSource.getMetadata().getLastTokenPositionIndex(TokenType.lexical) == 0) { // don't re-analyze
                    Runnable worker = new IndexedDocumentAnalyzer(storage, indexSearcher, storedDocumentSource,
                            corpusId, verbose);
                    executor.execute(worker);
                }
            }
            executor.shutdown();
            try {
                if (!executor.awaitTermination(
                        parameters.getParameterIntValue("luceneAnalysisTimeout", 60 * 10), TimeUnit.SECONDS)) { // default 10 minutes
                    throw new InterruptedException("Lucene analysis has run out of time.");
                }
            } catch (InterruptedException e) {
                throw new RuntimeException("Lucene document analysis run out of time", e);
            }
        }

    }

    return corpusId;

}

From source file:org.apache.hadoop.hbase.regionserver.ServerNonceManager.java

/**
 * Starts the operation if operation with such nonce has not already succeeded. If the
 * operation is in progress, waits for it to end and checks whether it has succeeded.
 * @param group Nonce group.//from w w  w .  j  ava 2 s .com
 * @param nonce Nonce.
 * @param stoppable Stoppable that terminates waiting (if any) when the server is stopped.
 * @return true if the operation has not already succeeded and can proceed; false otherwise.
 */
public boolean startOperation(long group, long nonce, Stoppable stoppable) throws InterruptedException {
    if (nonce == HConstants.NO_NONCE)
        return true;
    NonceKey nk = new NonceKey(group, nonce);
    OperationContext ctx = new OperationContext();
    while (true) {
        OperationContext oldResult = nonces.putIfAbsent(nk, ctx);
        if (oldResult == null)
            return true;

        // Collision with some operation - should be extremely rare.
        synchronized (oldResult) {
            int oldState = oldResult.getState();
            LOG.debug("Conflict detected by nonce: " + nk + ", " + oldResult);
            if (oldState != OperationContext.WAIT) {
                return oldState == OperationContext.PROCEED; // operation ended
            }
            oldResult.setHasWait();
            oldResult.wait(this.conflictWaitIterationMs); // operation is still active... wait and loop
            if (stoppable.isStopped()) {
                throw new InterruptedException("Server stopped");
            }
        }
    }
}

From source file:org.sakuli.starter.SahiConnector.java

/**
 * reconnect method for method {@link #startSahiTestSuite()}
 *
 * @param e the thrown ConnectException or IllegalMonitorStateException
 * @throws InterruptedException/* w w  w.  ja  v a  2s  .  co  m*/
 */
protected void reconnect(Exception e) throws InterruptedException, SakuliException {
    logger.warn("Cannot connect to sahi proxy - start Proxy.main()");
    if (countConnections <= sahiProxyProperties.getMaxConnectTries()) {
        logger.info("RECONNECT to sahi proxy in " + sahiProxyProperties.getReconnectSeconds() + " seconds");

        //send thread to sleep
        Thread.sleep(TimeUnit.SECONDS.toMillis(sahiProxyProperties.getReconnectSeconds()));
        this.startSahiTestSuite();
    } else {
        logger.info("Reconnect to sahi proxy unsuccessful - Connection refused");
        throw new InterruptedException(e.getMessage());
    }
}

From source file:Sandbox.ZipExtractor.ProcessZip.java

private void callableWriteToFile() throws FileNotFoundException, InterruptedException, ExecutionException {
    //private void callableWriteToFile(List<ZipFileEntry> partition, int partNo, ExecutorService refreshExecutor) {
    //        int  corePoolSize  = 100;
    //        int  maxPoolSize   = 500;
    //        long keepAliveTime = 5000L;
    ////from ww  w  . j a  v a2  s . co  m
    //        // Create thread pool for any Manual Refreshes.
    //        ThreadPoolExecutor refreshExecutor = new ThreadPoolExecutor(
    //                corePoolSize,
    //                maxPoolSize,
    //                keepAliveTime,
    //                TimeUnit.MILLISECONDS,
    //                new LinkedBlockingQueue<Runnable>());

    //ExecutorService executorServiceA = Executors.newFixedThreadPool(10);

    ExecutorService executorService = Executors.newCachedThreadPool();

    // List to keep track of all futures (results of Callable)
    List<Future<Boolean>> futures = new ArrayList<Future<Boolean>>();

    int counter = 0;

    for (ZipFileEntry zipFileEntry : practiceLicences) {
        //for (ZipFileEntry zipFileEntry : partition) {
        //System.out.println("\tPartition No: " + partNo + " Processing: " + ++counter);
        //Runnable worker = new WriteToFile("C:\\Temp\\App_Data\\TEST", zipFileEntry);
        //refreshExecutor.execute(worker);

        Callable<Boolean> worker = new WriteToFile("C:\\Temp\\App_Data\\TEST", zipFileEntry);
        Future<Boolean> submit = executorService.submit(worker);

        futures.add(submit);
    }

    // Process futures to create combined list
    for (Future<Boolean> future : futures) {
        try {
            if (future.get().equals(Boolean.FALSE)) {
                throw new FileNotFoundException("Failed to create file.");
            }
        } catch (InterruptedException e) {
            // Write error to log file then re-throw
            throw new InterruptedException(e.getMessage());
        } catch (ExecutionException e) {
            // Write error to log file then re-throw
            throw new ExecutionException(e.getMessage(), e.getCause());
        }
    }

    executorService.shutdown();

    //while(!refreshExecutor.isTerminated()) {
    //}

    //System.out.println("Finished all threads!");
}

From source file:org.alfresco.extension.bulkimport.source.fs.FilesystemBulkImportSource.java

/**
 * This method actually does the work of scanning.
 *///from  w  w  w . j  av  a 2 s  .  com
private void scanDirectory(final BulkImportSourceStatus status, final BulkImportCallback callback,
        final File sourceDirectory, final File directory, final boolean submitFiles)
        throws InterruptedException {
    // PRECONDITIONS
    if (sourceDirectory == null)
        throw new IllegalArgumentException("sourceDirectory cannot be null.");
    if (directory == null)
        throw new IllegalArgumentException("directory cannot be null.");

    // Body
    if (debug(log))
        debug(log, "Scanning directory " + directory.getAbsolutePath() + " for "
                + (submitFiles ? "Files" : "Folders") + "...");

    status.setCurrentlyScanning(sourceDirectory.getAbsolutePath());

    final Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> analysedDirectory = directoryAnalyser
            .analyseDirectory(sourceDirectory, directory);

    if (analysedDirectory != null) {
        final List<FilesystemBulkImportItem> directoryItems = analysedDirectory.getFirst();
        final List<FilesystemBulkImportItem> fileItems = analysedDirectory.getSecond();

        if (!submitFiles && directoryItems != null) {
            for (final FilesystemBulkImportItem directoryItem : directoryItems) {
                if (importStatus.isStopping() || Thread.currentThread().isInterrupted())
                    throw new InterruptedException(
                            Thread.currentThread().getName() + " was interrupted. Terminating early.");

                if (!filter(directoryItem)) {
                    callback.submit(directoryItem);
                }
            }
        }

        if (submitFiles && fileItems != null) {
            for (final FilesystemBulkImportItem fileItem : fileItems) {
                if (importStatus.isStopping() || Thread.currentThread().isInterrupted())
                    throw new InterruptedException(
                            Thread.currentThread().getName() + " was interrupted. Terminating early.");

                if (!filter(fileItem)) {
                    callback.submit(fileItem);
                }
            }
        }

        if (debug(log))
            debug(log, "Finished scanning directory " + directory.getAbsolutePath() + ".");

        // Recurse into subdirectories and scan them too
        if (directoryItems != null && directoryItems.size() > 0) {
            if (debug(log))
                debug(log, "Recursing into " + directoryItems.size() + " subdirectories of "
                        + directory.getAbsolutePath());

            for (final FilesystemBulkImportItem directoryItem : directoryItems) {
                if (importStatus.isStopping() || Thread.currentThread().isInterrupted())
                    throw new InterruptedException(
                            Thread.currentThread().getName() + " was interrupted. Terminating early.");

                if (!filter(directoryItem)) {
                    final FilesystemBulkImportItemVersion lastVersion = directoryItem.getVersions().last(); // Directories shouldn't have versions, but grab the last one (which will have the directory file pointer) just in case...

                    if (lastVersion.getContentFile() != null) {
                        scanDirectory(status, callback, sourceDirectory, lastVersion.getContentFile(),
                                submitFiles);
                    } else {
                        if (info(log))
                            info(log, "Directory " + directoryItem.getName()
                                    + " is metadata only - scan will be skipped.");
                    }
                }
            }
        } else {
            if (debug(log))
                debug(log, directory.getAbsolutePath() + " has no subdirectories.");
        }
    }
}

From source file:org.jenkinsmvn.jenkins.api.JenkinsClient.java

public void waitTillAllBuildsDone(String jobName, long pollTimeMillis, long timeOutInMillis)
        throws IOException, InterruptedException {
    JobDetails jobDetails = getJobDetails(jobName, false);

    if (!jobDetails.getBuildable()) {
        return;/*from   ww w  . j a v a 2 s .  c  om*/
    }

    synchronized (jobsCache.get(jobName)) {
        long start = System.currentTimeMillis();
        do {
            if (jobDetails.getQueueItem() == null) {
                if (jobDetails.getLastBuild() == null) {
                    return;
                }

                BuildDetails buildDetails = getBuildDetails(jobDetails.getLastBuild());

                if (!buildDetails.getBuilding()) {
                    return;
                }
            }

            if (System.currentTimeMillis() - start > timeOutInMillis) {
                throw new InterruptedException(String.format("time out of %dms reached.", timeOutInMillis));
            }

            Thread.sleep(pollTimeMillis);
            jobDetails = getJobDetails(jobName, false);
        } while (true);
    }
}