List of usage examples for java.lang InterruptedException InterruptedException
public InterruptedException(String s)
InterruptedException
with the specified detail message. From source file:gda.device.detector.addetector.filewriter.SingleImagePerFileWriter.java
private void waitForFile(String fullFilePath) throws DeviceException { try {/* w w w . j a v a 2 s .c o m*/ File f = new File(fullFilePath); long numChecks = 0; //TODO must here have timeout in case the file system gone down? while (!f.exists()) { numChecks++; try { Thread.sleep(MILLI_SECONDS_BETWEEN_POLLS); } catch (InterruptedException e) { throw new InterruptedException( "ScanBase is interrupted whilst waiting for '" + fullFilePath + "'"); } checkErrorStatus(); if ((numChecks * MILLI_SECONDS_BETWEEN_POLLS / 1000) > SECONDS_BETWEEN_SLOW_FILE_ARRIVAL_MESSAGES) { InterfaceProvider.getTerminalPrinter() .print("Waiting for file '" + fullFilePath + "' to be created"); numChecks = 0; } } } catch (Exception e) { throw new DeviceException("Error checking for existence of file '" + fullFilePath + "'", e); } }
From source file:ch.zhaw.iamp.rct.Controller.java
private void verifyBinaryState() throws InterruptedException { File binary = new File(mainWindow.getExecutableFilePath()); if (!binary.exists() || !binary.canExecute()) { throw new InterruptedException( "The binary at '" + binary.getAbsolutePath() + "' has to be existing and executable."); } else {/*ww w . j av a2 s . c om*/ System.out.println("[J] The binary at '" + binary.getAbsolutePath() + "' seems to be executable."); } }
From source file:org.alfresco.extension.bulkfilesystemimport.impl.AbstractBulkFilesystemImporter.java
private final List<ImportableItem> filterImportableItems(final List<ImportableItem> importableItems) throws InterruptedException { List<ImportableItem> result = new ArrayList<ImportableItem>(); if (importableItems != null && importableItems.size() > 0 && importableItems != null && importableItems.size() > 0) { if (importFilters == null || importFilters.size() == 0) { result.addAll(importableItems); } else {//from ww w.ja v a 2 s . c om for (final ImportableItem importableItem : importableItems) { if (importStatus.isStopping() || Thread.currentThread().isInterrupted()) throw new InterruptedException( Thread.currentThread().getName() + " was interrupted. Terminating early."); boolean filterImportableItem = false; for (final ImportFilter filter : importFilters) { if (filter.shouldFilter(importableItem)) { filterImportableItem = true; break; } } if (!filterImportableItem) { result.add(importableItem); } } } } return (result); }
From source file:net.jodah.failsafe.FailsafeController.java
/** * Checks if the controller was shutdown and throw back an interrupted exception if it has. * * @throws InterruptedException if the controller was shutdown *///from w ww. j a va2s.c o m private synchronized void interruptIfShutdown() throws InterruptedException { if (shutdownFailure != null) { throw new InterruptedException(shutdownFailure.getMessage()); } }
From source file:com.nttec.everychan.chans.dvach.DvachModule.java
private void cssTest(String boardName, final CancellableTask task) throws Exception { /* =*.*= */ class CSSCodeHolder { private volatile String cssCode = null; public synchronized void setCode(String code) { Logger.d(TAG, "set CSS code: " + code); if (cssCode == null) cssCode = code;/*from w w w.j av a 2 s.c om*/ } public boolean isSet() { return cssCode != null; } public String getCode() { return cssCode; } } class WebViewHolder { private WebView webView = null; } final CSSCodeHolder holder = new CSSCodeHolder(); final WebViewHolder wv = new WebViewHolder(); final String cssTest = HttpStreamer.getInstance().getStringFromUrl( getUsingUrl() + boardName + "/csstest.foo", HttpRequestModel.DEFAULT_GET, httpClient, null, task, false); long startTime = System.currentTimeMillis(); Async.runOnUiThread(new Runnable() { @Override public void run() { wv.webView = new WebView(MainApplication.getInstance()); wv.webView.setWebViewClient(new WebViewClient() { @Override public void onLoadResource(WebView view, String url) { if (url.contains("?code=") && !task.isCancelled()) { holder.setCode(url.substring(url.indexOf("?code=") + 6)); } } }); wv.webView.loadDataWithBaseURL("http://127.0.0.1/csstest.foo", cssTest, "text/html", "UTF-8", ""); } }); while (!holder.isSet()) { long time = System.currentTimeMillis() - startTime; if ((task != null && task.isCancelled()) || time > 5000) break; Thread.yield(); } Async.runOnUiThread(new Runnable() { @Override public void run() { try { wv.webView.stopLoading(); wv.webView.clearCache(true); wv.webView.destroy(); } catch (Exception e) { Logger.e(TAG, e); } } }); if (task != null && task.isCancelled()) throw new InterruptedException("interrupted"); String cssCode = holder.getCode(); if (cssCode != null) { HttpStreamer.getInstance().getBytesFromUrl(getUsingUrl() + boardName + "/csstest.foo?code=" + cssCode, HttpRequestModel.DEFAULT_GET, httpClient, null, task, false); } }
From source file:org.alfresco.extension.bulkimport.impl.BatchImporterImpl.java
private final void importFile(final NodeRef nodeRef, final BulkImportItem<BulkImportItemVersion> item, final boolean dryRun) throws InterruptedException { final int numberOfVersions = item.getVersions().size(); if (numberOfVersions == 0) { throw new IllegalStateException( item.getName() + " (being imported into " + String.valueOf(nodeRef) + ") has no versions."); } else if (numberOfVersions == 1) { importVersion(nodeRef, null, item.getVersions().first(), dryRun, true); } else {//from w ww. j av a2 s .co m final BulkImportItemVersion firstVersion = item.getVersions().first(); BulkImportItemVersion previousVersion = null; // Add the cm:versionable aspect if it isn't already there if (firstVersion.getAspects() == null || firstVersion.getAspects().isEmpty() || (!firstVersion.getAspects().contains(ContentModel.ASPECT_VERSIONABLE.toString()) && !firstVersion.getAspects() .contains(ContentModel.ASPECT_VERSIONABLE.toPrefixString()))) { if (debug(log)) debug(log, item.getName() + " has versions but is missing the cm:versionable aspect. Adding it."); nodeService.addAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE, null); } for (final BulkImportItemVersion version : item.getVersions()) { if (importStatus.isStopping() || Thread.currentThread().isInterrupted()) throw new InterruptedException( Thread.currentThread().getName() + " was interrupted. Terminating early."); importVersion(nodeRef, previousVersion, version, dryRun, false); previousVersion = version; } } if (trace(log)) trace(log, "Finished importing " + numberOfVersions + " version" + (numberOfVersions == 1 ? "" : "s") + " of file " + item.getName() + "."); }
From source file:org.alfresco.extension.bulkfilesystemimport.impl.AbstractBulkFilesystemImporter.java
private final List<List<ImportableItem>> batchImportableItems(final List<ImportableItem> importableItems) throws InterruptedException { List<List<ImportableItem>> result = new ArrayList<List<ImportableItem>>(); int currentBatch = 0; int currentBatchWeight = 0; result.add(new ArrayList<ImportableItem>()); for (final ImportableItem importableItem : importableItems) { if (importStatus.isStopping() || Thread.currentThread().isInterrupted()) throw new InterruptedException( Thread.currentThread().getName() + " was interrupted. Terminating early."); result.get(currentBatch).add(importableItem); currentBatchWeight += importableItem.weight(); if (currentBatchWeight >= batchWeight) { result.add(new ArrayList<ImportableItem>()); currentBatch++;//from w ww . jav a 2 s . c o m currentBatchWeight = 0; } } return (result); }
From source file:org.alfresco.extension.bulkfilesystemimport.impl.AbstractBulkFilesystemImporter.java
private final List<Pair<NodeRef, File>> importImportableItemBatches(final NodeRef target, final String sourceRoot, final List<List<ImportableItem>> batches, final boolean replaceExisting, final boolean inPlaceImport) throws InterruptedException { List<Pair<NodeRef, File>> result = new ArrayList<Pair<NodeRef, File>>(); if (batches != null) { for (final List<ImportableItem> batch : batches) { if (importStatus.isStopping() || Thread.currentThread().isInterrupted()) throw new InterruptedException( Thread.currentThread().getName() + " was interrupted. Terminating early."); result.addAll(importBatchInTxn(target, sourceRoot, batch, replaceExisting, inPlaceImport)); }/*w w w . j a v a 2 s. co m*/ } return (result); }
From source file:org.apache.marmotta.kiwi.sparql.persistence.KiWiSparqlConnection.java
/** * Evaluate a statement pattern join or filter on the database by translating it into an appropriate SQL statement. * Copied and adapted from KiWiReasoningConnection.query() * * @param join// w w w . j a v a2 s . c om * @param dataset * @return */ public CloseableIteration<BindingSet, SQLException> evaluateJoin(TupleExpr join, final BindingSet bindings, final Dataset dataset) throws SQLException, InterruptedException { Preconditions .checkArgument(join instanceof Join || join instanceof Filter || join instanceof StatementPattern || join instanceof Distinct || join instanceof Slice || join instanceof Reduced); // some definitions String[] positions = new String[] { "subject", "predicate", "object", "context" }; // collect all patterns in a list, using depth-first search over the join List<StatementPattern> patterns = new PatternCollector(join).patterns; long offset = new LimitFinder(join).offset; long limit = new LimitFinder(join).limit; boolean distinct = new DistinctFinder(join).distinct; // associate a name with each pattern; the names are used in the database query to refer to the triple // that matched this pattern and in the construction of variable names for the HQL query int patternCount = 0; final Map<StatementPattern, String> patternNames = new HashMap<StatementPattern, String>(); for (StatementPattern p : patterns) { patternNames.put(p, "P" + (++patternCount)); } // find all variables occurring in the patterns and create a map to map them to // field names in the database query; each variable will have one or several field names, // one for each pattern it occurs in; field names are constructed automatically by a counter // and the pattern name to ensure the name is a valid HQL identifier int variableCount = 0; // a map for the variable names; will look like { ?x -> "V1", ?y -> "V2", ... } final Map<Var, String> variableNames = new HashMap<>(); // a map for mapping variables to field names; each variable might have one or more field names, // depending on the number of patterns it occurs in; will look like // { ?x -> ["P1_V1", "P2_V1"], ?y -> ["P2_V2"], ... } Map<Var, List<String>> queryVariables = new HashMap<>(); Map<Var, List<String>> queryVariableIds = new HashMap<>(); // a map for defining alternative context values for each variable used in the context part of a pattern Map<StatementPattern, List<Resource>> variableContexts = new HashMap<>(); for (StatementPattern p : patterns) { // check graph restrictions in datasets (MARMOTTA-340) Resource[] contexts; Value contextValue = p.getContextVar() != null ? p.getContextVar().getValue() : null; Set<URI> graphs = null; boolean emptyGraph = false; if (dataset != null) { if (p.getScope() == StatementPattern.Scope.DEFAULT_CONTEXTS) { graphs = dataset.getDefaultGraphs(); emptyGraph = graphs.isEmpty() && !dataset.getNamedGraphs().isEmpty(); } else { graphs = dataset.getNamedGraphs(); emptyGraph = graphs.isEmpty() && !dataset.getDefaultGraphs().isEmpty(); } } if (emptyGraph) { // Search zero contexts return new EmptyIteration<BindingSet, SQLException>(); } else if (graphs == null || graphs.isEmpty()) { if (contextValue != null) { contexts = new Resource[] { (Resource) contextValue }; } else { contexts = new Resource[0]; } } else if (contextValue != null) { if (graphs.contains(contextValue)) { contexts = new Resource[] { (Resource) contextValue }; } else { // Statement pattern specifies a context that is not part of // the dataset return new EmptyIteration<BindingSet, SQLException>(); } } else { contexts = new Resource[graphs.size()]; int i = 0; for (URI graph : graphs) { URI context = null; if (!SESAME.NIL.equals(graph)) { context = graph; } contexts[i++] = context; } } // build pattern Var[] fields = new Var[] { p.getSubjectVar(), p.getPredicateVar(), p.getObjectVar(), p.getContextVar() }; for (int i = 0; i < fields.length; i++) { if (fields[i] != null && !fields[i].hasValue()) { Var v = fields[i]; if (variableNames.get(v) == null) { variableNames.put(v, "V" + (++variableCount)); queryVariables.put(v, new LinkedList<String>()); queryVariableIds.put(v, new LinkedList<String>()); } String pName = patternNames.get(p); String vName = variableNames.get(v); if (hasNodeCondition(fields[i], join)) { queryVariables.get(v).add(pName + "_" + positions[i] + "_" + vName); } queryVariableIds.get(v).add(pName + "." + positions[i]); } } // build an OR query for the value of the context variable if (contexts.length > 0) { variableContexts.put(p, Arrays.asList(contexts)); } } // build the select clause by projecting for each query variable the first name StringBuilder selectClause = new StringBuilder(); if (distinct) { selectClause.append("DISTINCT "); } final List<Var> selectVariables = new LinkedList<Var>(); for (Iterator<Var> it = queryVariableIds.keySet().iterator(); it.hasNext();) { Var v = it.next(); String projectedName = variableNames.get(v); String fromName = queryVariableIds.get(v).get(0); selectClause.append(fromName); selectClause.append(" as "); selectClause.append(projectedName); if (it.hasNext()) { selectClause.append(", "); } selectVariables.add(v); } // build the from-clause of the query; the from clause is constructed as follows: // 1. for each pattern P, there will be a "KiWiTriple P" in the from clause // 2. for each variable V in P occurring in // - subject, there will be a "inner join P.subject as P_S_V" or "left outer join P.subject as P_S_V", // depending on whether the "optional" parameter is false or true // - property, there will be a "inner join P.property as P_P_V" or "left outer join p.property as P_P_V" // - object, there will be a "inner join P.object as P_O_V" or "left outer join p.object as P_O_V" // - context, there will be a "inner join P.context as P_C_V" or "left outer join p.context as P_C_V" StringBuilder fromClause = new StringBuilder(); for (Iterator<StatementPattern> it = patterns.iterator(); it.hasNext();) { StatementPattern p = it.next(); String pName = patternNames.get(p); fromClause.append("triples " + pName); Var[] fields = new Var[] { p.getSubjectVar(), p.getPredicateVar(), p.getObjectVar(), p.getContextVar() }; for (int i = 0; i < fields.length; i++) { if (fields[i] != null && !fields[i].hasValue() && hasNodeCondition(fields[i], join)) { String vName = variableNames.get(fields[i]); fromClause.append(" INNER JOIN nodes AS "); fromClause.append(pName + "_" + positions[i] + "_" + vName); fromClause.append(" ON " + pName + "." + positions[i] + " = "); fromClause.append(pName + "_" + positions[i] + "_" + vName + ".id "); } } if (it.hasNext()) { fromClause.append(",\n "); } } // build the where clause as follows: // 1. iterate over all patterns and for each resource and literal field in subject, // property, object, or context, and set a query condition according to the // nodes given in the pattern // 2. for each variable that has more than one occurrences, add a join condition // 3. for each variable in the initialBindings, add a condition to the where clause // list of where conditions that will later be connected by AND List<String> whereConditions = new LinkedList<String>(); // 1. iterate over all patterns and for each resource and literal field in subject, // property, object, or context, and set a query condition according to the // nodes given in the pattern for (StatementPattern p : patterns) { String pName = patternNames.get(p); Var[] fields = new Var[] { p.getSubjectVar(), p.getPredicateVar(), p.getObjectVar(), p.getContextVar() }; for (int i = 0; i < fields.length; i++) { // find node id of the resource or literal field and use it in the where clause // in this way we can avoid setting too many query parameters long nodeId = -1; if (fields[i] != null && fields[i].hasValue()) { Value v = valueFactory.convert(fields[i].getValue()); if (v instanceof KiWiNode) { nodeId = ((KiWiNode) v).getId(); } else { throw new IllegalArgumentException( "the values in this query have not been created by the KiWi value factory"); } if (nodeId >= 0) { String condition = pName + "." + positions[i] + " = " + nodeId; whereConditions.add(condition); } } } } // 2. for each variable that has more than one occurrences, add a join condition for (Var v : queryVariableIds.keySet()) { List<String> vNames = queryVariableIds.get(v); for (int i = 1; i < vNames.size(); i++) { String vName1 = vNames.get(i - 1); String vName2 = vNames.get(i); whereConditions.add(vName1 + " = " + vName2); } } // 3. for each variable in the initialBindings, add a condition to the where clause setting it // to the node given as binding if (bindings != null) { for (String v : bindings.getBindingNames()) { for (Map.Entry<Var, List<String>> entry : queryVariableIds.entrySet()) { if (entry.getKey().getName() != null && entry.getKey().getName().equals(v) && entry.getValue() != null && entry.getValue().size() > 0) { List<String> vNames = entry.getValue(); String vName = vNames.get(0); Value binding = valueFactory.convert(bindings.getValue(v)); if (binding instanceof KiWiNode) { whereConditions.add(vName + " = " + ((KiWiNode) binding).getId()); } else { throw new IllegalArgumentException( "the values in this binding have not been created by the KiWi value factory"); } } } } } // 4. for each pattern, ensure that the matched triple is not marked as deleted for (StatementPattern p : patterns) { String pName = patternNames.get(p); whereConditions.add(pName + ".deleted = false"); } // 5. for each filter condition, add a statement to the where clause List<ValueExpr> filters = new FilterCollector(join).filters; for (ValueExpr expr : filters) { whereConditions.add(evaluateExpression(expr, queryVariables, null)); } // 6. for each context variable with a restricted list of contexts, we add a condition to the where clause // of the form (V.id = R1.id OR V.id = R2.id ...) for (Map.Entry<StatementPattern, List<Resource>> vctx : variableContexts.entrySet()) { // the variable String varName = patternNames.get(vctx.getKey()); // the string we are building StringBuilder cCond = new StringBuilder(); cCond.append("("); for (Iterator<Resource> it = vctx.getValue().iterator(); it.hasNext();) { Value v = valueFactory.convert(it.next()); if (v instanceof KiWiNode) { long nodeId = ((KiWiNode) v).getId(); cCond.append(varName); cCond.append(".context = "); cCond.append(nodeId); if (it.hasNext()) { cCond.append(" OR "); } } else { throw new IllegalArgumentException( "the values in this query have not been created by the KiWi value factory"); } } cCond.append(")"); whereConditions.add(cCond.toString()); } // construct the where clause StringBuilder whereClause = new StringBuilder(); for (Iterator<String> it = whereConditions.iterator(); it.hasNext();) { whereClause.append(it.next()); whereClause.append("\n "); if (it.hasNext()) { whereClause.append("AND "); } } // construct limit and offset StringBuilder limitClause = new StringBuilder(); if (limit > 0) { limitClause.append("LIMIT "); limitClause.append(limit); limitClause.append(" "); } if (offset >= 0) { limitClause.append("OFFSET "); limitClause.append(offset); limitClause.append(" "); } // build the query string String queryString = "SELECT " + selectClause + "\n " + "FROM " + fromClause + "\n " + "WHERE " + whereClause + "\n " + limitClause; log.debug("original SPARQL syntax tree:\n {}", join); log.debug("constructed SQL query string:\n {}", queryString); log.debug("SPARQL -> SQL node variable mappings:\n {}", queryVariables); log.debug("SPARQL -> SQL ID variable mappings:\n {}", queryVariableIds); final PreparedStatement queryStatement = parent.getJDBCConnection().prepareStatement(queryString); if (parent.getDialect().isCursorSupported()) { queryStatement.setFetchSize(parent.getConfiguration().getCursorSize()); } Future<ResultSet> queryFuture = executorService.submit(new Callable<ResultSet>() { @Override public ResultSet call() throws Exception { try { return queryStatement.executeQuery(); } catch (SQLException ex) { if (Thread.interrupted()) { log.info("SQL query execution cancelled; not returning result (Thread={})", Thread.currentThread()); throw new InterruptedException("SPARQL query execution cancelled"); } else { throw ex; } } } }); try { ResultSet result = queryFuture.get(); ResultSetIteration<BindingSet> it = new ResultSetIteration<BindingSet>(result, true, new ResultTransformerFunction<BindingSet>() { @Override public BindingSet apply(ResultSet row) throws SQLException { MapBindingSet resultRow = new MapBindingSet(); long[] nodeIds = new long[selectVariables.size()]; for (int i = 0; i < selectVariables.size(); i++) { nodeIds[i] = row.getLong(variableNames.get(selectVariables.get(i))); } KiWiNode[] nodes = parent.loadNodesByIds(nodeIds); for (int i = 0; i < selectVariables.size(); i++) { Var v = selectVariables.get(i); resultRow.addBinding(v.getName(), nodes[i]); } if (bindings != null) { for (Binding binding : bindings) { resultRow.addBinding(binding); } } return resultRow; } }); // materialize result to avoid having more than one result set open at the same time return new CloseableIteratorIteration<BindingSet, SQLException>(Iterations.asList(it).iterator()); } catch (InterruptedException | CancellationException e) { log.info("SPARQL query execution cancelled"); queryFuture.cancel(true); queryStatement.cancel(); queryStatement.close(); throw new InterruptedException("SPARQL query execution cancelled"); } catch (ExecutionException e) { log.error("error executing SPARQL query", e.getCause()); if (e.getCause() instanceof SQLException) { throw (SQLException) e.getCause(); } else if (e.getCause() instanceof InterruptedException) { throw (InterruptedException) e.getCause(); } else { throw new SQLException("error executing SPARQL query", e); } } }
From source file:org.alfresco.extension.bulkfilesystemimport.impl.AbstractBulkFilesystemImporter.java
private final List<Pair<NodeRef, File>> importBatch(final NodeRef target, final String sourcePath, final List<ImportableItem> batch, final boolean replaceExisting, final boolean inPlaceImport) throws InterruptedException { List<Pair<NodeRef, File>> result = new ArrayList<Pair<NodeRef, File>>(); for (final ImportableItem importableItem : batch) { if (importStatus.isStopping() || Thread.currentThread().isInterrupted()) throw new InterruptedException( Thread.currentThread().getName() + " was interrupted. Terminating early."); NodeRef nodeRef = importImportableItem(target, sourcePath, importableItem, replaceExisting, inPlaceImport);//w w w. j a v a 2s.co m // If it's a directory, add it to the list of sub-directories to be processed if (nodeRef != null && importableItem.getHeadRevision().contentFileExists() && ImportableItem.FileType.DIRECTORY.equals(importableItem.getFileType())) { result.add(new Pair<NodeRef, File>(nodeRef, importableItem.getHeadRevision().getContentFile())); } } return (result); }