List of usage examples for java.lang InterruptedException InterruptedException
public InterruptedException(String s)
InterruptedException
with the specified detail message. From source file:org.apache.hadoop.hdfs.PeerCache.java
/** * Periodically check in the cache and expire the entries * older than expiryPeriod minutes//from w ww . j a va2 s .c om */ private void run() throws InterruptedException { for (long lastExpiryTime = Time.monotonicNow(); !Thread.interrupted(); Thread.sleep(expiryPeriod)) { final long elapsed = Time.monotonicNow() - lastExpiryTime; if (elapsed >= expiryPeriod) { evictExpired(expiryPeriod); lastExpiryTime = Time.monotonicNow(); } } clear(); throw new InterruptedException("Daemon Interrupted"); }
From source file:org.openscore.worker.management.services.InBuffer.java
public void addExecutionMessage(ExecutionMessage msg) throws InterruptedException { syncManager.startGetMessages(); //this is a public method that can push new executions from outside - from execution threads //We need to check if the current execution thread was interrupted while waiting for the lock if (Thread.currentThread().isInterrupted()) { throw new InterruptedException( "Thread was interrupted while waiting on the lock in fillBufferPeriodically()!"); }//from w ww . j av a2s . co m addExecutionMessageInner(msg); syncManager.finishGetMessages(); }
From source file:org.firmata4j.firmata.FirmataDevice.java
@Override public void ensureInitializationIsDone() throws InterruptedException { if (!started.get()) { try {// w ww . j a va 2 s . com start(); } catch (IOException ex) { throw new InterruptedException(ex.getMessage()); } } long timePassed = 0L; long timeout = 100; while (!isReady()) { if (timePassed >= deviceConfiguration.getInitializationTimeout()) { throw new InterruptedException("Connection timeout"); } timePassed += timeout; Thread.sleep(timeout); } }
From source file:io.cloudslang.worker.management.services.InBuffer.java
public void addExecutionMessage(ExecutionMessage msg) throws InterruptedException { try {//from www . j ava2s .c o m syncManager.startGetMessages(); //this is a public method that can push new executions from outside - from execution threads //We need to check if the current execution thread was interrupted while waiting for the lock if (Thread.currentThread().isInterrupted()) { throw new InterruptedException( "Thread was interrupted while waiting on the lock in fillBufferPeriodically()!"); } addExecutionMessageInner(msg); } finally { syncManager.finishGetMessages(); } }
From source file:org.apache.mahout.regression.penalizedlinear.PenalizedLinearDriver.java
private void buildRegressionModelMR(PenalizedLinearParameter parameter, Path input, Path output) throws IOException, InterruptedException, ClassNotFoundException { Job job = prepareJob(input, output, SequenceFileInputFormat.class, PenalizedLinearMapper.class, Text.class, VectorWritable.class, PenalizedLinearReducer.class, Text.class, VectorWritable.class, SequenceFileOutputFormat.class); job.setJobName("Penalized Linear Regression Driver running over input: " + input); job.setNumReduceTasks(1);/*from w w w. j ava 2s . co m*/ job.setJarByClass(PenalizedLinearDriver.class); Configuration conf = job.getConfiguration(); conf.setInt(PenalizedLinearKeySet.NUM_CV, parameter.getNumOfCV()); conf.setFloat(PenalizedLinearKeySet.ALPHA, parameter.getAlpha()); conf.set(PenalizedLinearKeySet.LAMBDA, parameter.getLambda()); conf.setBoolean(PenalizedLinearKeySet.INTERCEPT, parameter.isIntercept()); if (!job.waitForCompletion(true)) { throw new InterruptedException("Penalized Linear Regression Job failed processing " + input); } }
From source file:it.eng.spagobi.studio.core.services.dataset.DeployDatasetService.java
/** if document has meadata associated do the automated deploy * /*from ww w . j av a2 s . c o m*/ * @return if automated eply has been done */ public boolean tryAutomaticDeploy() { logger.debug("IN"); IStructuredSelection sel = (IStructuredSelection) selection; // go on only if ysou selected a document Object objSel = sel.toList().get(0); org.eclipse.core.internal.resources.File fileSel = null; fileSel = (org.eclipse.core.internal.resources.File) objSel; projectname = fileSel.getProject().getName(); //if file has document metadata associated upload it, else call wizard String datasetId = null; String datasetLabel = null; String datasetCategory = null; try { datasetId = fileSel.getPersistentProperty(SpagoBIStudioConstants.DATASET_ID); datasetLabel = fileSel.getPersistentProperty(SpagoBIStudioConstants.DATASET_LABEL); datasetCategory = fileSel.getPersistentProperty(SpagoBIStudioConstants.DATASET_CATEGORY); } catch (CoreException e) { logger.error("Error in retrieving dataset Label", e); } // IF File selected has already an id of datasetassociated do the upload wiyhout asking further informations boolean automatic = false; boolean newDeployFromOld = false; if (datasetId != null) { logger.debug("Query already associated to dataset" + datasetId + " - " + datasetLabel + " - of category " + datasetCategory); final Integer idInteger = Integer.valueOf(datasetId); final String label2 = datasetLabel; final org.eclipse.core.internal.resources.File fileSel2 = fileSel; final NoDocumentException datasetException = new NoDocumentException(); final NoActiveServerException noActiveServerException = new NoActiveServerException(); final NotAllowedOperationStudioException notAllowedOperationStudioException = new NotAllowedOperationStudioException(); IRunnableWithProgress op = new IRunnableWithProgress() { public void run(IProgressMonitor monitor) throws InvocationTargetException { monitor.beginTask( "Deploying to dataset existing dataset with label: " + label2 + " and ID: " + idInteger, IProgressMonitor.UNKNOWN); if (projectname == null) { projectname = fileSel2.getProject().getName(); } try { logger.debug("dataset associated, upload the query to dataset " + label2); SpagoBIServerObjectsFactory spagoBIServerObjects = new SpagoBIServerObjectsFactory( projectname); // check dataset still exists IDataSet ds = spagoBIServerObjects.getServerDatasets().getDataSet(idInteger); if (ds == null) { datasetException.setNoDocument(true); logger.warn("Dataset no more present on server: with id " + idInteger); } else { logger.debug("update query to dataset"); String queryStr = null; String adaptedQueryStrList = null; try { JSONObject obj = JSONReader.createJSONObject(fileSel2); queryStr = obj.optString("query"); logger.debug("query is " + queryStr); adaptedQueryStrList = DeployDatasetService.adaptQueryToList(queryStr); logger.debug("adapted query list is " + adaptedQueryStrList); //solvedeccoqui // only the query may be modified by meta so it is the only refreshed ds.addToConfiguration(Dataset.QBE_JSON_QUERY, adaptedQueryStrList); //ds.setJsonQuery(adaptedQueryStrList); datasetException.setNoDocument(false); Integer dsId = spagoBIServerObjects.getServerDatasets().saveDataSet(ds); if (ds == null) { logger.error("Error while uploading dataset on server; check log on server"); MessageDialog.openError( PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "Error on deploy", "Error while uploading dataset; check server log to have details"); } BiObjectUtilities.setFileDataSetMetaData(fileSel2, ds); } catch (Exception e) { logger.error("error in reading JSON object, update failed", e); } } } catch (NoActiveServerException e1) { // no active server found noActiveServerException.setNoServer(true); } catch (RemoteException e) { if (e.getClass().toString().equalsIgnoreCase( "class it.eng.spagobi.sdk.exceptions.NotAllowedOperationException")) { logger.error("Current User has no permission to deploy dataset", e); notAllowedOperationStudioException.setNotAllowed(true); } else { logger.error("Error comunicating with server", e); MessageDialog.openError(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "Error comunicating with server", "Error while uploading the template: missing comunication with server"); } } monitor.done(); if (monitor.isCanceled()) logger.error("Operation not ended", new InterruptedException("The long running operation was cancelled")); } }; ProgressMonitorDialog dialog = new ProgressMonitorDialog( PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell()); try { dialog.run(true, true, op); } catch (InvocationTargetException e1) { logger.error("Error comunicating with server", e1); MessageDialog.openError(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "Error", "Missing comunication with server; check server definition and if service is avalaible"); dialog.close(); return false; } catch (InterruptedException e1) { logger.error("Error comunicating with server", e1); MessageDialog.openError(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "Error", "Missing comunication with server; check server definition and if service is avalaible"); dialog.close(); return false; } if (datasetException.isNoDocument()) { logger.error("Document no more present"); MessageDialog.openError(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "Error upload", "Dataset with ID " + idInteger + " no more present on server; you can do a new deploy"); sbdw.setNewDeployFromOld(true); newDeployFromOld = true; } if (noActiveServerException.isNoServer()) { logger.error("No server is defined active"); MessageDialog.openError(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "Error", "No server is defined active"); return false; } dialog.close(); if (notAllowedOperationStudioException.isNotAllowed()) { logger.error("Current User has no permission to deploy dataset"); MessageDialog.openError(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "", "Current user has no permission to deploy dataset"); return false; } if (!newDeployFromOld) { MessageDialog.openInformation(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "Deploy succesfull", "Deployed to the associated dataset with label: " + datasetLabel + " and ID: " + idInteger + " succesfull"); logger.debug( "Deployed to the associated document " + datasetLabel + " succesfull: ID: is " + idInteger); automatic = true; } } else { automatic = false; } if (!automatic || newDeployFromOld) { logger.debug("deploy a new Dataset: start wizard"); // init wizard sbdw.init(PlatformUI.getWorkbench(), sel); // Create the wizard dialog WizardDialog dialog = new WizardDialog(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), sbdw); dialog.setPageSize(650, 300); // Open the wizard dialog dialog.open(); } logger.debug("OUT"); return automatic; }
From source file:org.openhab.io.neeo.internal.NeeoUtil.java
/** * Checks whether the current thread has been interrupted and throws {@link InterruptedException} if it's been * interrupted//from w w w .j av a 2s . co m * * @throws InterruptedException the interrupted exception */ static void checkInterrupt() throws InterruptedException { if (Thread.currentThread().isInterrupted()) { throw new InterruptedException("thread interrupted"); } }
From source file:net.sourceforge.sqlexplorer.sessiontree.model.utility.Dictionary.java
/** * Load dictionary data for catalog//from w w w . jav a2s. com * * @param node catalognode to load * @param monitor ProgressMonitor displayed whilst loading * @throws InterruptedException If user cancelled loading */ private void loadSchemaCatalog(INode iNode, IProgressMonitor monitor) throws InterruptedException { if (_logger.isDebugEnabled()) { _logger.debug("Loading dictionary: " + iNode.getName()); } // check for cancellation by user if (monitor.isCanceled()) { throw new InterruptedException(Messages.getString("Progress.Dictionary.Cancelled")); } putCatalogSchemaName(iNode.toString(), iNode); monitor.subTask(iNode.getName()); INode[] children = iNode.getChildNodes(); if (children != null) { // check for cancellation by user if (monitor.isCanceled()) { throw new InterruptedException(Messages.getString("Progress.Dictionary.Cancelled")); } // divide work equally between type nodes int typeNodeWorkUnit = ROOT_WORK_UNIT / SUPPORTED_CONTENT_ASSIST_TYPES.length; int typeNodeWorkCompleted = 0; for (int i = 0; i < children.length; i++) { INode typeNode = children[i]; if (_logger.isDebugEnabled()) { _logger.debug("Loading dictionary: " + typeNode.getName()); } // only load a few types like tables and view nodes into the // dictionary boolean isIncludedInContentAssist = false; for (int j = 0; j < SUPPORTED_CONTENT_ASSIST_TYPES.length; j++) { if (typeNode.getType().equalsIgnoreCase(SUPPORTED_CONTENT_ASSIST_TYPES[j])) { isIncludedInContentAssist = true; } } if (!isIncludedInContentAssist) { continue; } monitor.subTask(typeNode.getName()); // check for cancellation by user if (monitor.isCanceled()) { throw new InterruptedException(Messages.getString("Progress.Dictionary.Cancelled")); } INode tableNodes[] = typeNode.getChildNodes(); if (tableNodes != null) { // check for cancellation by user if (monitor.isCanceled()) { throw new InterruptedException(Messages.getString("Progress.Dictionary.Cancelled")); } int tableNodeWorkUnit = typeNodeWorkUnit / tableNodes.length; for (int j = 0; j < tableNodes.length; j++) { INode tableNode = tableNodes[j]; if (_logger.isDebugEnabled()) { _logger.debug("Loading dictionary: " + tableNode.getName()); } if (monitor != null) { monitor.worked(tableNodeWorkUnit); typeNodeWorkCompleted = typeNodeWorkCompleted + tableNodeWorkUnit; if (_logger.isDebugEnabled()) { _logger.debug("worked table: " + tableNodeWorkUnit + ", total type work: " + typeNodeWorkCompleted); } monitor.subTask(tableNode.getQualifiedName()); // check for cancellation by user if (monitor.isCanceled()) { throw new InterruptedException(Messages.getString("Progress.Dictionary.Cancelled")); } } // add table name ArrayList tableDetails = (ArrayList) getByTableName(tableNode.getName()); if (tableDetails == null) { tableDetails = new ArrayList(); putTableName(tableNode.getName(), tableDetails); } tableDetails.add(tableNode); // add column names if (tableNode instanceof TableNode) { TreeSet columnNames = new TreeSet(); List columns = ((TableNode) tableNode).getColumnNames(); if (columns != null) { Iterator it = columns.iterator(); while (it.hasNext()) { columnNames.add(it.next()); } } putColumnsByTableName(tableNode.getName(), columnNames); } } } if (typeNodeWorkCompleted < typeNodeWorkUnit) { // consume remainder of work for this type node if (_logger.isDebugEnabled()) { _logger.debug("consuming remainder: " + (typeNodeWorkUnit - typeNodeWorkCompleted)); } monitor.worked(typeNodeWorkUnit - typeNodeWorkCompleted); } typeNodeWorkCompleted = 0; } } }
From source file:com.thoughtworks.go.agent.AgentProcessParentImplTest.java
@Test public void shouldLogInterruptOnAgentProcess() throws InterruptedException { final List<String> cmd = new ArrayList<>(); try (LogFixture logFixture = logFixtureFor(AgentProcessParentImpl.class, Level.DEBUG)) { Process subProcess = mockProcess(); when(subProcess.waitFor()).thenThrow(new InterruptedException("bang bang!")); AgentProcessParentImpl bootstrapper = createBootstrapper(cmd, subProcess); int returnCode = bootstrapper.run("bootstrapper_version", "bar", getURLGenerator(), new HashMap<>(), context());//from w w w . j a v a 2 s . com assertThat(returnCode, is(0)); assertThat(logFixture.contains(Level.ERROR, "Agent was interrupted. Terminating agent and respawning. java.lang.InterruptedException: bang bang!"), is(true)); verify(subProcess).destroy(); } }
From source file:org.apache.giraph.rexster.utils.RexsterUtils.java
/** * Opens an HTTP connection to the specified Rexster server. * * @param conf giraph configuration * @param start start index of the Rexster page split * @param end end index of the Rexster page split * @param type stream type (vertices or edges) needed for the * REST Url//from ww w . j av a 2 s .c o m * @param gremlinScript gremlin script. If set to null, will be ignored. * @return BufferedReader the object used to retrieve the HTTP response * content */ // CHECKSTYLE: stop IllegalCatch public static BufferedReader openInputStream(ImmutableClassesGiraphConfiguration conf, long start, long end, String type, String gremlinScript) throws InterruptedException { String uriScriptFormat = "/graphs/%s/tp/gremlin?script=%s" + "&rexster.offset.start=%s&rexster.offset.end=%s"; String uriFormat = "/graphs/%s/tp/giraph/%s/" + "?rexster.offset.start=%s&rexster.offset.end=%s"; String endpoint = GIRAPH_REXSTER_HOSTNAME.get(conf); try { boolean isSsl = GIRAPH_REXSTER_USES_SSL.get(conf); int port = GIRAPH_REXSTER_PORT.get(conf); String graph = GIRAPH_REXSTER_INPUT_GRAPH.get(conf); URL url; if (gremlinScript != null && !gremlinScript.isEmpty()) { url = new URL(isSsl ? "https" : "http", endpoint, port, String.format(uriScriptFormat, graph, gremlinScript, start, end)); } else { url = new URL(isSsl ? "https" : "http", endpoint, port, String.format(uriFormat, graph, type, start, end)); } LOG.info(url); String username = GIRAPH_REXSTER_USERNAME.get(conf); String password = GIRAPH_REXSTER_PASSWORD.get(conf); String auth = getHTTPAuthString(username, password); HttpURLConnection connection = createConnection(url, "GET", auth); connection.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); connection.setDoInput(true); connection.setDoOutput(false); RexsterUtils.handleResponse(connection, type); InputStream is = connection.getInputStream(); InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); return new BufferedReader(isr); } catch (Exception e) { throw new InterruptedException(e.getMessage()); } }