List of usage examples for java.lang InterruptedException InterruptedException
public InterruptedException()
InterruptedException
with no detail message. From source file:com.splout.db.dnode.Fetcher.java
private File s3Fetch(URI uri, Reporter reporter) throws IOException, InterruptedException { String bucketName = uri.getHost(); String path = uri.getPath();/*from w ww .j a v a2s. c o m*/ UUID uniqueId = UUID.randomUUID(); File destFolder = new File(tempDir, uniqueId.toString() + "/" + path); if (destFolder.exists()) { FileUtils.deleteDirectory(destFolder); } destFolder.mkdirs(); Throttler throttler = new Throttler((double) bytesPerSecThrottle); boolean done = false; try { s3Service = new RestS3Service(getCredentials()); if (s3Service.checkBucketStatus(bucketName) != RestS3Service.BUCKET_STATUS__MY_BUCKET) { throw new IOException("Bucket doesn't exist or is already claimed: " + bucketName); } if (path.startsWith("/")) { path = path.substring(1, path.length()); } for (S3Object object : s3Service.listObjects(new S3Bucket(bucketName), path, "")) { long bytesSoFar = 0; String fileName = path; if (path.contains("/")) { fileName = path.substring(path.lastIndexOf("/") + 1, path.length()); } File fileDest = new File(destFolder, fileName); log.info("Downloading " + object.getKey() + " to " + fileDest + " ..."); if (fileDest.exists()) { fileDest.delete(); } object = s3Service.getObject(new S3Bucket(bucketName), object.getKey()); InputStream iS = object.getDataInputStream(); FileOutputStream writer = new FileOutputStream(fileDest); byte[] buffer = new byte[downloadBufferSize]; int nRead; while ((nRead = iS.read(buffer, 0, buffer.length)) != -1) { // Needed to being able to be interrupted at any moment. if (Thread.interrupted()) { iS.close(); writer.close(); cleanDirNoExceptions(destFolder); throw new InterruptedException(); } bytesSoFar += nRead; writer.write(buffer, 0, nRead); throttler.incrementAndThrottle(nRead); if (bytesSoFar >= bytesToReportProgress) { reporter.progress(bytesSoFar); bytesSoFar = 0l; } } if (reporter != null) { reporter.progress(bytesSoFar); } writer.close(); iS.close(); done = true; } if (!done) { throw new IOException("Bucket is empty! " + bucketName + " path: " + path); } } catch (S3ServiceException e) { throw new IOException(e); } return destFolder; }
From source file:com.epam.reportportal.apache.http.impl.conn.PoolingHttpClientConnectionManager.java
protected HttpClientConnection leaseConnection(final Future<CPoolEntry> future, final long timeout, final TimeUnit tunit) throws InterruptedException, ExecutionException, ConnectionPoolTimeoutException { final CPoolEntry entry; try {// w w w . j a v a2s . c o m entry = future.get(timeout, tunit); if (entry == null || future.isCancelled()) { throw new InterruptedException(); } Asserts.check(entry.getConnection() != null, "Pool entry with no connection"); if (this.log.isDebugEnabled()) { this.log.debug("Connection leased: " + format(entry) + formatStats(entry.getRoute())); } return CPoolProxy.newProxy(entry); } catch (final TimeoutException ex) { throw new ConnectionPoolTimeoutException("Timeout waiting for connection from pool"); } }
From source file:tap.formatter.JSONFormat.java
/** * Write the whole data part of the JSON file. * /*from w ww . j a v a2 s.c o m*/ * @param result Result to write. * @param selectedColumns All columns' metadata. * @param out Output stream in which the data must be written. * @param execReport Execution report (which contains the maximum allowed number of records to output). * @param thread Thread which has asked for this formatting (it must be used in order to test the {@link Thread#isInterrupted()} flag and so interrupt everything if need). * * @throws IOException If there is an error while writing something in the output stream. * @throws InterruptedException If the thread has been interrupted. * @throws JSONException If there is an error while formatting something in JSON. * @throws TAPException If any other error occurs. */ protected void writeData(TableIterator result, DBColumn[] selectedColumns, JSONWriter out, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException, JSONException { // [ out.array(); execReport.nbRows = 0; while (result.nextRow()) { // Stop right now the formatting if the job has been aborted/canceled/interrupted: if (thread.isInterrupted()) throw new InterruptedException(); // Deal with OVERFLOW, if needed: if (execReport.parameters.getMaxRec() > 0 && execReport.nbRows >= execReport.parameters.getMaxRec()) break; // [ out.array(); int indCol = 0; while (result.hasNextCol()) // ... writeFieldValue(result.nextCol(), selectedColumns[indCol++], out); // ] out.endArray(); execReport.nbRows++; } // ] out.endArray(); }
From source file:com.asakusafw.testdriver.inprocess.InProcessJobExecutorTest.java
/** * Test method for executing emulated command. *//*from w w w .ja v a2 s . co m*/ @Test public void executeCommand_interrupt() { MockCommandEmulator.callback(args -> { throw new InterruptedException(); }); JobExecutor executor = new InProcessJobExecutor(context); try { executor.execute(command("mock"), Collections.emptyMap()); throw new IllegalStateException(); } catch (IOException e) { // ok. } catch (AssertionError e) { // ok. } }
From source file:net.bpelunit.framework.control.run.TestCaseRunner.java
/** * /*w w w .j a v a 2 s. c o m*/ * Sends a synchronous message, waits for the result, and returns the * result. This method blocks until either a result has been retrieved, a * send error has occurred, or the thread was interrupted. * * @param message * the message to be sent * @return the resulting incoming message * @throws SynchronousSendException * @throws InterruptedException */ public IncomingMessage sendMessageSynchronous(OutgoingMessage message) throws SynchronousSendException, InterruptedException { PostMethod method = new PostMethod(message.getTargetURL()); // Set parameters: // -> Do not retry // -> Socket timeout to default timeout value. method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(1, false)); method.getParams().setParameter(HttpMethodParams.SO_TIMEOUT, Integer.valueOf(BPELUnitRunner.getTimeout())); method.addRequestHeader("SOAPAction", "\"" + message.getSOAPHTTPAction() + "\""); RequestEntity entity; try { entity = new StringRequestEntity(message.getMessageAsString(), BPELUnitConstants.TEXT_XML_CONTENT_TYPE, BPELUnitConstants.DEFAULT_HTTP_CHARSET); } catch (UnsupportedEncodingException e) { // cannot happen since we use the default HTTP Encoding. throw new SynchronousSendException("Unsupported encoding when trying to post message to web service.", e); } for (String option : message.getProtocolOptionNames()) { method.addRequestHeader(option, message.getProtocolOption(option)); } method.setRequestEntity(entity); try { // Execute the method. int statusCode = fClient.executeMethod(method); InputStream in = method.getResponseBodyAsStream(); IncomingMessage returnMsg = new IncomingMessage(); returnMsg.setStatusCode(statusCode); returnMsg.setMessage(in); return returnMsg; } catch (Exception e) { if (isAborting()) { throw new InterruptedException(); } else { throw new SynchronousSendException(e); } } finally { // Release the connection. method.releaseConnection(); } }
From source file:org.executequery.gui.importexport.ImportDelimitedWorker.java
private Object doWork() { // the process result String processResult = null;/*from w ww . ja v a2s . c o m*/ // are we halting on any error int onError = getParent().getOnError(); haltOnError = (onError == ImportExportProcess.STOP_TRANSFER); boolean isBatch = getParent().runAsBatchProcess(); appendProgressText("Beginning import from delimited file process..."); appendProgressText("Using connection: " + getParent().getDatabaseConnection().getName()); // --------------------------------------- // table specific counters // the table statement result int tableInsertCount = 0; // the records processed for this table int tableRowCount = 0; // the table commit count int tableCommitCount = 0; // --------------------------------------- // total import process counters // the current commit block size int commitCount = 0; // the total records inserted int totalInsertCount = 0; // the total records processed int totalRecordCount = 0; // the error count int errorCount = 0; // the current line number int lineNumber = 0; int rollbackSize = getParent().getRollbackSize(); int rollbackCount = 0; FileReader fileReader = null; BufferedReader reader = null; DateFormat dateFormat = null; try { // retrieve the import files Vector files = getParent().getDataFileVector(); int fileCount = files.size(); // whether to trim whitespace boolean trimWhitespace = getParent().trimWhitespace(); // whether this table has a date/time field boolean hasDate = false; // whether we are parsing date formats boolean parsingDates = parseDateValues(); // column names are first row boolean hasColumnNames = getParent().includeColumnNames(); // currently bound variables in the prepared statement Map<ColumnData, String> boundVariables = null; // ignored indexes of columns from the file List<Integer> ignoredIndexes = null; if (hasColumnNames) { boundVariables = new HashMap<ColumnData, String>(); ignoredIndexes = new ArrayList<Integer>(); appendProgressText("Using column names from input file's first row."); } // columns to be imported that are in the file Map<ColumnData, String> fileImportedColumns = new HashMap<ColumnData, String>(); // whether the data format failed (switch structure) boolean failed = false; // define the delimiter String delim = getParent().getDelimiter(); // --------------------------- // --- initialise counters --- // --------------------------- // the table's column count int columnCount = -1; // the length of each line in the file int rowLength = -1; // progress bar values int progressStatus = -1; // ongoing progress value int progressCheck = -1; // the import file size long fileSize = -1; // set the date format if (parseDateValues()) { try { dateFormat = createDateFormatter(); } catch (IllegalArgumentException e) { errorCount++; outputExceptionError("Error applying date mask", e); return FAILED; } } // record the start time start(); // setup the regex matcher for delims // ---------------------------------------------------------------- // below was the original pattern from oreilly book. // discovered issues when parsing values with quotes // in them - not only around them. /* String regex = "(?:^|\\" + delim + ") (?: \" ( (?> [^\"]*+ ) (?> \"\" [^\"]*+ )*+ ) \" | ( [^\"\\" + delim + "]*+ ) )"; Matcher matcher = Pattern.compile(regex, Pattern.COMMENTS).matcher(""); Matcher qMatcher = Pattern.compile("\"\"", Pattern.COMMENTS).matcher(""); */ // ---------------------------------------------------------------- // modified above to regex below // added the look-ahead after the close quote // and removed the quote from the last regex pattern String escapedDelim = escapeDelim(delim); String regex = "(?:^|" + escapedDelim + ") (?: \" ( (?> [^\"]*+ ) (?> \"\" [^\"]*+ )*+ ) \"(?=" + escapedDelim + "?) | ( [^" + escapedDelim + "]*+ ) )"; // ---------------------------------------------------------------- // changed above to the following - seems to work for now // regex pattern in full - where <delim> is the delimiter to use // \"([^\"]+?)\"<delim>?|([^<delim>]+)<delim>?|<delim> // // fixed oreilly one - not running this one // ---------------------------------------------------------------- Matcher matcher = Pattern.compile(regex, Pattern.COMMENTS).matcher(""); Matcher qMatcher = Pattern.compile("\"\"", Pattern.COMMENTS).matcher(""); // ---------------------------------------- // --- begin looping through the tables --- // ---------------------------------------- // ensure the connection has auto-commit to false conn = getConnection(); conn.setAutoCommit(false); int currentRowLength = 0; boolean insertLine = false; // the number of columns actually available in the file int filesColumnCount = 0; for (int i = 0; i < fileCount; i++) { lineNumber = 0; tableInsertCount = 0; tableCommitCount = 0; rollbackCount = 0; tableRowCount = 0; rowLength = 0; if (Thread.interrupted()) { setProgressStatus(100); throw new InterruptedException(); } tableCount++; DataTransferObject dto = (DataTransferObject) files.elementAt(i); // initialise the file object File inputFile = new File(dto.getFileName()); outputBuffer.append("---------------------------\nTable: "); outputBuffer.append(dto.getTableName()); outputBuffer.append("\nImport File: "); outputBuffer.append(inputFile.getName()); appendProgressText(outputBuffer); // setup the reader objects fileReader = new FileReader(inputFile); reader = new BufferedReader(fileReader); // retrieve the columns to be imported (or all) Vector<ColumnData> columns = getColumns(dto.getTableName()); columnCount = columns.size(); filesColumnCount = columnCount; // the wntire row read String row = null; // the current delimited value String value = null; // the ignored column count int ignoredCount = 0; // clear the file columns cache fileImportedColumns.clear(); // if the first row in the file has the column // names compare these with the columns selected if (hasColumnNames) { // init the bound vars cache with the selected columns boundVariables.clear(); for (int k = 0; k < columnCount; k++) { boundVariables.put(columns.get(k), VARIABLE_NOT_BOUND); } row = reader.readLine(); lineNumber++; String[] _columns = MiscUtils.splitSeparatedValues(row, delim); if (_columns != null && _columns.length > 0) { filesColumnCount = _columns.length; // -------------------------------------- // first determine if we have any columns in the // input file that were not selected for import // reset the ignored columns ignoredIndexes.clear(); // set up another list to re-add the columns in // the order in which they appear in the file. // all other columns will be added to the end Vector<ColumnData> temp = new Vector<ColumnData>(columnCount); ColumnData cd = null; int ignoredIndex = -1; for (int j = 0; j < _columns.length; j++) { ignoredIndex = j; String column = _columns[j]; for (int k = 0; k < columnCount; k++) { cd = columns.get(k); String _column = cd.getColumnName(); if (_column.equalsIgnoreCase(column)) { temp.add(cd); fileImportedColumns.put(cd, INCLUDED_COLUMN); ignoredIndex = -1; break; } } if (ignoredIndex != -1) { ignoredIndexes.add(Integer.valueOf(ignoredIndex)); } } ignoredCount = ignoredIndexes.size(); // if we didn't find any columns at all, show warning if (temp.isEmpty()) { String message = "No matching column names were " + "found within the specified file's first line.\n" + "The current file will be ignored."; outputBuffer.append(message); appendProgressWarningText(outputBuffer); int yesNo = GUIUtilities.displayYesNoDialog(message + "\nDo you wish to continue?", "Warning"); if (yesNo == JOptionPane.YES_OPTION) { continue; } else { throw new InterruptedException(); } } else { // add any other selected columns to the // end of the temp list with the columns // available in the file boolean addColumn = false; for (int k = 0; k < columnCount; k++) { addColumn = false; cd = columns.get(k); for (int j = 0, n = temp.size(); j < n; j++) { addColumn = true; if (temp.get(j) == cd) { addColumn = false; break; } } if (addColumn) { temp.add(cd); } } columns = temp; // note: size should not have changed } } } // otherwise just populate the columns in the file // with all the selected columns else { for (int j = 0; j < columnCount; j++) { fileImportedColumns.put(columns.get(j), INCLUDED_COLUMN); } } /* Log.debug("ignored count: " + ignoredCount); for (int j = 0; j < columnCount; j++) { Log.debug("column: " + columns.get(j)); } */ fileSize = inputFile.length(); progressStatus = 10; progressCheck = (int) (fileSize / progressStatus); // prepare the statement prepareStatement(dto.getTableName(), columns); if (parsingDates && dateFormat == null) { // check for a date data type for (int j = 0; j < columnCount; j++) { if (dateFormat == null && !hasDate) { ColumnData cd = columns.get(j); if (fileImportedColumns.containsKey(cd)) { if (cd.isDateDataType()) { hasDate = true; break; } } } } if (hasDate && dateFormat == null) { String pattern = verifyDate(); if (StringUtils.isNotBlank(pattern)) { fileReader.close(); setProgressStatus(100); throw new InterruptedException(); } dateFormat = createDateFormatter(pattern); } } rowLength = 0; while ((row = reader.readLine()) != null) { insertLine = true; lineNumber++; tableRowCount++; totalRecordCount++; if (Thread.interrupted()) { fileReader.close(); printTableResult(tableRowCount, tableCommitCount, dto.getTableName()); setProgressStatus(100); throw new InterruptedException(); } currentRowLength = row.length(); if (currentRowLength == 0) { outputBuffer.append("Line "); outputBuffer.append(lineNumber); outputBuffer.append(" contains no delimited values"); appendProgressWarningText(outputBuffer); int yesNo = GUIUtilities.displayYesNoDialog("No values provided from line " + lineNumber + " - the row is blank.\n" + "Do you wish to continue?", "Warning"); if (yesNo == JOptionPane.YES_OPTION) { continue; } else { throw new InterruptedException(); } } rowLength += currentRowLength; if (progressCheck < rowLength) { setProgressStatus(progressStatus); progressStatus += 10; rowLength = 0; } // reset matcher with current row matcher.reset(row); int index = 0; int lastIndex = -1; int loopIgnoredCount = 0; //Log.debug(row); for (int j = 0; j < filesColumnCount; j++) { if (matcher.find(index)) { String first = matcher.group(2); if (first != null) { value = first; } else { qMatcher.reset(matcher.group(1)); value = qMatcher.replaceAll("\""); } index = matcher.end(); // check if its an ignored column if (ignoredCount > 0) { if (isIndexIgnored(ignoredIndexes, j)) { loopIgnoredCount++; continue; } } } else { // not enough delims check if (j < (filesColumnCount - 1) && index > (currentRowLength - 1)) { outputBuffer.append("Insufficient number of column "); outputBuffer.append("values provided at line "); outputBuffer.append(lineNumber); appendProgressErrorText(outputBuffer); int yesNo = GUIUtilities .displayYesNoDialog("Insufficient number of values provided from line " + lineNumber + ".\n" + "Do you wish to continue?", "Warning"); if (yesNo == JOptionPane.YES_OPTION) { insertLine = false; break; } else { throw new InterruptedException(); } } else { // check if we're on a delim the matcher didn't pick up int delimLength = delim.length(); if (row.substring(index, index + delimLength).equals(delim)) { // increment index index++; // assign as null value value = null; } } } // check if we landed on the same index - likely null value if (index == lastIndex) { index++; } lastIndex = index; if (value != null && value.trim().length() == 0) { value = null; } try { ColumnData cd = columns.get(j - loopIgnoredCount); setValue(value, getIndexOfColumn(columns, cd) + 1, cd.getSQLType(), trimWhitespace, dateFormat); if (hasColumnNames) { boundVariables.put(cd, VARIABLE_BOUND); } } catch (ParseException e) { errorCount++; failed = true; outputBuffer.append("Error parsing date value - "); outputBuffer.append(value); outputBuffer.append(" - on line "); outputBuffer.append(lineNumber); outputBuffer.append(" at position "); outputBuffer.append(j); outputExceptionError(null, e); break; } catch (NumberFormatException e) { errorCount++; failed = true; outputBuffer.append("Error parsing value - "); outputBuffer.append(value); outputBuffer.append(" - on line "); outputBuffer.append(lineNumber); outputBuffer.append(" at position "); outputBuffer.append(j); outputExceptionError(null, e); break; } } if (!insertLine) { prepStmnt.clearParameters(); continue; } if (failed && haltOnError) { processResult = FAILED; break; } // execute the statement try { // check all variables are bound if we used // the column names from the first row if (hasColumnNames) { index = 0; // check all variables are bound - insert NULL otherwise for (Map.Entry<ColumnData, String> entry : boundVariables.entrySet()) { ColumnData cd = entry.getKey(); if (VARIABLE_NOT_BOUND.equals(entry.getValue())) { index = getIndexOfColumn(columns, cd); prepStmnt.setNull(index + 1, cd.getSQLType()); } } } if (isBatch) { prepStmnt.addBatch(); } else { int result = prepStmnt.executeUpdate(); tableInsertCount += result; commitCount += result; } rollbackCount++; // check the rollback segment if (rollbackCount == rollbackSize) { if (isBatch) { int result = getBatchResult(prepStmnt.executeBatch())[0]; tableInsertCount += result; commitCount += result; prepStmnt.clearBatch(); } conn.commit(); totalInsertCount += commitCount; tableCommitCount = tableInsertCount; rollbackCount = 0; commitCount = 0; } // reset bound variables if (hasColumnNames) { for (int k = 0; k < columnCount; k++) { boundVariables.put(columns.get(k), VARIABLE_NOT_BOUND); } } } catch (SQLException e) { logException(e); errorCount++; if (!isBatch) { outputBuffer.append("Error inserting data from line "); outputBuffer.append(lineNumber); outputExceptionError(null, e); } else { outputBuffer.append("Error on last batch execution"); outputExceptionError(null, e); } if (haltOnError) { processResult = FAILED; conn.rollback(); getParent().cancelTransfer(); throw new InterruptedException(); } } } // ---------------------------- // file/table has ended here if (isBatch) { int[] batchResult = null; try { batchResult = getBatchResult(prepStmnt.executeBatch()); int result = batchResult[0]; tableInsertCount += result; commitCount += result; tableCommitCount = tableInsertCount; } catch (BatchUpdateException e) { logException(e); int[] updateCounts = e.getUpdateCounts(); batchResult = getBatchResult(updateCounts); errorCount += batchResult[1]; if (errorCount == 0) { errorCount = 1; } outputBuffer.append("An error occured during the batch process: "); outputBuffer.append(e.getMessage()); SQLException _e = e.getNextException(); while (_e != null) { outputBuffer.append("\nNext Exception: "); outputBuffer.append(_e.getMessage()); _e = _e.getNextException(); } outputBuffer.append("\n\nRecords processed to the point "); outputBuffer.append("where this error occurred: "); outputBuffer.append(updateCounts.length); appendProgressErrorText(outputBuffer); processResult = FAILED; } // Log.debug("commitCount: " + commitCount + // " batch: " + batchResult[0]); if (tableRowCount != tableInsertCount) { conn.rollback(); if (onError == ImportExportProcess.STOP_TRANSFER) { getParent().cancelTransfer(); processResult = FAILED; throw new InterruptedException(); } } } boolean doCommit = true; if (failed && !isBatch && rollbackSize != ImportExportProcess.COMMIT_END_OF_ALL_FILES) { int yesNo = GUIUtilities.displayYesNoDialog( "The process completed with errors.\n" + "Do you wish to commit the last block?", "Confirm commit"); doCommit = (yesNo == JOptionPane.YES_OPTION); } // do the commit if ok from above // and if rollback size selected is end of file if (rollbackSize == ImportExportProcess.COMMIT_END_OF_FILE) { if (doCommit) { conn.commit(); totalInsertCount += commitCount; tableCommitCount = tableInsertCount; commitCount = 0; } else { conn.rollback(); } } // update the progress display printTableResult(tableRowCount, tableInsertCount, dto.getTableName()); setProgressStatus(100); // reset the checks hasDate = false; failed = false; } // commit the last remaining block or where // set to commit at the end of all files if (rollbackSize != ImportExportProcess.COMMIT_END_OF_FILE) { setProgressStatus(100); boolean doCommit = true; if (errorCount > 0 && errorCount != totalRecordCount) { int yesNo = GUIUtilities.displayYesNoDialog( "The process completed with errors.\n" + "Do you wish to commit the changes?", "Confirm commit"); doCommit = (yesNo == JOptionPane.YES_OPTION); } if (doCommit) { conn.commit(); totalInsertCount += commitCount; } else { conn.rollback(); } } processResult = SUCCESS; } catch (InterruptedException e) { if (processResult != FAILED) { processResult = CANCELLED; } try { if (prepStmnt != null) { prepStmnt.cancel(); } if (conn != null) { conn.rollback(); } } catch (SQLException e2) { outputExceptionError("Error rolling back transaction", e); } } catch (Exception e) { logException(e); outputBuffer.append("Error processing data from line "); outputBuffer.append(lineNumber); outputExceptionError("\nUnrecoverable error importing table data from file", e); int yesNo = GUIUtilities.displayYesNoDialog( "The process encountered errors.\n" + "Do you wish to commit the last transaction block?", "Confirm commit"); boolean doCommit = (yesNo == JOptionPane.YES_OPTION); try { if (doCommit) { conn.commit(); totalInsertCount += commitCount; } else { conn.rollback(); } } catch (SQLException e2) { logException(e2); outputExceptionError("Error processing last transaction block", e2); } processResult = FAILED; } finally { finish(); releaseResources(getParent().getDatabaseConnection()); if (totalRecordCount == 0 || errorCount > 0) { processResult = FAILED; } setTableCount(tableCount); setRecordCount(totalRecordCount); setRecordCountProcessed(totalInsertCount); setErrorCount(errorCount); setProgressStatus(100); GUIUtilities.scheduleGC(); if (reader != null) { try { reader.close(); } catch (IOException e) { } } if (fileReader != null) { try { fileReader.close(); } catch (IOException e) { } } if (prepStmnt != null) { try { prepStmnt.close(); } catch (SQLException e) { } } } return processResult; }
From source file:com.alibaba.otter.node.etl.select.selector.canal.CanalEmbedSelector.java
public Message<EventData> selector() throws InterruptedException { int emptyTimes = 0; com.alibaba.otter.canal.protocol.Message message = null; if (batchTimeout < 0) {// ? while (running) { message = canalServer.getWithoutAck(clientIdentity, batchSize); if (message == null || message.getId() == -1L) { // ? applyWait(emptyTimes++); } else { break; }//from ww w . j a va2 s . c o m } if (!running) { throw new InterruptedException(); } } else { // while (running) { message = canalServer.getWithoutAck(clientIdentity, batchSize, batchTimeout, TimeUnit.MILLISECONDS); if (message == null || message.getId() == -1L) { // ? continue; } else { break; } } if (!running) { throw new InterruptedException(); } } List<EventData> eventDatas = messageParser.parse(pipelineId, message.getEntries()); // /? Message<EventData> result = new Message<EventData>(message.getId(), eventDatas); // ?entry? if (!CollectionUtils.isEmpty(message.getEntries())) { long lastEntryTime = message.getEntries().get(message.getEntries().size() - 1).getHeader() .getExecuteTime(); if (lastEntryTime > 0) {// oracle?0 this.lastEntryTime = lastEntryTime; } } if (dump && logger.isInfoEnabled()) { String startPosition = null; String endPosition = null; if (!CollectionUtils.isEmpty(message.getEntries())) { startPosition = buildPositionForDump(message.getEntries().get(0)); endPosition = buildPositionForDump(message.getEntries().get(message.getEntries().size() - 1)); } dumpMessages(result, startPosition, endPosition, message.getEntries().size());// } return result; }
From source file:org.kairosdb.plugin.rabbitmq.core.RabbitmqConsumer.java
/** * Critical method. Read configurations from file and configure bindings. If * any Exception occurs interrupt execution. * // ww w. j a v a 2s . c om * @throws InterruptedException * the interrupted exception */ private void readConfigurations() throws InterruptedException { try { // Channel connection with broker connection = factory.newConnection(); channel = connection.createChannel(); // Declare consumer consumer = new QueueingConsumer(channel); // Read binding configuration file and bind queues to exchanges configureBindings(); isAlive = true; } catch (Exception e) { // Interrupt start if any goes wrong LOGGER.error("[KRMQ] An error occurred: ", e); throw new InterruptedException(); } }
From source file:savant.data.sources.TabixDataSource.java
/** * {@inheritDoc}/*from w w w .j a v a2 s .c om*/ */ @Override public List<TabixIntervalRecord> getRecords(String ref, RangeAdapter r, Resolution res, RecordFilterAdapter filt) throws IOException, InterruptedException { List<TabixIntervalRecord> result = new ArrayList<TabixIntervalRecord>(); try { TabixReader.Iterator i = reader .query(MiscUtils.homogenizeSequence(ref) + ":" + r.getFrom() + "-" + (r.getTo() + 1)); if (i != null) { String line = null; int start = -1; int end = -1; Map<Integer, Integer> ends = new HashMap<Integer, Integer>(); while ((line = i.next()) != null) { // Note: count is used to uniquely identify records in same location // Assumption is that iterator will always give records in same order TabixIntervalRecord rec = TabixIntervalRecord.valueOf(line, mapping); if (filt == null || filt.accept(rec)) { RangeAdapter r2 = rec.getExpandedRange(r); if (r2 != null) { return getRecords(ref, r2, res, filt); } if (rec.getInterval().getStart() == start) { end = rec.getInterval().getEnd(); if (ends.get(end) == null) { ends.put(end, 0); } else { int count = ends.get(end) + 1; ends.put(end, count); rec.setCount(count); } } else { start = rec.getInterval().getStart(); end = rec.getInterval().getEnd(); ends.clear(); ends.put(end, 0); rec.setCount(0); } if (!absorbGFFRecord(rec, result)) { result.add(rec); } } if (Thread.interrupted()) { throw new InterruptedException(); } } } } catch (ArrayIndexOutOfBoundsException x) { // If the chromosome isn't found, the Tabix library manifests it by throwing an ArrayIndexOutOfBoundsException. LOG.info(String.format("Reference \"%s\" not found.", ref)); } return result; }
From source file:org.roda_project.commons_ip.model.impl.eark.EARKUtils.java
protected static void addSchemasToZipAndMETS(Map<String, ZipEntryInfo> zipEntries, MetsWrapper metsWrapper, List<IPFile> schemas, String representationId) throws IPException, InterruptedException { if (schemas != null && !schemas.isEmpty()) { for (IPFile schema : schemas) { if (Thread.interrupted()) { throw new InterruptedException(); }/*from ww w. j ava 2 s.c om*/ String schemaFilePath = IPConstants.SCHEMAS_FOLDER + ModelUtils.getFoldersFromList(schema.getRelativeFolders()) + schema.getFileName(); FileType fileType = EARKMETSUtils.addSchemaFileToMETS(metsWrapper, schemaFilePath, schema.getPath()); if (representationId != null) { schemaFilePath = IPConstants.REPRESENTATIONS_FOLDER + representationId + IPConstants.ZIP_PATH_SEPARATOR + schemaFilePath; } ZIPUtils.addFileTypeFileToZip(zipEntries, schema.getPath(), schemaFilePath, fileType); } } }