List of usage examples for java.lang InterruptedException InterruptedException
public InterruptedException(String s)
InterruptedException
with the specified detail message. From source file:org.apache.carbondata.spark.vectorreader.VectorizedCarbonRecordReader.java
/** * Implementation of RecordReader API.//w w w . j a va2 s .c o m */ @Override public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException, UnsupportedOperationException { // The input split can contain single HDFS block or multiple blocks, so firstly get all the // blocks and then set them in the query model. List<CarbonInputSplit> splitList; if (inputSplit instanceof CarbonInputSplit) { splitList = new ArrayList<>(1); splitList.add((CarbonInputSplit) inputSplit); } else if (inputSplit instanceof CarbonMultiBlockSplit) { // contains multiple blocks, this is an optimization for concurrent query. CarbonMultiBlockSplit multiBlockSplit = (CarbonMultiBlockSplit) inputSplit; splitList = multiBlockSplit.getAllSplits(); } else { throw new RuntimeException("unsupported input split type: " + inputSplit); } List<TableBlockInfo> tableBlockInfoList = CarbonInputSplit.createBlocks(splitList); queryModel.setTableBlockInfos(tableBlockInfoList); queryModel.setVectorReader(true); try { queryExecutor = QueryExecutorFactory.getQueryExecutor(queryModel, taskAttemptContext.getConfiguration()); iterator = (AbstractDetailQueryResultIterator) queryExecutor.execute(queryModel); } catch (QueryExecutionException e) { if (ExceptionUtils.indexOfThrowable(e, FileNotFoundException.class) > 0) { LOGGER.error(e.getMessage(), e); throw new InterruptedException( "Insert overwrite may be in progress.Please check " + e.getMessage()); } throw new InterruptedException(e.getMessage()); } catch (Exception e) { if (ExceptionUtils.indexOfThrowable(e, FileNotFoundException.class) > 0) { LOGGER.error(e.getMessage(), e); throw new InterruptedException( "Insert overwrite may be in progress.Please check " + e.getMessage()); } throw e; } }
From source file:org.wso2.carbon.registry.subscription.test.util.JMXClient.java
/** * Listen to all the jmx notifications till the required notification is * captured/*from w ww.j av a 2 s . co m*/ * * @throws InterruptedException */ public boolean getNotifications() throws InterruptedException { Calendar startTime = Calendar.getInstance(); try { while (!isSuccess()) { if (((Calendar.getInstance().getTimeInMillis() - startTime.getTimeInMillis())) < 60000) { Thread.sleep(1000); } else { break; } } return isSuccess(); } catch (InterruptedException e) { log.error("JMX notification listner interrupted"); throw new InterruptedException("JMX notification listner Ninterrupted" + e.getMessage()); } }
From source file:org.alfresco.extension.bulkimport.impl.BatchImporterImpl.java
private final void importBatchImpl(final NodeRef target, final Batch batch, final boolean replaceExisting, final boolean dryRun) throws InterruptedException { if (batch != null) { for (final BulkImportItem<BulkImportItemVersion> item : batch) { if (importStatus.isStopping() || Thread.currentThread().isInterrupted()) throw new InterruptedException( Thread.currentThread().getName() + " was interrupted. Terminating early."); importItem(target, item, replaceExisting, dryRun); }// w ww. j a v a2s.co m } }
From source file:org.apache.mahout.regression.penalizedlinear.LinearCrossValidation.java
private void runPenalizedLinear() throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = getConf(); conf.setInt(PenalizedLinearKeySet.NUM_CV, parameter.numOfCV); conf.setFloat(PenalizedLinearKeySet.ALPHA, parameter.alpha); conf.set(PenalizedLinearKeySet.LAMBDA, parameter.lambda); conf.setBoolean(PenalizedLinearKeySet.INTERCEPT, parameter.intercept); Job job = new Job(conf, "Penalized Linear Regression Driver running over input: " + input); job.setInputFormatClass(SequenceFileInputFormat.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setMapperClass(PenalizedLinearMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(VectorWritable.class); job.setReducerClass(PenalizedLinearReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(VectorWritable.class); job.setCombinerClass(PenalizedLinearReducer.class); job.setNumReduceTasks(1);/*from ww w .j a v a 2 s .c o m*/ job.setJarByClass(LinearRegularizePath.class); FileInputFormat.addInputPath(job, new Path(output, DIRECTORY_CONTAINING_CONVERTED_INPUT)); FileOutputFormat.setOutputPath(job, new Path(output, "output")); if (!job.waitForCompletion(true)) { throw new InterruptedException("Penalized Linear Regression Job failed processing " + input); } solver = new PenalizedLinearSolver(); solver.setAlpha(parameter.alpha); solver.setIntercept(parameter.intercept); solver.setLambdaString(parameter.lambda); solver.initSolver(new Path(output, "output"), getConf()); solver.crossValidate(); printInfo(parameter, solver); }
From source file:org.apache.mahout.regression.penalizedlinear.LinearRegularizePath.java
private void runPenalizedLinear() throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = getConf(); conf.setInt(PenalizedLinearKeySet.NUM_CV, parameter.numOfCV); conf.setFloat(PenalizedLinearKeySet.ALPHA, parameter.alpha); conf.set(PenalizedLinearKeySet.LAMBDA, parameter.lambda); conf.setBoolean(PenalizedLinearKeySet.INTERCEPT, parameter.intercept); Job job = new Job(conf, "Penalized Linear Regression Driver running over input: " + input); job.setInputFormatClass(SequenceFileInputFormat.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setMapperClass(PenalizedLinearMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(VectorWritable.class); job.setReducerClass(PenalizedLinearReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(VectorWritable.class); job.setCombinerClass(PenalizedLinearReducer.class); job.setNumReduceTasks(1);//from w w w .ja v a2 s . co m job.setJarByClass(LinearRegularizePath.class); FileInputFormat.addInputPath(job, new Path(output, DIRECTORY_CONTAINING_CONVERTED_INPUT)); FileOutputFormat.setOutputPath(job, new Path(output, "output")); if (!job.waitForCompletion(true)) { throw new InterruptedException("Penalized Linear Regression Job failed processing " + input); } solver = new PenalizedLinearSolver(); solver.setAlpha(parameter.alpha); solver.setIntercept(parameter.intercept); solver.setLambdaString(parameter.lambda); solver.initSolver(new Path(output, "output"), getConf()); solver.regularizePath(solver.getLambda()); printInfo(parameter, solver); }
From source file:org.kawanfw.file.api.util.client.StreamsTransferWithProgress.java
/** * Downloads the list of remote files using {@code RemoteInputStream} <br> * Will set the progress indicators shareable variable * {@link ConnectionHttp#getProgress()}. <br> * Will also test the value of {@link ConnectionHttp#getCancelled())} to * throw an {@code InterruptedException} if necessary. <br> * <br>//from w ww .ja va2s . c o m * <br> * Warning: streams are not closed and must be closed by caller * * @param inStreams * the RemoteInputStream list to read for download * @param files * the files to create * @param totalLength * the total lenth of files. * * * @throws IOException * @throws RemoteException * @throws SocketException * @throws UnknownHostException * @throws InvalidLoginException * @throws IllegalArgumentException * @throws ConnectException */ public void download(List<InputStream> inStreams, List<File> files, long totalLength) throws ConnectException, IllegalArgumentException, InvalidLoginException, UnknownHostException, SocketException, RemoteException, IOException, InterruptedException { if (inStreams == null) { throw new IllegalArgumentException("inStream can not be null!"); } if (files == null) { throw new IllegalArgumentException("files can not be null!"); } InputStream in = null; OutputStream out = null; for (int i = 0; i < inStreams.size(); i++) { try { if (inStreams.get(i) == null) { continue; } String pathname = ((RemoteInputStream) inStreams.get(i)).getPathname(); debug("Downloading remoteFile with RemoteInpuStream: " + pathname + " progress: " + progress.get()); currentPathnameDownload = pathname; in = inStreams.get(i); out = new BufferedOutputStream(new FileOutputStream(files.get(i))); int tempLen = 0; byte[] buffer = new byte[1024 * 4]; int n = 0; while ((n = in.read(buffer)) != -1) { tempLen += n; if (totalLength > 0 && tempLen > totalLength / 100) { tempLen = 0; int cpt = progress.get(); cpt++; // Update the progress value for progress // indicator progress.set(Math.min(99, cpt)); } // If progress indicator says that user has cancelled the // download, stop now! if (cancelled.get()) { throw new InterruptedException("File download cancelled by user."); } out.write(buffer, 0, n); } } finally { IOUtils.closeQuietly(in); IOUtils.closeQuietly(out); inStreams.set(i, null); } } }
From source file:org.eclipse.osee.ote.core.test.shells.TelnetShell.java
public synchronized void waitForTransmission(int millis) throws InterruptedException { if (!inputBuffer.waitFor(millis)) { throw new InterruptedException("Waiting for transmission took longer then " + millis + " miliseconds."); }/* w ww. j av a 2 s . c o m*/ }
From source file:org.alfresco.extension.bulkimport.source.fs.DirectoryAnalyser.java
private Map<String, SortedMap<BigDecimal, Pair<File, File>>> categoriseFiles(final File[] directoryListing) throws InterruptedException { Map<String, SortedMap<BigDecimal, Pair<File, File>>> result = null; if (directoryListing != null) { result = new HashMap<>(); for (final File file : directoryListing) { if (importStatus.isStopping() || Thread.currentThread().isInterrupted()) throw new InterruptedException( Thread.currentThread().getName() + " was interrupted. Terminating early."); categoriseFile(result, file); }/* w w w . j a v a 2s . c om*/ } return (result); }
From source file:com.marklogic.contentpump.MultithreadedMapper.java
/** * Run the application's maps using a thread pool. *//* w ww .j a v a 2s .com*/ @Override public void run(Context context) throws IOException, InterruptedException { outer = context; int numberOfThreads = getThreadCount(context); mapClass = getMapperClass(context); if (LOG.isDebugEnabled()) { LOG.debug("Running with " + numberOfThreads + " threads"); } // current mapper takes 1 thread numberOfThreads--; InputSplit split = context.getInputSplit(); // submit runners try { List<Future<?>> taskList = null; if (threadPool != null) { taskList = new ArrayList<Future<?>>(); synchronized (threadPool) { for (int i = 0; i < numberOfThreads; ++i) { MapRunner runner = new MapRunner(); BaseMapper<K1, V1, K2, V2> mapper = runner.getMapper(); if (!threadPool.isShutdown()) { Collection<Future<Object>> tasks = mapper.submitTasks(threadPool, split); taskList.addAll(tasks); numberOfThreads -= tasks.size(); Future<?> future = threadPool.submit(runner); taskList.add(future); } else { throw new InterruptedException("Thread Pool has been shut down"); } } threadPool.notify(); } // MapRunner that runs in current thread MapRunner r = new MapRunner(); r.run(); for (Future<?> f : taskList) { f.get(); } } else { runners = new ArrayList<MapRunner>(numberOfThreads); for (int i = 0; i < numberOfThreads; ++i) { MapRunner thread; thread = new MapRunner(); thread.start(); runners.add(i, thread); } // MapRunner runs in current thread MapRunner r = new MapRunner(); r.run(); for (int i = 0; i < numberOfThreads; ++i) { MapRunner thread = runners.get(i); thread.join(); Throwable th = thread.throwable; if (th != null) { if (th instanceof IOException) { throw (IOException) th; } else if (th instanceof InterruptedException) { throw (InterruptedException) th; } else { throw new RuntimeException(th); } } } } } catch (ClassNotFoundException e) { LOG.error("MapRunner class not found", e); } catch (ExecutionException e) { LOG.error("Error waiting for MapRunner threads to complete", e); } }
From source file:org.apache.mahout.classifier.sequencelearning.crf.InputDriver.java
public static void buildTaggerImpl(Configuration conf, Path input, Path output, int xsize) throws IOException, InterruptedException, ClassNotFoundException { conf.set("xsize", String.valueOf(xsize)); Path CRFModelInitialPath = new Path(output, "CRFModelInitial"); conf.set("CRFModelInitialPath", CRFModelInitialPath.toString()); Path TaggerImplPath = new Path(output, "TaggerImpl"); conf.set("TaggerImplPath", TaggerImplPath.toString()); String jobName = "buildTagger"; System.out.println(jobName);//from w w w . ja v a 2 s . com Job job = new Job(conf, jobName); job.setMapOutputKeyClass(IntWritable.class); job.setMapOutputValueClass(TaggerImplWritable.class); job.setOutputKeyClass(IntWritable.class); job.setOutputValueClass(TaggerImplWritable.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setMapperClass(BuildTaggerImplMapper.class); job.setReducerClass(BuildTaggerImplReducer.class); job.setNumReduceTasks(1); job.setJarByClass(InputDriver.class); FileInputFormat.addInputPath(job, input); FileOutputFormat.setOutputPath(job, TaggerImplPath); job.setJarByClass(InputDriver.class); if (!job.waitForCompletion(true)) { throw new InterruptedException(""); } }