List of usage examples for java.util.concurrent BlockingQueue add
boolean add(E e);
From source file:io.orchestrate.client.itest.KvTest.java
@Theory public void pojoKvPutAsync(@ForAll(sampleSize = 10) final String key) throws InterruptedException { assumeThat(key, not(isEmptyString())); final BlockingQueue<KvMetadata> queue = DataStructures.getLTQInstance(KvMetadata.class); User user = new User("test1", "Some description"); final OrchestrateRequest<KvMetadata> addUserRequest = client.kv(collection(), key).put(user) .on(new ResponseAdapter<KvMetadata>() { @Override//from w w w. ja v a 2 s. c o m public void onFailure(final Throwable error) { // handle error condition } @Override public void onSuccess(final KvMetadata userKvMeta) { queue.add(userKvMeta); } }); @SuppressWarnings("unchecked") final KvMetadata kvMetadata = queue.poll(5000, TimeUnit.MILLISECONDS); assertNotNull(kvMetadata); }
From source file:gobblin.couchbase.writer.CouchbaseWriterTest.java
private List<Pair<AbstractDocument, Future>> writeRecords(Iterator<AbstractDocument> recordIterator, CouchbaseWriter writer, int outstandingRequests, long kvTimeout, TimeUnit kvTimeoutUnit) throws DataConversionException, UnsupportedEncodingException { final BlockingQueue<Pair<AbstractDocument, Future>> outstandingCallQueue = new LinkedBlockingDeque<>( outstandingRequests);/* w w w .j a va 2 s.co m*/ final List<Pair<AbstractDocument, Future>> failedFutures = new ArrayList<>(outstandingRequests); int index = 0; long runTime = 0; final AtomicInteger callbackSuccesses = new AtomicInteger(0); final AtomicInteger callbackFailures = new AtomicInteger(0); final ConcurrentLinkedDeque<Throwable> callbackExceptions = new ConcurrentLinkedDeque<>(); Verifier verifier = new Verifier(); while (recordIterator.hasNext()) { AbstractDocument doc = recordIterator.next(); index++; verifier.onWrite(doc); final long startTime = System.nanoTime(); Future callFuture = writer.write(doc, new WriteCallback<TupleDocument>() { @Override public void onSuccess(WriteResponse<TupleDocument> writeResponse) { callbackSuccesses.incrementAndGet(); } @Override public void onFailure(Throwable throwable) { callbackFailures.incrementAndGet(); callbackExceptions.add(throwable); } }); drainQueue(outstandingCallQueue, 1, kvTimeout, kvTimeoutUnit, failedFutures); outstandingCallQueue.add(new Pair<>(doc, callFuture)); runTime += System.nanoTime() - startTime; } int failedWrites = 0; long responseStartTime = System.nanoTime(); drainQueue(outstandingCallQueue, outstandingRequests, kvTimeout, kvTimeoutUnit, failedFutures); runTime += System.nanoTime() - responseStartTime; for (Throwable failure : callbackExceptions) { System.out.println(failure.getClass() + " : " + failure.getMessage()); } failedWrites += failedFutures.size(); System.out.println("Total time to send " + index + " records = " + runTime / 1000000.0 + "ms, " + "Failed writes = " + failedWrites + " Callback Successes = " + callbackSuccesses.get() + "Callback Failures = " + callbackFailures.get()); verifier.verify(writer.getBucket()); return failedFutures; }
From source file:ubic.gemma.core.loader.expression.arrayDesign.ArrayDesignProbeMapperServiceImpl.java
@Override public void processArrayDesign(ArrayDesign arrayDesign, ProbeMapperConfig config, boolean useDB) { assert config != null; if (arrayDesign.getTechnologyType().equals(TechnologyType.GENELIST) || arrayDesign.getTechnologyType().equals(TechnologyType.SEQUENCING) || arrayDesign.getTechnologyType().equals(TechnologyType.OTHER)) { throw new IllegalArgumentException( "Do not use this service to process platforms that do not use an probe-based technology."); }// w w w . j a va 2 s .c om Collection<Taxon> taxa = arrayDesignService.getTaxa(arrayDesign.getId()); Taxon taxon = arrayDesign.getPrimaryTaxon(); if (taxa.size() > 1 && taxon == null) { throw new IllegalArgumentException( "Array design has sequence from multiple taxa and has no primary taxon set: " + arrayDesign); } GoldenPathSequenceAnalysis goldenPathDb = new GoldenPathSequenceAnalysis(taxon); BlockingQueue<BACS> persistingQueue = new ArrayBlockingQueue<>( ArrayDesignProbeMapperServiceImpl.QUEUE_SIZE); AtomicBoolean generatorDone = new AtomicBoolean(false); AtomicBoolean loaderDone = new AtomicBoolean(false); this.load(persistingQueue, generatorDone, loaderDone, useDB); if (useDB) { ArrayDesignProbeMapperServiceImpl.log.info("Removing any old associations"); arrayDesignService.deleteGeneProductAssociations(arrayDesign); } int count = 0; int hits = 0; int numWithNoResults = 0; ArrayDesignProbeMapperServiceImpl.log .info("Start processing " + arrayDesign.getCompositeSequences().size() + " probes ..."); for (CompositeSequence compositeSequence : arrayDesign.getCompositeSequences()) { Map<String, Collection<BlatAssociation>> results = this.processCompositeSequence(config, taxon, goldenPathDb, compositeSequence); if (results == null) { numWithNoResults++; continue; } for (Collection<BlatAssociation> col : results.values()) { for (BlatAssociation association : col) { if (ArrayDesignProbeMapperServiceImpl.log.isDebugEnabled()) ArrayDesignProbeMapperServiceImpl.log.debug(association); persistingQueue.add(new BACS(compositeSequence, association)); } ++hits; } if (++count % 200 == 0) { ArrayDesignProbeMapperServiceImpl.log.info("Processed " + count + " composite sequences" + " with blat results; " + hits + " mappings found."); } } generatorDone.set(true); ArrayDesignProbeMapperServiceImpl.log.info("Waiting for loading to complete ..."); while (!loaderDone.get()) { try { Thread.sleep(1000); } catch (InterruptedException e) { throw new RuntimeException(e); } } ArrayDesignProbeMapperServiceImpl.log.info( "Processed " + count + " composite sequences with blat results; " + hits + " mappings found."); if (numWithNoResults > 0) { ArrayDesignProbeMapperServiceImpl.log.info(numWithNoResults + " had no blat results"); } arrayDesignReportService.generateArrayDesignReport(arrayDesign.getId()); this.deleteOldFiles(arrayDesign); }
From source file:coral.service.ExpServer.java
public void process(Integer client, String request, BlockingQueue<Message> outQueue) { ExpServiceImpl service = ((ExpServiceImpl) ch.getService()); int qindex = request.indexOf('?'); String arg = ""; if (qindex < 1) { qindex = request.length();/*from ww w. j a v a 2s. c om*/ } else { arg = request.substring(qindex + 1); } logger.info(" request: " + request + " qindex: " + qindex); String cmd = request.substring(9, qindex); // get args logger.debug("arg: ###" + arg + "###"); Map<String, String> args = new HashMap<String, String>(); for (String s : arg.substring(0, arg.length()).split("&")) { String[] ss = s.split("="); if (ss[0] != null) { args.put(ss[0], (ss.length > 1) ? ss[1] : ""); } } logger.info("server command " + cmd); String output = null; // do the deed if (cmd.length() > 0) { StringBuilder msg = new StringBuilder(); Map<Integer, ExpData> data = service.getAllData(); Map<String, Object> adds = new HashMap<String, Object>(); adds.put("_agentdata", data); adds.put("_stages", service.getStages()); adds.put("_query", args); // adds.put("_clients", ch.getClientInfoMapList()); String content = ExpTemplateUtil.evalVM(cmd, data, null, service, adds); msg.append(content); output = msg.toString(); } if (args.containsKey("debug")) { System.out.println("" + args.get("debug") + " - serv: " + service.debug); if (args.get("debug").equals("ON")) { service.debug = true; outQueue.add(new Message("info:?debug=true")); } if (args.get("debug").equals("OFF")) { service.debug = false; outQueue.add(new Message("info:?debug=false")); } System.out.println("" + args.get("debug") + " - serv: " + service.debug); } if (args.containsKey("export")) { String filename = args.get("export"); if (filename != null && !filename.equals("")) { File file = new File(filename); String msg = "Exporting data to " + file.getAbsolutePath() + ", please wait..."; outQueue.add(new Message("vset", "_exp.html", "text/plain", "YES", msg.getBytes())); try { Thread.sleep(100); } catch (InterruptedException e2) { // TODO Auto-generated catch block e2.printStackTrace(); } CSVWriter writer; try { List<String> headers = dataService.getAllVariableNames(); writer = new CSVWriter(file, "UTF-8"); List<String[]> stages = dataService.stageInfos(); headers.add(0, "_id"); headers.add(1, "_collection"); headers.add(2, "_template"); headers.add(3, "_stage"); headers.add(4, "_inmsg"); System.out.println(Arrays.toString(headers.toArray())); writer.writeHeader(headers.toArray(new String[] {})); for (String[] s : stages) { String[] data = new String[headers.size()]; for (int i = 0; i < s.length; i++) { if (i < s.length) { data[i] = s[i]; } } Map<String, String> map = dataService.getMap(s[1], Long.parseLong(s[0])); for (Map.Entry<String, String> e : map.entrySet()) { data[headers.indexOf(e.getKey())] = e.getValue(); } writer.writeData(data); } writer.close(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (NumberFormatException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (SQLException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } String msg2 = "Exporting data to " + file.getAbsolutePath() + ", sucessful..."; outQueue.add(new Message("vset", "_exp.html", "text/plain", "YES", msg2.getBytes())); } } if (args.containsKey("makeclient")) { String props = args.get("makeclient"); try { props = URLDecoder.decode(props, "UTF-8"); } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.debug("make client " + props); Properties generalProp = new Properties(); InputStream is = new ByteArrayInputStream(props.getBytes()); try { generalProp.load(is); is.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } // generalProp.put("coral.polyp.ontop", "false"); CoralPolyp.invokeClient(generalProp); outQueue.add(new Message("info:?client=started", new byte[] {})); } if (output != null) { outQueue.add(new Message("vset", "_exp.html", "text/html", "YES", output.getBytes())); } }
From source file:gobblin.couchbase.writer.CouchbaseWriter.java
@Override public Future<WriteResponse> write(final D record, final WriteCallback callback) { assertRecordWritable(record);/*from www.ja v a2 s .c o m*/ if (record instanceof TupleDocument) { ((TupleDocument) record).content().value1().retain(); } Observable<D> observable = _bucket.async().upsert(record); if (callback == null) { return new WriteResponseFuture<>( observable.timeout(_operationTimeout, _operationTimeunit).toBlocking().toFuture(), _defaultWriteResponseMapper); } else { final AtomicBoolean callbackFired = new AtomicBoolean(false); final BlockingQueue<Pair<WriteResponse, Throwable>> writeResponseQueue = new ArrayBlockingQueue<>(1); final Future<WriteResponse> writeResponseFuture = new Future<WriteResponse>() { @Override public boolean cancel(boolean mayInterruptIfRunning) { return false; } @Override public boolean isCancelled() { return false; } @Override public boolean isDone() { return callbackFired.get(); } @Override public WriteResponse get() throws InterruptedException, ExecutionException { Pair<WriteResponse, Throwable> writeResponseThrowablePair = writeResponseQueue.take(); return getWriteResponseorThrow(writeResponseThrowablePair); } @Override public WriteResponse get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { Pair<WriteResponse, Throwable> writeResponseThrowablePair = writeResponseQueue.poll(timeout, unit); if (writeResponseThrowablePair == null) { throw new TimeoutException("Timeout exceeded while waiting for future to be done"); } else { return getWriteResponseorThrow(writeResponseThrowablePair); } } }; observable.timeout(_operationTimeout, _operationTimeunit).subscribe(new Subscriber<D>() { @Override public void onCompleted() { } @Override public void onError(Throwable e) { callbackFired.set(true); writeResponseQueue.add(new Pair<WriteResponse, Throwable>(null, e)); callback.onFailure(e); } @Override public void onNext(D doc) { try { callbackFired.set(true); WriteResponse writeResponse = new GenericWriteResponse<D>(doc); writeResponseQueue.add(new Pair<WriteResponse, Throwable>(writeResponse, null)); callback.onSuccess(writeResponse); } finally { if (doc instanceof TupleDocument) { ((TupleDocument) doc).content().value1().release(); } } } }); return writeResponseFuture; } }
From source file:com.alibaba.otter.node.etl.common.pipe.impl.http.archive.ArchiveBean.java
/** * /*from ww w . ja va2 s . com*/ */ @SuppressWarnings("resource") private boolean doPack(final File targetArchiveFile, List<FileData> fileDatas, final ArchiveRetriverCallback<FileData> callback) { // ? if (true == targetArchiveFile.exists() && false == NioUtils.delete(targetArchiveFile, 3)) { throw new ArchiveException( String.format("[%s] exist and delete failed", targetArchiveFile.getAbsolutePath())); } boolean exist = false; ZipOutputStream zipOut = null; Set<String> entryNames = new HashSet<String>(); BlockingQueue<Future<ArchiveEntry>> queue = new LinkedBlockingQueue<Future<ArchiveEntry>>(); // ? ExecutorCompletionService completionService = new ExecutorCompletionService(executor, queue); final File targetDir = new File(targetArchiveFile.getParentFile(), FilenameUtils.getBaseName(targetArchiveFile.getPath())); try { // FileUtils.forceMkdir(targetDir); zipOut = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(targetArchiveFile))); zipOut.setLevel(Deflater.BEST_SPEED); // ?? for (final FileData fileData : fileDatas) { if (fileData.getEventType().isDelete()) { continue; // delete?? } String namespace = fileData.getNameSpace(); String path = fileData.getPath(); boolean isLocal = StringUtils.isBlank(namespace); String entryName = null; if (true == isLocal) { entryName = FilenameUtils.getPath(path) + FilenameUtils.getName(path); } else { entryName = namespace + File.separator + path; } // ???? if (entryNames.contains(entryName) == false) { entryNames.add(entryName); } else { continue; } final String name = entryName; if (true == isLocal && !useLocalFileMutliThread) { // ?? queue.add(new DummyFuture(new ArchiveEntry(name, callback.retrive(fileData)))); } else { completionService.submit(new Callable<ArchiveEntry>() { public ArchiveEntry call() throws Exception { // ?? InputStream input = null; OutputStream output = null; try { input = callback.retrive(fileData); if (input instanceof LazyFileInputStream) { input = ((LazyFileInputStream) input).getInputSteam();// ?stream } if (input != null) { File tmp = new File(targetDir, name); NioUtils.create(tmp.getParentFile(), false, 3);// ? output = new FileOutputStream(tmp); NioUtils.copy(input, output);// ? return new ArchiveEntry(name, new File(targetDir, name)); } else { return new ArchiveEntry(name); } } finally { IOUtils.closeQuietly(input); IOUtils.closeQuietly(output); } } }); } } for (int i = 0; i < entryNames.size(); i++) { // ? ArchiveEntry input = null; InputStream stream = null; try { input = queue.take().get(); if (input == null) { continue; } stream = input.getStream(); if (stream == null) { continue; } if (stream instanceof LazyFileInputStream) { stream = ((LazyFileInputStream) stream).getInputSteam();// ?stream } exist = true; zipOut.putNextEntry(new ZipEntry(input.getName())); NioUtils.copy(stream, zipOut);// ? zipOut.closeEntry(); } finally { IOUtils.closeQuietly(stream); } } if (exist) { zipOut.finish(); } } catch (Exception e) { throw new ArchiveException(e); } finally { IOUtils.closeQuietly(zipOut); try { FileUtils.deleteDirectory(targetDir);// } catch (IOException e) { // ignore } } return exist; }
From source file:org.apache.falcon.service.FeedSLAMonitoringService.java
void addNewPendingFeedInstances(Date from, Date to) throws FalconException { Set<String> currentClusters = DeploymentUtil.getCurrentClusters(); for (String feedName : monitoredFeeds) { Feed feed = EntityUtil.getEntity(EntityType.FEED, feedName); for (Cluster feedCluster : feed.getClusters().getClusters()) { if (currentClusters.contains(feedCluster.getName())) { Date nextInstanceTime = from; Pair<String, String> key = new Pair<>(feed.getName(), feedCluster.getName()); BlockingQueue<Date> instances = pendingInstances.get(key); if (instances == null) { instances = new LinkedBlockingQueue<>(queueSize); Date feedStartTime = feedCluster.getValidity().getStart(); Frequency retentionFrequency = FeedHelper.getRetentionFrequency(feed, feedCluster); ExpressionHelper evaluator = ExpressionHelper.get(); ExpressionHelper.setReferenceDate(new Date()); Date retention = new Date(evaluator.evaluate(retentionFrequency.toString(), Long.class)); if (feedStartTime.before(retention)) { feedStartTime = retention; }// w ww .j av a 2 s . c o m nextInstanceTime = feedStartTime; } Set<Date> exists = new HashSet<>(instances); org.apache.falcon.entity.v0.cluster.Cluster currentCluster = EntityUtil .getEntity(EntityType.CLUSTER, feedCluster.getName()); nextInstanceTime = EntityUtil.getNextStartTime(feed, currentCluster, nextInstanceTime); while (nextInstanceTime.before(to)) { if (instances.size() >= queueSize) { // if no space, first make some space LOG.debug("Removing instance={} for <feed,cluster>={}", instances.peek(), key); exists.remove(instances.peek()); instances.remove(); } LOG.debug("Adding instance={} for <feed,cluster>={}", nextInstanceTime, key); if (exists.add(nextInstanceTime)) { instances.add(nextInstanceTime); } nextInstanceTime = new Date(nextInstanceTime.getTime() + ONE_MS); nextInstanceTime = EntityUtil.getNextStartTime(feed, currentCluster, nextInstanceTime); } pendingInstances.put(key, instances); } } } }
From source file:org.apache.reef.io.network.NetworkConnectionServiceTest.java
/** * NetworkService messaging rate benchmark. *///from w w w. j a v a 2 s . c o m @Test public void testMessagingNetworkConnServiceRateDisjoint() throws Exception { Assume.assumeFalse("Use log level INFO to run benchmarking", LOG.isLoggable(Level.FINEST)); LOG.log(Level.FINEST, name.getMethodName()); final BlockingQueue<Object> barrier = new LinkedBlockingQueue<>(); final int numThreads = 4; final int size = 2000; final int numMessages = 300000 / (Math.max(1, size / 512)); final int totalNumMessages = numMessages * numThreads; final String message = StringUtils.repeat('1', size); final ExecutorService e = Executors.newCachedThreadPool(); for (int t = 0; t < numThreads; t++) { final int tt = t; e.submit(new Runnable() { public void run() { try (final NetworkMessagingTestService messagingTestService = new NetworkMessagingTestService( localAddress)) { final Monitor monitor = new Monitor(); final Codec<String> codec = new StringCodec(); messagingTestService.registerTestConnectionFactory(groupCommClientId, numMessages, monitor, codec); try (final Connection<String> conn = messagingTestService .getConnectionFromSenderToReceiver(groupCommClientId)) { try { conn.open(); for (int count = 0; count < numMessages; ++count) { // send messages to the receiver. conn.write(message); } monitor.mwait(); } catch (final NetworkException e) { e.printStackTrace(); throw new RuntimeException(e); } } } catch (final Exception e) { throw new RuntimeException(e); } } }); } // start and time final long start = System.currentTimeMillis(); final Object ignore = new Object(); for (int i = 0; i < numThreads; i++) { barrier.add(ignore); } e.shutdown(); e.awaitTermination(100, TimeUnit.SECONDS); final long end = System.currentTimeMillis(); final double runtime = ((double) end - start) / 1000; LOG.log(Level.INFO, "size: " + size + "; messages/s: " + totalNumMessages / runtime + " bandwidth(bytes/s): " + ((double) totalNumMessages * 2 * size) / runtime); // x2 for unicode chars }
From source file:coral.service.ExpServable.java
public void process(Message cmd, BlockingQueue<Message> outQueue) { Integer id = getClientId(outQueue); String c = cmd.getFullContent(); // remove leading slash / c.replaceAll("^/", ""); logger.info("run cmd on server: ####" + c + "#### to " + id); if (c.startsWith("__RES")) { logger.info("send resources"); sendResources(outQueue);/*from w w w. java 2s. c o m*/ } else if (c.startsWith("__FILE/")) { logger.info("client " + id + " requests file " + c); int pos = c.indexOf("__FILE/"); String filename = c.substring(pos + 7); service.evalTemplate(id, filename); } else if (c.startsWith(serverMarker)) { logger.info("client " + id + " requests server " + c); server.process(id, c, outQueue); } else if (c.startsWith(refreshMarker)) { id = Integer.parseInt(c.replaceAll("[^\\d]", "")); logger.info("refresh client " + id + " " + c); BlockingQueue<Message> oldq = clients.get(id); if (oldq != null) { clients.remove(oldq); clients.put(id, outQueue); service.process(id, "?refreshid=" + id); } else { logger.info("START new client with this id"); clients.put(id, outQueue); service.addClient(id); if (useScreenwriter) { try { FileWriter fw = new FileWriter(new File("client_screens" + id + ".html")); screenwriter.put(id, fw); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } service.process(id, "?refreshid=" + id); } } else if (id == null) { logger.info("START new client with no id"); clientCount = dataService.getNewId(clientCount + 1); clients.put(clientCount, outQueue); service.addClient(clientCount); id = clientCount; if (useScreenwriter) { try { FileWriter fw = new FileWriter(new File("client_screens" + id + ".html")); screenwriter.put(id, fw); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } service.process(id, c); } else if (c.startsWith(startMarker)) { logger.info("START new client (discard old one)"); clients.remove(outQueue); clientCount = dataService.getNewId(clientCount + 1); clients.put(clientCount, outQueue); service.addClient(clientCount); id = clientCount; service.process(id, c); } else if (c.startsWith(CoralUtils.KILL_KEY)) { // kills the client with the given id (any number in the command) id = Integer.parseInt(cmd.getQuery().get("id")); logger.info("KILL client " + id + " " + c); BlockingQueue<Message> oldq = clients.get(id); if (oldq != null) { oldq.add(new Message("vset:_error?content=client has been removed", new byte[] {})); clients.remove(oldq); service.removeClient(id); outQueue.add(new Message("vset:_error?content=client " + id + " has been removed ")); } else { logger.info("client id " + id + " did not exist"); outQueue.add(new Message("vset:_error?content=client " + id + " does not exist ")); } // clients.remove(outQueue); logger.info("client discarded"); } else { service.process(id, cmd.getFullContent()); } }
From source file:org.opencastproject.videosegmenter.impl.VideoSegmenterServiceImpl.java
/** * Returns the segments for the movie accessible through the frame grabbing control. * /*ww w .j ava 2 s .c o m*/ * @param video * the mpeg-7 video representation * @param dsh * the data source handler * @return the list of segments * @throws IOException * if accessing a frame fails * @throws VideoSegmenterException * if segmentation of the video fails */ protected List<Segment> segment(Video video, FrameGrabber dsh) throws IOException, VideoSegmenterException { List<Segment> segments = new ArrayList<Segment>(); int t = 1; int lastStableImageTime = 0; long startOfSegment = 0; int currentSceneStabilityCount = 1; boolean sceneChangeImminent = true; boolean luckyPunchRecovery = false; int segmentCount = 1; BufferedImage previousImage = null; BufferedImage lastStableImage = null; BlockingQueue<Buffer> bufferQueue = new ArrayBlockingQueue<Buffer>(stabilityThreshold + 1); long durationInSeconds = video.getMediaTime().getMediaDuration().getDurationInMilliseconds() / 1000; Segment contentSegment = video.getTemporalDecomposition().createSegment("segment-" + segmentCount); ImageComparator icomp = new ImageComparator(changesThreshold); // icomp.setStatistics(true); // String imagesPath = PathSupport.concat(new String[] { // System.getProperty("java.io.tmpdir"), // "videosegments", // video.getMediaLocator().getMediaURI().toString().replaceAll("\\W", "-") // }); // icomp.saveImagesTo(new File(imagesPath)); Buffer buf = dsh.getBuffer(); while (t < durationInSeconds && buf != null && !buf.isEOM()) { BufferedImage bufferedImage = ImageUtils.createImage(buf); if (bufferedImage == null) throw new VideoSegmenterException("Unable to extract image at time " + t); logger.trace("Analyzing video at {} s", t); // Compare the new image with our previous sample boolean differsFromPreviousImage = icomp.isDifferent(previousImage, bufferedImage, t); // We found an image that is different compared to the previous one. Let's see if this image remains stable // for some time (STABILITY_THRESHOLD) so we can declare a new scene if (differsFromPreviousImage) { logger.debug("Found differing image at {} seconds", t); // If this is the result of a lucky punch (looking ahead STABILITY_THRESHOLD seconds), then we should // really start over an make sure we get the correct beginning of the new scene if (!sceneChangeImminent && t - lastStableImageTime > 1) { luckyPunchRecovery = true; previousImage = lastStableImage; bufferQueue.add(buf); t = lastStableImageTime; } else { lastStableImageTime = t - 1; lastStableImage = previousImage; previousImage = bufferedImage; currentSceneStabilityCount = 1; t++; } sceneChangeImminent = true; } // We are looking ahead and everyhting seems to be fine. else if (!sceneChangeImminent) { fillLookAheadBuffer(bufferQueue, buf, dsh); lastStableImageTime = t; t += stabilityThreshold; previousImage = bufferedImage; lastStableImage = bufferedImage; } // Seems to be the same image. If we have just recently detected a new scene, let's see if we are able to // confirm that this is scene is stable (>= STABILITY_THRESHOLD) else if (currentSceneStabilityCount < stabilityThreshold) { currentSceneStabilityCount++; previousImage = bufferedImage; t++; } // Did we find a new scene? else if (currentSceneStabilityCount == stabilityThreshold) { lastStableImageTime = t; long endOfSegment = t - stabilityThreshold - 1; long durationms = (endOfSegment - startOfSegment) * 1000L; // Create a new segment if this wasn't the first one if (endOfSegment > stabilityThreshold) { contentSegment.setMediaTime(new MediaRelTimeImpl(startOfSegment * 1000L, durationms)); contentSegment = video.getTemporalDecomposition().createSegment("segment-" + ++segmentCount); segments.add(contentSegment); startOfSegment = endOfSegment; } // After finding a new segment, likelihood of a stable image is good, let's take a look ahead. Since // a processor can't seek, we need to store the buffers in between, in case we need to come back. fillLookAheadBuffer(bufferQueue, buf, dsh); t += stabilityThreshold; previousImage = bufferedImage; lastStableImage = bufferedImage; currentSceneStabilityCount++; sceneChangeImminent = false; logger.info("Found new scene at {} s", startOfSegment); } // Did we find a new scene by looking ahead? else if (sceneChangeImminent) { // We found a scene change by looking ahead. Now we want to get to the exact position lastStableImageTime = t; previousImage = bufferedImage; lastStableImage = bufferedImage; currentSceneStabilityCount++; t++; } // Nothing special, business as usual else { // If things look stable, then let's look ahead as much as possible without loosing information (which is // equal to looking ahead STABILITY_THRESHOLD seconds. lastStableImageTime = t; fillLookAheadBuffer(bufferQueue, buf, dsh); t += stabilityThreshold; lastStableImage = bufferedImage; previousImage = bufferedImage; } if (luckyPunchRecovery) { buf = bufferQueue.poll(); luckyPunchRecovery = !bufferQueue.isEmpty(); } else buf = dsh.getBuffer(); } // Finish off the last segment long startOfSegmentms = startOfSegment * 1000L; long durationms = ((long) durationInSeconds - startOfSegment) * 1000; contentSegment.setMediaTime(new MediaRelTimeImpl(startOfSegmentms, durationms)); segments.add(contentSegment); // Print summary if (icomp.hasStatistics()) { NumberFormat nf = NumberFormat.getNumberInstance(); nf.setMaximumFractionDigits(2); logger.info("Image comparison finished with an average change of {}% in {} comparisons", nf.format(icomp.getAvgChange()), icomp.getComparisons()); } // Cleanup if (icomp.getSavedImagesDirectory() != null) { FileUtils.deleteQuietly(icomp.getSavedImagesDirectory()); } return segments; }