List of usage examples for java.util Deque poll
E poll();
From source file:edu.stanford.cfuller.colocalization3d.correction.PositionCorrector.java
/** * Determines the target registration error for a correction by successively leaving out each ImageObject in a set used to make a correction, * calculating a correction from the remaining objects, and assessing the error in correcting the object left out. * /* w ww. j a v a 2 s . co m*/ * @param imageObjects A Vector containing all the ImageObjects to be used for the correction * or in the order it appears in a multiwavelength image file. * @return The average value of the error over all objects. */ public double determineTRE(java.util.List<ImageObject> imageObjects) { int referenceChannel = this.parameters.getIntValueForKey(REF_CH_PARAM); int channelToCorrect = this.parameters.getIntValueForKey(CORR_CH_PARAM); RealVector treVector = new ArrayRealVector(imageObjects.size(), 0.0); RealVector treXYVector = new ArrayRealVector(imageObjects.size(), 0.0); java.util.Deque<TREThread> startedThreads = new java.util.LinkedList<TREThread>(); int maxThreads = 1; if (this.parameters.hasKey(THREAD_COUNT_PARAM)) { maxThreads = this.parameters.getIntValueForKey(THREAD_COUNT_PARAM); } final int threadWaitTime_ms = 1000; for (int removeIndex = 0; removeIndex < imageObjects.size(); removeIndex++) { if (removeIndex % 10 == 0) { java.util.logging.Logger .getLogger(edu.stanford.cfuller.colocalization3d.Colocalization3DMain.LOGGER_NAME) .finer("calulating TRE: point " + (removeIndex + 1) + " of " + imageObjects.size()); } TREThread nextFit = new TREThread(imageObjects, referenceChannel, channelToCorrect, removeIndex, this); if (startedThreads.size() < maxThreads) { startedThreads.add(nextFit); nextFit.start(); } else { TREThread next = startedThreads.poll(); try { next.join(threadWaitTime_ms); } catch (InterruptedException e) { e.printStackTrace(); } while (next.isAlive()) { startedThreads.add(next); next = startedThreads.poll(); try { next.join(threadWaitTime_ms); } catch (InterruptedException e) { e.printStackTrace(); } } treVector.setEntry(next.getRemoveIndex(), next.getTre()); treXYVector.setEntry(next.getRemoveIndex(), next.getTreXY()); startedThreads.add(nextFit); nextFit.start(); } } java.util.List<Integer> unsuccessful_TRE = new java.util.ArrayList<Integer>(); while (startedThreads.size() > 0) { TREThread next = startedThreads.poll(); try { next.join(); if (next.getSuccess()) { treVector.setEntry(next.getRemoveIndex(), next.getTre()); } else { unsuccessful_TRE.add(next.getRemoveIndex()); } } catch (InterruptedException e) { e.printStackTrace(); } } RealVector treVector_mod = new ArrayRealVector(treVector.getDimension() - unsuccessful_TRE.size()); RealVector treXYVector_mod = new ArrayRealVector(treVector_mod.getDimension()); int c = 0; //unsuccessful TRE calculation results when there is incomplete coverage in the correction dataset for (int i = 0; i < treVector.getDimension(); ++i) { if (!unsuccessful_TRE.contains(i)) { treVector_mod.setEntry(c, treVector.getEntry(i)); treXYVector_mod.setEntry(c, treXYVector.getEntry(i)); ++c; } } treVector = treVector_mod; treXYVector = treXYVector_mod; double tre = treVector.getL1Norm() / treVector.getDimension(); double xy_tre = (treXYVector.getL1Norm() / treXYVector.getDimension()); java.util.logging.Logger.getLogger(edu.stanford.cfuller.colocalization3d.Colocalization3DMain.LOGGER_NAME) .info("TRE: " + tre); java.util.logging.Logger.getLogger(edu.stanford.cfuller.colocalization3d.Colocalization3DMain.LOGGER_NAME) .info("x-y TRE: " + xy_tre); return tre; }
From source file:io.undertow.server.handlers.proxy.LoadBalancingProxyHTTP2TestCase.java
@Test public void testHttp2ClientMultipleStreamsThreadSafety() throws IOException, URISyntaxException, ExecutionException, InterruptedException, TimeoutException { //not actually a proxy test //but convent to put it here UndertowXnioSsl ssl = new UndertowXnioSsl(DefaultServer.getWorker().getXnio(), OptionMap.EMPTY, DefaultServer.SSL_BUFFER_POOL, DefaultServer.createClientSslContext()); final UndertowClient client = UndertowClient.getInstance(); final ClientConnection connection = client.connect( new URI("https", null, DefaultServer.getHostAddress(), DefaultServer.getHostPort() + 1, "/", null, null),//www . ja va2 s .c o m DefaultServer.getWorker(), ssl, DefaultServer.getBufferPool(), OptionMap.create(UndertowOptions.ENABLE_HTTP2, true)).get(); final ExecutorService service = Executors.newFixedThreadPool(10); try { Deque<FutureResult<String>> futures = new ArrayDeque<>(); for (int i = 0; i < 100; ++i) { final FutureResult<String> future = new FutureResult<>(); futures.add(future); service.submit(new Callable<String>() { @Override public String call() throws Exception { ClientRequest cr = new ClientRequest().setMethod(Methods.GET).setPath("/path") .setProtocol(Protocols.HTTP_1_1); connection.sendRequest(cr, new ClientCallback<ClientExchange>() { @Override public void completed(ClientExchange result) { result.setResponseListener(new ClientCallback<ClientExchange>() { @Override public void completed(ClientExchange result) { new StringReadChannelListener(DefaultServer.getBufferPool()) { @Override protected void stringDone(String string) { future.setResult(string); } @Override protected void error(IOException e) { future.setException(e); } }.setup(result.getResponseChannel()); } @Override public void failed(IOException e) { future.setException(e); } }); } @Override public void failed(IOException e) { future.setException(e); } }); return null; } }); } while (!futures.isEmpty()) { FutureResult<String> future = futures.poll(); Assert.assertNotEquals(IoFuture.Status.WAITING, future.getIoFuture().awaitInterruptibly(10, TimeUnit.SECONDS)); Assert.assertEquals("/path", future.getIoFuture().get()); } } finally { service.shutdownNow(); } }
From source file:org.apache.pig.newplan.BaseOperatorPlan.java
/** * Move everything below a given operator to the new operator plan. The specified operator will * be moved and will be the root of the new operator plan * @param root Operator to move everything after * @param newPlan new operator plan to move things into * @throws PlanException //w w w. ja v a 2 s .co m */ public void moveTree(Operator root, BaseOperatorPlan newPlan) throws FrontendException { Deque<Operator> queue = new ArrayDeque<Operator>(); newPlan.add(root); root.setPlan(newPlan); queue.addLast(root); while (!queue.isEmpty()) { Operator node = queue.poll(); if (getSuccessors(node) != null) { for (Operator succ : getSuccessors(node)) { if (!queue.contains(succ)) { queue.addLast(succ); newPlan.add(succ); succ.setPlan(newPlan); newPlan.connect(node, succ); } } } } trimBelow(root); }
From source file:org.apache.solr.handler.component.PivotFacetHelper.java
/** * Recursive function to do all the pivots *///from w w w . ja va2s . c o m protected List<NamedList<Object>> doPivots(NamedList<Integer> superFacets, String field, String subField, Deque<String> fnames, ResponseBuilder rb, DocSet docs, int minMatch, boolean distinct, int maxDepth, int depth) throws IOException { SolrIndexSearcher searcher = rb.req.getSearcher(); // TODO: optimize to avoid converting to an external string and then having to convert back to internal below SchemaField sfield = searcher.getSchema().getField(field); FieldType ftype = sfield.getType(); String nextField = fnames.poll(); // when distinct and no subs, dont bother if (subField == null && distinct == true) { return new ArrayList<NamedList<Object>>(); } Query baseQuery = rb.getQuery(); List<NamedList<Object>> values = new ArrayList<NamedList<Object>>(superFacets.size()); for (Map.Entry<String, Integer> kv : superFacets) { // Only sub-facet if parent facet has positive count - still may not be any values for the sub-field though if (kv.getValue() >= minMatch) { // don't reuse the same BytesRef each time since we will be constructing Term // objects that will most likely be cached. BytesRef termval = new BytesRef(); ftype.readableToIndexed(kv.getKey(), termval); SimpleOrderedMap<Object> pivot = new SimpleOrderedMap<Object>(); pivot.add("field", field); pivot.add("value", ftype.toObject(sfield, termval)); pivot.add("count", kv.getValue()); // only due stats DocSet subset = null; SimpleFacets sf = null; if (maxDepth != depth) { Query query = new TermQuery(new Term(field, termval)); subset = searcher.getDocSet(query, docs); sf = getFacetImplementation(rb.req, subset, rb.req.getParams()); NamedList<Object> subFieldStats = sf.getFacetPercentileCounts(); // if (subFieldStats != null && subFieldStats.size() > 0) { pivot.add("statistics", subFieldStats); // } } if (subField == null) { if (distinct == false) { values.add(pivot); } } else { if (sf == null) { Query query = new TermQuery(new Term(field, termval)); subset = searcher.getDocSet(query, docs); sf = getFacetImplementation(rb.req, subset, rb.req.getParams()); NamedList<Object> subFieldStats = sf.getFacetPercentileCounts(); // if (subFieldStats != null && subFieldStats.size() > 0) { pivot.add("statistics", subFieldStats); // } } NamedList<Integer> nl = sf.getTermCounts(subField); if (distinct) { pivot.add("distinct", nl.size()); if (depth > 1) { List<NamedList<Object>> list = doPivots(nl, subField, nextField, fnames, rb, subset, minMatch, distinct, maxDepth, depth - 1); // if (list.size() > 0) { pivot.add("pivot", list); // } values.add(pivot); } } else { if (nl.size() >= minMatch) { List<NamedList<Object>> list = doPivots(nl, subField, nextField, fnames, rb, subset, minMatch, distinct, maxDepth, depth - 1); // if (list.size() > 0) { pivot.add("pivot", list); // } values.add(pivot); } } } } } // put the field back on the list fnames.push(nextField); return values; }
From source file:org.diorite.impl.world.chunk.PlayerChunksImpl.java
private void continueUpdate() { final byte render = this.getRenderDistance(); final byte view = this.getViewDistance(); if ((this.lastUpdateR >= view) || (this.lastUpdateR >= render)) { return;// w ww .j a va2 s.c o m } final LongCollection oldChunks = new LongOpenHashSet(this.visibleChunks); final Collection<ChunkImpl> chunksToSent = new ConcurrentSet<>(); final int r = this.lastUpdateR++; final ChunkManagerImpl impl = this.player.getWorld().getChunkManager(); final int cx = this.lastUpdate.getX(); final int cz = this.lastUpdate.getZ(); { final Deque<ChunkImpl> toProcess = this.toProcess; ChunkImpl chunk; while ((chunk = toProcess.poll()) != null) { this.processChunk(chunk, cx, cz, view, impl, oldChunks, chunksToSent); } } if (r == 0) { this.processChunk(cx, cz, cx, cz, view, impl, oldChunks, chunksToSent); } else { for (int x = -r; x <= r; x++) { if ((x == r) || (x == -r)) { for (int z = -r; z <= r; z++) { this.processChunk(cx + x, cz + z, cx, cz, view, impl, oldChunks, chunksToSent); } } this.processChunk(cx + x, cz + r, cx, cz, view, impl, oldChunks, chunksToSent); this.processChunk(cx + x, cz - r, cx, cz, view, impl, oldChunks, chunksToSent); } } if (chunksToSent.isEmpty() /*&& oldChunks.isEmpty()*/) { return; } final PacketPlayClientboundMapChunk[] packets = new PacketPlayClientboundMapChunk[chunksToSent.size()]; int i = 0; for (final ChunkImpl chunk : chunksToSent) { packets[i++] = new PacketPlayClientboundMapChunk(true, chunk); } this.player.getNetworkManager().sendPackets(packets); }
From source file:org.roche.antibody.services.graphsynchronizer.GraphSynchronizer.java
private HELMCode buildHelm(Domain activeDomain) { HELMCode code = new HELMCode(); Deque<Sequence> sequencesToHandle = new ArrayDeque<Sequence>(); handledConnections.clear();/* w ww. j ava2s . c o m*/ handledInterDomainConnections.clear(); handledSequences.clear(); sequencesToHandle.offer(activeDomain); Map<Sequence, HELMElement> helmElemMap = new HashMap<Sequence, HELMElement>(); while (sequencesToHandle.isEmpty() == false) { Sequence seqToHandle = sequencesToHandle.poll(); Sequence seqForConnectionCheck = seqToHandle; if (handledSequences.contains(seqToHandle)) { continue; } else { handledSequences.add(seqToHandle); } if (seqToHandle instanceof Domain) { activeDomain = (Domain) seqToHandle; HELMElement pep = seqService.toHELM(activeDomain); code.addHELMElement(pep); helmElemMap.put(activeDomain.getPeptide(), pep); seqForConnectionCheck = activeDomain.getPeptide(); } for (Connection con : seqToHandle.getConnections()) { if (handledConnections.contains(con)) { continue; } if (con instanceof GeneralConnection) { HELMConnection helmCon = null; if (con.getSource() == seqForConnectionCheck && con.getTarget() == seqForConnectionCheck) { HELMElement element = seqService.toHELM(seqToHandle); code.addHELMElement(element); helmCon = connectionService.createConnection(con, element, element); } else { HELMElement source = helmElemMap.get(con.getSource()); if (source == null) { source = seqService.toHELM(con.getSource()); helmElemMap.put(con.getSource(), source); code.addHELMElement(source); sequencesToHandle.push(con.getSource()); } HELMElement target = helmElemMap.get(con.getTarget()); if (target == null) { target = seqService.toHELM(con.getTarget()); helmElemMap.put(con.getTarget(), target); code.addHELMElement(target); sequencesToHandle.push(con.getTarget()); } helmCon = connectionService.createConnection(con, source, target); } code.addHELMConnection(helmCon); handledConnections.add(con); } if (con instanceof CysteinConnection && connectionService.isIntraDomainConnection(con)) { HELMConnection helmCon = connectionService.createConnection(con, helmElemMap.get(activeDomain.getPeptide()), helmElemMap.get(activeDomain.getPeptide())); handledConnections.add(con); code.addHELMConnection(helmCon); } if (con instanceof CysteinConnection && !connectionService.isIntraDomainConnection(con)) { handledInterDomainConnections.add(con); } } } return code; }
From source file:org.shaman.terrain.polygonal.PolygonalMapGenerator.java
/** * Step 4: assign elevation/* w w w .j a v a 2 s . c om*/ */ private void assignElevation() { if (graph == null) { return; } Random rand = new Random(seed * 2); //initialize border corners with zero elevation Deque<Graph.Corner> q = new ArrayDeque<>(); for (Graph.Corner c : graph.corners) { if (c.border) { c.elevation = 0; q.add(c); } else { c.elevation = Float.POSITIVE_INFINITY; } } // Traverse the graph and assign elevations to each point. As we // move away from the map border, increase the elevations. This // guarantees that rivers always have a way down to the coast by // going downhill (no local minima). while (!q.isEmpty()) { Graph.Corner c = q.poll(); for (Graph.Corner a : c.adjacent) { if (c.ocean && a.ocean && a.elevation > 0) { a.elevation = 0; q.addFirst(a); continue; } float elevation = c.elevation + (a.ocean ? 0 : 0.01f); if (!c.water && !a.water) { elevation += 1; } //add some more randomness //elevation += rand.nextDouble()/4; if (elevation < a.elevation) { a.elevation = elevation; q.add(a); } } } //redistribute elevation float SCALE_FACTOR = 1.1f; ArrayList<Graph.Corner> corners = new ArrayList<>(); for (Graph.Corner c : graph.corners) { if (!c.ocean) { corners.add(c); } } Collections.sort(corners, new Comparator<Graph.Corner>() { @Override public int compare(Graph.Corner o1, Graph.Corner o2) { return Float.compare(o1.elevation, o2.elevation); } }); for (int i = 0; i < corners.size(); i++) { // Let y(x) be the total area that we want at elevation <= x. // We want the higher elevations to occur less than lower // ones, and set the area to be y(x) = 1 - (1-x)^2. float y = (float) i / (float) (corners.size() - 1); float x = (float) (Math.sqrt(SCALE_FACTOR) - Math.sqrt(SCALE_FACTOR * (1 - y))); if (x > 1.0) x = 1; // TODO: does this break downslopes? corners.get(i).elevation = x; } assignCenterElevations(); //update mesh updateElevationGeometry(); }