List of usage examples for java.util LinkedList poll
public E poll()
From source file:org.deegree.framework.xml.XMLFragment.java
/** * reads the encoding of a XML document from its header. If no header available * <code>CharsetUtils.getSystemCharset()</code> will be returned * /* ww w .j a v a 2s. c o m*/ * @param pbis * @return encoding of a XML document * @throws IOException */ private String readEncoding(PushbackInputStream pbis) throws IOException { byte[] b = new byte[80]; String s = ""; int rd = 0; LinkedList<byte[]> bs = new LinkedList<byte[]>(); LinkedList<Integer> rds = new LinkedList<Integer>(); while (rd < 80) { rds.addFirst(pbis.read(b)); if (rds.peek() == -1) { rds.poll(); break; } rd += rds.peek(); s += new String(b, 0, rds.peek()).toLowerCase(); bs.addFirst(b); b = new byte[80]; } String encoding = CharsetUtils.getSystemCharset(); if (s.indexOf("?>") > -1) { int p = s.indexOf("encoding="); if (p > -1) { StringBuffer sb = new StringBuffer(); int k = p + 1 + "encoding=".length(); while (s.charAt(k) != '"' && s.charAt(k) != '\'') { sb.append(s.charAt(k++)); } encoding = sb.toString(); } } while (!bs.isEmpty()) { pbis.unread(bs.poll(), 0, rds.poll()); } return encoding; }
From source file:no.asgari.civilization.server.excel.ItemReader.java
private void extractShuffledWondersFromExcel(Workbook wb) { Sheet wonderSheet = wb.getSheet(SheetName.WONDERS.getName()); List<Cell> unfilteredCells = new ArrayList<>(); wonderSheet.forEach(row -> row.forEach(unfilteredCells::add)); //Kategoriser wonderne List<String> wonderName = unfilteredCells.stream().filter(p -> !p.toString().trim().isEmpty()) .filter(notRandomPredicate).filter(rowNotZeroPredicate).filter(columnIndexZeroPredicate) .map(Object::toString).collect(Collectors.toList()); List<String> description = unfilteredCells.stream().filter(p -> !p.toString().trim().isEmpty()) .filter(notRandomPredicate).filter(rowNotZeroPredicate).filter(cell -> cell.getColumnIndex() == 1) .map(Object::toString).collect(Collectors.toList()); LinkedList<String> wondersName = new LinkedList<>(wonderName); LinkedList<String> descriptions = new LinkedList<>(description); //Kun ancient ancientWonders = new LinkedList<>(); //There is no break in java 8 forEach, thus we use the old for for (int i = 0; i < wondersName.size(); i++) { String wonder = wondersName.poll(); String desc = descriptions.poll(); if (wonder.toLowerCase().contains(SheetName.WONDERS.getName().toLowerCase())) { break; }//from w w w . j ava2 s .c om ancientWonders.add(new Wonder(wonder, desc, Wonder.ANCIENT, SheetName.ANCIENT_WONDERS)); } Collections.shuffle(ancientWonders); //Kun ancient medievalWonders = new LinkedList<>(); for (int i = 0; i < wondersName.size(); i++) { String wonder = wondersName.poll(); String desc = descriptions.poll(); if (wonder.toLowerCase().contains(SheetName.WONDERS.getName().toLowerCase())) { break; } medievalWonders.add(new Wonder(wonder, desc, Wonder.MEDIEVAL, SheetName.MEDIEVAL_WONDERS)); } Collections.shuffle(medievalWonders); //Only modern left modernWonders = new LinkedList<>(); int remainingSize = wondersName.size(); for (int i = 0; i < remainingSize; i++) { String wonder = wondersName.poll(); String desc = descriptions.poll(); modernWonders.add(new Wonder(wonder, desc, Wonder.MODERN, SheetName.MODERN_WONDERS)); } Collections.shuffle(modernWonders); }
From source file:es.caib.seycon.ng.servei.AutoritzacioServiceImpl.java
private Collection getCodiGrupsFillsGrup(String codiGrup) { LinkedList l_grupsUsuari = new LinkedList(); l_grupsUsuari.add(codiGrup);/*from w w w. j a v a 2 s . c o m*/ HashSet grupsFills = new HashSet(); String codiGrupAnalitzat = null; while ((codiGrupAnalitzat = (String) l_grupsUsuari.poll()) != null) { if (!grupsFills.contains(codiGrupAnalitzat)) { // si no l'hem // analitzat ja grupsFills.add(codiGrupAnalitzat); Collection fills = getGrupEntityDao().findSubGrupsByCodi(codiGrupAnalitzat); if (fills != null) for (Iterator git = fills.iterator(); git.hasNext();) { GrupEntity fg = (GrupEntity) git.next(); if (!grupsFills.contains(fg.getCodi())) // si no s ja // analitzat l_grupsUsuari.add(fg.getCodi()); } } } return grupsFills; }
From source file:io.hops.transaction.lock.INodeLock.java
private List<INode> findChildrenRecursively(INode lastINode) throws StorageException, TransactionContextException { LinkedList<INode> children = new LinkedList<>(); LinkedList<INode> unCheckedDirs = new LinkedList<>(); if (lastINode != null) { if (lastINode instanceof INodeDirectory) { unCheckedDirs.add(lastINode); }//from www .j ava 2 s. co m } // Find all the children in the sub-directories. while (!unCheckedDirs.isEmpty()) { INode next = unCheckedDirs.poll(); if (next instanceof INodeDirectory) { setINodeLockType(TransactionLockTypes.INodeLockType.READ_COMMITTED); //locking the parent is sufficient List<INode> clist = ((INodeDirectory) next).getChildrenList(); unCheckedDirs.addAll(clist); children.addAll(clist); } } LOG.debug("Added " + children.size() + " children."); return children; }
From source file:org.apache.storm.scheduler.IsolationScheduler.java
@Override public void schedule(Topologies topologies, Cluster cluster) { Set<String> origBlacklist = cluster.getBlacklistedHosts(); List<TopologyDetails> isoTopologies = isolatedTopologies(topologies.getTopologies()); Set<String> isoIds = extractTopologyIds(isoTopologies); Map<String, Set<Set<ExecutorDetails>>> topologyWorkerSpecs = topologyWorkerSpecs(isoTopologies); Map<String, Map<Integer, Integer>> topologyMachineDistributions = topologyMachineDistributions( isoTopologies);/* ww w .j a v a 2 s. c om*/ Map<String, List<AssignmentInfo>> hostAssignments = hostAssignments(cluster); for (Map.Entry<String, List<AssignmentInfo>> entry : hostAssignments.entrySet()) { List<AssignmentInfo> assignments = entry.getValue(); String topologyId = assignments.get(0).getTopologyId(); Map<Integer, Integer> distribution = topologyMachineDistributions.get(topologyId); Set<Set<ExecutorDetails>> workerSpecs = topologyWorkerSpecs.get(topologyId); int numWorkers = assignments.size(); if (isoIds.contains(topologyId) && checkAssignmentTopology(assignments, topologyId) && distribution.containsKey(numWorkers) && checkAssignmentWorkerSpecs(assignments, workerSpecs)) { decrementDistribution(distribution, numWorkers); for (AssignmentInfo ass : assignments) { workerSpecs.remove(ass.getExecutors()); } cluster.blacklistHost(entry.getKey()); } else { for (AssignmentInfo ass : assignments) { if (isoIds.contains(ass.getTopologyId())) { cluster.freeSlot(ass.getWorkerSlot()); } } } } Map<String, Set<WorkerSlot>> hostUsedSlots = hostToUsedSlots(cluster); LinkedList<HostAssignableSlots> hss = hostAssignableSlots(cluster); for (Map.Entry<String, Set<Set<ExecutorDetails>>> entry : topologyWorkerSpecs.entrySet()) { String topologyId = entry.getKey(); Set<Set<ExecutorDetails>> executorSet = entry.getValue(); List<Integer> workerNum = distributionToSortedAmounts(topologyMachineDistributions.get(topologyId)); for (Integer num : workerNum) { HostAssignableSlots hostSlots = hss.peek(); List<WorkerSlot> slot = hostSlots != null ? hostSlots.getWorkerSlots() : null; if (slot != null && slot.size() >= num) { hss.poll(); cluster.freeSlots(hostUsedSlots.get(hostSlots.getHostName())); for (WorkerSlot tmpSlot : slot.subList(0, num)) { Set<ExecutorDetails> executor = removeElemFromExecutorsSet(executorSet); cluster.assign(tmpSlot, topologyId, executor); } cluster.blacklistHost(hostSlots.getHostName()); } } } List<String> failedTopologyIds = extractFailedTopologyIds(topologyWorkerSpecs); if (failedTopologyIds.size() > 0) { LOG.warn("Unable to isolate topologies " + failedTopologyIds + ". No machine had enough worker slots to run the remaining workers for these topologies. " + "Clearing all other resources and will wait for enough resources for " + "isolated topologies before allocating any other resources."); // clear workers off all hosts that are not blacklisted Map<String, Set<WorkerSlot>> usedSlots = hostToUsedSlots(cluster); Set<Map.Entry<String, Set<WorkerSlot>>> entries = usedSlots.entrySet(); for (Map.Entry<String, Set<WorkerSlot>> entry : entries) { if (!cluster.isBlacklistedHost(entry.getKey())) { cluster.freeSlots(entry.getValue()); } } } else { // run default scheduler on non-isolated topologies Set<String> allocatedTopologies = allocatedTopologies(topologyWorkerSpecs); Topologies leftOverTopologies = leftoverTopologies(topologies, allocatedTopologies); DefaultScheduler.defaultSchedule(leftOverTopologies, cluster); } cluster.setBlacklistedHosts(origBlacklist); }
From source file:com.oltpbenchmark.benchmarks.seats.SEATSWorker.java
private boolean executeUpdateReservation(UpdateReservation proc) throws SQLException { LinkedList<Reservation> cache = CACHE_RESERVATIONS.get(CacheType.PENDING_UPDATES); assert (cache != null) : "Unexpected " + CacheType.PENDING_UPDATES; if (LOG.isTraceEnabled()) LOG.trace("Let's look for a Reservation that we can update"); // Pull off the first pending seat change and throw that ma at the server Reservation r = null;//from w w w . j a v a2 s. c o m try { r = cache.poll(); } catch (Throwable ex) { // Nothing } if (r == null) { if (LOG.isDebugEnabled()) LOG.warn(String.format("Failed to find Reservation to update [cache=%d]", cache.size())); return (false); } if (LOG.isTraceEnabled()) LOG.trace("Ok let's try to update " + r); long value = rng.number(1, 1 << 20); long attribute_idx = rng.nextInt(UpdateReservation.NUM_UPDATES); long seatnum = rng.number(0, SEATSConstants.FLIGHTS_NUM_SEATS - 1); if (LOG.isTraceEnabled()) LOG.trace("Calling " + proc); proc.run(conn, r.id, r.flight_id.encode(), r.customer_id.encode(), seatnum, attribute_idx, value); conn.commit(); SEATSWorker.this.requeueReservation(r); return (true); }
From source file:com.oltpbenchmark.benchmarks.seats.SEATSWorker.java
private boolean executeNewReservation(NewReservation proc) throws SQLException { Reservation reservation = null;//from w ww .j av a 2 s .c o m BitSet seats = null; LinkedList<Reservation> cache = CACHE_RESERVATIONS.get(CacheType.PENDING_INSERTS); assert (cache != null) : "Unexpected " + CacheType.PENDING_INSERTS; if (LOG.isDebugEnabled()) LOG.debug(String.format("Attempting to get a new pending insert Reservation [totalPendingInserts=%d]", cache.size())); while (reservation == null) { Reservation r = cache.poll(); if (r == null) { if (LOG.isDebugEnabled()) LOG.warn("Unable to execute " + proc + " - No available reservations to insert"); break; } seats = getSeatsBitSet(r.flight_id); if (isFlightFull(seats)) { if (LOG.isDebugEnabled()) LOG.debug(String.format("%s is full", r.flight_id)); continue; } // PAVLO: Not sure why this is always coming back as reserved? // else if (seats.get(r.seatnum)) { // if (LOG.isDebugEnabled()) // LOG.debug(String.format("Seat #%d on %s is already booked", r.seatnum, r.flight_id)); // continue; // } else if (isCustomerBookedOnFlight(r.customer_id, r.flight_id)) { if (LOG.isDebugEnabled()) LOG.debug(String.format("%s is already booked on %s", r.customer_id, r.flight_id)); continue; } reservation = r; } // WHILE if (reservation == null) { if (LOG.isDebugEnabled()) LOG.warn("Failed to find a valid pending insert Reservation\n" + this.toString()); return (false); } // Generate a random price for now double price = 2.0 * rng.number(SEATSConstants.RESERVATION_PRICE_MIN, SEATSConstants.RESERVATION_PRICE_MAX); // Generate random attributes long attributes[] = new long[9]; for (int i = 0; i < attributes.length; i++) { attributes[i] = rng.nextLong(); } // FOR if (LOG.isTraceEnabled()) LOG.trace("Calling " + proc); proc.run(conn, reservation.id, reservation.customer_id.encode(), reservation.flight_id.encode(), reservation.seatnum, price, attributes); conn.commit(); // Mark this seat as successfully reserved seats.set(reservation.seatnum); // Set it up so we can play with it later this.requeueReservation(reservation); return (true); }
From source file:com.jayway.jsonpath.JsonModel.java
private <T> T getTargetObject(JsonPath jsonPath, Class<T> clazz) { notNull(jsonPath, "jsonPath can not be null"); if (!jsonPath.isPathDefinite()) { throw new IndefinitePathException(jsonPath.getPath()); }/*from ww w. jav a2 s. c o m*/ JsonProvider jsonProvider = JsonProviderFactory.createProvider(); Object modelRef = jsonObject; if (jsonPath.getTokenizer().size() == 1) { PathToken onlyToken = jsonPath.getTokenizer().iterator().next(); if ("$".equals(onlyToken.getFragment())) { return clazz.cast(modelRef); } } else { LinkedList<PathToken> tokens = jsonPath.getTokenizer().getPathTokens(); PathToken currentToken; do { currentToken = tokens.poll(); modelRef = currentToken.apply(modelRef, jsonProvider); } while (!tokens.isEmpty()); if (modelRef.getClass().isAssignableFrom(clazz)) { throw new InvalidModelException( jsonPath + " does nor refer to a Map but " + currentToken.getClass().getName()); } return clazz.cast(modelRef); } throw new InvalidModelException(); }
From source file:org.vast.stt.renderer.opengl.TextureManager.java
/** * Creates a new texture by transfering data from styler to GL memory * @param styler//from w w w . j a v a2 s . c om * @param tex * @param texInfo */ protected void createTexture(TextureStyler styler, RasterTileGraphic tex, GLTexture texInfo) { // fetch texture data from styler fillTexData(styler, tex, texInfo); Symbolizer sym = styler.getSymbolizer(); // if texture was successfully constructed, bind it with GL if (tex.hasRasterData) { // size of texture pool int texPoolSize = styler.getSymbolizer().getTexPoolSize(); // create new texture name and bind it int[] id = new int[1]; boolean lastNewTexture = false; // ACCORDING TO WHETHER THE TEXTURE POOL SIZE HAS BEEN REACHED // A NEW TEXTURE IS GENERATED OR THE FIRST OF THE STACK IS REUSED // FOR THE MOST RECENT RASTER DATA if ((symTexturePoolSizeReachedTable == null) || !symTexturePoolSizeReachedTable.containsKey(sym)) { // create new texture name gl.glGenTextures(1, id, 0); if (texPoolSize > 0) { if (id[0] < (texPoolSize + 1)) { if (!symTextureStackTable.containsKey(sym)) { LinkedList<Integer> TexIdStack = new LinkedList<Integer>(); TexIdStack.addLast(id[0]); symTextureStackTable.put(sym, TexIdStack); } else { LinkedList<Integer> stack = symTextureStackTable.get(sym); stack.addLast(id[0]); symTextureStackTable.put(sym, stack); if (id[0] == texPoolSize) { symTexturePoolSizeReachedTable.put(sym, true); lastNewTexture = true; } } } } } else if ((symTexturePoolSizeReachedTable != null) || symTexturePoolSizeReachedTable.containsKey(sym)) { LinkedList<Integer> stack = symTextureStackTable.get(sym); id[0] = stack.poll(); stack.addLast(id[0]); } // Bind the texture to the texture Id gl.glBindTexture(OpenGLCaps.TEXTURE_2D_TARGET, id[0]); // set texture parameters // TODO: Allow user to select between Linear (smoothed) and nearest-neighbor interp // gl.glTexParameteri(OpenGLCaps.TEXTURE_2D_TARGET, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST); // gl.glTexParameteri(OpenGLCaps.TEXTURE_2D_TARGET, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST); gl.glTexParameteri(OpenGLCaps.TEXTURE_2D_TARGET, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR); gl.glTexParameteri(OpenGLCaps.TEXTURE_2D_TARGET, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR); gl.glTexParameteri(OpenGLCaps.TEXTURE_2D_TARGET, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE); gl.glTexParameteri(OpenGLCaps.TEXTURE_2D_TARGET, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE); //gl.glTexParameteri(OpenGLCaps.TEXTURE_2D_TARGET, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_BORDER); //gl.glTexParameteri(OpenGLCaps.TEXTURE_2D_TARGET, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_BORDER); //gl.glTexParameterfv(OpenGLCaps.TEXTURE_2D_TARGET, GL.GL_TEXTURE_BORDER_COLOR, new float[] {0.0f,0.0f,0.0f,0.0f}, 0); // figure out image format int format = 0; switch (tex.bands) { case 1: format = GL.GL_LUMINANCE; break; case 2: format = GL.GL_LUMINANCE_ALPHA; break; case 3: format = GL.GL_RGB; break; case 4: format = GL.GL_RGBA; break; } gl.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1); // create texture in GL memory if (!symTexturePoolSizeReachedTable.containsKey(sym) || lastNewTexture) { gl.glTexImage2D(OpenGLCaps.TEXTURE_2D_TARGET, 0, tex.bands, tex.width + texInfo.widthPadding, tex.height + texInfo.heightPadding, 0, format, GL.GL_UNSIGNED_BYTE, tex.rasterData); } else { gl.glTexImage2D(OpenGLCaps.TEXTURE_2D_TARGET, 0, tex.bands, tex.width + texInfo.widthPadding, tex.height + texInfo.heightPadding, 0, format, GL.GL_UNSIGNED_BYTE, tex.rasterData); // gl.glTexSubImage2D(OpenGLCaps.TEXTURE_2D_TARGET, 0, 0, 0, // tex.width + texInfo.widthPadding, tex.height + texInfo.heightPadding, // format, GL.GL_UNSIGNED_BYTE, tex.rasterData); } // erase temp buffer tex.rasterData = null; // set new id and reset needsUpdate flag int oldID = texInfo.id; texInfo.id = id[0]; // delete previous texture if needed if (oldID > 0) { gl.glDeleteTextures(1, new int[] { oldID }, 0); if (log.isDebugEnabled()) log.debug("Tex #" + oldID + " deleted and replaced by " + texInfo.id); } } }
From source file:net.sourceforge.seqware.pipeline.plugins.MetadataTest.java
@Test public void testListAllTables() { systemErr.println("Test List all Tables\n"); launchPlugin("--list-tables"); String output = getOut();//from w w w .j a v a 2s . com // fix up test to support basic workflow/run creation tools, see git commit 4862eaba7f3d7c7495155dc913ead745b544f358 String[] tables = new String[] { "TableName", "study", "experiment", "sample", "ius", "lane", "sequencer_run", "workflow", "workflow_run" }; LinkedList<String> stuff = new LinkedList(Arrays.asList(output.split("\n"))); for (String table : tables) { int index = stuff.indexOf(table); if (index >= 0) { stuff.remove(index); } else { Assert.fail("Missing a table:" + table); } } while (!stuff.isEmpty()) { String s = stuff.poll(); Assert.fail("There are extra tables listed: " + s); } }