List of usage examples for java.util Queue poll
E poll();
From source file:org.jboss.errai.ioc.rebind.ioc.graph.impl.DependencyGraphBuilderImpl.java
private void processResolutionQueue(final Queue<AbstractInjectable> resolutionQueue, final Multimap<ResolutionPriority, ConcreteInjectable> resolvedByPriority) { do {/* www . j a va 2s.c o m*/ final AbstractInjectable cur = resolutionQueue.poll(); for (final BaseInjectable link : cur.linked) { if (link instanceof AbstractInjectable) { resolutionQueue.add((AbstractInjectable) link); } else if (link instanceof ConcreteInjectable) { resolvedByPriority.put(getMatchingPriority(link), (ConcreteInjectable) link); } } } while (resolutionQueue.size() > 0); }
From source file:org.apache.storm.scheduler.resource.strategies.scheduling.DefaultResourceAwareStrategy.java
/** * Order executors based on how many in and out connections it will potentially need to make. * First order components by the number of in and out connections it will have. Then iterate through the sorted list of components. * For each component sort the neighbors of that component by how many connections it will have to make with that component. * Add an executor from this component and then from each neighboring component in sorted order. Do this until there is nothing left to schedule * * @param td The topology the executors belong to * @param unassignedExecutors a collection of unassigned executors that need to be unassigned. Should only try to assign executors from this list * @return a list of executors in sorted order *//*from w w w.j a v a2s. co m*/ private List<ExecutorDetails> orderExecutors(TopologyDetails td, Collection<ExecutorDetails> unassignedExecutors) { Map<String, Component> componentMap = td.getComponents(); List<ExecutorDetails> execsScheduled = new LinkedList<>(); Map<String, Queue<ExecutorDetails>> compToExecsToSchedule = new HashMap<>(); for (Component component : componentMap.values()) { compToExecsToSchedule.put(component.id, new LinkedList<ExecutorDetails>()); for (ExecutorDetails exec : component.execs) { if (unassignedExecutors.contains(exec)) { compToExecsToSchedule.get(component.id).add(exec); } } } Set<Component> sortedComponents = sortComponents(componentMap); sortedComponents.addAll(componentMap.values()); for (Component currComp : sortedComponents) { Map<String, Component> neighbors = new HashMap<String, Component>(); for (String compId : (List<String>) ListUtils.union(currComp.children, currComp.parents)) { neighbors.put(compId, componentMap.get(compId)); } Set<Component> sortedNeighbors = sortNeighbors(currComp, neighbors); Queue<ExecutorDetails> currCompExesToSched = compToExecsToSchedule.get(currComp.id); boolean flag = false; do { flag = false; if (!currCompExesToSched.isEmpty()) { execsScheduled.add(currCompExesToSched.poll()); flag = true; } for (Component neighborComp : sortedNeighbors) { Queue<ExecutorDetails> neighborCompExesToSched = compToExecsToSchedule.get(neighborComp.id); if (!neighborCompExesToSched.isEmpty()) { execsScheduled.add(neighborCompExesToSched.poll()); flag = true; } } } while (flag); } return execsScheduled; }
From source file:org.onebusaway.uk.network_rail.gtfs_realtime.graph.PositionBerthToStanoxGraphMain.java
private void explore(Set<RawBerthNode> connections, RawStanoxNode stanoxNode) { Queue<OrderedRawBerthNode> queue = new PriorityQueue<OrderedRawBerthNode>(); Set<RawBerthNode> visited = new HashSet<RawBerthNode>(); int openCount = 0; for (RawBerthNode connection : connections) { queue.add(new OrderedRawBerthNode(connection, null, 0.0)); openCount++;/*from ww w. jav a2 s . c o m*/ } Map<RawBerthNode, RawBerthNode> parents = new HashMap<RawBerthNode, RawBerthNode>(); while (!queue.isEmpty()) { OrderedRawBerthNode currentNode = queue.poll(); RawBerthNode node = currentNode.getNode(); boolean isOpen = currentNode.isOpen(); if (isOpen) { openCount--; } else if (openCount == 0) { return; } if (visited.contains(node)) { continue; } visited.add(node); parents.put(node, currentNode.getParent()); Set<RawStanoxNode> stanoxes = node.getStanox(); if (stanoxes.size() > 0 && !stanoxes.contains(stanoxNode)) { _log.info(node + " stanoxes=" + stanoxes + " " + currentNode.getDistance() + " open=" + openCount); RawBerthNode c = node; while (c != null) { _log.info(" " + c); c = parents.get(c); } isOpen = false; } for (Map.Entry<RawBerthNode, List<Integer>> entry : node.getOutgoing().entrySet()) { RawBerthNode outgoing = entry.getKey(); int avgDuration = RawNode.average(entry.getValue()); queue.add(new OrderedRawBerthNode(outgoing, node, currentNode.getDistance() + avgDuration, isOpen)); if (isOpen) { openCount++; } } } }
From source file:com.github.fauu.natrank.service.RankingServiceImpl.java
@Override public void createRankings() throws DataAccessException { List<Match> matches = (List<Match>) matchRepository.findAll(); calculateTeamRatingsAndRankChanges(matches); calculateTeamExtremes();// w w w .j av a2s . com List<Ranking> rankings = new LinkedList<>(); Map<Integer, RankingEntry> entryMap = new HashMap<>(); rankingRepository.deleteAll(); Queue<LocalDate> rankingDateQueue = new LinkedList<>(); rankingDateQueue.add(matches.get(matches.size() - 1).getDate()); for (Match match : matches) { LocalDate nextRankingDate = rankingDateQueue.peek(); if (nextRankingDate == null) { break; } if (match.getDate().isAfter(nextRankingDate)) { rankings.add(createRankingForDate(rankingDateQueue.poll(), entryMap)); } List<Team> matchTeams = new ArrayList<>(); matchTeams.add(match.getTeam1()); matchTeams.add(match.getTeam2()); for (Team team : matchTeams) { if (!entryMap.containsKey(team.getId())) { RankingEntry newEntry = new RankingEntry(); newEntry.setTeam(team); entryMap.put(team.getId(), newEntry); } } List<RankingEntry> matchTeamEntries = new ArrayList<>(); matchTeamEntries.add(entryMap.get(matchTeams.get(0).getId())); matchTeamEntries.add(entryMap.get(matchTeams.get(1).getId())); matchTeamEntries.get(0).incrementMatchesTotal(); matchTeamEntries.get(1).incrementMatchesTotal(); if (match.getHomeTeam() == null) { matchTeamEntries.get(0).incrementMatchesOnNeutralGround(); matchTeamEntries.get(1).incrementMatchesOnNeutralGround(); } else if (match.getHomeTeam() == matchTeams.get(0)) { matchTeamEntries.get(0).incrementMatchesHome(); matchTeamEntries.get(1).incrementMatchesAway(); } else if (match.getHomeTeam() == matchTeams.get(1)) { matchTeamEntries.get(0).incrementMatchesAway(); matchTeamEntries.get(1).incrementMatchesHome(); } if (match.getWinnerTeam() == null) { matchTeamEntries.get(0).incrementDraws(); matchTeamEntries.get(1).incrementDraws(); } else if (match.getWinnerTeam() == matchTeams.get(0)) { matchTeamEntries.get(0).incrementWins(); matchTeamEntries.get(1).incrementLosses(); } else if (match.getWinnerTeam() == matchTeams.get(1)) { matchTeamEntries.get(0).incrementLosses(); matchTeamEntries.get(1).incrementWins(); } matchTeamEntries.get(0).addGoalsFor(match.getTeam1Goals()); matchTeamEntries.get(0).addGoalsAgainst(match.getTeam2Goals()); matchTeamEntries.get(1).addGoalsFor(match.getTeam2Goals()); matchTeamEntries.get(1).addGoalsAgainst(match.getTeam1Goals()); } LocalDate nextRankingDate; while ((nextRankingDate = rankingDateQueue.poll()) != null) { rankings.add(createRankingForDate(nextRankingDate, entryMap)); } rankingRepository.save(rankings); }
From source file:com.datumbox.common.persistentstorage.factories.MongoDBStructureFactory.java
@Override public void postLoad(BigDataStructureContainer learnedParameters, MemoryConfiguration memoryConfiguration) { //InMemory DataStructureTypes can be used to speed up training. This point of load() //method is reached when the classifier calls a method that requires data and the //data are not already loaded. ///*from ww w .j av a 2 s .co m*/ //Loading the data from the DB to a map during methods like test() or predict() is //slow and a waste of resources. Usually LRU caching is more appropriate. //Thus using hashmaps during test() or predict() is disallowed if the //data are not already in there. // //Below if the MapType is set to HashMap we switch it to the default map. //We do the same with the other DataStructures if (memoryConfiguration.getMapType().isInMemory()) { memoryConfiguration.setMapType(getDefaultMapType()); memoryConfiguration.setLRUsize(getDefaultLRUsize()); } if (memoryConfiguration.getCollectionType().isInMemory()) { memoryConfiguration.setCollectionType(getDefaultCollectionType()); } if (memoryConfiguration.getSetType().isInMemory()) { memoryConfiguration.setSetType(getDefaultSetType()); } if (memoryConfiguration.getQueueType().isInMemory()) { memoryConfiguration.setQueueType(getDefaultQueueType()); } Queue<BigDataStructureContainer> learnableObjects = new LinkedList<>(); Set<BigDataStructureContainer> alreadyChecked = new HashSet<>(); //This set uses the default equals() which means that it compares memory addresses. This behavior is desired learnableObjects.add(learnedParameters); while (learnableObjects.size() > 0) { //get the next object from the queue BigDataStructureContainer obj = learnableObjects.poll(); //mark it as examined alreadyChecked.add(obj); //reinitialize the big data structures to load the data from the mongodb collections obj.bigDataStructureInitializer(this, memoryConfiguration); //get all the fields from all the inherited classes for (Field field : getAllFields(new LinkedList<>(), obj.getClass())) { Class<?> fieldClass = field.getType(); //if this object can be learned and is not already checked add it in the Queue if (BigDataStructureContainer.class.isAssignableFrom(fieldClass)) { field.setAccessible(true); BigDataStructureContainer fieldValue; try { fieldValue = (BigDataStructureContainer) field.get(obj); } catch (IllegalArgumentException | IllegalAccessException ex) { throw new RuntimeException(ex); } if (fieldValue != null && !alreadyChecked.contains(fieldValue)) { learnableObjects.add(fieldValue); } } } } }
From source file:password.pwm.svc.report.ReportService.java
private void updateCacheFromLdap() throws ChaiUnavailableException, ChaiOperationException, PwmOperationalException, PwmUnrecoverableException { LOGGER.debug(PwmConstants.REPORTING_SESSION_LABEL, "beginning process to updating user cache records from ldap"); if (status != STATUS.OPEN) { return;/*from ww w .j av a 2 s .c o m*/ } cancelFlag = false; reportStatus = new ReportStatusInfo(settings.getSettingsHash()); reportStatus.setInProgress(true); reportStatus.setStartDate(new Date()); try { final Queue<UserIdentity> allUsers = new LinkedList<>(getListOfUsers()); reportStatus.setTotal(allUsers.size()); while (status == STATUS.OPEN && !allUsers.isEmpty() && !cancelFlag) { final Date startUpdateTime = new Date(); final UserIdentity userIdentity = allUsers.poll(); try { if (updateCachedRecordFromLdap(userIdentity)) { reportStatus.setUpdated(reportStatus.getUpdated() + 1); } } catch (Exception e) { String errorMsg = "error while updating report cache for " + userIdentity.toString() + ", cause: "; errorMsg += e instanceof PwmException ? ((PwmException) e).getErrorInformation().toDebugStr() : e.getMessage(); final ErrorInformation errorInformation; errorInformation = new ErrorInformation(PwmError.ERROR_REPORTING_ERROR, errorMsg); LOGGER.error(PwmConstants.REPORTING_SESSION_LABEL, errorInformation.toDebugStr()); reportStatus.setLastError(errorInformation); reportStatus.setErrors(reportStatus.getErrors() + 1); } reportStatus.setCount(reportStatus.getCount() + 1); reportStatus.getEventRateMeter().markEvents(1); final TimeDuration totalUpdateTime = TimeDuration.fromCurrent(startUpdateTime); if (settings.isAutoCalcRest()) { avgTracker.addSample(totalUpdateTime.getTotalMilliseconds()); Helper.pause(avgTracker.avgAsLong()); } else { Helper.pause(settings.getRestTime().getTotalMilliseconds()); } } if (cancelFlag) { reportStatus.setLastError( new ErrorInformation(PwmError.ERROR_SERVICE_NOT_AVAILABLE, "report cancelled by operator")); } } finally { reportStatus.setFinishDate(new Date()); reportStatus.setInProgress(false); } LOGGER.debug(PwmConstants.REPORTING_SESSION_LABEL, "update user cache process completed: " + JsonUtil.serialize(reportStatus)); }
From source file:com.datumbox.common.persistentstorage.factories.MongoDBStructureFactory.java
@Override public void preSave(BigDataStructureContainer learnedParameters, MemoryConfiguration memoryConfiguration) { boolean usesInMemoryStructures = memoryConfiguration.getMapType().isInMemory() || memoryConfiguration.getSetType().isInMemory() || memoryConfiguration.getQueueType().isInMemory() || memoryConfiguration.getCollectionType().isInMemory(); //If in-memory structures are used to speed up the execution of the algorithm, then //those fields are marked as Transient and thus they will not be stored //by Morphia. To avoid losing this information, we check to find the fields //of ModelParameter object and we try to spote the fields that are marked //as Transient and BigDataStructureMarker (custom annotation). If such a field is found //we add its contents in the database in a collection named as the name //of the field. if (usesInMemoryStructures) { Queue<BigDataStructureContainer> learnableObjects = new LinkedList<>(); Set<BigDataStructureContainer> alreadyChecked = new HashSet<>(); //This set uses the default equals() which means that it compares memory addresses. This behavior is desired learnableObjects.add(learnedParameters); while (learnableObjects.size() > 0) { //get the next object from the queue BigDataStructureContainer obj = learnableObjects.poll(); //mark it as examined alreadyChecked.add(obj);/*from www . j av a 2 s . c o m*/ //get all the fields from all the inherited classes for (Field field : getAllFields(new LinkedList<>(), obj.getClass())) { handleBigDataStructureField(field, obj, memoryConfiguration); Class<?> fieldClass = field.getType(); //if this object can be learned and is not already checked add it in the Queue if (BigDataStructureContainer.class.isAssignableFrom(fieldClass)) { field.setAccessible(true); BigDataStructureContainer fieldValue; try { fieldValue = (BigDataStructureContainer) field.get(obj); } catch (IllegalArgumentException | IllegalAccessException ex) { throw new RuntimeException(ex); } if (!alreadyChecked.contains(fieldValue)) { learnableObjects.add(fieldValue); } } } } } }
From source file:org.kuali.kra.award.awardhierarchy.AwardHierarchyServiceImpl.java
public Map<String, AwardHierarchy> getAwardHierarchy(AwardHierarchy anyNode, List<String> order) { Map<String, AwardHierarchy> result = new HashMap<String, AwardHierarchy>(); if (anyNode == null) { return result; }/*from www . j a va2 s . com*/ Map<String, Object> values = new HashMap<String, Object>(); //find all hierarchy BOs for the root award number. If the anyNode was got is the root, the award number //will be 'DEFAULT_AWARD_NUMBER' and therefore we will use the award number, otherwise, the root award number String rootAwardNumber = StringUtils.equals(Award.DEFAULT_AWARD_NUMBER, anyNode.getRootAwardNumber()) ? anyNode.getAwardNumber() : anyNode.getRootAwardNumber(); values.put("rootAwardNumber", rootAwardNumber); values.put("active", true); List<AwardHierarchy> hierarchyList = (List<AwardHierarchy>) legacyDataAdapter .findMatchingOrderBy(AwardHierarchy.class, values, "awardNumber", true); if (!hierarchyList.isEmpty()) { for (AwardHierarchy hierarchy : hierarchyList) { result.put(hierarchy.getAwardNumber(), hierarchy); //clear children in case this was already called and cached BOs were returned from OJB. hierarchy.getChildren().clear(); } AwardHierarchy rootNode = result.get(rootAwardNumber); for (AwardHierarchy hierarchy : result.values()) { hierarchy.setRoot(rootNode); AwardHierarchy parent = result.get(hierarchy.getParentAwardNumber()); if (parent != null) { parent.getChildren().add(hierarchy); hierarchy.setParent(parent); } } for (AwardHierarchy hierarchy : result.values()) { Collections.sort(hierarchy.getChildren(), new Comparator<AwardHierarchy>() { public int compare(AwardHierarchy arg0, AwardHierarchy arg1) { return arg0.getAwardNumber().compareTo(arg1.getAwardNumber()); } }); } Queue<AwardHierarchy> queue = new LinkedList<AwardHierarchy>(); queue.add(rootNode); while (!queue.isEmpty()) { AwardHierarchy node = queue.poll(); order.add(node.getAwardNumber()); queue.addAll(node.getChildren()); } } return result; }
From source file:org.apache.kylin.metadata.model.DataModelDesc.java
private void reorderJoins(Map<String, TableDesc> tables) { if (joinTables.length == 0) { return;/*from ww w .j av a 2 s . c o m*/ } Map<String, List<JoinTableDesc>> fkMap = Maps.newHashMap(); for (JoinTableDesc joinTable : joinTables) { JoinDesc join = joinTable.getJoin(); String fkSideName = join.getFKSide().getAlias(); if (fkMap.containsKey(fkSideName)) { fkMap.get(fkSideName).add(joinTable); } else { List<JoinTableDesc> joinTableList = Lists.newArrayList(); joinTableList.add(joinTable); fkMap.put(fkSideName, joinTableList); } } JoinTableDesc[] orderedJoinTables = new JoinTableDesc[joinTables.length]; int orderedIndex = 0; Queue<JoinTableDesc> joinTableBuff = new ArrayDeque<JoinTableDesc>(); TableDesc rootDesc = tables.get(rootFactTable); joinTableBuff.addAll(fkMap.get(rootDesc.getName())); while (!joinTableBuff.isEmpty()) { JoinTableDesc head = joinTableBuff.poll(); orderedJoinTables[orderedIndex++] = head; String headAlias = head.getJoin().getPKSide().getAlias(); if (fkMap.containsKey(headAlias)) { joinTableBuff.addAll(fkMap.get(headAlias)); } } joinTables = orderedJoinTables; }
From source file:org.shaman.terrain.polygonal.GraphToHeightmap.java
private void calculateBaseElevation() { //assign elevation to oceans for (Graph.Corner c : graph.corners) { if (c.ocean) { c.elevation = -1;//from w w w . j ava 2s . co m } } Queue<Graph.Corner> q = new ArrayDeque<>(); for (Graph.Corner c : graph.corners) { if (c.coast) { q.add(c); } } while (!q.isEmpty()) { Graph.Corner c = q.poll(); for (Graph.Corner r : c.adjacent) { float h = Math.max(-1, c.elevation - 0.2f); if (r.ocean && r.elevation < h) { r.elevation = h; q.add(r); } } } assignCenterElevations(); //render Geometry geom = createElevationGeometry(); Heightmap tmp = new Heightmap(size); render(tmp.getRawData(), geom, ColorRGBA.Black, -1, 1); //scale for (int x = 0; x < size; ++x) { for (int y = 0; y < size; ++y) { float h = tmp.getHeightAt(x, y); h = (float) (Math.signum(h) * Math.pow(Math.abs(h), HEIGHT_SCALING)); tmp.setHeightAt(x, y, h); } } //distort Noise distortionNoise = new Noise(rand.nextLong()); for (int x = 0; x < size; ++x) { for (int y = 0; y < size; ++y) { float s = x / (float) size; float t = y / (float) size; float ss = (float) (s + DISTORTION_AMPLITUDE * 2 * distortionNoise.noise(s * DISTORTION_FREQUENCY, t * DISTORTION_FREQUENCY, 0)); float tt = (float) (t + DISTORTION_AMPLITUDE * 2 * distortionNoise.noise(s * DISTORTION_FREQUENCY, t * DISTORTION_FREQUENCY, 3.4)); float v = tmp.getHeightInterpolating(ss * size, tt * size); heightmap.setHeightAt(x, y, v); } } //smooth for (int i = 0; i < SMOOTHING_STEPS; ++i) { smooth(heightmap); } //reset height for (Graph.Corner c : graph.corners) { if (c.ocean) { c.elevation = 0; } } assignCenterElevations(); LOG.info("base elevation assigned"); }