List of usage examples for java.util ArrayDeque ArrayDeque
public ArrayDeque(Collection<? extends E> c)
From source file:com.solera.defrag.ViewStack.java
/** * Replace the current stack with the given views, * * @param views the list of views to replace the stack with. The list consists of pairs of * Integer (layoutId) to Bundle (parameters) for the view. If the Bundle component * is the USE_EXISTING_SAVED_STATE tag, then we will use that saved state for that * view (if it exists, and is at the right location in the stack) otherwise this will be null. *//*ww w. j av a2 s . c o m*/ public void replaceStack(@NonNull final List<Pair<Integer, Bundle>> views) { if (views.isEmpty()) { throw new IllegalArgumentException("Cannot replace stack with an empty views stack"); } ViewStackEntry fromEntry = null; Iterator<ViewStackEntry> iterator = null; setTraversingState(TraversingState.REPLACING); if (!viewStack.isEmpty()) { fromEntry = viewStack.peek(); //take a copy of the view stack: Deque<ViewStackEntry> copy = new ArrayDeque<>(viewStack); viewStack.clear(); viewStack.push(fromEntry); iterator = copy.iterator(); } for (Pair<Integer, Bundle> view : views) { Bundle savedParameter = view.second; SparseArray<Parcelable> viewState = null; if (view.second == USE_EXISTING_SAVED_STATE) { savedParameter = null; if (iterator != null && iterator.hasNext()) { final ViewStackEntry next = iterator.next(); if (next.mLayout == view.first) { savedParameter = next.mParameters; viewState = next.mViewState; } else { iterator = null; } } } viewStack.push(new ViewStackEntry(view.first, savedParameter, viewState)); } final ViewStackEntry toEntry = viewStack.peek(); final View toView = toEntry.getView(); if (fromEntry == null || fromEntry.mLayout == toEntry.mLayout) { //if current topEntry layout is null or equal to the next proposed topEntry layout //we cannot do a transition animation viewStack.remove(fromEntry); removeAllViews(); addView(toView); ViewUtils.waitForMeasure(toView, new ViewUtils.OnMeasuredCallback() { @Override public void onMeasured(View view, int width, int height) { setTraversingState(TraversingState.IDLE); } }); } else { final View fromView = fromEntry.getView(); addView(toView); final ViewStackEntry finalFromEntry = fromEntry; ViewUtils.waitForMeasure(toView, new ViewUtils.OnMeasuredCallback() { @Override public void onMeasured(View view, int width, int height) { ViewStack.this.runAnimation(fromView, toView, TraversingOperation.REPLACE); viewStack.remove(finalFromEntry); } }); } }
From source file:org.datacleaner.configuration.JaxbConfigurationReader.java
public JaxbConfigurationReader(ConfigurationReaderInterceptor configurationReaderCallback) { if (configurationReaderCallback == null) { configurationReaderCallback = new DefaultConfigurationReaderInterceptor(); }// w w w . j a va2 s . c o m _interceptor = configurationReaderCallback; _variablePathBuilder = new ArrayDeque<String>(4); try { _jaxbContext = JAXBContext.newInstance(ObjectFactory.class.getPackage().getName(), ObjectFactory.class.getClassLoader()); } catch (JAXBException e) { throw new IllegalStateException(e); } }
From source file:org.apache.hadoop.hbase.regionserver.wal.AsyncFSWAL.java
public AsyncFSWAL(FileSystem fs, Path rootDir, String logDir, String archiveDir, Configuration conf, List<WALActionsListener> listeners, boolean failIfWALExists, String prefix, String suffix, EventLoop eventLoop) throws FailedLogCloseException, IOException { super(fs, rootDir, logDir, archiveDir, conf, listeners, failIfWALExists, prefix, suffix); this.eventLoop = eventLoop; int maxHandlersCount = conf.getInt(REGION_SERVER_HANDLER_COUNT, 200); waitingConsumePayloads = new ArrayDeque<Payload>(maxHandlersCount * 3); batchSize = conf.getLong(WAL_BATCH_SIZE, DEFAULT_WAL_BATCH_SIZE); createMaxRetries = conf.getInt(ASYNC_WAL_CREATE_MAX_RETRIES, DEFAULT_ASYNC_WAL_CREATE_MAX_RETRIES); logRollerExitedCheckIntervalMs = conf.getLong(ASYNC_WAL_LOG_ROLLER_EXITED_CHECK_INTERVAL_MS, DEFAULT_ASYNC_WAL_LOG_ROLLER_EXITED_CHECK_INTERVAL_MS); rollWriter();/*from ww w . jav a2s . c o m*/ }
From source file:com.espertech.esper.view.std.GroupByViewImpl.java
protected static Object addUpgradeToDequeIfPopulated(Object holder, EventBean theEvent) { if (holder == null) { return theEvent; } else if (holder instanceof Deque) { Deque<EventBean> deque = (Deque<EventBean>) holder; deque.add(theEvent);/* w ww .j a v a2 s.c o m*/ return deque; } else { ArrayDeque<EventBean> deque = new ArrayDeque<EventBean>(4); deque.add((EventBean) holder); deque.add(theEvent); return deque; } }
From source file:hudson.plugins.nested_view.NestedView.java
/** * Returns the health of this nested view. * <p/>/*from ww w . jav a2 s.c o m*/ * <p>Notice that, if a job is contained in several sub-views of the current * view, then it is taken into account only once to get accurate stats.</p> * <p>This algorithm has been derecursified, hence the stack stuff.</p> */ public HealthReportContainer getHealth() { // we use a set to avoid taking into account several times the same job // when computing the health Set<TopLevelItem> items = new LinkedHashSet<TopLevelItem>(100); // retrieve all jobs to analyze (using DFS) Deque<View> viewsStack = new ArrayDeque<View>(20); viewsStack.push(this); do { View currentView = viewsStack.pop(); if (currentView instanceof NestedView) { for (View v : ((NestedView) currentView).views) { viewsStack.push(v); } } else { items.addAll(currentView.getItems()); } } while (!viewsStack.isEmpty()); HealthReportContainer hrc = new HealthReportContainer(); for (TopLevelItem item : items) { if (item instanceof Job) { hrc.sum += ((Job) item).getBuildHealth().getScore(); hrc.count++; } } hrc.report = hrc.count > 0 ? new HealthReport(hrc.sum / hrc.count, Messages._ViewHealth(hrc.count)) : new HealthReport(100, Messages._NoJobs()); return hrc; }
From source file:de.themoep.simpleteampvp.games.SimpleTeamPvPGame.java
/** * Balance the teams// w ww . j a va 2 s .co m * @return <tt>true</tt> if game is in GameState.JOINING and players can be balanced */ public boolean balance() { if (getState() != GameState.JOINING) return false; plugin.getServer() .broadcastMessage(ChatColor.GREEN + "Ausbalancieren und Auffllen der Teams gestartet..."); Map<Player, String> beforeBalance = new HashMap<>(); List<Player> playersToJoin = new ArrayList<>(); for (Player player : plugin.getServer().getOnlinePlayers()) { if (player.hasPermission(SimpleTeamPvP.BYPASS_PERM) || player.getGameMode() == GameMode.CREATIVE || player.getGameMode() == GameMode.SPECTATOR) continue; TeamInfo team = getTeam(player); if (team == null) { if (config.getRandomRegion() == null || config.getRandomRegion().contains(player.getLocation())) playersToJoin.add(player); beforeBalance.put(player, ""); } else { beforeBalance.put(player, team.getName()); } } plugin.getLogger().log(Level.INFO, "Players to join: " + playersToJoin.size()); int totalPlayers = playersToJoin.size(); for (TeamInfo team : config.getTeams().values()) { totalPlayers += team.getSize(); } plugin.getLogger().log(Level.INFO, "Number of teams: " + config.getTeams().size()); double perfectSize = (double) totalPlayers / (double) config.getTeams().size(); plugin.getLogger().log(Level.INFO, "perfectSize: " + perfectSize); if (plugin.getServerTags() != null) { // Team key -> Tag Map<String, String> teamTags = new HashMap<>(); for (TeamInfo team : config.getTeams().values()) { Map<String, Integer> tags = new HashMap<>(); for (String playerName : team.getScoreboardTeam().getEntries()) { Player player = plugin.getServer().getPlayer(playerName); if (player == null) continue; String tag = "no server"; ServerInfo serverInfo = plugin.getServerTags().getPlayerServer(player); if (serverInfo != null) { tag = serverInfo.getTag(); } if (!tags.containsKey(tag)) { tags.put(tag, 0); } tags.put(tag, tags.get(tag) + 1); } String teamTag = "no server"; int tagCount = 0; for (Map.Entry<String, Integer> entry : tags.entrySet()) { if (entry.getValue() > tagCount) { tagCount = entry.getValue(); teamTag = entry.getKey(); } } teamTags.put(team.getName(), teamTag); } for (TeamInfo team : config.getTeams().values()) { // Filter out players that come from another server than the majority of the team // and remove them as long as the team is larger than the perfect size for (String playerName : team.getScoreboardTeam().getEntries()) { if (team.getSize() <= perfectSize + 0.5) break; Player player = plugin.getServer().getPlayer(playerName); if (player == null) continue; String tag = "no server"; ServerInfo serverInfo = plugin.getServerTags().getPlayerServer(player); if (serverInfo != null) { tag = serverInfo.getTag(); } if (tag.equals(teamTags.get(team.getName()))) continue; plugin.getLogger().log(Level.INFO, "[ST] Removed " + player.getName() + " from " + team.getName() + " (Step 1)"); team.removePlayer(player); playersToJoin.add(player); } // Team still larger than the perfect size? Remove last joined player Deque<String> teamMates = new ArrayDeque<>(team.getScoreboardTeam().getEntries()); while (team.getSize() > perfectSize + 0.5) { String name = teamMates.peekLast(); Player player = plugin.getServer().getPlayer(name); if (player == null) continue; team.removePlayer(player); plugin.getLogger().log(Level.INFO, "[ST] Removed " + player.getName() + " from " + team.getName() + " (Step 2)"); teamMates.pollLast(); playersToJoin.add(player); } } // Add rest of players to teams from their server Iterator<Player> playerIterator = playersToJoin.iterator(); while (playerIterator.hasNext()) { Player player = playerIterator.next(); ServerInfo serverInfo = plugin.getServerTags().getPlayerServer(player); if (serverInfo != null && teamTags.containsValue(serverInfo.getTag())) { for (TeamInfo team : config.getTeams().values()) { if (team.getSize() < perfectSize - 0.5 && teamTags.containsKey(team.getName()) && teamTags.get(team.getName()).equals(serverInfo.getTag())) { team.addPlayer(player); plugin.getLogger().log(Level.INFO, "[ST] Added " + player.getName() + " to " + team.getName()); playerIterator.remove(); break; } } } } plugin.getLogger().log(Level.INFO, "Players to join after servertags: " + playersToJoin.size()); } // Remove players from teams that have more than the perfect size for (TeamInfo team : config.getTeams().values()) { for (String playerName : team.getScoreboardTeam().getEntries()) { if (team.getSize() <= perfectSize + 0.5) break; Player player = plugin.getServer().getPlayer(playerName); if (player == null) continue; plugin.getLogger().log(Level.INFO, "Removed " + player.getName() + " from " + team.getName()); team.removePlayer(player); playersToJoin.add(player); } } Iterator<Player> playerIterator = playersToJoin.iterator(); for (TeamInfo team : config.getTeams().values()) { while (playerIterator.hasNext()) { if (team.getSize() >= perfectSize - 0.5) break; Player player = playerIterator.next(); team.addPlayer(player); plugin.getLogger().log(Level.INFO, "Added " + player.getName() + " to " + team.getName()); playerIterator.remove(); } } if (playerIterator.hasNext()) { plugin.getLogger().log(Level.INFO, "Adding " + playersToJoin.size() + " remaining players to teams according to their player count:"); List<TeamInfo> teams = new ArrayList<>(config.getTeams().values()); teams.sort((t1, t2) -> Integer.compare(t2.getSize(), t1.getSize())); for (TeamInfo team : teams) { while (playerIterator.hasNext()) { if (team.getSize() > perfectSize) break; Player player = playerIterator.next(); team.addPlayer(player); plugin.getLogger().log(Level.INFO, "Added remaining player " + player.getName() + " to " + team.getName()); playerIterator.remove(); } } } if (playerIterator.hasNext()) { plugin.getLogger().log(Level.INFO, "Adding " + playersToJoin.size() + " remaining players to totally random teams:"); Random r = new Random(); List<TeamInfo> teams = new ArrayList<>(config.getTeams().values()); while (playerIterator.hasNext()) { Player player = playerIterator.next(); TeamInfo team = teams.get(r.nextInt(teams.size())); team.addPlayer(player); plugin.getLogger().log(Level.INFO, "Added player " + player.getName() + " to " + team.getName() + " by random"); playerIterator.remove(); } } plugin.getLogger().log(Level.INFO, "All players joined! (" + playersToJoin.size() + ")"); for (Map.Entry<Player, String> entry : beforeBalance.entrySet()) { TeamInfo team = getTeam(entry.getKey()); if (team != null && !team.getName().equals(entry.getValue())) { Player player = null; for (Iterator<String> it = team.getScoreboardTeam().getEntries().iterator(); player == null && it.hasNext();) { player = plugin.getServer().getPlayer(it.next()); } if (player != null && team.getJoinRegion().contains(player.getLocation())) { entry.getKey().teleport(player); } else { entry.getKey().teleport(team.getJoinRegion().calculateMiddle().getLocation()); } } } plugin.getServer().broadcastMessage(ChatColor.GREEN + "Teams ausbalanciert und aufgefllt!"); state = GameState.WAITING; return true; }
From source file:com.espertech.esper.core.context.util.StatementAgentInstanceUtil.java
private static void evaluateEventForStatementInternal(EPServicesContext servicesContext, EventBean theEvent, List<AgentInstance> agentInstances) { // context was created - reevaluate for the given event ArrayDeque<FilterHandle> callbacks = new ArrayDeque<FilterHandle>(2); servicesContext.getFilterService().evaluate(theEvent, callbacks); // evaluates for ALL statements if (callbacks.isEmpty()) { return;/* w w w .ja v a 2s . c o m*/ } // there is a single callback and a single context, if they match we are done if (agentInstances.size() == 1 && callbacks.size() == 1) { AgentInstance agentInstance = agentInstances.get(0); if (agentInstance.getAgentInstanceContext().getStatementId() .equals(callbacks.getFirst().getStatementId())) { process(agentInstance, servicesContext, callbacks, theEvent); } return; } // use the right sorted/unsorted Map keyed by AgentInstance to sort boolean isPrioritized = servicesContext.getConfigSnapshot().getEngineDefaults().getExecution() .isPrioritized(); Map<AgentInstance, Object> stmtCallbacks; if (!isPrioritized) { stmtCallbacks = new HashMap<AgentInstance, Object>(); } else { stmtCallbacks = new TreeMap<AgentInstance, Object>(AgentInstanceComparator.INSTANCE); } // process all callbacks for (FilterHandle filterHandle : callbacks) { // determine if this filter entry applies to any of the affected agent instances String statementId = filterHandle.getStatementId(); AgentInstance agentInstanceFound = null; for (AgentInstance agentInstance : agentInstances) { if (agentInstance.getAgentInstanceContext().getStatementId().equals(statementId)) { agentInstanceFound = agentInstance; break; } } if (agentInstanceFound == null) { // when the callback is for some other stmt continue; } EPStatementHandleCallback handleCallback = (EPStatementHandleCallback) filterHandle; EPStatementAgentInstanceHandle handle = handleCallback.getAgentInstanceHandle(); // Self-joins require that the internal dispatch happens after all streams are evaluated. // Priority or preemptive settings also require special ordering. if (handle.isCanSelfJoin() || isPrioritized) { Object stmtCallback = stmtCallbacks.get(agentInstanceFound); if (stmtCallback == null) { stmtCallbacks.put(agentInstanceFound, handleCallback); } else if (stmtCallback instanceof ArrayDeque) { ArrayDeque<EPStatementHandleCallback> q = (ArrayDeque<EPStatementHandleCallback>) stmtCallback; q.add(handleCallback); } else { ArrayDeque<EPStatementHandleCallback> q = new ArrayDeque<EPStatementHandleCallback>(4); q.add((EPStatementHandleCallback) stmtCallback); q.add(handleCallback); stmtCallbacks.put(agentInstanceFound, q); } continue; } // no need to be sorted, process process(agentInstanceFound, servicesContext, Collections.<FilterHandle>singletonList(handleCallback), theEvent); } if (stmtCallbacks.isEmpty()) { return; } // Process self-join or sorted prioritized callbacks for (Map.Entry<AgentInstance, Object> entry : stmtCallbacks.entrySet()) { AgentInstance agentInstance = entry.getKey(); Object callbackList = entry.getValue(); if (callbackList instanceof ArrayDeque) { process(agentInstance, servicesContext, (Collection<FilterHandle>) callbackList, theEvent); } else { process(agentInstance, servicesContext, Collections.<FilterHandle>singletonList((FilterHandle) callbackList), theEvent); } if (agentInstance.getAgentInstanceContext().getEpStatementAgentInstanceHandle().isPreemptive()) { return; } } }
From source file:eu.itesla_project.modules.validation.OfflineValidationTool.java
@Override public void run(CommandLine line) throws Exception { OfflineConfig config = OfflineConfig.load(); String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name") : OfflineConfig.DEFAULT_RULES_DB_NAME; String workflowId = line.getOptionValue("workflow"); Path outputDir = Paths.get(line.getOptionValue("output-dir")); double purityThreshold = line.hasOption("purity-threshold") ? Double.parseDouble(line.getOptionValue("purity-threshold")) : DEFAULT_PURITY_THRESHOLD;//from www. j av a 2 s. com Set<Country> countries = Arrays.stream(line.getOptionValue("base-case-countries").split(",")) .map(Country::valueOf).collect(Collectors.toSet()); Interval histoInterval = Interval.parse(line.getOptionValue("history-interval")); boolean mergeOptimized = line.hasOption("merge-optimized"); CaseType caseType = CaseType.valueOf(line.getOptionValue("case-type")); CaseRepositoryFactory caseRepositoryFactory = config.getCaseRepositoryFactoryClass().newInstance(); RulesDbClientFactory rulesDbClientFactory = config.getRulesDbClientFactoryClass().newInstance(); ContingenciesAndActionsDatabaseClient contingencyDb = config.getContingencyDbClientFactoryClass() .newInstance().create(); SimulatorFactory simulatorFactory = config.getSimulatorFactoryClass().newInstance(); LoadFlowFactory loadFlowFactory = config.getLoadFlowFactoryClass().newInstance(); MergeOptimizerFactory mergeOptimizerFactory = config.getMergeOptimizerFactoryClass().newInstance(); SimulationParameters simulationParameters = SimulationParameters.load(); try (ComputationManager computationManager = new LocalComputationManager(); RulesDbClient rulesDb = rulesDbClientFactory.create(rulesDbName); CsvMetricsDb metricsDb = new CsvMetricsDb(outputDir, true, "metrics")) { CaseRepository caseRepository = caseRepositoryFactory.create(computationManager); Queue<DateTime> dates = Queues.synchronizedDeque( new ArrayDeque<>(caseRepository.dataAvailable(caseType, countries, histoInterval))); Map<String, Map<RuleId, ValidationStatus>> statusPerRulePerCase = Collections .synchronizedMap(new TreeMap<>()); Map<String, Map<RuleId, Map<HistoDbAttributeId, Object>>> valuesPerRulePerCase = Collections .synchronizedMap(new TreeMap<>()); int cores = Runtime.getRuntime().availableProcessors(); ExecutorService executorService = Executors.newFixedThreadPool(cores); try { List<Future<?>> tasks = new ArrayList<>(cores); for (int i = 0; i < cores; i++) { tasks.add(executorService.submit((Runnable) () -> { while (dates.size() > 0) { DateTime date = dates.poll(); try { Network network = MergeUtil.merge(caseRepository, date, caseType, countries, loadFlowFactory, 0, mergeOptimizerFactory, computationManager, mergeOptimized); System.out.println("case " + network.getId() + " loaded"); System.out.println("running simulation on " + network.getId() + "..."); network.getStateManager().allowStateMultiThreadAccess(true); String baseStateId = network.getId(); network.getStateManager().cloneState(StateManager.INITIAL_STATE_ID, baseStateId); network.getStateManager().setWorkingState(baseStateId); Map<RuleId, ValidationStatus> statusPerRule = new HashMap<>(); Map<RuleId, Map<HistoDbAttributeId, Object>> valuesPerRule = new HashMap<>(); LoadFlow loadFlow = loadFlowFactory.create(network, computationManager, 0); LoadFlowResult loadFlowResult = loadFlow.run(); System.err.println("load flow terminated (" + loadFlowResult.isOk() + ") on " + network.getId()); if (loadFlowResult.isOk()) { Stabilization stabilization = simulatorFactory.createStabilization(network, computationManager, 0); ImpactAnalysis impactAnalysis = simulatorFactory.createImpactAnalysis(network, computationManager, 0, contingencyDb); Map<String, Object> context = new HashMap<>(); stabilization.init(simulationParameters, context); impactAnalysis.init(simulationParameters, context); StabilizationResult stabilizationResult = stabilization.run(); System.err.println("stabilization terminated (" + stabilizationResult.getStatus() + ") on " + network.getId()); metricsDb.store(workflowId, network.getId(), "STABILIZATION", stabilizationResult.getMetrics()); if (stabilizationResult.getStatus() == StabilizationStatus.COMPLETED) { ImpactAnalysisResult impactAnalysisResult = impactAnalysis .run(stabilizationResult.getState()); System.err.println("impact analysis terminated on " + network.getId()); metricsDb.store(workflowId, network.getId(), "IMPACT_ANALYSIS", impactAnalysisResult.getMetrics()); System.out.println("checking rules on " + network.getId() + "..."); for (SecurityIndex securityIndex : impactAnalysisResult .getSecurityIndexes()) { for (RuleAttributeSet attributeSet : RuleAttributeSet.values()) { statusPerRule.put(new RuleId(attributeSet, securityIndex.getId()), new ValidationStatus(null, securityIndex.isOk())); } } } } Map<HistoDbAttributeId, Object> values = IIDM2DB .extractCimValues(network, new IIDM2DB.Config(null, false)) .getSingleValueMap(); for (RuleAttributeSet attributeSet : RuleAttributeSet.values()) { for (Contingency contingency : contingencyDb.getContingencies(network)) { List<SecurityRule> securityRules = rulesDb.getRules(workflowId, attributeSet, contingency.getId(), null); for (SecurityRule securityRule : securityRules) { SecurityRuleExpression securityRuleExpression = securityRule .toExpression(purityThreshold); SecurityRuleCheckReport checkReport = securityRuleExpression .check(values); valuesPerRule.put(securityRule.getId(), ExpressionAttributeList .list(securityRuleExpression.getCondition()).stream() .collect(Collectors.toMap(attributeId -> attributeId, new Function<HistoDbAttributeId, Object>() { @Override public Object apply( HistoDbAttributeId attributeId) { Object value = values.get(attributeId); return value != null ? value : Float.NaN; } }))); ValidationStatus status = statusPerRule.get(securityRule.getId()); if (status == null) { status = new ValidationStatus(null, null); statusPerRule.put(securityRule.getId(), status); } if (checkReport.getMissingAttributes().isEmpty()) { status.setRuleOk(checkReport.isSafe()); } } } } statusPerRulePerCase.put(network.getId(), statusPerRule); valuesPerRulePerCase.put(network.getId(), valuesPerRule); } catch (Exception e) { LOGGER.error(e.toString(), e); } } })); } for (Future<?> task : tasks) { task.get(); } } finally { executorService.shutdown(); executorService.awaitTermination(1, TimeUnit.MINUTES); } writeCsv(statusPerRulePerCase, valuesPerRulePerCase, outputDir); } }
From source file:com.espertech.esper.core.start.EPPreparedExecuteMethodQuery.java
private Collection<EventBean> getFiltered(Collection<EventBean> snapshot, List<ExprNode> filterExpressions) { ArrayDeque<EventBean> deque = new ArrayDeque<EventBean>(Math.min(snapshot.size(), 16)); ExprNodeUtility.applyFilterExpressionsIterable(snapshot, filterExpressions, agentInstanceContext, deque); return deque; }
From source file:com.sonymobile.android.media.internal.ISOBMFFParser.java
protected void initParsing() { mCurrentOffset = 0; mCurrentBoxSequence = new ArrayDeque<BoxHeader>(10); }