List of usage examples for java.util TreeSet first
public E first()
From source file:bes.injector.InjectorBurnTest.java
private void testPromptnessOfExecution(long intervalNanos, float loadIncrement) throws InterruptedException, ExecutionException, TimeoutException { final int executorCount = 4; int threadCount = 8; int maxQueued = 1024; final WeibullDistribution workTime = new WeibullDistribution(3, 200000); final long minWorkTime = TimeUnit.MICROSECONDS.toNanos(1); final long maxWorkTime = TimeUnit.MILLISECONDS.toNanos(1); final int[] threadCounts = new int[executorCount]; final WeibullDistribution[] workCount = new WeibullDistribution[executorCount]; final ExecutorService[] executors = new ExecutorService[executorCount]; final Injector injector = new Injector(""); for (int i = 0; i < executors.length; i++) { executors[i] = injector.newExecutor(threadCount, maxQueued); threadCounts[i] = threadCount;//from w ww . ja va2 s.com workCount[i] = new WeibullDistribution(2, maxQueued); threadCount *= 2; maxQueued *= 2; } long runs = 0; long events = 0; final TreeSet<Batch> pending = new TreeSet<Batch>(); final BitSet executorsWithWork = new BitSet(executorCount); long until = 0; // basic idea is to go through different levels of load on the executor service; initially is all small batches // (mostly within max queue size) of very short operations, moving to progressively larger batches // (beyond max queued size), and longer operations for (float multiplier = 0f; multiplier < 2.01f;) { if (System.nanoTime() > until) { System.out.println(String.format("Completed %.0fK batches with %.1fM events", runs * 0.001f, events * 0.000001f)); events = 0; until = System.nanoTime() + intervalNanos; multiplier += loadIncrement; System.out.println(String.format("Running for %ds with load multiplier %.1f", TimeUnit.NANOSECONDS.toSeconds(intervalNanos), multiplier)); } // wait a random amount of time so we submit new tasks in various stages of long timeout; if (pending.isEmpty()) timeout = 0; else if (Math.random() > 0.98) timeout = Long.MAX_VALUE; else if (pending.size() == executorCount) timeout = pending.first().timeout; else timeout = (long) (Math.random() * pending.last().timeout); while (!pending.isEmpty() && timeout > System.nanoTime()) { Batch first = pending.first(); boolean complete = false; try { for (Result result : first.results.descendingSet()) result.future.get(timeout - System.nanoTime(), TimeUnit.NANOSECONDS); complete = true; } catch (TimeoutException e) { } if (!complete && System.nanoTime() > first.timeout) { for (Result result : first.results) if (!result.future.isDone()) throw new AssertionError(); complete = true; } if (complete) { pending.pollFirst(); executorsWithWork.clear(first.executorIndex); } } // if we've emptied the executors, give all our threads an opportunity to spin down if (timeout == Long.MAX_VALUE) { try { Thread.sleep(10); } catch (InterruptedException e) { } } // submit a random batch to the first free executor service int executorIndex = executorsWithWork.nextClearBit(0); if (executorIndex >= executorCount) continue; executorsWithWork.set(executorIndex); ExecutorService executor = executors[executorIndex]; TreeSet<Result> results = new TreeSet<Result>(); int count = (int) (workCount[executorIndex].sample() * multiplier); long targetTotalElapsed = 0; long start = System.nanoTime(); long baseTime; if (Math.random() > 0.5) baseTime = 2 * (long) (workTime.sample() * multiplier); else baseTime = 0; for (int j = 0; j < count; j++) { long time; if (baseTime == 0) time = (long) (workTime.sample() * multiplier); else time = (long) (baseTime * Math.random()); if (time < minWorkTime) time = minWorkTime; if (time > maxWorkTime) time = maxWorkTime; targetTotalElapsed += time; Future<?> future = executor.submit(new WaitTask(time)); results.add(new Result(future, System.nanoTime() + time)); } long end = start + (long) Math.ceil(targetTotalElapsed / (double) threadCounts[executorIndex]) + TimeUnit.MILLISECONDS.toNanos(100L); long now = System.nanoTime(); if (runs++ > executorCount && now > end) throw new AssertionError(); events += results.size(); pending.add(new Batch(results, end, executorIndex)); // System.out.println(String.format("Submitted batch to executor %d with %d items and %d permitted millis", executorIndex, count, TimeUnit.NANOSECONDS.toMillis(end - start))); } }
From source file:com.ichi2.libanki.test.SchedTestCase.java
@MediumTest public void test_deckDue() throws JSONException { Collection d = Shared.getEmptyDeck(getInstrumentation().getContext()); // add a note with default deck Note f = d.newNote();// w w w .j a v a2 s.c o m f.setitem("Front", "one"); d.addNote(f); // and one that's a child f = d.newNote(); f.setitem("Front", "two"); long default1 = 0; f.model().put("did", d.getDecks().id("Default::1")); default1 = f.model().getLong("did"); d.addNote(f); // make it a review card Card c = f.cards().get(0); c.setQueue(2); c.setDue(0); c.flush(); // add one more with a new deck f = d.newNote(); f.setitem("Front", "two"); long foobar = 0; f.model().put("did", d.getDecks().id("foo::bar")); foobar = f.model().getLong("did"); d.addNote(f); // and one that's a sibling f = d.newNote(); f.setitem("Front", "three"); long foobaz = 0; f.model().put("did", d.getDecks().id("foo::baz")); foobaz = f.model().getLong("did"); d.addNote(f); d.reset(); assertEquals(d.getDecks().getDecks().size(), 5); List<Object[]> cnts = d.getSched().deckDueList(Sched.DECK_INFORMATION_SIMPLE_COUNTS); // DIFFERENT THAN LIBANKI: deckDueList in AnkiDroid returns [deckname, did, new, lrn, rev] assertEquals(cnts.get(0)[0], "Default"); assertEquals(((Long) cnts.get(0)[1]).longValue(), 1); MoreAsserts.assertEquals(Shared.toPrimitiveInt(Arrays.copyOfRange(cnts.get(0), 2, 5, Integer[].class)), new int[] { 1, 0, 0 }); assertEquals(cnts.get(1)[0], "Default::1"); assertEquals(((Long) cnts.get(1)[1]).longValue(), default1); MoreAsserts.assertEquals(Shared.toPrimitiveInt(Arrays.copyOfRange(cnts.get(1), 2, 5, Integer[].class)), new int[] { 0, 0, 1 }); assertEquals(cnts.get(2)[0], "foo"); assertEquals(((Long) cnts.get(2)[1]).longValue(), d.getDecks().id("foo")); MoreAsserts.assertEquals(Shared.toPrimitiveInt(Arrays.copyOfRange(cnts.get(2), 2, 5, Integer[].class)), new int[] { 0, 0, 0 }); assertEquals(cnts.get(3)[0], "foo::bar"); assertEquals(((Long) cnts.get(3)[1]).longValue(), foobar); MoreAsserts.assertEquals(Shared.toPrimitiveInt(Arrays.copyOfRange(cnts.get(3), 2, 5, Integer[].class)), new int[] { 1, 0, 0 }); assertEquals(cnts.get(4)[0], "foo::baz"); assertEquals(((Long) cnts.get(4)[1]).longValue(), foobaz); MoreAsserts.assertEquals(Shared.toPrimitiveInt(Arrays.copyOfRange(cnts.get(4), 2, 5, Integer[].class)), new int[] { 1, 0, 0 }); TreeSet<Object[]> tree = d.getSched().deckDueTree(Sched.DECK_INFORMATION_SIMPLE_COUNTS); assertEquals(((String[]) tree.first()[0])[0], "Default"); // sum of child and parent assertEquals(((Long) tree.first()[1]).longValue(), 1); assertEquals(((Integer) tree.first()[2]).intValue(), 1); assertEquals(((Integer) tree.first()[4]).intValue(), 1); // child count is just review // DIFFERENT THAN LIBANKI assertEquals(((String[]) tree.higher(tree.first())[0])[0], "Default"); assertEquals(((String[]) tree.higher(tree.first())[0])[1], "1"); assertEquals(((Long) tree.higher(tree.first())[1]).longValue(), default1); assertEquals(((Integer) tree.higher(tree.first())[2]).intValue(), 0); assertEquals(tree.higher(tree.first())[4], 1); // code should not fail if a card has an invalid deck c.setDid(12345); c.flush(); d.getSched().deckDueList(Sched.DECK_INFORMATION_SIMPLE_COUNTS); d.getSched().deckDueTree(Sched.DECK_INFORMATION_SIMPLE_COUNTS); }
From source file:com.joliciel.talismane.parser.TransitionBasedGlobalLearningParser.java
public List<ParseConfiguration> parseSentence(List<PosTagSequence> posTagSequences, FeatureWeightVector weightVector, RankingSolution correctSolution) { MONITOR.startTask("parseSentence"); try {// www . ja v a 2s.com long startTime = (new Date()).getTime(); int maxAnalysisTimeMilliseconds = maxAnalysisTimePerSentence * 1000; int minFreeMemoryBytes = minFreeMemory * KILOBYTE; TokenSequence tokenSequence = posTagSequences.get(0).getTokenSequence(); TreeMap<Integer, TreeSet<ParseConfiguration>> heaps = new TreeMap<Integer, TreeSet<ParseConfiguration>>(); TreeSet<ParseConfiguration> heap0 = new TreeSet<ParseConfiguration>(); for (PosTagSequence posTagSequence : posTagSequences) { // add an initial ParseConfiguration for each postag sequence ParseConfiguration initialConfiguration = this.getParserServiceInternal() .getInitialConfiguration(posTagSequence); initialConfiguration.setScoringStrategy(new SimpleRankingScoringStrategy()); initialConfiguration.setRankingScore(0.0); heap0.add(initialConfiguration); if (LOG.isDebugEnabled()) { LOG.debug("Adding initial posTagSequence: " + posTagSequence); } } heaps.put(0, heap0); TreeSet<ParseConfiguration> backupHeap = null; TreeSet<ParseConfiguration> finalHeap = null; while (heaps.size() > 0) { Entry<Integer, TreeSet<ParseConfiguration>> heapEntry = heaps.firstEntry(); TreeSet<ParseConfiguration> currentHeap = heapEntry.getValue(); int currentHeapIndex = heapEntry.getKey(); if (LOG.isTraceEnabled()) { LOG.trace("##### Polling next heap: " + heapEntry.getKey() + ", size: " + heapEntry.getValue().size()); } boolean finished = false; // systematically set the final heap here, just in case we exit "naturally" with no more heaps finalHeap = heapEntry.getValue(); backupHeap = new TreeSet<ParseConfiguration>(); // we jump out when either (a) all tokens have been attached or (b) we go over the max alloted time ParseConfiguration topConf = currentHeap.first(); if (topConf.isTerminal()) { LOG.trace("Exiting with terminal heap: " + heapEntry.getKey() + ", size: " + heapEntry.getValue().size()); finished = true; } // check if we've gone over alloted time for this sentence long analysisTime = (new Date()).getTime() - startTime; if (maxAnalysisTimePerSentence > 0 && analysisTime > maxAnalysisTimeMilliseconds) { LOG.info("Parse tree analysis took too long for sentence: " + tokenSequence.getText()); LOG.info("Breaking out after " + maxAnalysisTimePerSentence + " seconds."); finished = true; } // check if we've enough memory to process this sentence if (minFreeMemory > 0) { long freeMemory = Runtime.getRuntime().freeMemory(); if (freeMemory < minFreeMemoryBytes) { LOG.info("Not enough memory left to parse sentence: " + tokenSequence.getText()); LOG.info("Min free memory (bytes):" + minFreeMemoryBytes); LOG.info("Current free memory (bytes): " + freeMemory); finished = true; } } // check if any of the remaining top-N solutions on any heap can lead to the correct solution if (correctSolution != null) { boolean canReachCorrectSolution = false; for (TreeSet<ParseConfiguration> heap : heaps.values()) { int j = 1; for (ParseConfiguration solution : heap) { if (j > beamWidth) break; if (solution.canReach(correctSolution)) { canReachCorrectSolution = true; break; } j++; } if (canReachCorrectSolution) break; } if (!canReachCorrectSolution) { LOG.debug("None of the solutions on the heap can reach the gold solution. Exiting."); finished = true; } } if (finished) { // combine any remaining heaps for (TreeSet<ParseConfiguration> heap : heaps.values()) { if (finalHeap != heap) { finalHeap.addAll(heap); } } break; } // remove heap from set of heaps heapEntry = heaps.pollFirstEntry(); // limit the breadth to K int maxSolutions = currentHeap.size() > this.beamWidth ? this.beamWidth : currentHeap.size(); int j = 0; while (currentHeap.size() > 0) { ParseConfiguration history = currentHeap.pollFirst(); backupHeap.add(history); if (LOG.isTraceEnabled()) { LOG.trace("### Next configuration on heap " + heapEntry.getKey() + ":"); LOG.trace(history.toString()); LOG.trace("Score: " + df.format(history.getScore())); LOG.trace(history.getPosTagSequence()); } Set<Transition> transitions = new HashSet<Transition>(); // test the positive rules on the current configuration boolean ruleApplied = false; if (parserPositiveRules != null) { MONITOR.startTask("check rules"); try { for (ParserRule rule : parserPositiveRules) { if (LOG.isTraceEnabled()) { LOG.trace("Checking rule: " + rule.getCondition().getName()); } RuntimeEnvironment env = this.featureService.getRuntimeEnvironment(); FeatureResult<Boolean> ruleResult = rule.getCondition().check(history, env); if (ruleResult != null && ruleResult.getOutcome()) { transitions.add(rule.getTransition()); ruleApplied = true; if (LOG.isTraceEnabled()) { LOG.trace("Rule applies. Setting transition to: " + rule.getTransition().getCode()); } if (!rule.getTransition().checkPreconditions(history)) { LOG.error("Cannot apply rule, preconditions not met."); ruleApplied = false; } break; } } } finally { MONITOR.endTask("check rules"); } } if (!ruleApplied) { transitions = parsingConstrainer.getPossibleTransitions(history); Set<Transition> eliminatedTransitions = new HashSet<Transition>(); for (Transition transition : transitions) { if (!transition.checkPreconditions(history)) { eliminatedTransitions.add(transition); } } transitions.removeAll(eliminatedTransitions); // apply the negative rules eliminatedTransitions = new HashSet<Transition>(); if (parserNegativeRules != null) { MONITOR.startTask("check negative rules"); try { for (ParserRule rule : parserNegativeRules) { if (LOG.isTraceEnabled()) { LOG.trace("Checking negative rule: " + rule.getCondition().getName()); } RuntimeEnvironment env = this.featureService.getRuntimeEnvironment(); FeatureResult<Boolean> ruleResult = rule.getCondition().check(history, env); if (ruleResult != null && ruleResult.getOutcome()) { eliminatedTransitions.add(rule.getTransition()); if (LOG.isTraceEnabled()) { LOG.debug("Rule applies. Eliminating transition: " + rule.getTransition().getCode()); } } } if (eliminatedTransitions.size() == transitions.size()) { LOG.debug("All transitions eliminated! Restoring original transitions."); } else { transitions.removeAll(eliminatedTransitions); } } finally { MONITOR.endTask("check negative rules"); } } } // has a positive rule been applied? if (transitions.size() == 0) { // just in case the we run out of both heaps and analyses, we build this backup heap backupHeap.add(history); if (LOG.isTraceEnabled()) LOG.trace( "No transitions could be applied: not counting this solution as part of the beam"); } else { // up the counter, since we will count this solution towards the heap j++; // add solutions to the heap, one per valid transition MONITOR.startTask("heap sort"); try { Map<Transition, Double> deltaScorePerTransition = new HashMap<Transition, Double>(); double absoluteMax = 1; for (Transition transition : transitions) { if (LOG.isTraceEnabled()) { LOG.trace("Applying transition: " + transition.getCode()); } ParseConfiguration configuration = this.parserServiceInternal .getConfiguration(history); transition.apply(configuration); configuration.setRankingScore(history.getRankingScore()); configuration.getIncrementalFeatureResults() .addAll(history.getIncrementalFeatureResults()); // test the features on the new configuration double scoreDelta = 0.0; MONITOR.startTask("feature analyse"); List<FeatureResult<?>> featureResults = new ArrayList<FeatureResult<?>>(); try { for (ParseConfigurationFeature<?> feature : this.parseFeatures) { MONITOR.startTask(feature.getName()); try { RuntimeEnvironment env = this.featureService.getRuntimeEnvironment(); FeatureResult<?> featureResult = feature.check(configuration, env); if (featureResult != null) { featureResults.add(featureResult); double weight = weightVector.getWeight(featureResult); scoreDelta += weight; if (LOG.isTraceEnabled()) { LOG.trace(featureResult.toString() + " = " + weight); } } } finally { MONITOR.endTask(feature.getName()); } } configuration.getIncrementalFeatureResults().add(featureResults); if (LOG.isTraceEnabled()) { LOG.trace("Score = " + configuration.getRankingScore() + " + " + scoreDelta + " = " + (configuration.getRankingScore() + scoreDelta)); } configuration.setRankingScore(configuration.getRankingScore() + scoreDelta); deltaScorePerTransition.put(transition, scoreDelta); if (Math.abs(scoreDelta) > absoluteMax) absoluteMax = Math.abs(scoreDelta); } finally { MONITOR.endTask("feature analyse"); } int nextHeapIndex = parseComparisonStrategy.getComparisonIndex(configuration) * 1000; while (nextHeapIndex <= currentHeapIndex) nextHeapIndex++; TreeSet<ParseConfiguration> nextHeap = heaps.get(nextHeapIndex); if (nextHeap == null) { nextHeap = new TreeSet<ParseConfiguration>(); heaps.put(nextHeapIndex, nextHeap); if (LOG.isTraceEnabled()) LOG.trace("Created heap with index: " + nextHeapIndex); } nextHeap.add(configuration); if (LOG.isTraceEnabled()) { LOG.trace("Added configuration with score " + configuration.getScore() + " to heap: " + nextHeapIndex + ", total size: " + nextHeap.size()); } configuration.clearMemory(); } // next transition // Create a probability distribution of transitions // normalise probabilities for each transition via normalised exponential // e^(x/absmax)/sum(e^(x/absmax)) // where x/absmax is in [-1,1] // e^(x/absmax) is in [1/e,e] double total = 0.0; for (Transition transition : deltaScorePerTransition.keySet()) { double deltaScore = deltaScorePerTransition.get(transition); deltaScore = Math.exp(deltaScore / absoluteMax); deltaScorePerTransition.put(transition, deltaScore); total += deltaScore; } for (Transition transition : deltaScorePerTransition.keySet()) { double probability = deltaScorePerTransition.get(transition); probability /= total; Decision<Transition> decision = machineLearningService.createDecision(transition, probability); transition.setDecision(decision); if (LOG.isTraceEnabled()) { LOG.trace("Transition: " + transition.getCode() + ", Prob: " + probability); } } } finally { MONITOR.endTask("heap sort"); } } // have we any transitions? // beam width test if (j == maxSolutions) break; } // next history } // next atomic index // return the best sequences on the heap List<ParseConfiguration> bestConfigurations = new ArrayList<ParseConfiguration>(); int i = 0; if (finalHeap.isEmpty()) finalHeap = backupHeap; while (!finalHeap.isEmpty()) { bestConfigurations.add(finalHeap.pollFirst()); i++; if (i >= this.getBeamWidth()) break; } if (LOG.isDebugEnabled()) { if (correctSolution != null) { LOG.debug("Gold transitions: " + correctSolution.getIncrementalOutcomes()); } for (ParseConfiguration finalConfiguration : bestConfigurations) { LOG.debug(df.format(finalConfiguration.getScore()) + ": " + finalConfiguration.toString()); LOG.debug("Pos tag sequence: " + finalConfiguration.getPosTagSequence()); LOG.debug("Transitions: " + finalConfiguration.getTransitions()); if (LOG.isTraceEnabled()) { StringBuilder sb = new StringBuilder(); sb.append(" * PosTag sequence score "); sb.append(df.format(finalConfiguration.getPosTagSequence().getScore())); sb.append(" = "); for (PosTaggedToken posTaggedToken : finalConfiguration.getPosTagSequence()) { sb.append(" * "); sb.append(df.format(posTaggedToken.getDecision().getProbability())); } sb.append(" root "); sb.append(finalConfiguration.getPosTagSequence().size()); LOG.trace(sb.toString()); sb = new StringBuilder(); sb.append(" * Token sequence score = "); sb.append(df.format(finalConfiguration.getPosTagSequence().getTokenSequence().getScore())); LOG.trace(sb.toString()); } } } return bestConfigurations; } finally { MONITOR.endTask("parseSentence"); } }
From source file:org.unitime.timetable.onlinesectioning.OnlineSectioningServerImpl.java
@Override public CourseInfo getCourseInfo(String course) { iLock.readLock().lock();// w w w .j av a 2 s . c o m try { if (course.indexOf('-') >= 0) { String courseName = course.substring(0, course.indexOf('-')).trim(); String title = course.substring(course.indexOf('-') + 1).trim(); TreeSet<CourseInfo> infos = iCourseForName.get(courseName.toLowerCase()); if (infos != null && !infos.isEmpty()) for (CourseInfo info : infos) if (title.equalsIgnoreCase(info.getTitle())) return info; return null; } else { TreeSet<CourseInfo> infos = iCourseForName.get(course.toLowerCase()); if (infos != null && !infos.isEmpty()) return infos.first(); return null; } } finally { iLock.readLock().unlock(); } }
From source file:org.geotools.styling.css.CssTranslator.java
/** * Organizes them rules by ascending z-index * // ww w .java2 s. c o m * @param rules * @return */ private Map<Integer, List<CssRule>> organizeByZIndex(List<CssRule> rules) { TreeSet<Integer> indexes = getZIndexesForRules(rules); Map<Integer, List<CssRule>> result = new HashMap<>(); if (indexes.size() == 1) { result.put(indexes.first(), rules); } else { // now for each level extract the sub-rules attached to that level, // considering that properties not associated to a level, bind to all levels int symbolizerPropertyCount = 0; for (Integer index : indexes) { List<CssRule> rulesByIndex = new ArrayList<>(); for (CssRule rule : rules) { CssRule subRule = rule.getSubRuleByZIndex(index); if (subRule != null) { if (subRule.hasSymbolizerProperty()) { symbolizerPropertyCount++; } rulesByIndex.add(subRule); } } // do we have at least one property that will trigger the generation // of a symbolizer in here? if (symbolizerPropertyCount > 0) { result.put(index, rulesByIndex); } } } return result; }
From source file:org.sleuthkit.autopsy.keywordsearch.AccountsText.java
/** * Initialize this object with information about which pages/chunks have * hits. Multiple calls will not change the initial results. *//*from w w w . j a v a2s . c o m*/ synchronized private void loadPageInfo() { if (isPageInfoLoaded) { return; } if (chunkId != null) {//if a chunk is specified, only show that chunk/page this.numberPagesForFile = 1; this.currentPage = chunkId; this.numberOfHitsPerPage.put(chunkId, 0); this.pages.add(chunkId); this.currentHitPerPage.put(chunkId, 0); } else { try { this.numberPagesForFile = solrServer.queryNumFileChunks(this.solrObjectId); } catch (KeywordSearchModuleException | NoOpenCoreException ex) { LOGGER.log(Level.WARNING, "Could not get number pages for content " + this.solrDocumentId, ex); //NON-NLS return; } //if has chunks, get pages with hits TreeSet<Integer> sortedPagesWithHits = new TreeSet<>(); SolrQuery q = new SolrQuery(); q.setShowDebugInfo(DEBUG); //debug q.setQuery(queryString); q.setFields(Server.Schema.ID.toString()); //for this case we only need the document ids q.addFilterQuery( Server.Schema.ID.toString() + ":" + this.solrObjectId + Server.CHUNK_ID_SEPARATOR + "*"); try { QueryResponse response = solrServer.query(q, METHOD.POST); for (SolrDocument resultDoc : response.getResults()) { final String resultDocumentId = resultDoc.getFieldValue(Server.Schema.ID.toString()).toString(); // Put the solr chunk id in the map String resultChunkID = StringUtils.substringAfter(resultDocumentId, Server.CHUNK_ID_SEPARATOR); if (StringUtils.isNotBlank(resultChunkID)) { sortedPagesWithHits.add(Integer.parseInt(resultChunkID)); } else { sortedPagesWithHits.add(0); } } } catch (KeywordSearchModuleException | NoOpenCoreException | NumberFormatException ex) { LOGGER.log(Level.WARNING, "Error executing Solr highlighting query: " + keywords, ex); //NON-NLS } //set page to first page having highlights if (sortedPagesWithHits.isEmpty()) { this.currentPage = 0; } else { this.currentPage = sortedPagesWithHits.first(); } for (Integer page : sortedPagesWithHits) { numberOfHitsPerPage.put(page, 0); //unknown number of matches in the page pages.add(page); currentHitPerPage.put(page, 0); //set current hit to 0th } } isPageInfoLoaded = true; }
From source file:org.unitime.timetable.action.PersonalizedExamReportAction.java
protected long getMeetingComparable(Class_ clazz) { Assignment assignment = clazz.getCommittedAssignment(); TreeSet meetings = (clazz.getEvent() == null ? null : new TreeSet(clazz.getEvent().getMeetings())); if (meetings != null && !meetings.isEmpty()) { return ((Meeting) meetings.first()).getMeetingDate().getTime(); } else if (assignment != null) { return assignment.getTimeLocation().getStartSlot(); }/*from w w w . ja va 2 s .c o m*/ return -1; }
From source file:org.apache.bookkeeper.stream.storage.impl.sc.DefaultStorageContainerController.java
@Override public ClusterAssignmentData computeIdealState(ClusterMetadata clusterMetadata, ClusterAssignmentData currentState, Set<BookieSocketAddress> currentCluster) { if (currentCluster.isEmpty()) { log.info("Current cluster is empty. No alive server is found."); return currentState; }//from w w w.j av a2 s. c om // 1. get current server assignments Map<BookieSocketAddress, Set<Long>> currentServerAssignments; try { currentServerAssignments = currentState.getServersMap().entrySet().stream() .collect(Collectors.toMap(e1 -> { try { return new BookieSocketAddress(e1.getKey()); } catch (UnknownHostException uhe) { log.error("Invalid cluster "); throw new UncheckedExecutionException( "Invalid server found in current assignment map" + e1.getKey(), uhe); } }, e2 -> e2.getValue().getContainersList().stream().collect(Collectors.toSet()))); } catch (UncheckedExecutionException uee) { log.warn("Invalid cluster assignment data is found : {} - {}. Recompute assignment from empty state", currentState, uee.getCause().getMessage()); currentServerAssignments = Maps.newHashMap(); } Set<BookieSocketAddress> currentServersAssigned = currentServerAssignments.keySet(); // 2. if no servers is assigned, initialize the ideal state if (currentServersAssigned.isEmpty()) { return initializeIdealState(clusterMetadata, currentCluster); } // 3. get the cluster diffs Set<BookieSocketAddress> serversAdded = Sets.difference(currentCluster, currentServersAssigned) .immutableCopy(); Set<BookieSocketAddress> serversRemoved = Sets.difference(currentServersAssigned, currentCluster) .immutableCopy(); if (serversAdded.isEmpty() && serversRemoved.isEmpty()) { // cluster is unchanged, assuming the current state is ideal, no re-assignment is required. return currentState; } log.info( "Storage container controller detects cluster changed:\n" + "\t {} servers added: {}\n\t {} servers removed: {}", serversAdded.size(), serversAdded, serversRemoved.size(), serversRemoved); // 4. compute the containers that owned by servers removed. these containers are needed to be reassigned. Set<Long> containersToReassign = currentServerAssignments.entrySet().stream() .filter(serverEntry -> !currentCluster.contains(serverEntry.getKey())) .flatMap(serverEntry -> serverEntry.getValue().stream()).collect(Collectors.toSet()); // 5. use an ordered set as priority deque to sort the servers by the number of assigned containers TreeSet<Pair<BookieSocketAddress, LinkedList<Long>>> assignmentQueue = new TreeSet<>( new ServerAssignmentDataComparator()); for (Map.Entry<BookieSocketAddress, Set<Long>> entry : currentServerAssignments.entrySet()) { BookieSocketAddress host = entry.getKey(); if (!currentCluster.contains(host)) { if (log.isTraceEnabled()) { log.trace("Host {} is not in current cluster anymore", host); } continue; } else { if (log.isTraceEnabled()) { log.trace("Adding host {} to assignment queue", host); } assignmentQueue.add(Pair.of(host, Lists.newLinkedList(entry.getValue()))); } } // 6. add new servers for (BookieSocketAddress server : serversAdded) { assignmentQueue.add(Pair.of(server, Lists.newLinkedList())); } // 7. assign the containers that are needed to be reassigned. for (Long containerId : containersToReassign) { Pair<BookieSocketAddress, LinkedList<Long>> leastLoadedServer = assignmentQueue.pollFirst(); leastLoadedServer.getValue().add(containerId); assignmentQueue.add(leastLoadedServer); } // 8. rebalance the containers if needed int diffAllowed; if (assignmentQueue.size() > clusterMetadata.getNumStorageContainers()) { diffAllowed = 1; } else { diffAllowed = clusterMetadata.getNumStorageContainers() % assignmentQueue.size() == 0 ? 0 : 1; } Pair<BookieSocketAddress, LinkedList<Long>> leastLoaded = assignmentQueue.first(); Pair<BookieSocketAddress, LinkedList<Long>> mostLoaded = assignmentQueue.last(); while (mostLoaded.getValue().size() - leastLoaded.getValue().size() > diffAllowed) { leastLoaded = assignmentQueue.pollFirst(); mostLoaded = assignmentQueue.pollLast(); // move container from mostLoaded to leastLoaded Long containerId = mostLoaded.getValue().removeFirst(); // add the container to the end to avoid balancing this container again. leastLoaded.getValue().addLast(containerId); assignmentQueue.add(leastLoaded); assignmentQueue.add(mostLoaded); leastLoaded = assignmentQueue.first(); mostLoaded = assignmentQueue.last(); } // 9. the new ideal state is computed, finalize it Map<String, ServerAssignmentData> newAssignmentMap = Maps.newHashMap(); assignmentQueue.forEach(assignment -> newAssignmentMap.put(assignment.getKey().toString(), ServerAssignmentData.newBuilder().addAllContainers(assignment.getValue()).build())); return ClusterAssignmentData.newBuilder().putAllServers(newAssignmentMap).build(); }
From source file:org.unitime.timetable.action.PersonalizedExamReportAction.java
protected long getMeetingComparable(ExamSectionInfo section) { if (section.getOwner().getOwnerObject() instanceof Class_) { Class_ clazz = (Class_) section.getOwner().getOwnerObject(); Assignment assignment = clazz.getCommittedAssignment(); TreeSet meetings = (clazz.getEvent() == null ? null : new TreeSet(clazz.getEvent().getMeetings())); if (meetings != null && !meetings.isEmpty()) { return ((Meeting) meetings.first()).getMeetingDate().getTime(); } else if (assignment != null) { return assignment.getTimeLocation().getStartSlot(); }//from w w w. ja v a2 s. c o m } return -1; }
From source file:org.unitime.timetable.action.PersonalizedExamReportAction.java
protected String getMeetingTime(ExamSectionInfo section) { String meetingTime = ""; if (section.getOwner().getOwnerObject() instanceof Class_) { Formats.Format<Date> dpf = Formats.getDateFormat(Formats.Pattern.DATE_EVENT_SHORT); Class_ clazz = (Class_) section.getOwner().getOwnerObject(); Assignment assignment = clazz.getCommittedAssignment(); TreeSet meetings = (clazz.getEvent() == null ? null : new TreeSet(clazz.getEvent().getMeetings())); if (meetings != null && !meetings.isEmpty()) { Date first = ((Meeting) meetings.first()).getMeetingDate(); Date last = ((Meeting) meetings.last()).getMeetingDate(); meetingTime += dpf.format(first) + " - " + dpf.format(last); } else if (assignment != null && assignment.getDatePattern() != null) { DatePattern dp = assignment.getDatePattern(); if (dp != null && !dp.isDefault()) { if (dp.getType().intValue() == DatePattern.sTypeAlternate) meetingTime += dp.getName(); else { meetingTime += dpf.format(dp.getStartDate()) + " - " + dpf.format(dp.getEndDate()); }//from w ww .j a v a 2s .c o m } } if (meetings != null && !meetings.isEmpty()) { int dayCode = getDaysCode(meetings); String days = ""; for (int i = 0; i < Constants.DAY_CODES.length; i++) if ((dayCode & Constants.DAY_CODES[i]) != 0) days += CONSTANTS.shortDays()[i]; meetingTime += " " + days; Meeting first = (Meeting) meetings.first(); meetingTime += " " + first.startTime() + " - " + first.stopTime(); } else if (assignment != null) { TimeLocation t = assignment.getTimeLocation(); meetingTime += " " + t.getDayHeader() + " " + t.getStartTimeHeader(CONSTANTS.useAmPm()) + " - " + t.getEndTimeHeader(CONSTANTS.useAmPm()); } } return meetingTime; }