List of usage examples for java.util SortedSet clear
void clear();
From source file:org.commoncrawl.util.ArcFileWriter.java
private void preWriteRecordTasks(int headerBytesLength, int contentBytesLength, String contentType) throws IOException { checkSize(headerBytesLength, contentBytesLength); // update stats getActiveFile()._totalHeaderBytesWritten += headerBytesLength; getActiveFile()._totalContentBytesWritten += contentBytesLength; getActiveFile()._itemsWritten++;/* w w w . ja va2 s .c o m*/ SortedSet<Integer> counts = _mimeTypeCounts.get(contentType); if (counts.size() == 0) { counts.add(1); } else { int count = counts.first() + 1; counts.clear(); counts.add(count); } // record start position of this item _lastItemPos = getActiveFile().getFileSize(); // Wrap stream in GZIP Writer. // The below construction immediately writes the GZIP 'default' // header out on the underlying stream. _out = new CompressedStream(_out); }
From source file:org.orcid.core.adapter.impl.Jaxb2JpaAdapterImpl.java
private SortedSet<WorkEntity> getWorkEntities(ProfileEntity profileEntity, OrcidWorks orcidWorks) { SortedSet<WorkEntity> existingWorkEntities = profileEntity.getWorks(); Map<String, WorkEntity> existingWorkEntitiesMap = createWorkEntitiesMap(existingWorkEntities); SortedSet<WorkEntity> workEntities = null; if (existingWorkEntities == null) { workEntities = new TreeSet<WorkEntity>(); } else {//from w w w . j av a 2s . c om // To allow for orphan deletion existingWorkEntities.clear(); workEntities = existingWorkEntities; } if (orcidWorks != null && orcidWorks.getOrcidWork() != null && !orcidWorks.getOrcidWork().isEmpty()) { List<OrcidWork> orcidWorkList = orcidWorks.getOrcidWork(); for (OrcidWork orcidWork : orcidWorkList) { WorkEntity workEntity = getWorkEntity(orcidWork, existingWorkEntitiesMap.get(orcidWork.getPutCode())); if (workEntity != null) { workEntity.setProfile(profileEntity); workEntities.add(workEntity); } } } return workEntities; }
From source file:org.orcid.core.adapter.impl.Jaxb2JpaAdapterImpl.java
private void setKeywords(ProfileEntity profileEntity, Keywords keywords) { SortedSet<ProfileKeywordEntity> profileKeywordEntities = null; SortedSet<ProfileKeywordEntity> existingProfileKeywordEntities = profileEntity.getKeywords(); Map<String, ProfileKeywordEntity> existingProfileKeywordEntitiesMap = createProfileKeyworkEntitiesMap( existingProfileKeywordEntities); if (existingProfileKeywordEntities == null) { profileKeywordEntities = new TreeSet<>(); } else {// w w w . ja v a2 s. c o m // To allow for orphan deletion existingProfileKeywordEntities.clear(); profileKeywordEntities = existingProfileKeywordEntities; } if (keywords != null) { profileEntity.setKeywordsVisibility(keywords.getVisibility()); List<Keyword> keywordList = keywords.getKeyword(); if (keywordList != null && !keywordList.isEmpty()) { for (Keyword keyword : keywordList) { if (StringUtils.isNotBlank(keyword.getContent())) { profileKeywordEntities.add( getProfileKeywordEntity(keyword, profileEntity, existingProfileKeywordEntitiesMap)); } } } } profileEntity.setKeywords(profileKeywordEntities); }
From source file:org.ngrinder.perftest.service.PerfTestService.java
/** * Delete test {@link PerfTest} by user and test id. * * @param user user// www . j a va 2s . c o m * @param id test id */ @Transactional public void delete(User user, long id) { PerfTest perfTest = getOne(id); // If it's not requested by user who started job. It's wrong request. if (!hasPermission(perfTest, user, Permission.DELETE_TEST_OF_OTHER)) { return; } SortedSet<Tag> tags = perfTest.getTags(); if (tags != null) { tags.clear(); } perfTestRepository.save(perfTest); perfTestRepository.delete(perfTest); deletePerfTestDirectory(perfTest); }
From source file:org.apache.accumulo.test.functional.RegexGroupBalanceIT.java
@Test(timeout = 120000) public void testBalancing() throws Exception { Connector conn = getConnector();// w w w . j a va 2s .c om String tablename = getUniqueNames(1)[0]; conn.tableOperations().create(tablename); SortedSet<Text> splits = new TreeSet<>(); splits.add(new Text("01a")); splits.add(new Text("01m")); splits.add(new Text("01z")); splits.add(new Text("02a")); splits.add(new Text("02f")); splits.add(new Text("02r")); splits.add(new Text("02z")); splits.add(new Text("03a")); splits.add(new Text("03f")); splits.add(new Text("03m")); splits.add(new Text("03r")); conn.tableOperations().setProperty(tablename, RegexGroupBalancer.REGEX_PROPERTY, "(\\d\\d).*"); conn.tableOperations().setProperty(tablename, RegexGroupBalancer.DEFAUT_GROUP_PROPERTY, "03"); conn.tableOperations().setProperty(tablename, RegexGroupBalancer.WAIT_TIME_PROPERTY, "50ms"); conn.tableOperations().setProperty(tablename, Property.TABLE_LOAD_BALANCER.getKey(), RegexGroupBalancer.class.getName()); conn.tableOperations().addSplits(tablename, splits); while (true) { Thread.sleep(250); Table<String, String, MutableInt> groupLocationCounts = getCounts(conn, tablename); boolean allGood = true; allGood &= checkGroup(groupLocationCounts, "01", 1, 1, 3); allGood &= checkGroup(groupLocationCounts, "02", 1, 1, 4); allGood &= checkGroup(groupLocationCounts, "03", 1, 2, 4); allGood &= checkTabletsPerTserver(groupLocationCounts, 3, 3, 4); if (allGood) { break; } } splits.clear(); splits.add(new Text("01b")); splits.add(new Text("01f")); splits.add(new Text("01l")); splits.add(new Text("01r")); conn.tableOperations().addSplits(tablename, splits); while (true) { Thread.sleep(250); Table<String, String, MutableInt> groupLocationCounts = getCounts(conn, tablename); boolean allGood = true; allGood &= checkGroup(groupLocationCounts, "01", 1, 2, 4); allGood &= checkGroup(groupLocationCounts, "02", 1, 1, 4); allGood &= checkGroup(groupLocationCounts, "03", 1, 2, 4); allGood &= checkTabletsPerTserver(groupLocationCounts, 4, 4, 4); if (allGood) { break; } } // merge group 01 down to one tablet conn.tableOperations().merge(tablename, null, new Text("01z")); while (true) { Thread.sleep(250); Table<String, String, MutableInt> groupLocationCounts = getCounts(conn, tablename); boolean allGood = true; allGood &= checkGroup(groupLocationCounts, "01", 1, 1, 1); allGood &= checkGroup(groupLocationCounts, "02", 1, 1, 4); allGood &= checkGroup(groupLocationCounts, "03", 1, 2, 4); allGood &= checkTabletsPerTserver(groupLocationCounts, 2, 3, 4); if (allGood) { break; } } }
From source file:org.apache.geode.management.internal.beans.DistributedSystemBridge.java
/** * @return a list of region names hosted on the system *//*from w ww . jav a2s. c om*/ public String[] listAllRegionPaths() { if (distrRegionMap.values().size() == 0) { return ManagementConstants.NO_DATA_STRING; } // Sort region paths SortedSet<String> regionPathsSet = new TreeSet<>(); for (DistributedRegionBridge bridge : distrRegionMap.values()) { regionPathsSet.add(bridge.getFullPath()); } String[] regionPaths = new String[regionPathsSet.size()]; regionPaths = regionPathsSet.toArray(regionPaths); regionPathsSet.clear(); return regionPaths; }
From source file:cerrla.LocalCrossEntropyDistribution.java
/** * Updates the distributions based on the current state of the elites. * /*w ww . j a va2 s. com*/ * @param elites * The elite values (+ 1 more sample). * @param population * The population size. * @param numElites * The minimum number of elites. */ private void updateDistributions(SortedSet<PolicyValue> elites, int population, int numElites) { if (population == 0) return; double minReward = performance_.getMinimumReward(); // Clean up the policy values SortedSet<PolicyValue> removed = preUpdateModification(elites, numElites, population, minReward); ElitesData ed = policyGenerator_.updateDistributions(elites, ProgramArgument.ALPHA.doubleValue(), numElites, population, minReward); if (ed != null) performance_.noteElitesReward(currentEpisode_, ed.getMeanEliteValue(), ed.getMaxEliteValue()); // Negative updates: if (ProgramArgument.NEGATIVE_UPDATES.booleanValue()) policyGenerator_.updateNegative(elites, ProgramArgument.ALPHA.doubleValue(), population, numElites, removed); // Run the post update operations boolean newSlotCreated = policyGenerator_.postUpdateOperations(numElites); if (ProgramArgument.RESET_ELITES.booleanValue() && newSlotCreated) elites.clear(); }
From source file:com.android.mms.transaction.MessagingNotification.java
/** * Checks to see if there are any "unseen" messages or delivery * reports and builds a sorted (by delivery date) list of unread notifications. * * @param context the context to use//from www . j a v a 2 s. c o m * @param newMsgThreadId The thread ID of a new message that we're to notify about; if there's * no new message, use THREAD_NONE. If we should notify about multiple or unknown thread IDs, * use THREAD_ALL. * @param isStatusMessage */ public static void blockingUpdateNewMessageIndicator(Context context, long newMsgThreadId, boolean isStatusMessage) { if (DEBUG) { Contact.logWithTrace(TAG, "blockingUpdateNewMessageIndicator: newMsgThreadId: " + newMsgThreadId); } final boolean isDefaultSmsApp = MmsConfig.isSmsEnabled(context); if (!isDefaultSmsApp) { cancelNotification(context, NOTIFICATION_ID); if (DEBUG || Log.isLoggable(LogTag.APP, Log.VERBOSE)) { Log.d(TAG, "blockingUpdateNewMessageIndicator: not the default sms app - skipping " + "notification"); } return; } // notificationSet is kept sorted by the incoming message delivery time, with the // most recent message first. SortedSet<NotificationInfo> notificationSet = new TreeSet<NotificationInfo>(INFO_COMPARATOR); Set<Long> threads = new HashSet<Long>(4); addMmsNotificationInfos(context, threads, notificationSet); addSmsNotificationInfos(context, threads, notificationSet); if (notificationSet.isEmpty()) { if (DEBUG) { Log.d(TAG, "blockingUpdateNewMessageIndicator: notificationSet is empty, " + "canceling existing notifications"); } cancelNotification(context, NOTIFICATION_ID); } else { if (DEBUG || Log.isLoggable(LogTag.APP, Log.VERBOSE)) { Log.d(TAG, "blockingUpdateNewMessageIndicator: count=" + notificationSet.size() + ", newMsgThreadId=" + newMsgThreadId); } if (isInCurrentConversation(newMsgThreadId, threads)) { if (DEBUG) { Log.d(TAG, "blockingUpdateNewMessageIndicator: newMsgThreadId == " + "sCurrentlyDisplayedThreadId so NOT showing notification," + " but playing soft sound. threadId: " + newMsgThreadId); } playInConversationNotificationSound(context, newMsgThreadId); return; } updateNotification(context, newMsgThreadId, threads.size(), notificationSet); } // And deals with delivery reports (which use Toasts). It's safe to call in a worker // thread because the toast will eventually get posted to a handler. MmsSmsDeliveryInfo delivery = getSmsNewDeliveryInfo(context); if (delivery != null) { delivery.deliver(context, isStatusMessage); } notificationSet.clear(); threads.clear(); }
From source file:de.uni_potsdam.hpi.asg.logictool.mapping.SequenceBasedAndGateDecomposer.java
public boolean decomposeAND(NetlistTerm term) { logger.info("Decomposition of " + term.toString()); Set<Signal> signals = netlist.getDrivenSignalsTransitive(term); if (signals.isEmpty()) { logger.warn("No signal(s) for term " + term + " found"); return false; } else if (signals.size() > 1) { logger.warn("Term " + term + " drives more than one signal. This is not supported yet"); return false; }/* w ww . j a va2s . c o m*/ Signal origsig = signals.iterator().next(); if (!isAOC(term, origsig)) { logger.warn("Algorithm not applicable for non-AOC architectures"); return false; } int startgatesize = BDDHelper.numberOfVars(term.getBdd()); BDD bdd = term.getBdd(); Set<Signal> origrelevant = findRelevantSigs(bdd); if (origrelevant == null) { return false; } StateGraph sg2 = sghelper.getNewStateGraph(origrelevant, origsig); if (sg2 == null) { logger.warn("Failed to generate new SG. Using the original one."); sg2 = origsg; } BiMap<Signal, Signal> sigmap = HashBiMap.create(); Set<Signal> relevant = new HashSet<>(); boolean found; for (Signal oldSig : origrelevant) { found = false; for (Signal newSig : sg2.getAllSignals()) { if (oldSig.getName().equals(newSig.getName())) { sigmap.put(oldSig, newSig); found = true; break; } } if (!found) { logger.error("Signal " + oldSig.getName() + " not found"); return false; } relevant.add(sigmap.get(oldSig)); } found = false; for (Signal newSig : sg2.getAllSignals()) { if (origsig.getName().equals(newSig.getName())) { sigmap.put(origsig, newSig); found = true; break; } } if (!found) { logger.error("Signal " + origsig.getName() + " not found"); return false; } Signal sig = sigmap.get(origsig); Map<Signal, Boolean> posnegmap = getInputsPosOrNeg(term, sigmap); BDD newbdd = factory.one(); for (Entry<Signal, Boolean> entry : posnegmap.entrySet()) { if (entry.getValue()) { newbdd = newbdd.andWith(getPosBDD(entry.getKey())); } else { newbdd = newbdd.andWith(getNegBDD(entry.getKey())); } if (entry.getKey() instanceof QuasiSignal) { relevant.add(entry.getKey()); } } Set<State> startStates = new HashSet<>(); for (State s : sg2.getStates()) { for (Entry<Transition, State> entry2 : s.getNextStates().entrySet()) { if (entry2.getKey().getSignal() == sig) { startStates.add(entry2.getValue()); } } } List<List<Signal>> fallingPartitions = new ArrayList<>(); for (Signal sig2 : relevant) { List<Signal> tmp = new ArrayList<>(); tmp.add(sig2); fallingPartitions.add(tmp); } SortedSet<IOBehaviour> sequencesFront = new TreeSet<>(new SequenceFrontCmp()); SortedSet<IOBehaviour> sequencesBack = new TreeSet<>(new SequenceBackCmp()); Set<IOBehaviour> newSequences = new HashSet<>(); Set<IOBehaviour> rmSequences = new HashSet<>(); Deque<IOBehaviourSimulationStep> steps = new ArrayDeque<>(); pool = new IOBehaviourSimulationStepPool(new IOBehaviourSimulationStepFactory()); pool.setMaxTotal(-1); try { root = pool.borrowObject(); } catch (Exception e) { e.printStackTrace(); logger.error("Could not borrow object"); return false; } IOBehaviourSimulationStep newStep; for (State s : startStates) { try { newStep = pool.borrowObject(); } catch (Exception e) { e.printStackTrace(); logger.error("Could not borrow object"); return false; } root.getNextSteps().add(newStep); newStep.setPrevStep(root); newStep.setStart(s); newStep.setNextState(s); steps.add(newStep); } if (steps.isEmpty()) { return false; } final long checkThreshold = 100; long stepsEvaledTotal = 0; IOBehaviourSimulationStep step = null; while (!steps.isEmpty()) { step = steps.removeLast(); // System.out.println("#Step: " + step.toString()); getNewSteps(step, sig, newSequences, steps, relevant); stepsEvaledTotal++; if (newSequences.size() >= checkThreshold) { removeCandidates(sequencesFront, sequencesBack, newSequences, rmSequences); } } removeCandidates(sequencesFront, sequencesBack, newSequences, rmSequences); logger.debug("Sequences: " + sequencesFront.size() + " - Tmp Sequences: " + newSequences.size() + " - Steps to evaluate: " + steps.size() + " - Steps evaluated: " + stepsEvaledTotal); logger.debug("Pool: " + "Created: " + pool.getCreatedCount() + ", Borrowed: " + pool.getBorrowedCount() + ", Returned: " + pool.getReturnedCount() + ", Active: " + pool.getNumActive() + ", Idle: " + pool.getNumIdle()); logger.debug("RmSub: " + rmSub + " // RmFall: " + rmFall); SortedSet<IOBehaviour> sequences = new TreeSet<>(sequencesFront); sequencesFront.clear(); sequencesBack.clear(); // System.out.println(sequences.toString()); List<IOBehaviour> falling = new ArrayList<>(); List<IOBehaviour> rising = new ArrayList<>(); List<IOBehaviour> constant = new ArrayList<>(); if (!categoriseSequences(newbdd, sequences, falling, rising, constant)) { return false; } // System.out.println("Falling:"); // for(IOBehaviour beh : falling) { // System.out.println(beh.toString()); // } // System.out.println("Rising:"); // for(IOBehaviour beh : rising) { // System.out.println(beh.toString()); // } // System.out.println("Constant:"); // for(IOBehaviour beh : constant) { // System.out.println(beh.toString()); // } fallingPartitions = getPossiblePartitionsFromFalling(falling, relevant); // System.out.println("FallingPartitions: " + fallingPartitions.toString()); Map<Integer, List<Partition>> partitions = getPartitions(relevant, startgatesize); if (partitions == null) { logger.error("There was a problem while creating partions for signal " + sig.getName()); return false; } // System.out.println("Init:"); // for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) { // System.out.println(entry.getKey()); // for(Partition p : entry.getValue()) { // System.out.println("\t" + p.toString()); // } // } filterPartitions(partitions, fallingPartitions); if (partitions.isEmpty()) { logger.error("No suitable partions found"); return false; } // System.out.println("After filter Falling:"); // for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) { // System.out.println(entry.getKey()); // for(Partition p : entry.getValue()) { // System.out.println("\t" + p.toString()); // } // } // System.out.println("posneg: " + posnegmap.toString()); setPartitionBDDs(partitions, posnegmap); if (!checkRising(rising, partitions)) { logger.error("Check rising failed"); return false; } if (partitions.isEmpty()) { logger.error("No suitable partions found"); return false; } // System.out.println("After filter Rising:"); // for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) { // System.out.println(entry.getKey()); // for(Partition p : entry.getValue()) { // System.out.println("\t" + p.toString()); // } // } if (!checkConstant(constant, partitions)) { logger.error("Check constant failed"); return false; } if (partitions.isEmpty()) { logger.error("No suitable partions found"); return false; } // System.out.println("After filter Constant:"); // for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) { // System.out.println(entry.getKey()); // for(Partition p : entry.getValue()) { // System.out.println("\t" + p.toString()); // } // } applyDecoResult(term, partitions, posnegmap, sigmap); return true; }
From source file:org.sakaiproject.content.tool.ListItem.java
protected void captureAccess(ParameterParser params, String index) { String access_mode = params.getString("access_mode" + index); if (access_mode == null || AccessMode.GROUPED.toString().equals(access_mode)) { // we inherit more than one group and must check whether group access changes at this item String[] access_groups = params.getStrings("access_groups" + index); SortedSet<String> new_groups = new TreeSet<String>(); if (access_groups != null) { new_groups.addAll(Arrays.asList(access_groups)); }//from w w w . ja va2 s . c o m SortedSet<String> new_group_refs = convertToRefs(new_groups); Collection inh_grps = getInheritedGroupRefs(); boolean groups_are_inherited = (new_group_refs.size() == inh_grps.size()) && inh_grps.containsAll(new_group_refs); if (groups_are_inherited) { new_groups.clear(); setGroupsById(new_groups); setAccessMode(AccessMode.INHERITED); } else { setGroupsById(new_groups); setAccessMode(AccessMode.GROUPED); } setPubview(false); } else if (ResourcesAction.PUBLIC_ACCESS.equals(access_mode)) { if (!isPubviewInherited()) { setPubview(true); setAccessMode(AccessMode.INHERITED); } } else if (AccessMode.INHERITED.toString().equals(access_mode)) { captureAccessRoles(params, index); setAccessMode(AccessMode.INHERITED); this.groups.clear(); } }