List of usage examples for java.util Set clear
void clear();
From source file:com.jaeksoft.searchlib.crawler.web.spider.HtmlArchiver.java
final public void archive(BrowserDriver<?> browserDriver, Set<String> xPathDisableScriptSet) throws IOException, ParserConfigurationException, SAXException, IllegalStateException, SearchLibException, URISyntaxException, XPatherException { String pageSource = browserDriver.getSourceCode(); HtmlCleanerParser htmlCleanerParser = new HtmlCleanerParser(); htmlCleanerParser.init(pageSource);/*from w w w . j av a 2 s .c o m*/ // Find iframe Set<WebElement> iframeWebElementSet = new HashSet<WebElement>(); browserDriver.locateBy(By.tagName("iframe"), iframeWebElementSet, true); Map<TagNode, WebElement> iframeNodeMap = null; if (iframeWebElementSet != null && iframeWebElementSet.size() > 0) { iframeNodeMap = new HashMap<TagNode, WebElement>(); Set<TagNode> tagNodeSet = new HashSet<TagNode>(); for (WebElement webElement : iframeWebElementSet) { String xPath = browserDriver.getXPath(webElement, true); if (xPath == null) continue; if (htmlCleanerParser.xpath(xPath, tagNodeSet) == 0) { Logging.warn("DisableScript not found using XPath: " + xPath); continue; } for (TagNode tagNode : tagNodeSet) iframeNodeMap.put(tagNode, webElement); tagNodeSet.clear(); } } // Find node that need to be disabled Set<TagNode> disableScriptNodeSet = null; if (xPathDisableScriptSet != null && xPathDisableScriptSet.size() > 0) { disableScriptNodeSet = new HashSet<TagNode>(); for (String xPath : xPathDisableScriptSet) if (htmlCleanerParser.xpath(xPath, disableScriptNodeSet) == 0) Logging.warn("DisableScript not found using XPath: " + xPath); } recursiveArchive(htmlCleanerParser.getTagNode(), disableScriptNodeSet, iframeNodeMap); htmlCleanerParser.writeHtmlToFile(indexFile); String charset = htmlCleanerParser.findCharset(); if (charset == null) FileUtils.write(sourceFile, pageSource); else FileUtils.write(sourceFile, pageSource, charset); }
From source file:com.dragome.compiler.units.ClassUnit.java
private Set<MemberUnit> getNotImplementedMethods() { Set<MemberUnit> interfacesMembers = new HashSet<MemberUnit>(); getDeclaredMembersInInterfaces(this, interfacesMembers); Set<MemberUnit> implementedMembers = new HashSet<MemberUnit>(); getImplementedMembersInHierarchy(this, implementedMembers); interfacesMembers.removeAll(implementedMembers); if (isImplementing(InvocationHandler.class) || isAbstract || isInterface || interfacesMembers.size() <= 0 || interfacesMembers.isEmpty()) interfacesMembers.clear(); return interfacesMembers; }
From source file:com.npower.dm.hibernate.management.ProfileConfigManagementBeanImp.java
/** * Add or update the AttributeValue specified by the name. This is modifier of * AttributeValue in multiple-value mode. * // w w w .ja v a 2 s.c o m * Caution: Assign null to value is permitted. this will set the value to * null, AttributeValue will not be deleted! * * Caution: Order of AttributeValue will automaticlly increased! The * AttributeValue added lastestly will be bottom. * * @param name * Attribute's name * @param value * String[] array of multi-value * @throws DMException */ public void setAttributeValue(ProfileConfig config, String name, String value[]) throws DMException { // update this profile config, first. make sure the profileID will generated // by hibernate. Session hsession = this.getHibernateSession(); hsession.saveOrUpdate(config); Clob[] clobValues = null; if (value != null) { clobValues = new Clob[value.length]; for (int i = 0; value != null && i < value.length; i++) { clobValues[i] = (value[i] == null) ? null : Hibernate.createClob(value[i]); } } // Check exists? Set<ProfileValueMap> vMaps = ((ProfileConfigEntity) config).getProfileValueMaps(); for (Iterator<ProfileValueMap> i = vMaps.iterator(); i.hasNext();) { ProfileValueMap vMap = i.next(); ProfileAttributeValueEntity v = (ProfileAttributeValueEntity) vMap.getProfileAttribValue(); if (name.equals(v.getProfileAttribute().getName())) { // In multi-value mode, clear single value; v.setRawData(null); // Set to multi-value mode v.setIsMultiValued(true); v.setItemDataKind(ProfileAttributeValue.ITEM_DATA_KIND_TEXT); v.setMFormat(DDFNode.DDF_FORMAT_CHR); // Clear all of old values. Set<ProfileValueItem> items = v.getProfileValueItems(); for (Iterator<ProfileValueItem> item = items.iterator(); item.hasNext();) { hsession.delete(item.next()); } // clear up the set of items. items.clear(); for (int j = 0; clobValues != null && j < clobValues.length; j++) { // Create a ProfileValueItem ProfileValueItem item = new ProfileValueItem(v); // Inherit property from AttributeValues item.setItemDataKind(v.getItemDataKind()); item.setMFormat(v.getMFormat()); item.setMType(v.getMType()); item.setUpdateId(v.getUpdateId()); // Assign the value item.setRawData(clobValues[j]); hsession.saveOrUpdate(item); // Save into DM inventory items.add(item); } return; } } // Create a new AttributeValue ProfileTemplate template = config.getProfileTemplate(); ManagementBeanFactory factory = this.getManagementBeanFactory(); ProfileAttribute attr = factory.createProfileTemplateBean().getProfileAttributeByName(template.getName(), name); if (attr == null) { throw new DMException( "Could not find attribute by name: " + name + " from the template: " + template.getName()); } // Create a ProfileAttributeValueEntity ProfileAttributeValueEntity av = new ProfileAttributeValueEntity(); av.setProfileAttribute(attr); // In Multi-value mode, clear the value of single-value mode. av.setRawData(null); // Set to multi-value mode av.setIsMultiValued(true); av.setItemDataKind(ProfileAttributeValue.ITEM_DATA_KIND_TEXT); av.setMFormat(DDFNode.DDF_FORMAT_CHR); hsession.saveOrUpdate(av); // Create all of items from clobValues for (int j = 0; clobValues != null && j < clobValues.length; j++) { // Create a ProfileValueItem ProfileValueItem item = new ProfileValueItem(av); // Inherit the value of properties from ProfileAttributeValueEntity. item.setItemDataKind(av.getItemDataKind()); item.setMFormat(av.getMFormat()); item.setMType(av.getMType()); item.setUpdateId(av.getUpdateId()); // set the value of Clob item.setRawData(clobValues[j]); // Add into DMInventory. hsession.saveOrUpdate(item); // Link the Item to ProfileAttributeValueEntity av.getProfileValueItems().add(item); } // New a ValueMapID ProfileValueMapID mapID = new ProfileValueMapID(); mapID.setAttributeValueId(av.getID()); mapID.setProfileId(config.getID()); // New a ProfileValueMap long index = ((ProfileConfigEntity) config).getProfileValueMaps().size() + 1; ProfileValueMap map = new ProfileValueMap(mapID, av, config, index); // Link to ProfileConfigEntity Set<ProfileValueMap> maps = ((ProfileConfigEntity) config).getProfileValueMaps(); maps.add(map); // Add the ProfileValueMap into DM inventory. hsession.saveOrUpdate(map); }
From source file:eu.stratosphere.nephele.executiongraph.ExecutionStage.java
/** * Reconstructs the execution pipelines for this execution stage. *//*from w w w. j a va2s. c o m*/ void reconstructExecutionPipelines() { Iterator<ExecutionGroupVertex> it = this.stageMembers.iterator(); final Set<ExecutionVertex> alreadyVisited = new HashSet<ExecutionVertex>(); while (it.hasNext()) { final ExecutionGroupVertex groupVertex = it.next(); // We only look at input vertices first if (!groupVertex.isInputVertex()) { continue; } final Iterator<ExecutionVertex> vertexIt = groupVertex.iterator(); while (vertexIt.hasNext()) { final ExecutionVertex vertex = vertexIt.next(); reconstructExecutionPipeline(vertex, true, alreadyVisited); } } it = this.stageMembers.iterator(); alreadyVisited.clear(); while (it.hasNext()) { final ExecutionGroupVertex groupVertex = it.next(); // We only look at input vertices first if (!groupVertex.isOutputVertex()) { continue; } final Iterator<ExecutionVertex> vertexIt = groupVertex.iterator(); while (vertexIt.hasNext()) { final ExecutionVertex vertex = vertexIt.next(); reconstructExecutionPipeline(vertex, false, alreadyVisited); } } }
From source file:com.npower.dm.hibernate.management.ProfileConfigManagementBeanImp.java
/** * Add or update the value of AttributeValue specified by the name. This is * modifier of AttributeValue in multiple-value, binary mode. * /*from w ww.j a v a 2 s .co m*/ * Caution: Assign null to value is permitted. this will set the value to * null, AttributeValue will not be deleted! * * Caution: Order of AttributeValue will automaticlly increased! The * AttributeValue added lastestly will be bottom. * * @param name * Attribute's name * @param value * String[] array of multi-value * @throws DMException */ public void setAttributeValue(ProfileConfig config, String name, InputStream value[]) throws DMException, IOException { // update this profile config, first. make sure the profileID will generated // by hibernate. Session hsession = this.getHibernateSession(); hsession.saveOrUpdate(config); Blob[] blobValues = null; try { if (value != null) { blobValues = new Blob[value.length]; for (int i = 0; value != null && i < value.length; i++) { blobValues[i] = (value[i] == null) ? null : Hibernate.createBlob(value[i]); } } } catch (IOException e) { throw e; } // Check exists? Set<ProfileValueMap> vMaps = ((ProfileConfigEntity) config).getProfileValueMaps(); for (Iterator<ProfileValueMap> i = vMaps.iterator(); i.hasNext();) { ProfileValueMap vMap = (ProfileValueMap) i.next(); ProfileAttributeValueEntity v = (ProfileAttributeValueEntity) vMap.getProfileAttribValue(); if (name.equals(v.getProfileAttribute().getName())) { // In multi-value mode, clear single value; v.setRawData(null); // Set to multi-value mode v.setIsMultiValued(true); v.setMFormat(DDFNode.DDF_FORMAT_BIN); // Clear all of old values. Set<ProfileValueItem> items = v.getProfileValueItems(); for (Iterator<ProfileValueItem> item = items.iterator(); item.hasNext();) { hsession.delete(item.next()); } // clear up the set of items. items.clear(); for (int j = 0; blobValues != null && j < blobValues.length; j++) { // Create a ProfileValueItem ProfileValueItem item = new ProfileValueItem(v); // Inherit property from AttributeValues item.setItemDataKind(v.getItemDataKind()); item.setMFormat(v.getMFormat()); item.setMType(v.getMType()); item.setUpdateId(v.getUpdateId()); // Assign the value item.setBinaryData(blobValues[j]); hsession.saveOrUpdate(item); // Save into DM inventory items.add(item); } return; } } // Create a new AttributeValue ProfileTemplate template = config.getProfileTemplate(); ManagementBeanFactory factory = this.getManagementBeanFactory(); ProfileAttribute attr = factory.createProfileTemplateBean().getProfileAttributeByName(template.getName(), name); if (attr == null) { throw new DMException( "Could not find attribute by name: " + name + " from the template: " + template.getName()); } // Create a ProfileAttributeValueEntity ProfileAttributeValueEntity av = new ProfileAttributeValueEntity(); av.setProfileAttribute(attr); // In Multi-value mode, clear the value of single-value mode. av.setRawData(null); // Set to multi-value mode av.setIsMultiValued(true); av.setMFormat(DDFNode.DDF_FORMAT_BIN); hsession.saveOrUpdate(av); // Create all of items from clobValues for (int j = 0; blobValues != null && j < blobValues.length; j++) { // Create a ProfileValueItem ProfileValueItem item = new ProfileValueItem(av); // Inherit the value of properties from ProfileAttributeValueEntity. item.setItemDataKind(av.getItemDataKind()); item.setMFormat(av.getMFormat()); item.setMType(av.getMType()); item.setUpdateId(av.getUpdateId()); // set the value of Clob item.setBinaryData(blobValues[j]); // Add into DMInventory. hsession.saveOrUpdate(item); // Link the Item to ProfileAttributeValueEntity av.getProfileValueItems().add(item); } // New a ValueMapID ProfileValueMapID mapID = new ProfileValueMapID(); mapID.setAttributeValueId(av.getID()); mapID.setProfileId(config.getID()); // New a ProfileValueMap long index = ((ProfileConfigEntity) config).getProfileValueMaps().size() + 1; ProfileValueMap map = new ProfileValueMap(mapID, av, config, index); // Link to ProfileConfigEntity Set<ProfileValueMap> maps = ((ProfileConfigEntity) config).getProfileValueMaps(); maps.add(map); // Add the ProfileValueMap into DM inventory. hsession.saveOrUpdate(map); }
From source file:fr.landel.utils.assertor.AssertorIterableTest.java
/** * Test method for {@link AssertorIterable#isNotEmpty}. * //from ww w. j a v a 2 s . com * @throws IOException * On empty iterable */ @Test public void testIsNotEmpty() throws IOException { final String el = "element"; final Set<String> set = new HashSet<>(); set.add(el); Assertor.that(set).isNotEmpty().orElseThrow(); assertException(() -> { Assertor.that(set).not().isNotEmpty().orElseThrow("iterable is not empty"); fail(ERROR); }, IllegalArgumentException.class, "iterable is not empty"); set.clear(); assertException(() -> { Assertor.that(set).isNotEmpty().orElseThrow(); fail(ERROR); }, IllegalArgumentException.class); assertException(() -> { Assertor.that(set).isNotEmpty().orElseThrow("iterable is empty"); fail(ERROR); }, IllegalArgumentException.class, "iterable is empty"); assertException(() -> { Assertor.that(set).isNotEmpty().orElseThrow(new IOException(), true); fail(ERROR); }, IOException.class); assertException(() -> { Assertor.that((Iterable<String>) null).isNotEmpty().orElseThrow(); fail(); }, IllegalArgumentException.class, "the iterable 'null' should be NOT empty and NOT null"); }
From source file:com.netbase.insightapi.bestpractice.TopicDownloader.java
/** * Download all the documents of a topic, in sequential order, by * publication date./*from w ww .j ava 2s. com*/ * * @param masterQuery * @param user * @param startTimestamp * inclusive * @param endTimestamp * exclusive * @param handler * @throws InterruptedException * @throws InsightAPIQueryException */ public static void downloadHistory(InsightAPIQuery masterQuery, UserChannel user, int startTimestamp, int endTimestamp, ResultHandler handler) throws InterruptedException, InsightAPIQueryException { // clone the original query so we don't change it InsightAPIQuery query = new InsightAPIQuery(masterQuery); // force the parameters we rely on, leaving the others set by caller query.setParameter("sort", "timestamp"); query.setOp("retrieveDocuments"); // The caller can set "sizeNeeded" to any legal value, particularly for // testing purposes. In production, bigger is better unless we start // experiencing timeout or communication reliability issues. if (query.getParameters("sizeNeeded") == null) query.setParameter("sizeNeeded", 2000); /* * each call to the Insight API will return a (typically small) number * of documents that we already received in the prior call. This is * because we start the time range for call "n+1" with the highest * timestamp received during call "n". We do this because we are not * guaranteed to have received *all* of the documents containing the * highest timestamp. * * Timestamp resolution is 1/10 second; so, typically, we'll receive * exactly one document at the end of call "n" and the beginning of * "n+1". * * This set arranges for us to ignore the overlapped documents. */ Set<String> docIdsAlreadySeen = new HashSet<String>(); /* * the query for the first request covers the entire span for the * download. Since we're sorting and filtering by timestamp, we'll get * the earliest documents in the range. */ query.setPublishedTimestampRange(startTimestamp, endTimestamp); while (true) { InsightAPIQuery q = new InsightAPIQuery(query); // run the query and toss an exception if it didn't work user.run(q); q.checkSuccess(); // get the parsed json result JSONObject jsonResult = (JSONObject) q.getParsedContent(); // get the array of documents JSONArray docs = (JSONArray) jsonResult.get("documents"); // no documents at all? We're done if (docs == null || docs.size() == 0) break; // traverse the beginning of the list, counting up the duplicates int first = 0; while (first < docs.size()) { JSONObject doc = (JSONObject) docs.get(first); String docID = (String) getDocProperty(doc, "docID"); if (!docIdsAlreadySeen.contains(docID)) break; first++; } // all duplicates? we're done. if (first >= docs.size()) break; // call the ResultHandler to process the documents, beginning // with the first unique one handler.handleResult(docs, first); int last = docs.size() - 1; docIdsAlreadySeen.clear(); // get the timestamp of the last document received int lastTimestamp = ((Number) getDocProperty(docs.get(last), "timestamp")).intValue(); // if it's later than (shouldn't be) or equal to (could be) the // end of the requested range, we're done if (lastTimestamp >= endTimestamp) break; /* * traverse backwards through the list from the end, looking for the * next-lower timestamp. Write down all the docIDs of these * documents, because we're going to see them again at the beginning * of the next query */ while (last >= 0 && ((Number) getDocProperty(docs.get(last), "timestamp")).intValue() == lastTimestamp) { docIdsAlreadySeen.add((String) getDocProperty(docs.get(last), "docID")); last--; } /* * If we get through this loop with last < 0, it means that the * entire block of documents we received had the same timestamp. * This is a failure of this algorithm. * * For this to happen, it means that the topic contains more than * query.sizeNeeded (current max: 2000) documents with publication * timestamps in the same 1/10 second. * * We have no choice but to increment the timestamp by 1/10 of a * second and move on. If we don't, we'll keep getting the same * result in an infinite loop. */ if (last < 0) { user.logWarning(query.getSerial() + " too many docs with same timestamp=" + lastTimestamp + ", num of docs=" + docs.size()); docIdsAlreadySeen.clear(); lastTimestamp++; } // set the query's timestamp range to start with the last timestamp // we received, and rinse and repeat query.setPublishedTimestampRange(lastTimestamp, endTimestamp); } }
From source file:org.apache.juddi.query.FindEntityByCategoryGroupQuery.java
@SuppressWarnings("unchecked") public List<?> select(EntityManager em, FindQualifiers fq, CategoryBag categoryBag, List<?> keysIn, DynamicQuery.Parameter... restrictions) { // If keysIn is not null and empty, then search is over. if ((keysIn != null) && (keysIn.size() == 0)) return keysIn; if (categoryBag == null) return keysIn; List<KeyedReferenceGroup> categories = categoryBag.getKeyedReferenceGroup(); if (categories == null || categories.size() == 0) return keysIn; List<KeyedReferenceGroup> keyedRefGroups = new ArrayList<KeyedReferenceGroup>(0); for (KeyedReferenceGroup elem : categories) { if (elem instanceof KeyedReferenceGroup) keyedRefGroups.add((KeyedReferenceGroup) elem); }// w w w .j a va2 s . c o m if (keyedRefGroups.size() == 0) return keysIn; Collections.sort(keyedRefGroups, new KeyedRefGroupTModelComparator()); int count = 0; String prevTModelKey = null; Set<String> orResults = new HashSet<String>(0); List<?> restrictionList = keysIn; List<?> curResult = null; for (KeyedReferenceGroup keyedRefGroup : keyedRefGroups) { String curTModelKey = keyedRefGroup.getTModelKey(); DynamicQuery dynamicQry = new DynamicQuery(selectSQL); appendConditions(dynamicQry, fq, keyedRefGroup); if (restrictions != null && restrictions.length > 0) dynamicQry.AND().pad().appendGroupedAnd(restrictions); if (fq.isOrLikeKeys()) { if (!curTModelKey.equals(prevTModelKey)) { if (count != 0) { restrictionList = new ArrayList<String>(orResults); orResults.clear(); } } } else if (!fq.isOrAllKeys()) { if (count != 0) restrictionList = curResult; } if (restrictionList != null && restrictionList.size() == 0) break; curResult = getQueryResult(em, dynamicQry, restrictionList, entityAlias + "." + keyName); if (fq.isOrAllKeys() || fq.isOrLikeKeys()) { orResults.addAll((List<String>) curResult); } prevTModelKey = curTModelKey; count++; } List<String> result = null; if (fq.isOrAllKeys() || fq.isOrLikeKeys()) { result = new ArrayList<String>(0); result.addAll(orResults); } else result = (List<String>) curResult; return result; }
From source file:org.apache.tajo.master.rm.TajoWorkerResourceManager.java
private List<AllocatedWorkerResource> chooseWorkers(WorkerResourceRequest resourceRequest) { List<AllocatedWorkerResource> selectedWorkers = new ArrayList<AllocatedWorkerResource>(); int allocatedResources = 0; ResourceRequestPriority resourceRequestPriority = resourceRequest.request.getResourceRequestPriority(); List<Worker> randomWorkers = new ArrayList<Worker>(rmContext.getWorkers().values()); Collections.shuffle(randomWorkers); if (resourceRequestPriority == ResourceRequestPriority.MEMORY) { int numContainers = resourceRequest.request.getNumContainers(); int minMemoryMB = resourceRequest.request.getMinMemoryMBPerContainer(); int maxMemoryMB = resourceRequest.request.getMaxMemoryMBPerContainer(); float diskSlot = Math.max(resourceRequest.request.getMaxDiskSlotPerContainer(), resourceRequest.request.getMinDiskSlotPerContainer()); int liveWorkerSize = randomWorkers.size(); Set<Integer> insufficientWorkers = new HashSet<Integer>(); boolean stop = false; boolean checkMax = true; while (!stop) { if (allocatedResources >= numContainers) { break; }//from www . ja v a 2 s.c om if (insufficientWorkers.size() >= liveWorkerSize) { if (!checkMax) { break; } insufficientWorkers.clear(); checkMax = false; } int compareAvailableMemory = checkMax ? maxMemoryMB : minMemoryMB; for (Worker worker : randomWorkers) { if (allocatedResources >= numContainers) { stop = true; break; } if (insufficientWorkers.size() >= liveWorkerSize) { break; } WorkerResource workerResource = worker.getResource(); if (workerResource.getAvailableMemoryMB() >= compareAvailableMemory) { int workerMemory; if (workerResource.getAvailableMemoryMB() >= maxMemoryMB) { workerMemory = maxMemoryMB; } else { workerMemory = workerResource.getAvailableMemoryMB(); } AllocatedWorkerResource allocatedWorkerResource = new AllocatedWorkerResource(); allocatedWorkerResource.worker = worker; allocatedWorkerResource.allocatedMemoryMB = workerMemory; if (workerResource.getAvailableDiskSlots() >= diskSlot) { allocatedWorkerResource.allocatedDiskSlots = diskSlot; } else { allocatedWorkerResource.allocatedDiskSlots = workerResource.getAvailableDiskSlots(); } workerResource.allocateResource(allocatedWorkerResource.allocatedDiskSlots, allocatedWorkerResource.allocatedMemoryMB); selectedWorkers.add(allocatedWorkerResource); allocatedResources++; } else { insufficientWorkers.add(worker.getWorkerId()); } } } } else { int numContainers = resourceRequest.request.getNumContainers(); float minDiskSlots = resourceRequest.request.getMinDiskSlotPerContainer(); float maxDiskSlots = resourceRequest.request.getMaxDiskSlotPerContainer(); int memoryMB = Math.max(resourceRequest.request.getMaxMemoryMBPerContainer(), resourceRequest.request.getMinMemoryMBPerContainer()); int liveWorkerSize = randomWorkers.size(); Set<Integer> insufficientWorkers = new HashSet<Integer>(); boolean stop = false; boolean checkMax = true; while (!stop) { if (allocatedResources >= numContainers) { break; } if (insufficientWorkers.size() >= liveWorkerSize) { if (!checkMax) { break; } insufficientWorkers.clear(); checkMax = false; } float compareAvailableDisk = checkMax ? maxDiskSlots : minDiskSlots; for (Worker worker : randomWorkers) { if (allocatedResources >= numContainers) { stop = true; break; } if (insufficientWorkers.size() >= liveWorkerSize) { break; } WorkerResource workerResource = worker.getResource(); if (workerResource.getAvailableDiskSlots() >= compareAvailableDisk) { float workerDiskSlots; if (workerResource.getAvailableDiskSlots() >= maxDiskSlots) { workerDiskSlots = maxDiskSlots; } else { workerDiskSlots = workerResource.getAvailableDiskSlots(); } AllocatedWorkerResource allocatedWorkerResource = new AllocatedWorkerResource(); allocatedWorkerResource.worker = worker; allocatedWorkerResource.allocatedDiskSlots = workerDiskSlots; if (workerResource.getAvailableMemoryMB() >= memoryMB) { allocatedWorkerResource.allocatedMemoryMB = memoryMB; } else { allocatedWorkerResource.allocatedMemoryMB = workerResource.getAvailableMemoryMB(); } workerResource.allocateResource(allocatedWorkerResource.allocatedDiskSlots, allocatedWorkerResource.allocatedMemoryMB); selectedWorkers.add(allocatedWorkerResource); allocatedResources++; } else { insufficientWorkers.add(worker.getWorkerId()); } } } } return selectedWorkers; }
From source file:de.uni_potsdam.hpi.asg.logictool.mapping.SequenceBasedAndGateDecomposer.java
private void removeSubSequences(SortedSet<IOBehaviour> sequencesFront, SortedSet<IOBehaviour> sequencesBack, Set<IOBehaviour> newSequences, Set<IOBehaviour> rmSequences) { rmSequences.clear(); sequencesFront.addAll(newSequences); Iterator<IOBehaviour> it = sequencesFront.iterator(); if (!it.hasNext()) { //TODO: why? return;//from www .ja v a2s . c o m } IOBehaviour curr = it.next(); while (it.hasNext()) { IOBehaviour next = it.next(); if (newSequences.contains(curr)) { if (curr.getStart().compareTo(next.getStart()) == 0) { int i = 0; while (true) { if (curr.getSequence().size() == i) { rmSequences.add(curr); break; } //System.out.println(curr.toString() + " vs " + next.toString()); int cmpT = curr.getSequence().get(i).compareTo(next.getSequence().get(i)); if (cmpT != 0) { break; } //gleich, check next trans i++; } } } curr = next; } newSequences.removeAll(rmSequences); sequencesBack.addAll(newSequences); it = sequencesBack.iterator(); curr = it.next(); while (it.hasNext()) { IOBehaviour next = it.next(); if (newSequences.contains(curr)) { if (curr.getEnd().compareTo(next.getEnd()) == 0) { int i = 0; while (true) { if (curr.getSequence().size() == i) { rmSequences.add(curr); break; } int cmpT = curr.getSequence().get(curr.getSequence().size() - i - 1) .compareTo(next.getSequence().get(next.getSequence().size() - i - 1)); if (cmpT != 0) { break; } //gleich, check next trans i++; } } } curr = next; } }