List of usage examples for java.util LinkedHashSet add
boolean add(E e);
From source file:org.apache.tajo.plan.LogicalPlanner.java
private void setTargetOfTableSubQuery(PlanContext context, QueryBlock block, TableSubQueryNode subQueryNode) throws TajoException { // Add additional expressions required in upper nodes. Set<String> newlyEvaluatedExprs = TUtil.newHashSet(); for (NamedExpr rawTarget : block.namedExprsMgr.getAllNamedExprs()) { try {//from www . j a v a2 s. c om EvalNode evalNode = exprAnnotator.createEvalNode(context, rawTarget.getExpr(), NameResolvingMode.RELS_ONLY); if (checkIfBeEvaluatedAtRelation(block, evalNode, subQueryNode)) { block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode); newlyEvaluatedExprs.add(rawTarget.getAlias()); // newly added exr } } catch (UndefinedColumnException ve) { } } // Assume that each unique expr is evaluated once. LinkedHashSet<Target> targets = createFieldTargetsFromRelation(block, subQueryNode, newlyEvaluatedExprs); for (String newAddedExpr : newlyEvaluatedExprs) { targets.add(block.namedExprsMgr.getTarget(newAddedExpr, true)); } subQueryNode.setTargets(targets.toArray(new Target[targets.size()])); }
From source file:org.osaf.cosmo.mc.StandardMorseCodeController.java
private Set<Item> recordsToItems(EimRecordSetIterator i, CollectionItem collection) { // All child item for collection (both current and new) indexed by Uid HashMap<String, Item> allChildrenByUid = new HashMap<String, Item>(); LinkedHashSet<Item> children = new LinkedHashSet<Item>(); // Index all existing children for (Item child : collection.getChildren()) allChildrenByUid.put(child.getUid(), child); try {//from www.j a v a 2s. c om while (i.hasNext()) { EimRecordSet recordset = i.next(); try { Item item = contentService.findItemByUid(recordset.getUuid()); if (item != null && !(item instanceof ContentItem)) throw new ValidationException(recordset.getUuid(), "Child item " + recordset.getUuid() + " is not a content item"); ContentItem child = (ContentItem) item; // Handle case where item is a NoteOccurence, in which case // a new modification NoteItem needs to be created if (child instanceof NoteOccurrence) { if (recordset.isDeleted() == false) child = createChildItem((NoteOccurrence) child, collection, recordset, allChildrenByUid); else child = null; } // Handle case where recordset is to be deleted, but the // target item doesn't exist. if (child == null && recordset.isDeleted() == true) throw new ValidationException(recordset.getUuid(), "Tried to delete child item " + recordset.getUuid() + " , but it does not exist"); // Handle case where item doesn't exist, so create a new one if (child == null) child = createChildItem(collection, recordset, allChildrenByUid); children.add(child); // apply recordset new ItemTranslator(child).applyRecords(recordset); } catch (EimValidationException e) { throw new ValidationException(recordset.getUuid(), "could not apply EIM recordset " + recordset.getUuid() + " due to invalid data", e); } } } catch (EimException e) { throw new MorseCodeException("unknown EIM translation problem", e); } return children; }
From source file:org.alfresco.repo.security.permissions.impl.IntersectPermissionServiceImpl.java
/** * Key for a cache object is built from all the known Authorities (which can change dynamically so they must all be * used) the NodeRef ID and the permission reference itself. This gives a unique key for each permission test. *///from w w w . j a v a 2 s. co m Serializable generateKey(Set<String> auths, NodeRef nodeRef, PermissionReference perm, CacheType type) { LinkedHashSet<Serializable> key = new LinkedHashSet<Serializable>(); key.add(perm.toString()); // We will just have to key our dynamic sets by username. We wrap it so as not to be confused with a static set if (auths instanceof AuthorityServiceImpl.UserAuthoritySet) { key.add((Serializable) Collections .singleton(((AuthorityServiceImpl.UserAuthoritySet) auths).getUsername())); } else { key.addAll(auths); } key.add(nodeRef); // Ensure some concept of node version or transaction is included in the key so we can track without cache replication NodeRef.Status nodeStatus = nodeService.getNodeStatus(nodeRef); key.add(nodeStatus == null ? "null" : nodeStatus.getChangeTxnId()); key.add(type); return key; }
From source file:org.apache.olio.workload.driver.UIDriver.java
public Set<String> parseImages(StringBuilder buffer) { LinkedHashSet<String> urlSet = new LinkedHashSet<String>(); //String elStart = "<img "; String elStart = "background: "; String attrStart = " url("; int elStartLen = elStart.length() - 1; // Don't include the trailing space int attrStartLen = attrStart.length(); int idx = 0;/* w ww. j a va 2s . c o m*/ logger.finest("Parsing images from buffer"); for (;;) { // Find and copy out the element. idx = buffer.indexOf(elStart, idx); if (idx == -1) { break; } idx += elStartLen; int endIdx = buffer.indexOf(")", idx) + 1; // +1 to include the '(' if (endIdx == -1) { break; } String elText = buffer.substring(idx, endIdx); logger.finest(elText); idx = endIdx + 1; // Find the attribute int idx2 = elText.indexOf(attrStart); if (idx2 == -1) { logger.finer("No img src attribute. Weird! " + elText); continue; } endIdx = elText.indexOf(")", idx2 + attrStartLen); if (endIdx == -1) { logger.warning("No ) attribute ending. Weird! " + elText); continue; } String link = elText.substring(idx2 + attrStartLen, endIdx); if (link.startsWith("/uploaded_files")) { String url = baseURL + link; logger.finer("Adding " + url + " from idx " + idx); urlSet.add(url); } } return urlSet; }
From source file:org.wrml.runtime.schema.generator.SchemaGenerator.java
public Choices generateChoices(final Class<?> nativeChoicesEnumClass) { if (nativeChoicesEnumClass == null || !nativeChoicesEnumClass.isEnum()) { return null; }// w w w.j a v a2s. c o m final SchemaLoader schemaLoader = getSchemaLoader(); final Choices choices = getContext().newModel(schemaLoader.getChoicesSchemaUri()); final URI choicesUri = schemaLoader.getTypeUri(nativeChoicesEnumClass); choices.setUri(choicesUri); final UniqueName choicesUniqueName = schemaLoader.getTypeUniqueName(choicesUri); choices.setUniqueName(choicesUniqueName); final Object[] enumConstants = nativeChoicesEnumClass.getEnumConstants(); if (enumConstants != null && enumConstants.length > 0) { final LinkedHashSet choiceSet = new LinkedHashSet(enumConstants.length); for (final Object enumConstant : enumConstants) { final String choice = String.valueOf(enumConstant); choiceSet.add(enumConstant); } choices.getList().addAll(choiceSet); } return choices; }
From source file:org.apache.tajo.engine.planner.LogicalPlanner.java
@Override public ScanNode visitRelation(PlanContext context, Stack<Expr> stack, Relation expr) throws PlanningException { QueryBlock block = context.queryBlock; ScanNode scanNode = block.getNodeFromExpr(expr); updatePhysicalInfo(scanNode.getTableDesc()); // Find expression which can be evaluated at this relation node. // Except for column references, additional expressions used in select list, where clause, order-by clauses // can be evaluated here. Their reference names are kept in newlyEvaluatedExprsRef. Set<String> newlyEvaluatedExprsReferences = new LinkedHashSet<String>(); for (Iterator<NamedExpr> iterator = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); iterator .hasNext();) {/*from w ww. ja v a2s . c o m*/ NamedExpr rawTarget = iterator.next(); try { EvalNode evalNode = exprAnnotator.createEvalNode(context, rawTarget.getExpr(), NameResolvingMode.RELS_ONLY); if (checkIfBeEvaluatedAtRelation(block, evalNode, scanNode)) { block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode); newlyEvaluatedExprsReferences.add(rawTarget.getAlias()); // newly added exr } } catch (VerifyException ve) { } } // Assume that each unique expr is evaluated once. LinkedHashSet<Target> targets = createFieldTargetsFromRelation(block, scanNode, newlyEvaluatedExprsReferences); // The fact the some expr is included in newlyEvaluatedExprsReferences means that it is already evaluated. // So, we get a raw expression and then creates a target. for (String reference : newlyEvaluatedExprsReferences) { NamedExpr refrer = block.namedExprsMgr.getNamedExpr(reference); EvalNode evalNode = exprAnnotator.createEvalNode(context, refrer.getExpr(), NameResolvingMode.RELS_ONLY); targets.add(new Target(evalNode, reference)); } scanNode.setTargets(targets.toArray(new Target[targets.size()])); verifyProjectedFields(block, scanNode); return scanNode; }
From source file:net.sf.taverna.t2.security.credentialmanager.impl.CredentialManagerImpl.java
protected LinkedHashSet<URI> getPossibleServiceURIsToLookup(URI serviceURI, boolean usePathRecursion) { try {// w ww.j a v a2s .co m serviceURI = serviceURI.normalize(); serviceURI = dnParser.setUserInfoForURI(serviceURI, null); } catch (URISyntaxException ex) { logger.warn("Could not strip userinfo from " + serviceURI, ex); } /* * We'll use a LinkedHashSet to avoid checking for duplicates, like if * serviceURI.equals(withoutQuery) Only the first hit should be added to * the set. */ LinkedHashSet<URI> possibles = new LinkedHashSet<URI>(); possibles.add(serviceURI); if (!usePathRecursion || !serviceURI.isAbsolute()) return possibles; /* * We'll preserve the fragment, as it is used to indicate the realm */ String rawFragment = serviceURI.getRawFragment(); if (rawFragment == null) rawFragment = ""; URI withoutQuery = serviceURI.resolve(serviceURI.getRawPath()); addFragmentedURI(possibles, withoutQuery, rawFragment); // Immediate parent URI parent = withoutQuery.resolve("."); addFragmentedURI(possibles, parent, rawFragment); URI oldParent = null; // Top parent (to be added later) URI root = parent.resolve("/"); while (!parent.equals(oldParent) && !parent.equals(root) && parent.getPath().length() > 0) { // Intermediate parents, but not for "http://bla.org" as we would // find "http://bla.org.." oldParent = parent; parent = parent.resolve(".."); addFragmentedURI(possibles, parent, rawFragment); } // In case while-loop did not do so, also include root addFragmentedURI(possibles, root, rawFragment); if (rawFragment.length() > 0) // Add the non-fragment versions in the bottom of the list for (URI withFragment : new ArrayList<>(possibles)) try { possibles.add(dnParser.setFragmentForURI(withFragment, null)); } catch (URISyntaxException e) { logger.warn("Could not non-fragment URI " + withFragment); } return possibles; }
From source file:ca.uhn.fhir.jpa.dao.SearchBuilder.java
private void doSetPids(Collection<Long> thePids) { if (myParams.isPersistResults()) { if (mySearchEntity.getTotalCount() != null) { reinitializeSearch();//w ww. ja v a2s. c o m } LinkedHashSet<SearchResult> results = new LinkedHashSet<SearchResult>(); int index = 0; for (Long next : thePids) { SearchResult nextResult = new SearchResult(mySearchEntity); nextResult.setResourcePid(next); nextResult.setOrder(index); results.add(nextResult); index++; } mySearchResultDao.save(results); mySearchEntity.setTotalCount(results.size()); mySearchEntity = myEntityManager.merge(mySearchEntity); myEntityManager.flush(); } else { myPids = thePids; } }
From source file:com.redhat.rcm.version.mgr.VersionManager.java
protected LinkedHashSet<Project> loadProjectWithModules(final File topPom, final VersionManagerSession session) throws ProjectToolsException, IOException { final List<PomPeek> peeked = peekAtPomHierarchy(topPom, session); final LinkedHashSet<Project> projects = new LinkedHashSet<Project>(); for (final PomPeek peek : peeked) { final File pom = peek.getPom(); // Sucks, but we have to brute-force reading in the raw model. // The effective-model building, below, has a tantalizing getRawModel() // method on the result, BUT this seems to return models that have // the plugin versions set inside profiles...so they're not entirely // raw.// w w w . j a v a 2 s . c o m Model raw = null; InputStream in = null; try { in = new FileInputStream(pom); raw = new MavenXpp3Reader().read(in); } catch (final IOException e) { session.addError( new VManException("Failed to build model for POM: %s.\n--> %s", e, pom, e.getMessage())); } catch (final XmlPullParserException e) { session.addError( new VManException("Failed to build model for POM: %s.\n--> %s", e, pom, e.getMessage())); } finally { closeQuietly(in); } if (raw == null) { continue; } final Project project; if (session.isUseEffectivePoms()) { // FIXME: Need an option to disable this for self-contained use cases... // Is this the same as 'non-strict' mode?? final ModelBuildingRequest req = newModelBuildingRequest(pom, session); ModelBuildingResult mbResult = null; try { mbResult = modelBuilder.build(req); } catch (final ModelBuildingException e) { session.addError(new VManException("Failed to build model for POM: %s.\n--> %s", e, pom, e.getMessage())); } if (mbResult == null) { continue; } project = new Project(raw, mbResult, pom); } else { project = new Project(pom, raw); } projects.add(project); } return projects; }
From source file:net.sf.taverna.t2.security.credentialmanager.impl.CredentialManagerImpl.java
public void addFragmentedURI(LinkedHashSet<URI> possibles, URI uri, String rawFragment) { if (rawFragment != null && rawFragment.length() > 0) uri = uri.resolve("#" + rawFragment); possibles.add(uri); }