List of usage examples for java.util LinkedHashSet add
boolean add(E e);
From source file:ca.uhn.fhir.jpa.dao.SearchBuilder.java
private void processSort(final SearchParameterMap theParams) { // Set<Long> loadPids = theLoadPids; if (theParams.getSort() != null && isNotBlank(theParams.getSort().getParamName())) { List<Order> orders = new ArrayList<Order>(); List<Predicate> predicates = new ArrayList<Predicate>(); CriteriaBuilder builder = myEntityManager.getCriteriaBuilder(); CriteriaQuery<Tuple> cq = builder.createTupleQuery(); Root<ResourceTable> from = cq.from(ResourceTable.class); createPredicateResourceId(builder, cq, predicates, from.get("myId").as(Long.class)); createSort(builder, from, theParams.getSort(), orders, predicates); if (orders.size() > 0) { // TODO: why do we need the existing list for this join to work? Collection<Long> originalPids = doGetPids(); LinkedHashSet<Long> loadPids = new LinkedHashSet<Long>(); cq.multiselect(from.get("myId").as(Long.class)); cq.where(toArray(predicates)); cq.orderBy(orders);// w w w. j a v a 2 s . c o m TypedQuery<Tuple> query = myEntityManager.createQuery(cq); for (Tuple next : query.getResultList()) { loadPids.add(next.get(0, Long.class)); } ourLog.debug("Sort PID order is now: {}", loadPids); ArrayList<Long> pids = new ArrayList<Long>(loadPids); // Any ressources which weren't matched by the sort get added to the bottom for (Long next : originalPids) { if (loadPids.contains(next) == false) { pids.add(next); } } doSetPids(pids); } } }
From source file:com.sonicle.webtop.core.app.WebTopManager.java
public Set<RoleWithSource> getComputedRolesByUser(UserProfileId pid, boolean self, boolean transitive) throws WTException { WebTopManager usrm = wta.getWebTopManager(); Connection con = null;//from w ww . j av a 2s .co m HashSet<String> roleMap = new HashSet<>(); LinkedHashSet<RoleWithSource> roles = new LinkedHashSet<>(); try { con = WT.getConnection(CoreManifest.ID); String userUid = usrm.userToUid(pid); if (self) { UserDAO usedao = UserDAO.getInstance(); OUser user = usedao.selectByUid(con, userUid); roles.add(new RoleWithSource(RoleWithSource.SOURCE_USER, userUid, user.getDomainId(), pid.getUserId(), user.getDisplayName())); } RoleDAO roldao = RoleDAO.getInstance(); // Gets by group List<ORole> groles = roldao.selectFromGroupsByUser(con, userUid); for (ORole role : groles) { if (roleMap.contains(role.getRoleUid())) continue; // Skip duplicates roleMap.add(role.getRoleUid()); roles.add(new RoleWithSource(RoleWithSource.SOURCE_GROUP, role.getRoleUid(), role.getDomainId(), role.getName(), role.getDescription())); } // Gets direct assigned roles List<ORole> droles = roldao.selectDirectByUser(con, userUid); for (ORole role : droles) { if (roleMap.contains(role.getRoleUid())) continue; // Skip duplicates roleMap.add(role.getRoleUid()); roles.add(new RoleWithSource(RoleWithSource.SOURCE_ROLE, role.getRoleUid(), role.getDomainId(), role.getName(), role.getDescription())); } // Get transivite roles (belonging to groups) if (transitive) { List<ORole> troles = roldao.selectTransitiveFromGroupsByUser(con, userUid); for (ORole role : troles) { if (roleMap.contains(role.getRoleUid())) continue; // Skip duplicates roleMap.add(role.getRoleUid()); roles.add(new RoleWithSource(RoleWithSource.SOURCE_TRANSITIVE, role.getRoleUid(), role.getDomainId(), role.getName(), role.getDescription())); } } } catch (SQLException | DAOException ex) { throw new WTException(ex, "DB error"); } finally { DbUtils.closeQuietly(con); } return roles; }
From source file:edu.internet2.middleware.psp.Psp.java
/** * This method returns all source object identifiers. The map keys are the identifiers, and the map values are the * {@link SchemaEntityRef}s applicable for each identifier. * //from w w w. j a v a 2 s .c om * The identifiers are returned by the attribute resolver via a {@link BulkCalcRequest} whose return data is * "identifier". * * @param bulkProvisioningRequest the bulk provisioning request * @return a possibly empty map consisting of all source identifiers and their corresponding provisioned objects * @throws PspException * @throws AttributeRequestException */ public Map<String, List<SchemaEntityRef>> getAllSourceIdentifiers( BulkProvisioningRequest bulkProvisioningRequest) throws PspException, AttributeRequestException { BulkCalcRequest bulkCalcRequest = new BulkCalcRequest(); bulkCalcRequest.setSchemaEntities(bulkProvisioningRequest.getSchemaEntities()); bulkCalcRequest.setReturnData(ReturnData.IDENTIFIER); bulkCalcRequest.setId(BulkProvisioningRequest.BULK_REQUEST_ID); // provisioning context PspContext pspContext = new PspContext(); pspContext.setProvisioningServiceProvider(this); pspContext.setProvisioningRequest(bulkCalcRequest); // attribute request context BaseSAMLProfileRequestContext attributeRequestContext = new BaseSAMLProfileRequestContext(); attributeRequestContext.setPrincipalName(bulkCalcRequest.getId()); // get targets specified in request before building the context Map<String, List<Pso>> map = getTargetAndObjectDefinitions(bulkCalcRequest); // determine attribute resolver requested attributes LinkedHashSet<String> attributeIds = new LinkedHashSet<String>(); for (String psoTargetDefinition : map.keySet()) { for (Pso psoDefinition : map.get(psoTargetDefinition)) { if (!DatatypeHelper.isEmpty(psoDefinition.getAllSourceIdentifiersRef())) { attributeIds.add(DatatypeHelper.safeTrim(psoDefinition.getAllSourceIdentifiersRef())); } } } attributeRequestContext.setRequestedAttributes(attributeIds); // return null if there are no attribute ids to resovle if (attributeIds.isEmpty()) { LOG.debug("PSP '{}' - No source identifier refs are configured."); return null; } // resolve attributes LOG.debug("PSP '{}' - Calc {} Resolving attributes '{}'.", new Object[] { getId(), bulkCalcRequest, attributeIds }); Map<String, BaseAttribute<?>> attributes = getAttributeAuthority().getAttributes(attributeRequestContext); LOG.debug("PSP '{}' - Calc {} Resolved attributes '{}'.", new Object[] { getId(), bulkCalcRequest, attributeIds }); pspContext.setAttributes(attributes); Map<String, List<SchemaEntityRef>> identifierMap = new LinkedHashMap<String, List<SchemaEntityRef>>(); for (String targetId : map.keySet()) { for (Pso psoDefinition : map.get(targetId)) { String allSourceIdentifiersRef = psoDefinition.getAllSourceIdentifiersRef(); if (DatatypeHelper.isEmpty(allSourceIdentifiersRef)) { continue; } BaseAttribute attribute = attributes.get(allSourceIdentifiersRef); if (attribute == null) { LOG.warn("PSP '{}' - Unable to resolve attribute '{}'", getId(), allSourceIdentifiersRef); continue; } for (Object value : attribute.getValues()) { if (value == null) { throw new PspException("TODO null value"); } String id = null; if (value instanceof PSOIdentifier) { id = ((PSOIdentifier) value).getID(); } else { id = value.toString(); } if (!identifierMap.containsKey(id)) { identifierMap.put(id, new ArrayList<SchemaEntityRef>()); } SchemaEntityRef entity = new SchemaEntityRef(); entity.setEntityName(psoDefinition.getId()); entity.setTargetID(targetId); identifierMap.get(id).add(entity); } } } return identifierMap; }
From source file:org.apache.tajo.plan.LogicalPlanner.java
@Override public ScanNode visitRelation(PlanContext context, Stack<Expr> stack, Relation expr) throws TajoException { QueryBlock block = context.queryBlock; ScanNode scanNode = block.getNodeFromExpr(expr); updatePhysicalInfo(context, scanNode.getTableDesc()); // Find expression which can be evaluated at this relation node. // Except for column references, additional expressions used in select list, where clause, order-by clauses // can be evaluated here. Their reference names are kept in newlyEvaluatedExprsRef. Set<String> newlyEvaluatedExprsReferences = new LinkedHashSet<String>(); for (Iterator<NamedExpr> iterator = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); iterator .hasNext();) {//from ww w . j ava 2s . co m NamedExpr rawTarget = iterator.next(); try { EvalNode evalNode = exprAnnotator.createEvalNode(context, rawTarget.getExpr(), NameResolvingMode.RELS_ONLY); if (checkIfBeEvaluatedAtRelation(block, evalNode, scanNode)) { block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode); newlyEvaluatedExprsReferences.add(rawTarget.getAlias()); // newly added exr } } catch (UndefinedColumnException ve) { } } // Assume that each unique expr is evaluated once. LinkedHashSet<Target> targets = createFieldTargetsFromRelation(block, scanNode, newlyEvaluatedExprsReferences); // The fact the some expr is included in newlyEvaluatedExprsReferences means that it is already evaluated. // So, we get a raw expression and then creates a target. for (String reference : newlyEvaluatedExprsReferences) { NamedExpr refrer = block.namedExprsMgr.getNamedExpr(reference); EvalNode evalNode = exprAnnotator.createEvalNode(context, refrer.getExpr(), NameResolvingMode.RELS_ONLY); targets.add(new Target(evalNode, reference)); } scanNode.setTargets(targets.toArray(new Target[targets.size()])); verifyProjectedFields(block, scanNode); return scanNode; }
From source file:com.sonicle.webtop.core.CoreManager.java
public Set<String> listAllowedServices() { LinkedHashSet<String> ids = new LinkedHashSet<>(); UserProfileId targetPid = getTargetProfileId(); ServiceManager svcm = wta.getServiceManager(); for (String id : svcm.listRegisteredServices()) { if (RunContext.isPermitted(true, targetPid, SERVICE_ID, "SERVICE", "ACCESS", id)) ids.add(id); }// ww w .j ava 2 s . c o m return ids; }
From source file:org.dozer.MappingProcessor.java
private Set<?> addToSet(Object srcObj, FieldMap fieldMap, Collection<?> srcCollectionValue, Object destObj) { // create a list here so we can keep track of which elements we have mapped, and remove all others if removeOrphans = true Set<Object> mappedElements = new HashSet<Object>(); LinkedHashSet<Object> result = new LinkedHashSet<Object>(); // don't want to create the set if it already exists. Object field = fieldMap.getDestValue(destObj); if (field != null) { result.addAll((Collection<?>) field); }/* w w w . ja v a 2 s .c om*/ Object destValue; Class<?> destEntryType = null; Class<?> prevDestEntryType = null; for (Object srcValue : srcCollectionValue) { if (destEntryType == null || (fieldMap.getDestHintContainer() != null && fieldMap.getDestHintContainer().hasMoreThanOneHint())) { destEntryType = determineCollectionItemType(fieldMap, destObj, srcValue, prevDestEntryType); } CopyByReferenceContainer copyByReferences = globalConfiguration.getCopyByReferences(); if (srcValue != null && copyByReferences.contains(srcValue.getClass())) { destValue = srcValue; } else { destValue = mapOrRecurseObject(srcObj, srcValue, destEntryType, fieldMap, destObj); } prevDestEntryType = destEntryType; if (RelationshipType.NON_CUMULATIVE.equals(fieldMap.getRelationshipType()) && result.contains(destValue)) { List<Object> resultAsList = new ArrayList<Object>(result); int index = resultAsList.indexOf(destValue); // perform an update if complex type - can't map strings Object obj = resultAsList.get(index); // make sure it is not a String if (!obj.getClass().isAssignableFrom(String.class)) { mapToDestObject(null, srcValue, obj, false, null); mappedElements.add(obj); } } else { if (destValue != null || fieldMap.isDestMapNull()) { result.add(destValue); } mappedElements.add(destValue); } } // If remove orphans - we only want to keep the objects we've mapped from the src collection // so we'll clear result and replace all entries with the ones in mappedElements if (fieldMap.isRemoveOrphans()) { result.clear(); result.addAll(mappedElements); } if (field == null) { Class<? extends Set<?>> destSetType = (Class<? extends Set<?>>) fieldMap .getDestFieldType(destObj.getClass()); return CollectionUtils.createNewSet(destSetType, result); } else { // Bug #1822421 - Clear first so we don't end up with the removed orphans again ((Set) field).clear(); ((Set) field).addAll(result); return (Set<?>) field; } }
From source file:org.tdar.core.service.ReflectionService.java
/** * Find @link BulkImportField on a class. * //from w w w . j a va 2s. c o m * @param class2 * @param stack * @param annotationToFind * @param runAs * @param runAsField * @param prefix * @return */ private LinkedHashSet<CellMetadata> handleClassAnnotations(Class<?> class2, Stack<List<Class<?>>> stack, Class<BulkImportField> annotationToFind, Class<?> runAs, Field runAsField, String prefix) { LinkedHashSet<CellMetadata> set = new LinkedHashSet<>(); for (Field field : class2.getDeclaredFields()) { BulkImportField annotation = field.getAnnotation(annotationToFind); if (prefix == null) { prefix = ""; } if (annotation != null) { String fieldPrefix = prefix; if (StringUtils.isNotBlank(annotation.key())) { fieldPrefix = CellMetadata.getDisplayLabel(MessageHelper.getInstance(), annotation.key()); } Class<?> type = field.getType(); if (Objects.equals(field, runAsField)) { type = runAs; logger.trace(" ** overriding type with {}", type.getSimpleName()); } if (Collection.class.isAssignableFrom(type)) // handle Collection private List<ResourceCreator> ... { ParameterizedType stringListType = (ParameterizedType) field.getGenericType(); Class<?> cls = (Class<?>) stringListType.getActualTypeArguments()[0]; set.addAll(findBulkAnnotationsOnClass(cls, stack, fieldPrefix)); } // handle Singleton private Person owner ... else if (Persistable.class.isAssignableFrom(type)) { set.addAll(findBulkAnnotationsOnClass(type, stack, fieldPrefix)); } // handle more primative fields private String ... else { logger.trace("adding {} ({})", field, stack); if (!TdarConfiguration.getInstance().getCopyrightMandatory() && Objects.equals(annotation.key(), InformationResource.COPYRIGHT_HOLDER)) { continue; } if ((TdarConfiguration.getInstance().getLicenseEnabled() == false) && (Objects.equals(field.getName(), "licenseType") || Objects.equals(field.getName(), "licenseText"))) { continue; } set.add(new CellMetadata(field, annotation, class2, stack, prefix)); // set.add(field); } } } return set; }
From source file:net.rim.ejde.internal.packaging.PackagingJob.java
@Override public void run(IProgressMonitor monitor) throws CoreException { // remove the code signing error ResourceBuilderUtils.cleanProblemMarkers(ResourcesPlugin.getWorkspace().getRoot(), new String[] { IRIMMarker.SIGNATURE_TOOL_PROBLEM_MARKER }, IResource.DEPTH_ONE); // open the packaging console PackagingConsole.getInstance().activate(); LinkedHashSet<BlackBerryProject> projectSet = ProjectUtils.getProjectsByBuildOrder(_projects); monitor.beginTask(IConstants.EMPTY_STRING, projectSet.size() * 10); monitor.subTask(Messages.PackagingJob_Name); boolean needSign = false; // collect projects which need to be signed LinkedHashSet<BlackBerryProject> projectsNeedSigning = new LinkedHashSet<BlackBerryProject>(); // collect projects whose dependent projects need to be signed LinkedHashSet<BlackBerryProject> projectsDependencyNeedSigning = new LinkedHashSet<BlackBerryProject>(); // collect projects which are packaged successfully LinkedHashSet<BlackBerryProject> succesfullyPackagedProjects = new LinkedHashSet<BlackBerryProject>(); for (BlackBerryProject bbProject : projectSet) { // 1. run java build on the project if (!isBuildAutomaticallyOn()) { try { bbProject.getProject().build(IncrementalProjectBuilder.AUTO_BUILD, new SubProgressMonitor(monitor, 1)); } catch (CoreException e) { _log.error(e);/* w w w . j ava2s. com*/ } } monitor.worked(3); // 2. package the project if (!needPackaging(bbProject)) { if (needGenerateALXFile(bbProject)) { PackagingManager.generateALXForProject(bbProject); } } else { // remove the package problems ResourceBuilderUtils.cleanProblemMarkers(bbProject.getProject(), new String[] { IRIMMarker.PACKAGING_PROBLEM }, IResource.DEPTH_INFINITE); try { PackagingManager.packageProject(bbProject); if (!needSign) { needSign = true; } } catch (CoreException e) { _log.error(e.getMessage()); try { ResourceBuilderUtils.createProblemMarker( e.getStatus().getCode() == DiagnosticFactory.CREATE_FOLDER_ERR_ID ? bbProject.getMetaFileHandler() : bbProject.getProject(), IRIMMarker.PACKAGING_PROBLEM, e.getMessage(), -1, IMarker.SEVERITY_ERROR); } catch (Exception e1) { _log.error(e1.getMessage()); } } PackagingJob.setBuiltByJavaBuilders(bbProject.getProject(), false); } monitor.worked(4); // 3. run post-build command runPostBuild(bbProject); monitor.worked(1); // 4. check if the project needs to be signed or not if (!hasPackagingProblems(bbProject.getProject())) { succesfullyPackagedProjects.add(bbProject); if (PackagingUtils.isSigningNeeded(bbProject)) { projectsNeedSigning.add(bbProject); } else { if (PackagingUtils.isSigningNeededForDependency(bbProject)) { projectsDependencyNeedSigning.add(bbProject); } else { // if a project and its dependent projects do not need to be signed, copy the cod files to the web folder // copy the cod files of dependency projects to the deployment folders copyDependencyDeploymentFiles(bbProject); // copy files from "Standard" to "Web" copyToWebDeploymentFolder(bbProject); } } } monitor.worked(2); if (monitor.isCanceled()) { monitor.done(); return; } } // Code signing switch (_signingFlag) { case SIGN_FORCE: { if (!succesfullyPackagedProjects.isEmpty()) { signCodFile(succesfullyPackagedProjects, monitor); } break; } case SIGN_IF_PROTECTED_API_USED: { if (!projectsNeedSigning.isEmpty()) { signCodFile(projectsNeedSigning, monitor); for (BlackBerryProject project : projectsDependencyNeedSigning) { // copy the cod files of dependency projects to the deployment folders copyDependencyDeploymentFiles(project); // copy files from "Standard" to "Web" copyToWebDeploymentFolder(project); } } break; } case SIGN_IF_NECESSARY: { if (needSign) { if (!projectsNeedSigning.isEmpty()) { signCodFile(projectsNeedSigning, monitor); for (BlackBerryProject project : projectsDependencyNeedSigning) { // copy the cod files of dependency projects to the deployment folders copyDependencyDeploymentFiles(project); // copy files from "Standard" to "Web" copyToWebDeploymentFolder(project); } } } break; } } monitor.done(); return; }
From source file:org.ala.dao.FulltextSearchDaoImplSolr.java
/** * if word highlight enabled then do the exact match, otherwise do the concat match * /* www. j av a 2 s .co m*/ * @param names * @param term * @param prefix * @param suffix * @return */ private List<String> getHighlightedNames(List<String> names, String term, String prefix, String suffix) { LinkedHashSet<String> hlnames = null; List<String> lnames = null; String value = null; boolean isHighlight = false; //have word highlight if (prefix != null && suffix != null && prefix.trim().length() > 0 && suffix.trim().length() > 0 && term != null) { value = SolrUtils.cleanName(term.trim()); isHighlight = true; } else { value = SolrUtils.concatName(term); } Pattern p = Pattern.compile(value, Pattern.CASE_INSENSITIVE); java.util.regex.Matcher m = p.matcher(value); if (names != null) { hlnames = new LinkedHashSet<String>(); for (String name : names) { String name1 = null; name = name.trim(); if (isHighlight) { name1 = name; } else { name1 = SolrUtils.concatName(name); } m.reset(name1); if (m.find()) { //insert <b> and </b>at the start and end index name = name.substring(0, m.start()) + prefix + name.substring(m.start(), m.end()) + suffix + name.substring(m.end(), name.length()); hlnames.add(name); } } if (!hlnames.isEmpty()) { lnames = new ArrayList<String>(hlnames); Collections.sort(lnames); } else { lnames = new ArrayList<String>(); } } return lnames; }
From source file:com.github.dozermapper.core.MappingProcessor.java
private Set<?> addToSet(Object srcObj, FieldMap fieldMap, Collection<?> srcCollectionValue, Object destObj) { // create a list here so we can keep track of which elements we have mapped, and remove all others if removeOrphans = true Set<Object> mappedElements = new HashSet<>(); LinkedHashSet<Object> result = new LinkedHashSet<>(); // don't want to create the set if it already exists. Object field = fieldMap.getDestValue(destObj); if (field != null) { result.addAll((Collection<?>) field); }//from ww w . j a va 2s . c o m Object destValue; Class<?> destEntryType = null; Class<?> prevDestEntryType = null; for (Object srcValue : srcCollectionValue) { if (destEntryType == null || (fieldMap.getDestHintContainer() != null && fieldMap.getDestHintContainer().hasMoreThanOneHint())) { destEntryType = determineCollectionItemType(fieldMap, destObj, srcValue, prevDestEntryType); } CopyByReferenceContainer copyByReferences = globalConfiguration.getCopyByReferences(); if (srcValue != null && copyByReferences.contains(srcValue.getClass())) { destValue = srcValue; } else { destValue = mapOrRecurseObject(srcObj, srcValue, destEntryType, fieldMap, destObj); } prevDestEntryType = destEntryType; if (RelationshipType.NON_CUMULATIVE.equals(fieldMap.getRelationshipType()) && result.contains(destValue)) { List<Object> resultAsList = new ArrayList<>(result); int index = resultAsList.indexOf(destValue); // perform an update if complex type - can't map strings Object obj = resultAsList.get(index); // make sure it is not a String if (!obj.getClass().isAssignableFrom(String.class)) { mapToDestObject(null, srcValue, obj, false, fieldMap.getMapId()); mappedElements.add(obj); } } else { if (destValue != null || fieldMap.isDestMapNull()) { result.add(destValue); } mappedElements.add(destValue); } } // If remove orphans - we only want to keep the objects we've mapped from the src collection // so we'll clear result and replace all entries with the ones in mappedElements if (fieldMap.isRemoveOrphans()) { result.clear(); result.addAll(mappedElements); } if (field == null) { Class<? extends Set<?>> destSetType = (Class<? extends Set<?>>) fieldMap .getDestFieldType(destObj.getClass()); return CollectionUtils.createNewSet(destSetType, result); } else { // Bug #1822421 - Clear first so we don't end up with the removed orphans again ((Set) field).clear(); ((Set) field).addAll(result); return (Set<?>) field; } }