List of usage examples for java.util LinkedHashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:org.apache.ws.scout.registry.BusinessLifeCycleManagerV3Impl.java
protected BulkResponse deleteOperation(Collection<Key> keys, String op) throws JAXRException { if (keys == null) throw new JAXRException("Keys provided to " + op + " are null"); //Now we need to convert the collection into a vector for juddi BulkResponseImpl bulk = new BulkResponseImpl(); String[] keyarr = new String[keys.size()]; Result[] keyResultArr;/*from w w w.j a v a 2 s.c o m*/ LinkedHashSet<Key> coll = new LinkedHashSet<Key>(); Collection<Exception> exceptions = new ArrayList<Exception>(); try { Iterator iter = keys.iterator(); int currLoc = 0; while (iter.hasNext()) { Key key = (Key) iter.next(); keyarr[currLoc] = key.getId(); currLoc++; } // Delete operation DispositionReport bd = (DispositionReport) executeOperation(keyarr, op); List<Result> resultList = bd.getResult(); keyResultArr = new Result[resultList.size()]; resultList.toArray(keyResultArr); log.debug("After deleting Business. Obtained vector size:" + keyResultArr != null ? keyResultArr.length : 0); for (int i = 0; keyResultArr != null && i < keyResultArr.length; i++) { Result result = (Result) keyResultArr[i]; int errno = result.getErrno(); if (errno == 0) { coll.addAll(keys); } else { ErrInfo errinfo = result.getErrInfo(); DeleteException de = new DeleteException(errinfo.getErrCode() + ":" + errinfo.getValue()); bulk.setStatus(JAXRResponse.STATUS_FAILURE); exceptions.add(de); } } } catch (RegistryV3Exception regExcept) { /* * jUDDI (and prollie others) throw an exception on any fault in * the transaction w/ the registry, so we don't get any partial * success */ DeleteException de = new DeleteException(regExcept.getFaultCode() + ":" + regExcept.getFaultString(), regExcept); bulk.setStatus(JAXRResponse.STATUS_FAILURE); exceptions.add(de); } catch (JAXRException tran) { exceptions.add(new JAXRException("Apache JAXR Impl:", tran)); bulk.setStatus(JAXRResponse.STATUS_FAILURE); } bulk.setCollection(coll); bulk.setExceptions(exceptions); return bulk; }
From source file:org.tdar.core.service.ReflectionService.java
/** * Find @link BulkImportField on a class. * /*w w w .j a va 2 s .c o m*/ * @param class2 * @param stack * @param annotationToFind * @param runAs * @param runAsField * @param prefix * @return */ private LinkedHashSet<CellMetadata> handleClassAnnotations(Class<?> class2, Stack<List<Class<?>>> stack, Class<BulkImportField> annotationToFind, Class<?> runAs, Field runAsField, String prefix) { LinkedHashSet<CellMetadata> set = new LinkedHashSet<>(); for (Field field : class2.getDeclaredFields()) { BulkImportField annotation = field.getAnnotation(annotationToFind); if (prefix == null) { prefix = ""; } if (annotation != null) { String fieldPrefix = prefix; if (StringUtils.isNotBlank(annotation.key())) { fieldPrefix = CellMetadata.getDisplayLabel(MessageHelper.getInstance(), annotation.key()); } Class<?> type = field.getType(); if (Objects.equals(field, runAsField)) { type = runAs; logger.trace(" ** overriding type with {}", type.getSimpleName()); } if (Collection.class.isAssignableFrom(type)) // handle Collection private List<ResourceCreator> ... { ParameterizedType stringListType = (ParameterizedType) field.getGenericType(); Class<?> cls = (Class<?>) stringListType.getActualTypeArguments()[0]; set.addAll(findBulkAnnotationsOnClass(cls, stack, fieldPrefix)); } // handle Singleton private Person owner ... else if (Persistable.class.isAssignableFrom(type)) { set.addAll(findBulkAnnotationsOnClass(type, stack, fieldPrefix)); } // handle more primative fields private String ... else { logger.trace("adding {} ({})", field, stack); if (!TdarConfiguration.getInstance().getCopyrightMandatory() && Objects.equals(annotation.key(), InformationResource.COPYRIGHT_HOLDER)) { continue; } if ((TdarConfiguration.getInstance().getLicenseEnabled() == false) && (Objects.equals(field.getName(), "licenseType") || Objects.equals(field.getName(), "licenseText"))) { continue; } set.add(new CellMetadata(field, annotation, class2, stack, prefix)); // set.add(field); } } } return set; }
From source file:org.apache.tajo.engine.planner.global.GlobalPlanner.java
/** * If there are at least one distinct aggregation function, a query works as if the query is rewritten as follows: * * <h2>Original query</h2>// w ww .jav a2 s . c o m * <pre> * SELECT * grp1, grp2, count(*) as total, count(distinct grp3) as distinct_col * from * rel1 * group by * grp1, grp2; * </pre> * * The query will work as if the query is rewritten into two queries as follows: * * <h2>Rewritten query</h2> * <pre> * SELECT grp1, grp2, sum(cnt) as total, count(grp3) as distinct_col from ( * SELECT * grp1, grp2, grp3, count(*) as cnt * from * rel1 * group by * grp1, grp2, grp3) tmp1 * group by * grp1, grp2 * ) table1; * </pre> * * In more detail, the first aggregation aggregates not only original grouping fields but also distinct columns. * Non-distinct aggregation functions should be transformed to proper functions. * Then, the second aggregation aggregates only original grouping fields with distinct aggregation functions and * transformed non-distinct aggregation functions. * * As a result, although a no-distinct aggregation requires two stages, a distinct aggregation requires three * execution blocks. */ private ExecutionBlock buildGroupByIncludingDistinctFunctionsMultiStage(GlobalPlanContext context, ExecutionBlock latestExecBlock, GroupbyNode groupbyNode) throws TajoException { Column[] originalGroupingColumns = groupbyNode.getGroupingColumns(); LinkedHashSet<Column> firstStageGroupingColumns = Sets .newLinkedHashSet(Arrays.asList(groupbyNode.getGroupingColumns())); List<AggregationFunctionCallEval> firstStageAggFunctions = Lists.newArrayList(); List<AggregationFunctionCallEval> secondPhaseEvalNodes = Lists.newArrayList(); List<Target> firstPhaseEvalNodeTargets = Lists.newArrayList(); for (AggregationFunctionCallEval aggFunction : groupbyNode.getAggFunctions()) { if (aggFunction.isDistinct()) { // add distinct columns to first stage's grouping columns firstStageGroupingColumns.addAll(EvalTreeUtil.findUniqueColumns(aggFunction)); // keep distinct aggregation functions for the second stage secondPhaseEvalNodes.add(aggFunction); } else { // Rewrite non-distinct aggregation functions RewrittenFunctions rewritten = rewriteAggFunctionsForDistinctAggregation(context, aggFunction); firstStageAggFunctions.addAll(Lists.newArrayList(rewritten.firstStageEvals)); firstPhaseEvalNodeTargets.addAll(Lists.newArrayList(rewritten.firstStageTargets)); // keep rewritten non-aggregation functions for the second stage secondPhaseEvalNodes.add(rewritten.secondStageEvals); } } List<Target> firstStageTargets = new ArrayList<>(); for (Column column : firstStageGroupingColumns) { firstStageTargets.add(new Target(new FieldEval(column))); } for (Target target : firstPhaseEvalNodeTargets) { firstStageTargets.add(target); } // Create the groupby node for the first stage and set all necessary descriptions GroupbyNode firstStageGroupby = new GroupbyNode(context.plan.getLogicalPlan().newPID()); firstStageGroupby.setGroupingColumns(TUtil.toArray(firstStageGroupingColumns, Column.class)); firstStageGroupby.setAggFunctions(firstStageAggFunctions); firstStageGroupby.setTargets(firstStageTargets); firstStageGroupby.setChild(groupbyNode.getChild()); firstStageGroupby.setInSchema(groupbyNode.getInSchema()); // Makes two execution blocks for the first stage ExecutionBlock firstStage = buildGroupBy(context, latestExecBlock, firstStageGroupby); // Create the groupby node for the second stage. GroupbyNode secondPhaseGroupby = new GroupbyNode(context.plan.getLogicalPlan().newPID()); secondPhaseGroupby.setGroupingColumns(originalGroupingColumns); secondPhaseGroupby.setAggFunctions(secondPhaseEvalNodes); secondPhaseGroupby.setTargets(groupbyNode.getTargets()); ExecutionBlock secondStage = context.plan.newExecutionBlock(); secondStage.setPlan(secondPhaseGroupby); SortSpec[] sortSpecs = PlannerUtil.columnsToSortSpecs(firstStageGroupingColumns); secondStage.getEnforcer().enforceSortAggregation(secondPhaseGroupby.getPID(), sortSpecs); // Create a data channel between the first and second stages DataChannel channel; channel = new DataChannel(firstStage, secondStage, HASH_SHUFFLE, 32); channel.setShuffleKeys(secondPhaseGroupby.getGroupingColumns().clone()); channel.setSchema(firstStage.getPlan().getOutSchema()); channel.setDataFormat(dataFormat); // Setting for the second phase's logical plan ScanNode scanNode = buildInputExecutor(context.plan.getLogicalPlan(), channel); secondPhaseGroupby.setChild(scanNode); secondPhaseGroupby.setInSchema(scanNode.getOutSchema()); secondStage.setPlan(secondPhaseGroupby); context.plan.addConnect(channel); return secondStage; }
From source file:com.odoko.solrcli.actions.CrawlPostAction.java
/** * A very simple crawler, pulling URLs to fetch from a backlog and then * recurses N levels deep if recursive>0. Links are parsed from HTML * through first getting an XHTML version using SolrCell with extractOnly, * and followed if they are local. The crawler pauses for a default delay * of 10 seconds between each fetch, this can be configured in the delay * variable. This is only meant for test purposes, as it does not respect * robots or anything else fancy :)// w w w .java 2 s . co m * @param level which level to crawl * @param out output stream to write to * @return number of pages crawled on this level and below */ protected int webCrawl(int level, OutputStream out) { int numPages = 0; LinkedHashSet<URL> stack = backlog.get(level); int rawStackSize = stack.size(); stack.removeAll(visited); int stackSize = stack.size(); LinkedHashSet<URL> subStack = new LinkedHashSet<URL>(); info("Entering crawl at level "+level+" ("+rawStackSize+" links total, "+stackSize+" new)"); for(URL u : stack) { try { visited.add(u); PageFetcherResult result = pageFetcher.readPageFromUrl(u); if(result.httpStatus == 200) { u = (result.redirectUrl != null) ? result.redirectUrl : u; URL postUrl = new URL(appendParam(solrUrl.toString(), "literal.id="+URLEncoder.encode(u.toString(),"UTF-8") + "&literal.url="+URLEncoder.encode(u.toString(),"UTF-8"))); boolean success = postData(new ByteArrayInputStream(result.content), null, out, result.contentType, postUrl); if (success) { info("POSTed web resource "+u+" (depth: "+level+")"); Thread.sleep(delay * 1000); numPages++; // Pull links from HTML pages only if(recursive > level && result.contentType.equals("text/html")) { Set<URL> children = pageFetcher.getLinksFromWebPage(u, new ByteArrayInputStream(result.content), result.contentType, postUrl); subStack.addAll(children); } } else { warn("An error occurred while posting "+u); } } else { warn("The URL "+u+" returned a HTTP result status of "+result.httpStatus); } } catch (IOException e) { warn("Caught exception when trying to open connection to "+u+": "+e.getMessage()); } catch (InterruptedException e) { throw new RuntimeException(); } } if(!subStack.isEmpty()) { backlog.add(subStack); numPages += webCrawl(level+1, out); } return numPages; }
From source file:org.intermine.bio.dataconversion.SequenceProcessor.java
/** * Search ClassDescriptor cd class for refs/collections with the right name for the objectType * eg. find CDSs collection for objectType = CDS and find gene reference for objectType = Gene. *//*from w ww. j a v a 2s . c o m*/ private List<FieldDescriptor> getReferenceForRelationship(String objectType, ClassDescriptor cd) { List<FieldDescriptor> fds = new ArrayList<FieldDescriptor>(); LinkedHashSet<String> allClasses = new LinkedHashSet<String>(); allClasses.add(objectType); try { Set<String> parentClasses = ClassDescriptor.findSuperClassNames(getModel(), objectType); allClasses.addAll(parentClasses); } catch (MetaDataException e) { throw new RuntimeException("class not found in the model", e); } for (String clsName : allClasses) { List<String> possibleRefNames = new ArrayList<String>(); String unqualifiedClsName = TypeUtil.unqualifiedName(clsName); possibleRefNames.add(unqualifiedClsName); possibleRefNames.add(unqualifiedClsName + 's'); possibleRefNames.add(StringUtil.decapitalise(unqualifiedClsName)); possibleRefNames.add(StringUtil.decapitalise(unqualifiedClsName) + 's'); for (String possibleRefName : possibleRefNames) { FieldDescriptor fd = cd.getFieldDescriptorByName(possibleRefName); if (fd != null) { fds.add(fd); } } } return fds; }
From source file:org.apache.ws.scout.registry.BusinessLifeCycleManagerImpl.java
/** * Saves one or more Objects to the registry. An object may be a * RegistryObject subclass instance. If an object is not in the registry, * it is created in the registry. If it already exists in the registry * and has been modified, then its state is updated (replaced) in the * registry/*from w w w .j a v a 2s. c o m*/ * <p/> * TODO:Check if juddi can provide a facility to store a collection of heterogenous * objects * <p/> * TODO - does this belong here? it's really an overload of * LifecycleManager.saveObjects, but all the help we need * like saveOrganization() is up here... * * @param col * @return a BulkResponse containing the Collection of keys for those objects * that were saved successfully and any SaveException that was encountered * in case of partial commit * @throws JAXRException */ public BulkResponse saveObjects(Collection col) throws JAXRException { Iterator iter = col.iterator(); LinkedHashSet<Object> suc = new LinkedHashSet<Object>(); Collection<Exception> exc = new ArrayList<Exception>(); while (iter.hasNext()) { RegistryObject reg = (RegistryObject) iter.next(); BulkResponse br = null; Collection<RegistryObject> c = new ArrayList<RegistryObject>(); c.add(reg); if (reg instanceof javax.xml.registry.infomodel.Association) { br = saveAssociations(c, true); } else if (reg instanceof javax.xml.registry.infomodel.ClassificationScheme) { br = saveClassificationSchemes(c); } else if (reg instanceof javax.xml.registry.infomodel.Concept) { br = saveConcepts(c); } else if (reg instanceof javax.xml.registry.infomodel.Organization) { br = saveOrganizations(c); } else if (reg instanceof javax.xml.registry.infomodel.Service) { br = saveServices(c); } else if (reg instanceof javax.xml.registry.infomodel.ServiceBinding) { br = saveServiceBindings(c); } else { throw new JAXRException("Delete Operation for " + reg.getClass() + " not implemented by Scout"); } if (br.getCollection() != null) { suc.addAll(br.getCollection()); } if (br.getExceptions() != null) { exc.addAll(br.getExceptions()); } } BulkResponseImpl bulk = new BulkResponseImpl(); /* * TODO - what is the right status? */ bulk.setStatus(JAXRResponse.STATUS_SUCCESS); bulk.setCollection(suc); bulk.setExceptions(exc); return bulk; }
From source file:com.ehsy.solr.util.SimplePostTool.java
/** * A very simple crawler, pulling URLs to fetch from a backlog and then * recurses N levels deep if recursive>0. Links are parsed from HTML * through first getting an XHTML version using SolrCell with extractOnly, * and followed if they are local. The crawler pauses for a default delay * of 10 seconds between each fetch, this can be configured in the delay * variable. This is only meant for test purposes, as it does not respect * robots or anything else fancy :)//from w w w.j av a2 s . c o m * @param level which level to crawl * @param out output stream to write to * @return number of pages crawled on this level and below */ protected int webCrawl(int level, OutputStream out) { int numPages = 0; LinkedHashSet<URL> stack = backlog.get(level); int rawStackSize = stack.size(); stack.removeAll(visited); int stackSize = stack.size(); LinkedHashSet<URL> subStack = new LinkedHashSet<>(); info("Entering crawl at level " + level + " (" + rawStackSize + " links total, " + stackSize + " new)"); for (URL u : stack) { try { visited.add(u); PageFetcherResult result = pageFetcher.readPageFromUrl(u); if (result.httpStatus == 200) { u = (result.redirectUrl != null) ? result.redirectUrl : u; URL postUrl = new URL( appendParam(solrUrl.toString(), "literal.id=" + URLEncoder.encode(u.toString(), "UTF-8") + "&literal.url=" + URLEncoder.encode(u.toString(), "UTF-8"))); boolean success = postData(new ByteArrayInputStream(result.content), null, out, result.contentType, postUrl); if (success) { info("POSTed web resource " + u + " (depth: " + level + ")"); Thread.sleep(delay * 1000); numPages++; // Pull links from HTML pages only if (recursive > level && result.contentType.equals("text/html")) { Set<URL> children = pageFetcher.getLinksFromWebPage(u, new ByteArrayInputStream(result.content), result.contentType, postUrl); subStack.addAll(children); } } else { warn("An error occurred while posting " + u); } } else { warn("The URL " + u + " returned a HTTP result status of " + result.httpStatus); } } catch (IOException e) { warn("Caught exception when trying to open connection to " + u + ": " + e.getMessage()); } catch (InterruptedException e) { throw new RuntimeException(); } } if (!subStack.isEmpty()) { backlog.add(subStack); numPages += webCrawl(level + 1, out); } return numPages; }
From source file:org.egov.egf.web.actions.payment.ChequeAssignmentAction.java
@SkipValidation @ValidationErrorPage(value = SURRENDERSEARCH) @Action(value = "/payment/chequeAssignment-searchChequesForSurrender") public String searchChequesForSurrender() { validateForSuurenderSearch();/*from www .jav a2 s .c o m*/ if (!getFieldErrors().isEmpty()) { if (bank_branch != null && !bank_branch.equals("-1")) addDropdownData(BANKACCOUNT_LIST, persistenceService.findAllBy(" from Bankaccount where bankbranch.id=? and isactive=true ", Integer.valueOf(bank_branch.split("-")[1]))); loadReasonsForSurrendaring(); return beforeSearchForSurrender(); } final StringBuilder sql = new StringBuilder(); try { List<Object> params = new LinkedList<>(); if (isNotBlank(fromDate)) { sql.append(" and iv.voucherHeaderId.voucherDate>=? "); params.add(sdf.format(formatter.parse(fromDate))); } if (isNotBlank(toDate)) { sql.append(" and iv.voucherHeaderId.voucherDate<=? "); params.add(sdf.format(formatter.parse(toDate))); } if (bankaccount != null && bankaccount != -1) { sql.append(" and ih.bankAccountId.id=? "); params.add(bankaccount); } if (isNotBlank(instrumentNumber)) { sql.append(" and ih.instrumentNumber=? "); params.add(instrumentNumber); } if (department != null && department != -1) { sql.append(" and iv.voucherHeaderId.vouchermis.departmentid.id=? "); params.add(department); } if (isNotBlank(voucherHeader.getVoucherNumber())) { sql.append(" and iv.voucherHeaderId.voucherNumber=? "); params.add(voucherHeader.getVoucherNumber()); } final String mainQuery = new StringBuilder(500) .append("select ih from InstrumentVoucher iv ,InstrumentHeader ih ,InstrumentType it ") .append("where iv.instrumentHeaderId.id =ih.id and ih.instrumentNumber is not null ") .append("and ih.instrumentType=it.id and ( it.type = 'cheque' or it.type = 'cash' ) and ") .append("iv.voucherHeaderId.status=0 and iv.voucherHeaderId.type='") .append(FinancialConstants.STANDARD_VOUCHER_TYPE_PAYMENT).append("' ").append(sql) .append(" and ih.statusId.id in (?) order by iv.voucherHeaderId.voucherDate").toString(); final EgwStatus created = instrumentService.getStatusId(FinancialConstants.INSTRUMENT_CREATED_STATUS); params.add(created.getId()); instrumentHeaderList = persistenceService.findAllBy(mainQuery, params.toArray()); final LinkedHashSet lhs = new LinkedHashSet(); lhs.addAll(instrumentHeaderList); instrumentHeaderList.clear(); instrumentHeaderList.addAll(lhs); instrumentVoucherList = new ArrayList<>(); for (final InstrumentHeader ih : instrumentHeaderList) instrumentVoucherList.addAll(ih.getInstrumentVouchers()); getSession().put(INSTRUMENT_VOUCHER_LIST, instrumentVoucherList); getSession().put(INSTRUMENT_HEADER_LIST, instrumentHeaderList); if (!instrumentVoucherList.isEmpty()) { loadReasonsForSurrendaring(); loadChequeSerialNo(); } } catch (final ParseException e) { LOGGER.error("Error occurred while parsing date", e); throw new ValidationException(Arrays.asList(new ValidationError(UNPARSABLE_DATE, UNPARSABLE_DATE))); } getheader(); if (LOGGER.isDebugEnabled()) LOGGER.debug("Completed searchChequesForSurrender."); containsRTGS = false; return "surrendercheques"; }
From source file:org.egov.egf.web.actions.payment.ChequeAssignmentAction.java
@SkipValidation @ValidationErrorPage(value = SURRENDERRTGSSEARCH) @Action(value = "/payment/chequeAssignment-searchForRTGSSurrender") public String searchForRTGSSurrender() { if (LOGGER.isDebugEnabled()) LOGGER.debug("Starting searchRTGSForSurrender..."); validateForSuurenderSearch();// w w w .j a v a 2 s. co m if (getFieldErrors().size() > 0) { if (bank_branch != null && !bank_branch.equals("-1")) addDropdownData(BANKACCOUNT_LIST, persistenceService.findAllBy(" from Bankaccount where bankbranch.id=? and isactive=true ", Integer.valueOf(bank_branch.split("-")[1]))); loadReasonsForSurrendaring(); return beforeSearchForRTGSSurrender(); } final StringBuilder sql = new StringBuilder(); try { List<Object> params = new LinkedList<>(); if (isNotBlank(fromDate)) { sql.append(" and iv.voucherHeaderId.voucherDate>=? "); params.add(sdf.format(formatter.parse(fromDate))); } if (isNotBlank(toDate)) { sql.append(" and iv.voucherHeaderId.voucherDate<=? "); params.add(sdf.format(formatter.parse(toDate))); } if (bankaccount != null && bankaccount != -1) { sql.append(" and ih.bankAccountId.id=? "); params.add(bankaccount); } if (isNotBlank(instrumentNumber)) { sql.append(" and ih.transactionNumber=? "); params.add(instrumentNumber); } if (department != null && department != -1) { sql.append(" and iv.voucherHeaderId.vouchermis.departmentid.id=? "); params.add(department); } if (isNotBlank(voucherHeader.getVoucherNumber())) { sql.append(" and iv.voucherHeaderId.voucherNumber=? "); params.add(voucherHeader.getVoucherNumber()); } final String mainQuery = new StringBuilder(500) .append("select ih from InstrumentVoucher iv,InstrumentHeader ih ,InstrumentType it ") .append("where iv.instrumentHeaderId.id =ih.id and ih.transactionNumber is not null and ih.instrumentType=it.id ") .append("and it.type = 'advice' and iv.voucherHeaderId.status=0 and iv.voucherHeaderId.type='") .append(FinancialConstants.STANDARD_VOUCHER_TYPE_PAYMENT).append("' ").append(sql) .append(" and ih.statusId.id in (?) order by iv.voucherHeaderId.voucherDate").toString(); final EgwStatus created = instrumentService.getStatusId(FinancialConstants.INSTRUMENT_CREATED_STATUS); params.add(created.getId()); instrumentHeaderList = persistenceService.findAllBy(mainQuery, params.toArray()); final LinkedHashSet lhs = new LinkedHashSet(); lhs.addAll(instrumentHeaderList); instrumentHeaderList.clear(); instrumentHeaderList.addAll(lhs); instrumentVoucherList = new ArrayList<>(); for (final InstrumentHeader ih : instrumentHeaderList) instrumentVoucherList.addAll(ih.getInstrumentVouchers()); getSession().put(INSTRUMENT_VOUCHER_LIST, instrumentVoucherList); getSession().put(INSTRUMENT_HEADER_LIST, instrumentHeaderList); if (!instrumentVoucherList.isEmpty()) loadReasonsForSurrendaring(); loadChequeSerialNo(); } catch (final ParseException e) { LOGGER.error("Error occurred while parsing date", e); throw new ValidationException(Arrays.asList(new ValidationError(UNPARSABLE_DATE, UNPARSABLE_DATE))); } getheader(); if (LOGGER.isDebugEnabled()) LOGGER.debug("Completed searchRTGSForSurrender."); return "surrenderRTGS"; }
From source file:com.sonicle.webtop.core.CoreManager.java
public Sharing getSharing(String serviceId, String groupName, String shareId) throws WTException { WebTopManager usrm = wta.getWebTopManager(); ShareDAO shadao = ShareDAO.getInstance(); RolePermissionDAO rpedao = RolePermissionDAO.getInstance(); Connection con = null;//from ww w . j a v a 2s .co m String rootShareKey = OShare.buildRootKey(groupName); String folderShareKey = OShare.buildFolderKey(groupName); String rootPermissionKey = ServiceSharePermission.buildRootPermissionKey(groupName); String folderPermissionKey = ServiceSharePermission.buildFolderPermissionKey(groupName); String elementsPermissionKey = ServiceSharePermission.buildElementsPermissionKey(groupName); try { CompositeId cid = new CompositeId().parse(shareId); int level = cid.getSize() - 1; String rootId = cid.getToken(0); con = WT.getCoreConnection(); // Retrieves the root share OShare rootShare = null; if (rootId.equals("0")) { String puid = usrm.userToUid(getTargetProfileId()); rootShare = shadao.selectByUserServiceKeyInstance(con, puid, serviceId, rootShareKey, OShare.INSTANCE_ROOT); } else { rootShare = shadao.selectById(con, Integer.valueOf(rootId)); } Sharing outshare = new Sharing(); outshare.setId(shareId); outshare.setLevel(level); if (rootShare != null) { // A rootShare must be defined in order to continue... if (level == 0) { LinkedHashSet<String> roleUids = new LinkedHashSet<>(); roleUids.addAll(listRoles(serviceId, rootPermissionKey, rootShare.getShareId().toString())); OShare folderShare = shadao.selectByUserServiceKeyInstance(con, rootShare.getUserUid(), serviceId, folderShareKey, OShare.INSTANCE_WILDCARD); if (folderShare != null) roleUids.addAll( listRoles(serviceId, folderPermissionKey, folderShare.getShareId().toString())); for (String roleUid : roleUids) { // Root... SharePermsRoot rperms = new SharePermsRoot(); for (ORolePermission perm : rpedao.selectByRoleServiceKeyInstance(con, roleUid, serviceId, rootPermissionKey, rootShare.getShareId().toString())) { rperms.parse(perm.getAction()); } // Folder... SharePermsFolder fperms = new SharePermsFolder(); if (folderShare != null) { for (ORolePermission perm : rpedao.selectByRoleServiceKeyInstance(con, roleUid, serviceId, folderPermissionKey, folderShare.getShareId().toString())) { fperms.parse(perm.getAction()); } } // Elements... SharePermsElements eperms = new SharePermsElements(); if (folderShare != null) { for (ORolePermission perm : rpedao.selectByRoleServiceKeyInstance(con, roleUid, serviceId, elementsPermissionKey, folderShare.getShareId().toString())) { eperms.parse(perm.getAction()); } } outshare.getRights().add(new Sharing.RoleRights(roleUid, rperms, fperms, eperms)); } } else if (level == 1) { String folderId = cid.getToken(1); OShare folderShare = shadao.selectByUserServiceKeyInstance(con, rootShare.getUserUid(), serviceId, folderShareKey, folderId); if (folderShare != null) { List<String> roleUids = listRoles(serviceId, folderPermissionKey, folderShare.getShareId().toString()); for (String roleUid : roleUids) { // Folder... SharePermsFolder fperms = new SharePermsFolder(); for (ORolePermission perm : rpedao.selectByRoleServiceKeyInstance(con, roleUid, serviceId, folderPermissionKey, folderShare.getShareId().toString())) { fperms.parse(perm.getAction()); } // Elements... SharePermsElements eperms = new SharePermsElements(); for (ORolePermission perm : rpedao.selectByRoleServiceKeyInstance(con, roleUid, serviceId, elementsPermissionKey, folderShare.getShareId().toString())) { eperms.parse(perm.getAction()); } outshare.getRights().add(new Sharing.RoleRights(roleUid, null, fperms, eperms)); } } } } return outshare; } catch (SQLException | DAOException ex) { throw new WTException(ex, "DB error"); } finally { DbUtils.closeQuietly(con); } }