List of usage examples for java.util HashSet iterator
public Iterator<E> iterator()
From source file:org.unitime.timetable.model.Solution.java
public void createDivSecNumbers(org.hibernate.Session hibSession, Vector messages) { Vector assignments = new Vector(getAssignments()); assignments.addAll(new SolutionDAO().getSession() .createQuery("select distinct c from Class_ c, Solution s inner join s.owner.departments d " + "where s.uniqueId = :solutionId and c.managingDept=d and " + "c.uniqueId not in (select a.clazz.uniqueId from s.assignments a)") .setLong("solutionId", getUniqueId().longValue()).list()); HashSet relatedOfferings = new HashSet(); for (Enumeration e = assignments.elements(); e.hasMoreElements();) { Object o = e.nextElement(); Assignment assignment = (o instanceof Assignment ? (Assignment) o : null); Class_ clazz = (assignment == null ? (Class_) o : assignment.getClazz()); relatedOfferings.add(clazz.getSchedulingSubpart().getInstrOfferingConfig().getInstructionalOffering()); }/*from w w w . j a va 2 s.c o m*/ for (Iterator i = relatedOfferings.iterator(); i.hasNext();) { InstructionalOffering io = (InstructionalOffering) i.next(); for (Iterator j = io.getInstrOfferingConfigs().iterator(); j.hasNext();) { InstrOfferingConfig ioc = (InstrOfferingConfig) j.next(); for (Iterator k = ioc.getSchedulingSubparts().iterator(); k.hasNext();) { SchedulingSubpart subpart = (SchedulingSubpart) k.next(); for (Iterator l = subpart.getClasses().iterator(); l.hasNext();) { Class_ clazz = (Class_) l.next(); if (clazz.getClassSuffix() != null && !getOwner().getDepartments().contains(clazz.getManagingDept())) { Assignment assignment = clazz.getCommittedAssignment(); assignments.add(assignment == null ? (Object) clazz : (Object) assignment); clazz.setClassSuffix(null); } } } } } DivSecAssignmentComparator cmp = new DivSecAssignmentComparator(this, true, false); Collections.sort(assignments, cmp); Assignment lastAssignment = null; SchedulingSubpart lastSubpart = null; Class_ lastClazz = null; int divNum = 1, secNum = 0; HashSet takenDivNums = null; HashSet recompute = new HashSet(); for (Enumeration e = assignments.elements(); e.hasMoreElements();) { Object o = e.nextElement(); Assignment assignment = (o instanceof Assignment ? (Assignment) o : null); Class_ clazz = (assignment == null ? (Class_) o : assignment.getClazz()); if (clazz.getParentClass() != null && clazz.getSchedulingSubpart().getItype() .equals(clazz.getParentClass().getSchedulingSubpart().getItype())) continue; if (lastSubpart == null || !lastSubpart.equals(clazz.getSchedulingSubpart())) { takenDivNums = takenDivisionNumbers(clazz.getSchedulingSubpart()); lastAssignment = null; lastSubpart = null; lastClazz = null; } int nrClasses = clazz.getSchedulingSubpart().getInstrOfferingConfig().getInstructionalOffering() .getNrClasses(clazz.getSchedulingSubpart().getItype()); if (lastAssignment != null && assignment != null) { if (nrClasses >= 100 && cmp.compareTimeLocations(lastAssignment.getClazz(), assignment.getClazz(), lastAssignment.getTimeLocation(), assignment.getTimeLocation()) == 0) { if (lastClazz != null && clazz.getParentClass() != null && !clazz.getParentClass().equals(lastClazz.getParentClass()) && clazz.getParentClass().getDivSecNumber() != null && lastClazz.getParentClass().getDivSecNumber() != null) { if (cmp.compareTimeLocations(lastAssignment.getClazz(), assignment.getClazz(), lastAssignment.getTimeLocation(), assignment.getTimeLocation()) == 0 && clazz.getParentClass().getDivSecNumber().substring(0, 3) .equals(lastClazz.getParentClass().getDivSecNumber().substring(0, 3))) { secNum++; } else { divNum++; secNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; } } else { secNum++; } } else { divNum++; secNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; } } else if (lastClazz != null) { divNum++; secNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; } else { divNum = 1; secNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; } if (divNum == 100 && secNum == 1) { sLog.warn("Division number exceeded 99 for scheduling subpart " + clazz.getSchedulingSubpart().getSchedulingSubpartLabel() + "."); for (Iterator i = clazz.getSchedulingSubpart().getInstrOfferingConfig().getInstructionalOffering() .getInstrOfferingConfigs().iterator(); i.hasNext();) { InstrOfferingConfig cfg = (InstrOfferingConfig) i.next(); for (Iterator j = cfg.getSchedulingSubparts().iterator(); j.hasNext();) { SchedulingSubpart subpart = (SchedulingSubpart) j.next(); if (subpart.getItype().equals(clazz.getSchedulingSubpart().getItype())) recompute.add(subpart); } } } clazz.setClassSuffix(sSufixFormat.format(divNum) + sSufixFormat.format(secNum)); hibSession.update(clazz); lastAssignment = assignment; lastSubpart = clazz.getSchedulingSubpart(); lastClazz = clazz; } if (!recompute.isEmpty()) { HashSet recompute2 = new HashSet(); for (Iterator i = assignments.iterator(); i.hasNext();) { Object o = i.next(); Assignment assignment = (o instanceof Assignment ? (Assignment) o : null); Class_ clazz = (assignment == null ? (Class_) o : assignment.getClazz()); if (recompute.contains(clazz.getSchedulingSubpart())) { clazz.setClassSuffix(null); hibSession.update(clazz); } else { i.remove(); } } cmp = new DivSecAssignmentComparator(this, false, false); Collections.sort(assignments, cmp); lastAssignment = null; lastSubpart = null; lastClazz = null; for (Enumeration e = assignments.elements(); e.hasMoreElements();) { Object o = e.nextElement(); Assignment assignment = (o instanceof Assignment ? (Assignment) o : null); Class_ clazz = (assignment == null ? (Class_) o : assignment.getClazz()); if (lastSubpart == null || !lastSubpart.equals(clazz.getSchedulingSubpart())) { takenDivNums = takenDivisionNumbers(clazz.getSchedulingSubpart()); lastAssignment = null; lastSubpart = null; lastClazz = null; } if (lastAssignment != null && assignment != null) { if (cmp.compareTimeLocations(lastAssignment.getClazz(), assignment.getClazz(), lastAssignment.getTimeLocation(), assignment.getTimeLocation()) == 0) { secNum++; } else { divNum++; secNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; } } else if (lastClazz != null) { divNum++; secNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; } else { divNum = 1; secNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; } if (divNum == 100 && secNum == 1) { sLog.warn("Division number still (fallback) exceeded 99 for scheduling subpart " + clazz.getSchedulingSubpart().getSchedulingSubpartLabel() + "."); for (Iterator i = clazz.getSchedulingSubpart().getInstrOfferingConfig() .getInstructionalOffering().getInstrOfferingConfigs().iterator(); i.hasNext();) { InstrOfferingConfig cfg = (InstrOfferingConfig) i.next(); for (Iterator j = cfg.getSchedulingSubparts().iterator(); j.hasNext();) { SchedulingSubpart subpart = (SchedulingSubpart) j.next(); if (subpart.getItype().equals(clazz.getSchedulingSubpart().getItype())) recompute2.add(subpart); } } } clazz.setClassSuffix(sSufixFormat.format(divNum) + sSufixFormat.format(secNum)); hibSession.update(clazz); lastAssignment = assignment; lastSubpart = clazz.getSchedulingSubpart(); lastClazz = clazz; } if (!recompute2.isEmpty()) { for (Iterator i = assignments.iterator(); i.hasNext();) { Object o = i.next(); Assignment assignment = (o instanceof Assignment ? (Assignment) o : null); Class_ clazz = (assignment == null ? (Class_) o : assignment.getClazz()); if (recompute2.contains(clazz.getSchedulingSubpart())) { clazz.setClassSuffix(null); hibSession.update(clazz); } else { i.remove(); } } cmp = new DivSecAssignmentComparator(this, false, true); Collections.sort(assignments, cmp); lastAssignment = null; lastSubpart = null; lastClazz = null; for (Enumeration e = assignments.elements(); e.hasMoreElements();) { Object o = e.nextElement(); Assignment assignment = (o instanceof Assignment ? (Assignment) o : null); Class_ clazz = (assignment == null ? (Class_) o : assignment.getClazz()); if (lastSubpart == null || cmp.compareSchedulingSubparts(lastSubpart, clazz.getSchedulingSubpart()) != 0) { takenDivNums = takenDivisionNumbers(clazz.getSchedulingSubpart()); lastAssignment = null; lastSubpart = null; lastClazz = null; } if (lastAssignment != null && assignment != null) { if (cmp.compareTimeLocations(lastAssignment.getClazz(), assignment.getClazz(), lastAssignment.getTimeLocation(), assignment.getTimeLocation()) == 0) { secNum++; } else { divNum++; secNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; } } else if (lastClazz != null) { divNum++; secNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; } else { divNum = 1; secNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; } if (divNum == 100 && secNum == 1) { messages.add("Division number exceeded 99 for scheduling subpart " + clazz.getSchedulingSubpart().getSchedulingSubpartLabel() + "."); sLog.warn("Division number still (fallback2) exceeded 99 for scheduling subpart " + clazz.getSchedulingSubpart().getSchedulingSubpartLabel() + "."); } clazz.setClassSuffix(sSufixFormat.format(divNum) + sSufixFormat.format(secNum)); hibSession.update(clazz); lastAssignment = assignment; lastSubpart = clazz.getSchedulingSubpart(); lastClazz = clazz; } } } /* lastSubpart = null; TreeSet otherClasses = new TreeSet(new ClassComparator(ClassComparator.COMPARE_BY_HIERARCHY)); otherClasses.addAll(new SolutionDAO().getSession().createQuery( "select distinct c from Class_ c, Solution s inner join s.owner.departments d "+ "where s.uniqueId = :solutionId and c.managingDept=d and "+ "c.uniqueId not in (select a.clazz.uniqueId from s.assignments a) order by c.schedulingSubpart.uniqueId, c.sectionNumberCache"). setLong("solutionId", getUniqueId().longValue()). list()); for (Iterator i=otherClasses.iterator();i.hasNext();) { Class_ clazz = (Class_)i.next(); if (clazz.getParentClass()!=null && clazz.getSchedulingSubpart().getItype().equals(clazz.getParentClass().getSchedulingSubpart().getItype())) continue; if (clazz.getClassSuffix()!=null) { sLog.warn("This is odd, class "+clazz.getClassLabel()+" already has a div-sec number "+clazz.getClassSuffix()+"."); continue; } if (lastSubpart==null || !lastSubpart.equals(clazz.getSchedulingSubpart())) { takenDivNums = takenDivisionNumbers(clazz.getSchedulingSubpart()); } divNum = 1; while (takenDivNums.contains(new Integer(divNum))) divNum++; if (divNum==100) { messages.add("Division number exceeded 99 for scheduling subpart "+clazz.getSchedulingSubpart().getSchedulingSubpartLabel()+"."); } clazz.setClassSuffix(sSufixFormat.format(divNum)+sSufixFormat.format(1)); takenDivNums.add(new Integer(divNum)); lastSubpart = clazz.getSchedulingSubpart(); hibSession.update(clazz); } */ }
From source file:app.philm.in.controllers.MovieController.java
private <T, F extends Filter<T>> List<ListItem<T>> createSectionedListItemList(final List<T> items, final List<F> sections, List<F> sectionProcessingOrder) { Preconditions.checkNotNull(items, "items cannot be null"); Preconditions.checkNotNull(sections, "sections cannot be null"); if (sectionProcessingOrder != null) { Preconditions.checkArgument(sections.size() == sectionProcessingOrder.size(), "sections and sectionProcessingOrder must be the same size"); } else {//from w ww .j av a2 s .co m sectionProcessingOrder = sections; } final List<ListItem<T>> result = new ArrayList<>(items.size()); final HashSet<T> movies = new HashSet<>(items); Map<F, List<ListItem<T>>> sectionsItemLists = null; for (F filter : sectionProcessingOrder) { List<ListItem<T>> sectionItems = null; for (Iterator<T> i = movies.iterator(); i.hasNext();) { T movie = i.next(); if (movie != null && filter.isFiltered(movie)) { if (sectionItems == null) { sectionItems = new ArrayList<>(); // Now add Title String title = mStringFetcher.getString(filter.getSectionTitle()); sectionItems.add(new ListItem<T>(title)); } sectionItems.add(new ListItem<T>(movie)); i.remove(); } } if (!PhilmCollections.isEmpty(sectionItems)) { if (sectionsItemLists == null) { sectionsItemLists = new ArrayMap<>(); } filter.sortListItems(sectionItems); sectionsItemLists.put(filter, sectionItems); } } if (sectionsItemLists != null) { for (F filter : sections) { if (sectionsItemLists.containsKey(filter)) { result.addAll(sectionsItemLists.get(filter)); } } } return result; }
From source file:com.joyent.manta.http.MantaHttpHeaders.java
/** * Gets the header defining RBAC roles used for this object. * * @return roles associated with object/*from w ww . j a va2 s . c om*/ */ public Set<String> getRoles() { final Object value = get(HTTP_ROLE_TAG); if (value == null) { return Collections.emptySet(); } final HashSet<String> roles = new HashSet<>(); if (value instanceof Iterable<?>) { ((Iterable<?>) value).forEach(o -> { if (o != null) { roles.add(o.toString()); } }); } else if (value.getClass().isArray()) { for (Object o : (Object[]) value) { if (o != null) { roles.add(o.toString()); } } } else { String line = value.toString(); roles.addAll(MantaUtils.fromCsv(line)); } /* The result may come to us as a CSV. In that case we treat each * value separated by a comma as a single role. */ if (roles.size() == 1) { String line = roles.iterator().next(); roles.clear(); roles.addAll(MantaUtils.fromCsv(line)); } return Collections.unmodifiableSet(roles); }
From source file:org.unitime.timetable.solver.TimetableSolver.java
public Hashtable conflictInfo(Collection hints) { Hashtable conflictTable = new Hashtable(); Lock lock = currentSolution().getLock().readLock(); lock.lock();/*from w w w . j ava 2 s. co m*/ try { HashSet done = new HashSet(); for (Iterator i = hints.iterator(); i.hasNext();) { Hint hint = (Hint) i.next(); Placement p = hint.getPlacement((TimetableModel) currentSolution().getModel()); if (p == null) continue; for (Constraint constraint : p.variable().hardConstraints()) { HashSet conflicts = new HashSet(); constraint.computeConflicts(currentSolution().getAssignment(), p, conflicts); if (conflicts != null && !conflicts.isEmpty()) { for (Iterator j = conflicts.iterator(); j.hasNext();) { Placement conflict = (Placement) j.next(); Hint confHint = new Hint(this, conflict); if (done.contains(confHint)) continue; if (!conflictTable.containsKey(confHint)) { String name = constraint.getName(); if (constraint instanceof RoomConstraint) { name = "Room " + constraint.getName(); } else if (constraint instanceof InstructorConstraint) { name = "Instructor " + constraint.getName(); } else if (constraint instanceof GroupConstraint) { name = "Distribution " + constraint.getName(); } else if (constraint instanceof DepartmentSpreadConstraint) { name = "Balancing of department " + constraint.getName(); } else if (constraint instanceof SpreadConstraint) { name = "Same subpart spread " + constraint.getName(); } else if (constraint instanceof ClassLimitConstraint) { name = "Class limit " + constraint.getName(); } conflictTable.put(confHint, name); } } } } done.add(hint); } } finally { lock.unlock(); } return conflictTable; }
From source file:org.sakaiproject.component.gradebook.GradebookFrameworkServiceImpl.java
private void mergeGradeMappings(Collection gradingScaleDefinitions, Session session) throws HibernateException { Map newMappingDefinitionsMap = new HashMap(); HashSet uidsToSet = new HashSet(); for (Iterator iter = gradingScaleDefinitions.iterator(); iter.hasNext();) { GradingScaleDefinition bean = (GradingScaleDefinition) iter.next(); newMappingDefinitionsMap.put(bean.getUid(), bean); uidsToSet.add(bean.getUid());//w w w .j av a 2s . c o m } // Until we move to Hibernate 3 syntax, we need to update one record at a time. Query q; List gmtList; // Toggle any scales that are no longer specified. q = session.createQuery( "from GradingScale as gradingScale where gradingScale.uid not in (:uidList) and gradingScale.unavailable=false"); q.setParameterList("uidList", uidsToSet); gmtList = q.list(); for (Iterator iter = gmtList.iterator(); iter.hasNext();) { GradingScale gradingScale = (GradingScale) iter.next(); gradingScale.setUnavailable(true); session.update(gradingScale); if (log.isInfoEnabled()) log.info("Set Grading Scale " + gradingScale.getUid() + " unavailable"); } // Modify any specified scales that already exist. q = session.createQuery("from GradingScale as gradingScale where gradingScale.uid in (:uidList)"); q.setParameterList("uidList", uidsToSet); gmtList = q.list(); for (Iterator iter = gmtList.iterator(); iter.hasNext();) { GradingScale gradingScale = (GradingScale) iter.next(); copyDefinitionToScale((GradingScaleDefinition) newMappingDefinitionsMap.get(gradingScale.getUid()), gradingScale); uidsToSet.remove(gradingScale.getUid()); session.update(gradingScale); if (log.isInfoEnabled()) log.info("Updated Grading Scale " + gradingScale.getUid()); } // Add any new scales. for (Iterator iter = uidsToSet.iterator(); iter.hasNext();) { String uid = (String) iter.next(); GradingScale gradingScale = new GradingScale(); gradingScale.setUid(uid); GradingScaleDefinition bean = (GradingScaleDefinition) newMappingDefinitionsMap.get(uid); copyDefinitionToScale(bean, gradingScale); session.save(gradingScale); if (log.isInfoEnabled()) log.info("Added Grading Scale " + gradingScale.getUid()); } session.flush(); }
From source file:edu.ucla.cs.scai.canali.core.index.BuildIndex.java
private HashSet<Integer> findCommonLowestAncestor(int c1, int c2) { HashSet<Integer> res = new HashSet<>(); if (c1 == c2) { res.add(c1);/* ww w. j a v a 2 s . c o m*/ return res; } if (classAncestors[c1].contains(c2)) { res.add(c2); return res; } else if (classAncestors[c2].contains(c1)) { res.add(c1); return res; } //find common ancestors HashSet<Integer> temp = new HashSet<>(); for (int a : classAncestors[c1]) { if (classAncestors[c2].contains(a)) { temp.add(a); } } for (int candidateClass : temp) { boolean add = true; for (Iterator<Integer> it = res.iterator(); it.hasNext();) { int existingClass = it.next(); if (classAncestors[existingClass].contains(candidateClass)) { //candidate class is an ancestor of existing class add = false; break; } else if (classAncestors[candidateClass].contains(existingClass)) { //existing class is an ancestor of candodate class it.remove(); //don't break - candidate class could be the ancestor of other existing classes in res } } if (add) { res.add(candidateClass); } } return res; }
From source file:org.openanzo.jdbc.container.query.FindInferred.java
/** * Find all statements in container that match the provided parameters * /* ww w . j a v a 2 s . c om*/ * @param container * source of data * @param subj * Subject resource to match, or wildcard if null * @param prop * Predicate uri to match, or wildcard if null * @param obj * Object value to match, or wildcard if null * @param contexts * Context values to match, or wildcard if null * @return Iterable set of quads containing results of find operation * @throws AnzoException * if there was an error finding statements */ @SuppressWarnings("unchecked") public static Iterable<Quad> findStatements(RDBQuadStore container, Resource subj, URI prop, Value obj, URI... contexts) throws AnzoException { Long namedGraphId = null; String graphTable = null; int metadataGraph = -1; if (contexts != null && contexts.length == 1) { URI context = (URI) StatementUtils.convertUriToAny(contexts[0]); if (context != null) { namedGraphId = container.getNodeLayout().fetchId(context, container.getConnection()); if (namedGraphId == null) { return Collections.EMPTY_LIST; // required node is not even in db } metadataGraph = UriGenerator.isMetadataGraphUri(context) ? 1 : 0; } } else { HashSet<Value> set = new HashSet<Value>(); boolean includeAllNamedGraphs = false, includeAllMetadataGraphs = false; Map<Value, Long> graphIds = null; if (contexts != null && contexts.length > 0) { for (Resource context : contexts) { if (context.equals(GRAPHS.ALL_GRAPHS)) includeAllNamedGraphs = includeAllMetadataGraphs = true; else if (context.equals(GRAPHS.ALL_NAMEDGRAPHS)) includeAllNamedGraphs = true; else if (context.equals(GRAPHS.ALL_METADATAGRAPHS)) includeAllMetadataGraphs = true; else set.add(context); } if (set.size() > 0) { graphIds = container.getNodeLayout().resolveStoredNodes(set, false, container.getConnection(), -1); if (graphIds.size() < set.size()) { set.removeAll(graphIds.keySet()); log.debug(LogUtils.RDB_MARKER, "findStatementsInferred", new UnknownGraphException(StringUtils.join(set.iterator(), ", "))); if (graphIds.size() == 0 && !includeAllNamedGraphs && !includeAllMetadataGraphs) { return Collections.EMPTY_LIST;// required node is not even in db } } if (graphIds.size() > 0) { graphTable = SQLQueryConstants.defaultGraphsTempTable; //container.populateValidGraphs(graphIds, includeAllNamedGraphs, includeAllMetadataGraphs, graphTable); //container.populateValidGraphs(graphIds, includeAllNamedGraphs, graphTable); } } else if (includeAllNamedGraphs || includeAllMetadataGraphs) { metadataGraph = includeAllNamedGraphs ? (includeAllMetadataGraphs ? -1 : 0) : 1; } } } return findStatements(container, subj, prop, obj, namedGraphId, metadataGraph, graphTable); }
From source file:dao.DirectoryAuthorDaoDb.java
/** * getLoginsOfAuthors - returns the list of authors with their logins * @param authorSet - HashSet of authors with logins and member info * @return List -list of authors with logins * @throws BaseDaoException -when error occurs *///from w w w .j ava 2s. c om public List getLoginsOfAuthors(HashSet authorSet) throws BaseDaoException { if (authorSet == null) { return null; } List authorList = new ArrayList(); if (authorList != null) { Iterator iterator = authorSet.iterator(); while (iterator.hasNext()) { Directory dir = (Directory) iterator.next(); if (dir != null) { authorList.add(dir.getValue(DbConstants.LOGIN)); } } return authorList; } return null; }
From source file:org.openanzo.jdbc.container.query.FindInferred.java
/** * Find all statements in container that match the provided parameters including inferred statements * // www. ja v a 2s. com * @param container * source of data * @param subj * Subject resource to match, or wildcard if null * @param prop * Predicate uri to match, or wildcard if null * @param obj * Object value to match, or wildcard if null * @param contexts * Context values to match, or wildcard if null * @param ontology * URI of ontology that contains inferred property and object definitions * @return Iterable set of quads containing results of find operation * @throws AnzoException * if there was an error finding statements */ public static Iterable<Quad> findStatementsInferred(RDBQuadStore container, Resource subj, URI prop, Value obj, Resource[] contexts, Resource ontology) throws AnzoException { //try { Long namedGraphId = null; String graphTable = null; int metadataGraph = -1; if (contexts != null && contexts.length == 1) { Resource context = (Resource) StatementUtils.convertUriToAny(contexts[0]); if (context != null) { namedGraphId = container.getNodeLayout().fetchId(context, container.getConnection()); if (namedGraphId == null) { return Collections.<Quad>emptySet(); // required node is not even in db } metadataGraph = context.toString().startsWith(NAMESPACES.METADATAGRAPH_PREFIX) ? 1 : 0; } } else { HashSet<Value> set = new HashSet<Value>(); boolean includeAllNamedGraphs = false, includeAllMetadataGraphs = false; Map<Value, Long> graphIds = null; if (contexts != null && contexts.length > 0) { for (Resource context : contexts) { if (context.equals(GRAPHS.ALL_GRAPHS)) includeAllNamedGraphs = includeAllMetadataGraphs = true; else if (context.equals(GRAPHS.ALL_NAMEDGRAPHS)) includeAllNamedGraphs = true; else if (context.equals(GRAPHS.ALL_METADATAGRAPHS)) includeAllMetadataGraphs = true; else set.add(context); } if (set.size() > 0) { graphIds = container.getNodeLayout().resolveStoredNodes(set, false, container.getConnection(), -1); if (graphIds.size() < set.size()) { set.removeAll(graphIds.keySet()); log.debug(LogUtils.DATASOURCE_MARKER, "findStatementsInferred", new UnknownGraphException(StringUtils.join(set.iterator(), ", "))); if (graphIds.size() == 0 && !includeAllNamedGraphs && !includeAllMetadataGraphs) { return Collections.<Quad>emptyList();// required node is not even in db } } if (graphIds.size() > 0) { graphTable = SQLQueryConstants.defaultGraphsTempTable; //container.populateValidGraphs(graphIds, includeAllNamedGraphs, includeAllMetadataGraphs, graphTable); //container.populateValidGraphs(graphIds, includeAllNamedGraphs, graphTable); } } else if (includeAllNamedGraphs || includeAllMetadataGraphs) { metadataGraph = includeAllNamedGraphs ? (includeAllMetadataGraphs ? -1 : 0) : 1; } } } return findStatementsInferred(container, subj, prop, obj, namedGraphId, metadataGraph, graphTable, ontology); /*} catch (SQLException e) { throw new AnzoException(ExceptionConstants.ERROR_TAGS.CORE_ERROR | ExceptionConstants.ERROR_TAGS.RDB_ERROR, ExceptionConstants.CLIENT.FAILED_CONTAINER_FIND_STATEMENTS, e); }*/ }
From source file:dao.DirectoryAuthorDaoDb.java
/** * This methods lists all users that match with this alphabet * @param alphabet - alphabet/*www .j a va2 s . com*/ * @param directoryId - directoryId * @param userId - userId * @param userLogin - userLogin * @param accessFlag the acess flag to read slave(0) or master (1) * @return List - list of all users * @throws BaseDaoException */ public List getUsers(String alphabet, String directoryId, String userId, String userLogin, int accessFlag) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(userLogin)) { throw new BaseDaoException("params are null"); } /** alphabet default is "a" */ if (RegexStrUtil.isNull(alphabet)) { alphabet = "A"; } /** * check user permissions */ if (!diaryAdmin.isDiaryAdmin(userLogin) && !isAuthor(directoryId, userId)) { throw new BaseDaoException("User does not have permission to list users for this directory, " + directoryId + " userId = " + userId); } Fqn fqn = cacheUtil.fqn(DbConstants.USERS_ALPHABET); if (treeCache.exists(fqn, alphabet)) { HashSet dirauthors = new HashSet(); List result = (List) treeCache.get(fqn, alphabet); if (result != null && result.size() > 0) { HashSet authorSet = listAuthorsOfDirectory(directoryId, userId, userLogin, accessFlag); if (authorSet != null && authorSet.size() > 0) { Iterator it = authorSet.iterator(); while (it.hasNext()) { Directory author = (Directory) it.next(); if (author != null) { dirauthors.add(author.getValue(DbConstants.LOGIN)); } } } } if (result != null && result.size() > 0 && dirauthors.size() > 0) { List newUsers = new ArrayList(); for (int i = 0; i < result.size(); i++) { Hdlogin hdlogin = (Hdlogin) result.get(i); if (hdlogin != null) { if (!RegexStrUtil.isNull(hdlogin.getValue(DbConstants.LOGIN))) { if (!dirauthors.contains(hdlogin.getValue(DbConstants.LOGIN))) { newUsers.add(result.get(i)); // hdlogin.setValue(DbConstants.AUTHOR, "0"); } } } } treeCache.put(fqn, alphabet, newUsers); return newUsers; } } String queryName = null; if (accessFlag == 1) { queryName = scalabilityManager.getWriteZeroScalability("showusersquery"); } else { queryName = scalabilityManager.getReadZeroScalability("showusersquery"); } showUsersQuery = getQueryMapper().getQuery(queryName); StringBuffer alphabetString = new StringBuffer("%"); alphabetString.append(alphabet); alphabetString.append("%"); List result = null; try { Object[] params = { (Object) alphabetString.toString(), (Object) alphabetString.toString(), (Object) alphabetString.toString() }; result = showUsersQuery.execute(params); } catch (Exception e) { throw new BaseDaoException("error in getUsers() " + showUsersQuery.getSql(), e); } HashSet authorSet = listAuthorsOfDirectory(directoryId, userId, userLogin, accessFlag); if (authorSet != null && authorSet.size() > 0) { HashSet dirauthors = new HashSet(); Iterator it = authorSet.iterator(); while (it.hasNext()) { Directory author = (Directory) it.next(); if (author != null) { dirauthors.add(author.getValue(DbConstants.LOGIN)); } } if (result != null && result.size() > 0 && dirauthors.size() > 0) { List newUsers = new ArrayList(); for (int i = 0; i < result.size(); i++) { Hdlogin hdlogin = (Hdlogin) result.get(i); if (hdlogin != null) { if (!RegexStrUtil.isNull(hdlogin.getValue(DbConstants.LOGIN))) { if (!dirauthors.contains(hdlogin.getValue(DbConstants.LOGIN))) { newUsers.add(result.get(i)); // hdlogin.setValue(DbConstants.AUTHOR, "0"); } } } } treeCache.put(fqn, alphabet, newUsers); return newUsers; } } return null; }