List of usage examples for java.util HashSet iterator
public Iterator<E> iterator()
From source file:org.processmining.analysis.petrinet.cpnexport.HLToCPNTranslator.java
private void generateFrequencyDependencySubpage(ColoredTransition transition) { // in the case that the incoming place of the transition is a decision // place,//from w w w . ja v a 2s .c o m // calculate the gcd of the frequency dependencies that are attached to // the // outgoing transitions of that decision place HLActivity act = highLevelPN.findActivity(transition); ArrayList<HLChoice> choices = highLevelPN.getHLProcess().getChoicesForTargetActivity(act.getID()); // TODO: now only take first choice (and assume that the others would be // consistent) // However, it might be that different frequencies are given for this // activity for different choices // The cpn export currently does not deal with this - needs global // normalization if required HLChoice choice = choices.get(0); // calculate the gcd for the frequencydependencies in transitionsFreqDep int gcd = 1; boolean start = true; Iterator<HLCondition> conditions = choice.getConditions().iterator(); while (conditions.hasNext()) { HLCondition cond1 = conditions.next(); if (start && conditions.hasNext()) { HLCondition cond2 = conditions.next(); int freq1 = cond1.getFrequency(); int freq2 = cond2.getFrequency(); gcd = org.apache.commons.math.util.MathUtils.gcd(freq1, freq2); start = false; } else { gcd = org.apache.commons.math.util.MathUtils.gcd(gcd, cond1.getFrequency()); } } SubpageMapping oldMapping = transition.getSubpageMapping(); ColoredPetriNet oldSubpage = transition.getSubpage(); // Generate subpage between the already existing subpage for this // transition ColoredPetriNet subpage = new ColoredPetriNet(); subpage.setIdentifier("Frequency_dependency_" + transition.getIdentifier()); subpage.setCpnID(ManagerID.getNewID()); // Generate an input and an output place for each input and output place // of transition SubpageMapping mappingFromSubpageToTop = new SubpageMapping(); mappingFromSubpageToTop.setSubPageID(subpage.getCpnID()); HashSet inputPlacesSubpage = new HashSet<ColoredPlace>(); HashSet outputPlacesSubpage = new HashSet<ColoredPlace>(); Iterator inputPlaces = transition.getVerticesOnlyPredecessor().iterator(); while (inputPlaces.hasNext()) { ColoredPlace top = (ColoredPlace) inputPlaces.next(); ColoredPlace sub = new ColoredPlace(top.getIdentifier(), subpage); // the type of the top and sub place needs to be the same sub.setPlaceType(top.getPlaceType()); subpage.addPlace(sub); inputPlacesSubpage.add(sub); mappingFromSubpageToTop.addMapping(sub, top); } // the same for the output places Iterator outputPlaces = transition.getVerticesOnlySuccessor().iterator(); while (outputPlaces.hasNext()) { ColoredPlace top = (ColoredPlace) outputPlaces.next(); ColoredPlace sub = new ColoredPlace(top.getIdentifier(), subpage); // the type of the top and sub place needs to be the same sub.setPlaceType(top.getPlaceType()); subpage.addPlace(sub); outputPlacesSubpage.add(sub); mappingFromSubpageToTop.addMapping(sub, top); } // the same for the places that are both output and input Iterator inputOutputPlaces = transition.getVerticesPredecessorAndSuccessor().iterator(); while (inputOutputPlaces.hasNext()) { ColoredPlace top = (ColoredPlace) inputOutputPlaces.next(); ColoredPlace sub = new ColoredPlace(top.getIdentifier(), subpage); // the type of the top and sub place needs to be the same sub.setPlaceType(top.getPlaceType()); subpage.addPlace(sub); inputPlacesSubpage.add(sub); outputPlacesSubpage.add(sub); mappingFromSubpageToTop.addMapping(sub, top); } transition.setSubpageMapping(mappingFromSubpageToTop); transition.setSubpage(subpage); // The number of the generated transitions on the subpage equals the // frequency dependency // that has been set for the transition. Furthermore, ensure that each // generated transition // is connected correctly with the input and output places HLCondition cond = choice.getCondition(act.getID()); for (int i = 0; i < ((cond.getFrequency()) / gcd); i++) { ColoredTransition generatedTransition = new ColoredTransition(transition.getIdentifier() + (i + 1), subpage); subpage.addTransition(generatedTransition); Iterator<ColoredPlace> subPageInputPlaces = inputPlacesSubpage.iterator(); while (subPageInputPlaces.hasNext()) { ColoredPlace inputSubpage = subPageInputPlaces.next(); // connect with generatedTransition ColoredEdge edge = new ColoredEdge(inputSubpage, generatedTransition); subpage.addEdge(edge); } Iterator<ColoredPlace> subPageOutputPlaces = outputPlacesSubpage.iterator(); while (subPageOutputPlaces.hasNext()) { ColoredPlace outputSubpage = subPageOutputPlaces.next(); // connect with generatedTransition ColoredEdge edge = new ColoredEdge(generatedTransition, outputSubpage); subpage.addEdge(edge); } // ensure that the generatedTransition is pointing to the correct // subpage and that // the mapping is correct generatedTransition.setSubpage(oldSubpage); SubpageMapping newMapping = new SubpageMapping(); generatedTransition.setSubpageMapping(newMapping); newMapping.setSubPageID(oldMapping.getSubPageID()); // fix the mappings Iterator<Place> placesSub = oldSubpage.getPlaces().iterator(); while (placesSub.hasNext()) { ColoredPlace placeSub = (ColoredPlace) placesSub.next(); // Get the mapping that belongs to this place, if existing SubpageMapping.Mapping oldMappingForSubPlace = oldMapping.getMappingForSubPlace(placeSub); if (oldMappingForSubPlace != null) { ColoredPlace topPlace = oldMappingForSubPlace.second(); SubpageMapping.Mapping mappingTopToSubpageForPlace = mappingFromSubpageToTop .getMappingForTopPlace(topPlace); newMapping.addMapping(placeSub, mappingTopToSubpageForPlace.first()); } } } subpage.generateCpnIDs(); // generate the layout of the subpage generateLayoutHierarchicalPN(subpage, false); }
From source file:org.sakaiproject.tool.assessment.facade.AssessmentGradingFacadeQueries.java
public void completeItemGradingData(AssessmentGradingData assessmentGradingData, HashMap sectionSetMap) { ArrayList answeredPublishedItemIdList = new ArrayList(); List publishedItemIds = getPublishedItemIds(assessmentGradingData.getAssessmentGradingId()); Iterator iter = publishedItemIds.iterator(); Long answeredPublishedItemId = Long.valueOf(0l); while (iter.hasNext()) { answeredPublishedItemId = (Long) iter.next(); log.debug("answeredPublishedItemId = " + answeredPublishedItemId); answeredPublishedItemIdList.add(answeredPublishedItemId); }/*from ww w . j a v a 2s.c om*/ PublishedAssessmentService publishedAssessmentService = new PublishedAssessmentService(); Long publishedAssessmentId = assessmentGradingData.getPublishedAssessmentId(); HashSet sectionSet = null; if (sectionSetMap == null || !sectionSetMap.containsKey(publishedAssessmentId)) { sectionSet = publishedAssessmentService.getSectionSetForAssessment(publishedAssessmentId); if (sectionSetMap != null) { sectionSetMap.put(publishedAssessmentId, sectionSet); } } else { sectionSet = (HashSet) sectionSetMap.get(publishedAssessmentId); } if (sectionSet == null) { return; } PublishedSectionData publishedSectionData = null; ArrayList itemArrayList = null; Long publishedItemId = Long.valueOf(0l); PublishedItemData publishedItemData = null; iter = sectionSet.iterator(); while (iter.hasNext()) { publishedSectionData = (PublishedSectionData) iter.next(); log.debug("sectionId = " + publishedSectionData.getSectionId()); String authorType = publishedSectionData.getSectionMetaDataByLabel(SectionDataIfc.AUTHOR_TYPE); if (authorType != null && authorType.equals(SectionDataIfc.RANDOM_DRAW_FROM_QUESTIONPOOL.toString())) { log.debug("Random draw from questonpool"); itemArrayList = publishedSectionData.getItemArray(); long seed = (long) AgentFacade.getAgentString().hashCode(); if (publishedSectionData.getSectionMetaDataByLabel(SectionDataIfc.RANDOMIZATION_TYPE) != null && publishedSectionData.getSectionMetaDataByLabel(SectionDataIfc.RANDOMIZATION_TYPE) .equals(SectionDataIfc.PER_SUBMISSION)) { seed = (long) (assessmentGradingData.getAssessmentGradingId().toString() + "_" + publishedSectionData.getSectionId().toString()).hashCode(); } Collections.shuffle(itemArrayList, new Random(seed)); Integer numberToBeDrawn = Integer.valueOf(0); if (publishedSectionData.getSectionMetaDataByLabel(SectionDataIfc.NUM_QUESTIONS_DRAWN) != null) { numberToBeDrawn = Integer.valueOf( publishedSectionData.getSectionMetaDataByLabel(SectionDataIfc.NUM_QUESTIONS_DRAWN)); } int samplesize = numberToBeDrawn.intValue(); for (int i = 0; i < samplesize; i++) { publishedItemData = (PublishedItemData) itemArrayList.get(i); publishedItemId = publishedItemData.getItemId(); log.debug("publishedItemId = " + publishedItemId); if (!answeredPublishedItemIdList.contains(publishedItemId)) { saveItemGradingData(assessmentGradingData, publishedItemId); } } } else { log.debug("Not random draw from questonpool"); itemArrayList = publishedSectionData.getItemArray(); Iterator itemIter = itemArrayList.iterator(); while (itemIter.hasNext()) { publishedItemData = (PublishedItemData) itemIter.next(); publishedItemId = publishedItemData.getItemId(); log.debug("publishedItemId = " + publishedItemId); if (!answeredPublishedItemIdList.contains(publishedItemId)) { saveItemGradingData(assessmentGradingData, publishedItemId); } } } } }
From source file:gov.nih.nci.evs.browser.utils.DataUtils.java
public HashMap getAssociationTargetHashMap(String CUI, Vector sort_option) { Debug.println("(*) DataUtils getAssociationTargetHashMap "); long ms, delay = 0; String action = null;//from ww w.ja v a2s. c om ms = System.currentTimeMillis(); action = "Initializing member variables"; List<String> par_chd_assoc_list = new ArrayList(); par_chd_assoc_list.add("CHD"); par_chd_assoc_list.add("RB"); Vector parent_asso_vec = new Vector(Arrays.asList(_hierAssocToParentNodes)); Vector child_asso_vec = new Vector(Arrays.asList(_hierAssocToChildNodes)); Vector sibling_asso_vec = new Vector(Arrays.asList(_assocToSiblingNodes)); Vector bt_vec = new Vector(Arrays.asList(_assocToBTNodes)); Vector nt_vec = new Vector(Arrays.asList(_assocToNTNodes)); Vector category_vec = new Vector(Arrays.asList(_relationshipCategories)); HashMap rel_hmap = new HashMap(); for (int k = 0; k < category_vec.size(); k++) { String category = (String) category_vec.elementAt(k); HashSet hset = new HashSet(); rel_hmap.put(category, hset); } HashSet w = new HashSet(); Map<String, List<RelationshipTabResults>> map = null; Map<String, List<RelationshipTabResults>> map2 = null; LexBIGService lbs = RemoteServerUtil.createLexBIGService(); MetaBrowserService mbs = null; delay = System.currentTimeMillis() - ms; Debug.println("Run time (ms) for " + action + " " + delay); DBG.debugDetails(delay, action, "getAssociationTargetHashMap"); try { mbs = (MetaBrowserService) lbs.getGenericExtension("metabrowser-extension"); if (mbs == null) { _logger.error("Error! metabrowser-extension is null!"); return null; } ms = System.currentTimeMillis(); action = "Retrieving " + SOURCE_OF; ms = System.currentTimeMillis(); //_logger.info("CUI: " + CUI); //_logger.info("Direction: " + Direction.SOURCEOF); map = mbs.getRelationshipsDisplay(CUI, null, Direction.SOURCEOF); //_logger.info("Done !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! getRelationshipsDisplay !!!!"); delay = System.currentTimeMillis() - ms; Debug.println("Run time (ms) for " + action + " " + delay); DBG.debugDetails(delay, action, "getAssociationTargetHashMap"); ms = System.currentTimeMillis(); action = "Retrieving " + TARGET_OF; ms = System.currentTimeMillis(); map2 = mbs.getRelationshipsDisplay(CUI, par_chd_assoc_list, Direction.TARGETOF); delay = System.currentTimeMillis() - ms; Debug.println("Run time (ms) for " + action + " " + delay); DBG.debugDetails(delay, action, "getAssociationTargetHashMap"); } catch (Exception ex) { ex.printStackTrace(); return null; } // Categorize relationships into six categories and find association // source data ms = System.currentTimeMillis(); action = "Categorizing relationships into six categories; finding source data for each relationship"; Iterator rel_it = map.entrySet().iterator(); while (rel_it.hasNext()) { Entry thisEntry = (Entry) rel_it.next(); String rel = (String) thisEntry.getKey(); List<RelationshipTabResults> relations = (List<RelationshipTabResults>) thisEntry.getValue(); //for (String rel : map.keySet()) { //List<RelationshipTabResults> relations = map.get(rel); if (rel.compareTo(INCOMPLETE) != 0) { String category = "Other"; /* * if (parent_asso_vec.contains(rel)) category = "Parent"; else * if (child_asso_vec.contains(rel)) category = "Child"; else if * (bt_vec.contains(rel)) category = "Broader"; else if * (nt_vec.contains(rel)) category = "Narrower"; */ if (parent_asso_vec.contains(rel)) category = "Child"; else if (child_asso_vec.contains(rel)) category = "Parent"; else if (bt_vec.contains(rel)) category = "Narrower"; else if (nt_vec.contains(rel)) category = "Broader"; else if (sibling_asso_vec.contains(rel)) category = "Sibling"; for (RelationshipTabResults result : relations) { String code = result.getCui(); if (code.compareTo(CUI) != 0 && code.indexOf("@") == -1) { String rela = result.getRela(); String source = result.getSource(); String name = result.getName(); w = (HashSet) rel_hmap.get(category); if (w == null) { w = new HashSet(); } String str = rela + "|" + name + "|" + code + "|" + source; if (!w.contains(str)) { w.add(str); rel_hmap.put(category, w); } } } } } Iterator rel_it2 = map2.entrySet().iterator(); while (rel_it2.hasNext()) { Entry thisEntry = (Entry) rel_it2.next(); String rel = (String) thisEntry.getKey(); List<RelationshipTabResults> relations = (List<RelationshipTabResults>) thisEntry.getValue(); //for (String rel : map2.keySet()) { //List<RelationshipTabResults> relations = map2.get(rel); if (rel.compareTo(INCOMPLETE) != 0) { String category = "Other"; /* * if (parent_asso_vec.contains(rel)) category = "Parent"; else * if (child_asso_vec.contains(rel)) category = "Child"; else if * (bt_vec.contains(rel)) category = "Broader"; else if * (nt_vec.contains(rel)) category = "Narrower"; */ if (parent_asso_vec.contains(rel)) category = "Child"; else if (child_asso_vec.contains(rel)) category = "Parent"; else if (bt_vec.contains(rel)) category = "Narrower"; else if (nt_vec.contains(rel)) category = "Broader"; else if (sibling_asso_vec.contains(rel)) category = "Sibling"; for (RelationshipTabResults result : relations) { String code = result.getCui(); if (code.compareTo(CUI) != 0 && code.indexOf("@") == -1) { String rela = result.getRela(); String source = result.getSource(); String name = result.getName(); w = (HashSet) rel_hmap.get(category); if (w == null) { w = new HashSet(); } String str = rela + "|" + name + "|" + code + "|" + source; if (!w.contains(str)) { w.add(str); rel_hmap.put(category, w); } } } } } delay = System.currentTimeMillis() - ms; Debug.println("Run time (ms) for " + action + " " + delay); DBG.debugDetails(delay, action, "getAssociationTargetHashMap"); // Remove redundant RO relationships ms = System.currentTimeMillis(); action = "Removing redundant RO and CHD relationships"; HashSet other_hset = new HashSet(); HashSet w2 = (HashSet) rel_hmap.get("Other"); Iterator it = w2.iterator(); while (it.hasNext()) { String s = (String) it.next(); Vector ret_vec = parseData(s, "|"); String rel = (String) ret_vec.elementAt(0); String name = (String) ret_vec.elementAt(1); String target_code = (String) ret_vec.elementAt(2); String src = (String) ret_vec.elementAt(3); String t = name + "|" + target_code + "|" + src; if (rel.compareTo("RO") != 0 && !other_hset.contains(t)) { other_hset.add(t); } } HashSet w3 = new HashSet(); w2 = (HashSet) rel_hmap.get("Other"); it = w2.iterator(); while (it.hasNext()) { String s = (String) it.next(); Vector ret_vec = parseData(s, "|"); String rel = (String) ret_vec.elementAt(0); String name = (String) ret_vec.elementAt(1); String target_code = (String) ret_vec.elementAt(2); String src = (String) ret_vec.elementAt(3); if (rel.compareTo("RO") != 0) { w3.add(s); } else { // RO String t = name + "|" + target_code + "|" + src; if (!other_hset.contains(t)) { w3.add(s); } } } rel_hmap.put("Other", w3); other_hset = new HashSet(); w2 = (HashSet) rel_hmap.get("Child"); it = w2.iterator(); while (it.hasNext()) { String s = (String) it.next(); Vector ret_vec = parseData(s, "|"); String rel = (String) ret_vec.elementAt(0); String name = (String) ret_vec.elementAt(1); String target_code = (String) ret_vec.elementAt(2); String src = (String) ret_vec.elementAt(3); String t = name + "|" + target_code + "|" + src; if (rel.compareTo("CHD") != 0 && !other_hset.contains(t)) { other_hset.add(t); } } w3 = new HashSet(); w2 = (HashSet) rel_hmap.get("Child"); it = w2.iterator(); while (it.hasNext()) { String s = (String) it.next(); Vector ret_vec = parseData(s, "|"); String rel = (String) ret_vec.elementAt(0); String name = (String) ret_vec.elementAt(1); String target_code = (String) ret_vec.elementAt(2); String src = (String) ret_vec.elementAt(3); if (rel.compareTo("CHD") != 0) { w3.add(s); } else { String t = name + "|" + target_code + "|" + src; if (!other_hset.contains(t)) { w3.add(s); } } } rel_hmap.put("Child", w3); delay = System.currentTimeMillis() - ms; Debug.println("Run time (ms) for " + action + " " + delay); DBG.debugDetails(delay, action, "getAssociationTargetHashMap"); ms = System.currentTimeMillis(); action = "Sorting relationships by sort options (columns)"; HashMap new_rel_hmap = new HashMap(); // Sort relationships by sort options (columns) if (sort_option == null) { for (int k = 0; k < category_vec.size(); k++) { String category = (String) category_vec.elementAt(k); w = (HashSet) rel_hmap.get(category); Vector rel_v = hashSet2Vector(w); SortUtils.quickSort(rel_v); new_rel_hmap.put(category, rel_v); } } else { for (int k = 0; k < category_vec.size(); k++) { String category = (String) category_vec.elementAt(k); w = (HashSet) rel_hmap.get(category); Vector rel_v = hashSet2Vector(w); String sortOption = (String) sort_option.elementAt(k); rel_v = sortRelationshipData(rel_v, sortOption); new_rel_hmap.put(category, rel_v); } } delay = System.currentTimeMillis() - ms; Debug.println("Run time (ms) for " + action + " " + delay); DBG.debugDetails(delay, action, "getAssociationTargetHashMap"); // KLO, testing Vector sibling_vector = getSiblings(CUI); if (sort_option != null) { sibling_vector = sortRelationshipData(sibling_vector, (String) sort_option.elementAt(4)); } //new_rel_hmap.put("Sibling", getSiblings(CUI)); new_rel_hmap.put("Sibling", sibling_vector); removeRedundantRecords(new_rel_hmap); String incomplete = (String) new_rel_hmap.get(INCOMPLETE); if (incomplete != null) { new_rel_hmap.put(INCOMPLETE, incomplete); } return new_rel_hmap; }
From source file:org.apache.axis.wsdl.toJava.JavaDeployWriter.java
/** * Write out deployment instructions for given WSDL binding * * @param pw//w ww .ja va2s. c o m * @param bEntry * @throws IOException */ protected void writeDeployBinding(PrintWriter pw, BindingEntry bEntry) throws IOException { Binding binding = bEntry.getBinding(); String className = bEntry.getName(); if (emitter.isSkeletonWanted()) { className += "Skeleton"; } else { String customClassName = emitter.getImplementationClassName(); if (customClassName != null) className = customClassName; else className += "Impl"; } pw.println(" <parameter name=\"className\" value=\"" + className + "\"/>"); pw.println(" <parameter name=\"wsdlPortType\" value=\"" + binding.getPortType().getQName().getLocalPart() + "\"/>"); pw.println( " <parameter name=\"typeMappingVersion\" value=\"" + emitter.getTypeMappingVersion() + "\"/>"); HashSet allowedMethods = new HashSet(); String namespaceURI = binding.getQName().getNamespaceURI(); if (!emitter.isSkeletonWanted()) { Iterator operationsIterator = binding.getBindingOperations().iterator(); for (; operationsIterator.hasNext();) { BindingOperation bindingOper = (BindingOperation) operationsIterator.next(); Operation operation = bindingOper.getOperation(); OperationType type = operation.getStyle(); // These operation types are not supported. The signature // will be a string stating that fact. if ((OperationType.NOTIFICATION.equals(type)) || (OperationType.SOLICIT_RESPONSE.equals(type))) { continue; } String javaOperName = null; ServiceDesc serviceDesc = emitter.getServiceDesc(); if (emitter.isDeploy() && serviceDesc != null) { // If the emitter works in deploy mode, sync the java operation name with it of the ServiceDesc OperationDesc[] operDescs = serviceDesc .getOperationsByQName(new QName(namespaceURI, operation.getName())); if (operDescs.length == 0) { log.warn("Can't find operation in the Java Class for WSDL binding operation : " + operation.getName()); continue; } OperationDesc operDesc = operDescs[0]; if (operDesc.getMethod() == null) { log.warn("Can't find Java method for operation descriptor : " + operDesc.getName()); continue; } javaOperName = operDesc.getMethod().getName(); } else { javaOperName = JavaUtils.xmlNameToJava(operation.getName()); } allowedMethods.add(javaOperName); // We pass "" as the namespace argument because we're just // interested in the return type for now. Parameters params = symbolTable.getOperationParameters(operation, "", bEntry); if (params != null) { // TODO: Should really construct a FaultDesc here and // TODO: pass it to writeOperation, but this will take // TODO: some refactoring // Get the operation QName QName elementQName = Utils.getOperationQName(bindingOper, bEntry, symbolTable); // Get the operation's return QName and type QName returnQName = null; QName returnType = null; if (params.returnParam != null) { returnQName = params.returnParam.getQName(); returnType = Utils.getXSIType(params.returnParam); } // Get the operations faults Map faultMap = bEntry.getFaults(); ArrayList faults = null; if (faultMap != null) { faults = (ArrayList) faultMap.get(bindingOper); } // Get the operation's SOAPAction String SOAPAction = Utils.getOperationSOAPAction(bindingOper); // Write the operation metadata writeOperation(pw, javaOperName, elementQName, returnQName, returnType, params, binding.getQName(), faults, SOAPAction); } } } pw.print(" <parameter name=\"allowedMethods\" value=\""); if (allowedMethods.isEmpty()) { pw.println("*\"/>"); } else { boolean first = true; for (Iterator i = allowedMethods.iterator(); i.hasNext();) { String method = (String) i.next(); if (first) { pw.print(method); first = false; } else { pw.print(" " + method); } } pw.println("\"/>"); } Scope scope = emitter.getScope(); if (scope != null) { pw.println(" <parameter name=\"scope\" value=\"" + scope.getName() + "\"/>"); } }
From source file:gov.noaa.pfel.coastwatch.util.FileVisitorDNLS.java
/** * This is a convenience method for using this class. * <p>This works with Amazon AWS S3 bucket URLs. Internal /'s in the keys will be * treated as folder separators. If there aren't any /'s, all the keys will * be in the root directory.//ww w . java2 s. c om * * @param tDir The starting directory, with \\ or /, with or without trailing slash. * The resulting directoryPA will contain dirs with matching slashes and trailing slash. * @param tPathRegex a regex to constrain which subdirs to include. * This is ignored if recursive is false. * null or "" is treated as .* (i.e., match everything). * @param tDirectoriesToo if true, each directory name will get its own rows * in the results. * @return a table with columns with DIRECTORY, NAME, LASTMODIFIED, and SIZE columns. * LASTMODIFIED and SIZE are LongArrays -- For directories when the values * are otherwise unknown, the value will be Long.MAX_VALUE. * If directoriesToo=true, the original dir won't be included and any * directory's file NAME will be "". * @throws IOException if trouble */ public static Table oneStep(String tDir, String tFileNameRegex, boolean tRecursive, String tPathRegex, boolean tDirectoriesToo) throws IOException { long time = System.currentTimeMillis(); //is tDir an http URL? if (tDir.matches(FileVisitorDNLS.HTTP_REGEX)) { //Is it an S3 bucket with "files"? //If testing a "dir", url should have a trailing slash. Matcher matcher = AWS_S3_PATTERN.matcher(File2.addSlash(tDir)); //force trailing slash if (matcher.matches()) { //http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html //If files have file-system-like names, e.g., // http://bucketname.s3.amazonaws.com/dir1/dir2/fileName.ext) // http://nasanex.s3.amazonaws.com/NEX-DCP30/BCSD/rcp26/mon/atmos/tasmin/r1i1p1/v1.0/CONUS/tasmin_amon_BCSD_rcp26_r1i1p1_CONUS_NorESM1-M_209601-209912.nc // you still can't request just dir2 info because they aren't directories. // They are just object keys with internal slashes. //So specify prefix in request. Table table = makeEmptyTable(); StringArray directoryPA = (StringArray) table.getColumn(DIRECTORY); StringArray namePA = (StringArray) table.getColumn(NAME); LongArray lastModifiedPA = (LongArray) table.getColumn(LASTMODIFIED); LongArray sizePA = (LongArray) table.getColumn(SIZE); String bucketName = matcher.group(1); String prefix = matcher.group(2); String baseURL = tDir.substring(0, matcher.start(2)); AmazonS3 s3client = new AmazonS3Client(new ProfileCredentialsProvider()); try { if (verbose) String2.log("FileVisitorDNLS.oneStep getting info from AWS S3 at" + "\nURL=" + tDir); //"\nbucket=" + bucketName + " prefix=" + prefix); //I wanted to generate lastMod for dir based on lastMod of files //but it would be inconsistent for different requests (recursive, fileNameRegex). //so just a set of dir names. HashSet<String> dirHashSet = new HashSet(); ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucketName) .withPrefix(prefix); ObjectListing objectListing; do { objectListing = s3client.listObjects(listObjectsRequest); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { String keyFullName = objectSummary.getKey(); String keyDir = File2.getDirectory(baseURL + keyFullName); String keyName = File2.getNameAndExtension(keyFullName); if (debugMode) String2.log( "keyFullName=" + keyFullName + "\nkeyDir=" + keyDir + "\n tDir=" + tDir); if (keyDir.startsWith(tDir) && //it should (tRecursive || keyDir.length() == tDir.length())) { //store this dir if (tDirectoriesToo) { //S3 only returns object keys. I must infer/collect directories. //Store this dir and parents back to tDir. String choppedKeyDir = keyDir; while (choppedKeyDir.length() >= tDir.length()) { if (!dirHashSet.add(choppedKeyDir)) break; //hash set already had this, so will already have parents //chop off last subdirectory choppedKeyDir = File2.getDirectory( choppedKeyDir.substring(0, choppedKeyDir.length() - 1)); //remove trailing / } } //store this file's information //Sometimes directories appear as files are named "" with size=0. //I don't store those as files. if (debugMode) String2.log("keyName=" + keyFullName + "\n tFileNameRegex=" + tFileNameRegex + " matches=" + keyName.matches(tFileNameRegex)); if (keyName.length() > 0 && keyName.matches(tFileNameRegex)) { directoryPA.add(keyDir); namePA.add(keyName); lastModifiedPA.add(objectSummary.getLastModified().getTime()); //epoch millis sizePA.add(objectSummary.getSize()); //long } } } listObjectsRequest.setMarker(objectListing.getNextMarker()); } while (objectListing.isTruncated()); //add directories to the table if (tDirectoriesToo) { Iterator<String> it = dirHashSet.iterator(); while (it.hasNext()) { directoryPA.add(it.next()); namePA.add(""); lastModifiedPA.add(Long.MAX_VALUE); sizePA.add(Long.MAX_VALUE); } } table.leftToRightSortIgnoreCase(2); return table; } catch (AmazonServiceException ase) { throw new IOException("AmazonServiceException: " + ase.getErrorType() + " ERROR, HTTP Code=" + ase.getStatusCode() + ": " + ase.getMessage(), ase); } catch (AmazonClientException ace) { throw new IOException(ace.getMessage(), ace); } } //HYRAX before THREDDS //http://dods.jpl.nasa.gov/opendap/ocean_wind/ccmp/L3.5a/data/flk/1988/ matcher = HYRAX_PATTERN.matcher(tDir); if (matcher.matches()) { try { if (verbose) String2.log("FileVisitorDNLS.oneStep getting info from Hyrax at" + "\nURL=" + tDir); Table table = makeEmptyTable(); StringArray directoryPA = (StringArray) table.getColumn(DIRECTORY); StringArray namePA = (StringArray) table.getColumn(NAME); LongArray lastModifiedPA = (LongArray) table.getColumn(LASTMODIFIED); LongArray sizePA = (LongArray) table.getColumn(SIZE); DoubleArray lastModDA = new DoubleArray(); addToHyraxUrlList(tDir, tFileNameRegex, tRecursive, tPathRegex, tDirectoriesToo, namePA, lastModDA, sizePA); lastModifiedPA.append(lastModDA); int n = namePA.size(); for (int i = 0; i < n; i++) { String fn = namePA.get(i); directoryPA.add(File2.getDirectory(fn)); namePA.set(i, File2.getNameAndExtension(fn)); } table.leftToRightSortIgnoreCase(2); return table; } catch (Throwable t) { throw new IOException(t.getMessage(), t); } } //THREDDS matcher = THREDDS_PATTERN.matcher(tDir); if (matcher.matches()) { try { if (verbose) String2.log("FileVisitorDNLS.oneStep getting info from THREDDS at" + "\nURL=" + tDir); Table table = makeEmptyTable(); StringArray directoryPA = (StringArray) table.getColumn(DIRECTORY); StringArray namePA = (StringArray) table.getColumn(NAME); LongArray lastModifiedPA = (LongArray) table.getColumn(LASTMODIFIED); LongArray sizePA = (LongArray) table.getColumn(SIZE); DoubleArray lastModDA = new DoubleArray(); addToThreddsUrlList(tDir, tFileNameRegex, tRecursive, tPathRegex, tDirectoriesToo, namePA, lastModDA, sizePA); lastModifiedPA.append(lastModDA); int n = namePA.size(); for (int i = 0; i < n; i++) { String fn = namePA.get(i); directoryPA.add(File2.getDirectory(fn)); namePA.set(i, File2.getNameAndExtension(fn)); } table.leftToRightSortIgnoreCase(2); return table; } catch (Throwable t) { throw new IOException(t.getMessage(), t); } } //default: Apache-style WAF try { if (verbose) String2.log("FileVisitorDNLS.oneStep getting info from Apache-style WAF at" + "\nURL=" + tDir); Table table = makeEmptyTable(); StringArray directorySA = (StringArray) table.getColumn(DIRECTORY); StringArray nameSA = (StringArray) table.getColumn(NAME); LongArray lastModLA = (LongArray) table.getColumn(LASTMODIFIED); LongArray sizeLA = (LongArray) table.getColumn(SIZE); addToWAFUrlList(tDir, tFileNameRegex, tRecursive, tPathRegex, tDirectoriesToo, directorySA, nameSA, lastModLA, sizeLA); table.leftToRightSortIgnoreCase(2); return table; } catch (Throwable t) { throw new IOException(t.getMessage(), t); } } //local files //follow symbolic links: https://docs.oracle.com/javase/7/docs/api/java/nio/file/FileVisitor.html //But this doesn't follow Windows symbolic link .lnk's: // http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4237760 FileVisitorDNLS fv = new FileVisitorDNLS(tDir, tFileNameRegex, tRecursive, tPathRegex, tDirectoriesToo); EnumSet<FileVisitOption> opts = EnumSet.of(FileVisitOption.FOLLOW_LINKS); Files.walkFileTree(FileSystems.getDefault().getPath(tDir), opts, //follow symbolic links Integer.MAX_VALUE, //maxDepth fv); fv.table.leftToRightSortIgnoreCase(2); if (verbose) String2.log("FileVisitorDNLS.oneStep(local) finished successfully. n=" + fv.directoryPA.size() + " time=" + (System.currentTimeMillis() - time) + "ms"); return fv.table; }
From source file:org.unitime.timetable.solver.TimetableDatabaseLoader.java
private void propagateCommittedAssignment(HashSet students, Assignment assignment) { Class_ clazz = assignment.getClazz(); Lecture parentLecture = null;//w ww . j av a2s .c o m Class_ c = clazz; while ((parentLecture == null || parentLecture.isCommitted()) && c.getParentClass() != null) { c = c.getParentClass(); parentLecture = (Lecture) iLectures.get(c.getUniqueId()); } if (parentLecture != null && !parentLecture.isCommitted()) { for (Lecture lecture : parentLecture.sameSubpartLectures()) { if (!lecture.equals(parentLecture) && !lecture.isCommitted()) { //iProgress.debug("[A] Students "+students+" cannot enroll "+lecture.getName()+" due to the enrollment of "+clazz.getClassLabel()); for (Iterator i = students.iterator(); i.hasNext();) { Student student = (Student) i.next(); student.addCanNotEnroll(lecture); } } } } if (!clazz.getSchedulingSubpart().getChildSubparts().isEmpty()) { for (Iterator i = clazz.getSchedulingSubpart().getChildSubparts().iterator(); i.hasNext();) { SchedulingSubpart subpart = (SchedulingSubpart) i.next(); for (Iterator j = subpart.getClasses().iterator(); j.hasNext();) { Class_ child = (Class_) j.next(); if (!clazz.equals(child.getParentClass())) propagateCommittedAssignment(students, clazz, child); } } } }
From source file:com.clustercontrol.ping.factory.RunMonitorPing.java
/** * fping???IP??????<BR>// w w w .ja v a 2 s .c o m * nodeMap?????public * * @param hosts fping??? * @param message * @param count ping * @return (IP ??)?? */ public Hashtable<String, PingResult> wrapUpFping(ArrayList<String> messages, int count, int version) { Hashtable<String, PingResult> ret = new Hashtable<String, PingResult>(); HashMap<String, String> normalMap = new HashMap<String, String>(); //IP, HashMap<String, String> aberrantMap = new HashMap<String, String>(); //IP, HashSet<String> hostSet = new HashSet<String>(); /** * ????????5? * ??? * **/ String msg; String msgOrg; int lost; float average = 0; float reachRatio; //IP??? Pattern patternIp; if (version == 6) { patternIp = Pattern.compile("^([0-9].*)|^(\\:\\:.*)"); } else { patternIp = Pattern.compile("^([0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}.*)"); } //fping?? Pattern patternSp = Pattern.compile("(\\s:\\s|\\s)+"); //? Pattern patternNormal = Pattern.compile("([0-9]+\\.[0-9]+|-)"); //IP Matcher matcherIp; Matcher matcherValue; String message; /* * fping??? * ??? 127.0.0.1 : 0.10 1.23 * ???? 127.0.0.1 : - - * ???(?IP???) 127.0.0.1 : duplicate for [0], xx bytes, x.xx ms * ???(GW??????) 127.0.0.1 : () * ?? * ???????????????????? * 2???????????? * ???????IP???????? * ????? */ /* * ????? */ Iterator<String> itr = messages.iterator(); m_log.debug("wrapUpFping(): start logic: " + messages.size()); while (itr.hasNext()) { message = itr.next(); m_log.debug("wrapUpFping(): checkpoint"); //IP?????????? boolean bValidIP = false; if (version == 6) { m_log.debug("wrapUpFping(): into IPv6 loop"); String[] strs = message.split(" "); try { InetAddress.getByName(strs[0]); bValidIP = true; } catch (Exception e) { m_log.warn("wrapUpFping() invalid IPv6 adress: original message: " + message, e); m_log.warn("wrapUpFping() stack trace: " + e.getClass().getSimpleName() + ", " + e.getMessage(), e); } } else { matcherIp = patternIp.matcher(message); if (matcherIp.matches()) { bValidIP = true; } } if (bValidIP) { //?IP???????? String[] strs = patternSp.split(message); //?IP??? boolean isNormal = true; for (int i = 1; i < strs.length; i++) { matcherValue = patternNormal.matcher(strs[i]); if (!matcherValue.matches()) { isNormal = false; } } if (isNormal) { normalMap.put(strs[0], message); m_log.debug("wrapUpFping() : normalValue : " + message); } else { // ??IP??????? if (aberrantMap.get(strs[0]) != null && !(aberrantMap.get(strs[0])).equals("")) { String aberrantMessage = aberrantMap.get(strs[0]); aberrantMessage = aberrantMessage + "\n" + message; aberrantMap.put(strs[0], aberrantMessage); } else { aberrantMap.put(strs[0], message); } m_log.debug("wrapUpFping() : aberrantValue : " + message); } hostSet.add(strs[0]); } } /* * ?????????? */ itr = hostSet.iterator(); m_log.debug("wrapUpFping() : before into check result append loop"); while (itr.hasNext()) { m_log.debug("wrapUpFping() : after into check result append loop"); String host = itr.next(); String normalMessage = normalMap.get(host); String aberrantMessage = aberrantMap.get(host); if (normalMessage != null && !normalMessage.equals("")) { // String ?? String[] strs = patternSp.split(normalMessage); /*????????*/ float max = 0; //(?0) float min = Float.MAX_VALUE; //?(?Float?) int num = 0; //? //???????? for (int i = 1; i <= count; i++) { if (strs[i].equals("-")) { //?-???????? } else { //?count up num++; //??? if (max < Float.parseFloat(strs[i])) { max = Float.parseFloat(strs[i]); } //???? if (min > Float.parseFloat(strs[i])) { min = Float.parseFloat(strs[i]); } //? average += Float.parseFloat(strs[i]); } } //????????????? average /= num; /* * ??? */ StringBuffer buffer = new StringBuffer(); buffer.append("Pinging " + host + " (" + host + ") .\n\n"); // ??0?? if (num == 0) { //??0??lost100 reach0 lost = 100; reachRatio = 0; for (int i = 0; i < count; i++) // buffer.append("From " + strs[0] + " icmp_seq="+ index +" Destination Host Unreachable"); buffer.append("Reply from " + host + " icmp_seq=" + i + " Destination Host Unreachable\n"); buffer.append("\nPing statistics for " + host + ":\n"); buffer.append("Packets: Sent = " + count + ", Received = " + num + ", Lost = " + (count - num) + " (" + lost + "% loss),"); } else { lost = (count - num) * 100 / count; reachRatio = (float) num * 100 / count; buffer.append("\nPing statistics for " + host + ":\n"); buffer.append("Packets: Sent = " + count + ", Received = " + num + ", Lost = " + (count - num) + " (" + lost + "% loss),"); buffer.append("Approximate round trip times in milli-seconds:\n"); buffer.append( "\tMinimum = " + min + "ms, Maximum = " + max + "ms, Average = " + average + "ms\n"); } // ?????msgOrg? if (aberrantMessage != null && !aberrantMessage.equals("")) { buffer.append("\n\n" + aberrantMessage + "\n"); } msgOrg = buffer.toString(); msg = "Packets: Sent = " + count + ", Received = " + num + ", Lost = " + (count - num) + " (" + lost + "% loss)"; PingResult res = new PingResult(host, msg, msgOrg, lost, average, reachRatio); ret.put(host, res); m_log.debug("wrapUpFping() : success msg = " + msg + ", msgOrg = " + msgOrg); msg = ""; msgOrg = ""; lost = 100; average = 0; reachRatio = 0; } // ?????? else { msg = "Failed to get a value."; msgOrg = "Failed to get a value."; if (aberrantMessage != null && !aberrantMessage.equals("")) { msgOrg = msgOrg + "\n\n" + aberrantMessage; } PingResult res = new PingResult(host, msg, msgOrg, -1, -1, -1); ret.put(host, res); m_log.debug("wrapUpFping() : failure msg = " + msg + ", msgOrg = " + msgOrg); msg = ""; msgOrg = ""; } } return ret; }
From source file:org.mindswap.swoop.renderer.entity.ConciseFormatEntityRenderer.java
/** * /*from ww w .ja v a 2s.co m*/ * Returns a list of changes that are required to fufill the requirements of clicking a * delete link. A delete operation may involve various steps of changes and may also * cause some other changes to happen. The list will contain all the atomic changes * required for this operation * * @param hLink the link clicked on the editor pane * @return A list of OntologyChange objects * @throws OWLException */ public List handleDeleteLink(String hLink) throws OWLException { List changes = new ArrayList(); OWLOntology ontology = reasoner.getOntology(); // parse DELETE hyper-link int pos1 = hLink.indexOf(":"); int pos2 = hLink.indexOf(":", pos1 + 1); String hashCode = hLink.substring(pos1 + 1, pos2); String titleCode = hLink.substring(pos2 + 1, hLink.length()); Object obj = OWLDescHash.get(hashCode); if (titleCode.equals("P-FUN")) { // Remove Functional Attribute // *** Note: Change is effective immediately for property attribute SetFunctional change = new SetFunctional(ontology, (OWLProperty) obj, false, null); swoopModel.addUncommittedChange(change); return new ArrayList(); } if (titleCode.equals("P-IFUN")) { // Remove InverseFunctional Attribute // *** Note: Change is effective immediately for property attribute SetInverseFunctional change = new SetInverseFunctional(ontology, (OWLObjectProperty) obj, false, null); swoopModel.addUncommittedChange(change); return new ArrayList(); } if (titleCode.equals("P-TRA")) { // Remove Transitive Attribute // *** Note: Change is effective immediately for property attribute SetTransitive change = new SetTransitive(ontology, (OWLObjectProperty) obj, false, null); swoopModel.addUncommittedChange(change); return new ArrayList(); } if (titleCode.equals("P-SYM")) { // Remove Symmetric Attribute // *** Note: Change is effective immediately for property attribute SetSymmetric change = new SetSymmetric(ontology, (OWLObjectProperty) obj, false, null); swoopModel.addUncommittedChange(change); return new ArrayList(); } if (titleCode.equals("A-ANN")) { // Remove Annotation Instance OWLEntity currEntity = swoopModel.getSelectedEntity(); if (obj instanceof OWLAnnotationInstance) { OWLAnnotationInstance oai = (OWLAnnotationInstance) obj; RemoveAnnotationInstance change = new RemoveAnnotationInstance(ontology, currEntity, oai.getProperty(), oai.getContent(), null); swoopModel.addUncommittedChange(change); } return new ArrayList(); } if (titleCode.equals("I-SAM") || titleCode.equals("I-DIF")) { // delete sameAs axiom Set indSet = new HashSet(); indSet.add((OWLIndividual) displayedEntity); indSet.add((OWLIndividual) obj); OWLIndividualAxiom indAxiom = null; if (titleCode.equals("I-SAM")) indAxiom = ontology.getOWLDataFactory().getOWLSameIndividualsAxiom(indSet); else indAxiom = ontology.getOWLDataFactory().getOWLDifferentIndividualsAxiom(indSet); RemoveIndividualAxiom change = new RemoveIndividualAxiom(ontology, indAxiom, null); changes.add(change); } if (titleCode.equals("C-EQU")) { // delete equivalent class OWLClass displayedClass = (OWLClass) displayedEntity; if (obj instanceof OWLDescription) { OWLDescription desc = (OWLDescription) obj; RemoveEquivalentClass change = new RemoveEquivalentClass(ontology, displayedClass, desc, null); changes.add(change); } else { Iterator descIter = ((Collection) obj).iterator(); while (descIter.hasNext()) { OWLDescription desc = (OWLDescription) descIter.next(); RemoveEquivalentClass change = new RemoveEquivalentClass(ontology, displayedClass, desc, null); changes.add(change); } } } else if (titleCode.equals("C-DIS")) { // delete disjoint class OWLClass displayedClass = (OWLClass) displayedEntity; Set disSet = new HashSet(); if (obj instanceof OWLDescription) { OWLDescription desc = (OWLDescription) obj; disSet.add(desc); disSet.add(displayedClass); } else { disSet.add(displayedClass); disSet.addAll((Collection) obj); } OWLDisjointClassesAxiom disAxiom = ontology.getOWLDataFactory().getOWLDisjointClassesAxiom(disSet); RemoveClassAxiom change = new RemoveClassAxiom(ontology, disAxiom, null); changes.add(change); } else if (titleCode.equals("C-SUB")) { // delete super-class OWLClass displayedClass = (OWLClass) displayedEntity; if (obj instanceof OWLDescription) { OWLDescription desc = (OWLDescription) obj; if (displayedClass.getSuperClasses(ontology).contains(obj)) { // add RemoveSuperClass change in this case RemoveSuperClass change = new RemoveSuperClass(ontology, displayedClass, desc, null); changes.add(change); } else { // remove specific axiom in the other case OWLSubClassAxiom axiom = ontology.getOWLDataFactory().getOWLSubClassAxiom(displayedClass, desc); RemoveClassAxiom change2 = new RemoveClassAxiom(ontology, axiom, null); changes.add(change2); } } else { Iterator descIter = ((Collection) obj).iterator(); while (descIter.hasNext()) { OWLDescription desc = (OWLDescription) descIter.next(); if (displayedClass.getSuperClasses(ontology).contains(obj)) { RemoveSuperClass change = new RemoveSuperClass(ontology, displayedClass, desc, null); changes.add(change); } else { // remove specific axiom if present OWLSubClassAxiom axiom = ontology.getOWLDataFactory().getOWLSubClassAxiom(displayedClass, desc); RemoveClassAxiom change2 = new RemoveClassAxiom(ontology, axiom, null); changes.add(change2); } } } } else if (titleCode.equals("C-SUP")) { // delete super-class OWLClass displayedClass = (OWLClass) displayedEntity; if (obj instanceof OWLClass) { // super classes can be defined in two ways // check to see what kind of change needs to be put OWLClass desc = (OWLClass) obj; if (desc.getSuperClasses(ontology).contains(displayedClass)) { RemoveSuperClass change = new RemoveSuperClass(ontology, desc, displayedClass, null); changes.add(change); } else { // remove specific axiom if present OWLSubClassAxiom axiom = ontology.getOWLDataFactory().getOWLSubClassAxiom(desc, displayedClass); RemoveClassAxiom change2 = new RemoveClassAxiom(ontology, axiom, null); changes.add(change2); } } else { OWLDescription desc = (OWLDescription) obj; // remove specific axiom if present OWLSubClassAxiom axiom = ontology.getOWLDataFactory().getOWLSubClassAxiom(desc, displayedClass); RemoveClassAxiom change2 = new RemoveClassAxiom(ontology, axiom, null); changes.add(change2); } } else if (titleCode.equals("C-INT")) { // delete intersection element // remove whole intersection and add remaining elements OWLClass displayedClass = (OWLClass) displayedEntity; if (obj instanceof OWLDescription) { OWLDescription desc = (OWLDescription) obj; deleteFromBooleanDesc(ontology, displayedClass, desc, OWLAnd.class, changes); } else { Iterator descIter = ((Collection) obj).iterator(); while (descIter.hasNext()) { OWLClass desc = (OWLClass) descIter.next(); deleteFromBooleanDesc(ontology, displayedClass, desc, OWLAnd.class, changes); } } } else if (titleCode.equals("C-UNI")) { // delete union element // remove whole union and add remaining elements OWLClass displayedClass = (OWLClass) displayedEntity; if (obj instanceof OWLDescription) { OWLDescription desc = (OWLDescription) obj; deleteFromBooleanDesc(ontology, displayedClass, desc, OWLOr.class, changes); } else { Iterator descIter = ((Collection) obj).iterator(); while (descIter.hasNext()) { OWLClass desc = (OWLClass) descIter.next(); deleteFromBooleanDesc(ontology, displayedClass, desc, OWLOr.class, changes); } } } else if (titleCode.equals("C-NOT")) { // delete complement element OWLClass displayedClass = (OWLClass) displayedEntity; if (obj instanceof OWLDescription) { BooleanElementChange change = new BooleanElementChange(OWLNot.class, "Remove", ontology, displayedClass, (OWLDescription) obj, null); changes.add(change); } } else if (titleCode.equals("I-ONE")) { // delete one-of element OWLClass displayedClass = (OWLClass) displayedEntity; if (obj instanceof OWLIndividual) { OWLIndividual desc = (OWLIndividual) obj; updateEnumerations(ontology, displayedClass, desc, changes); } } else if (titleCode.equals("I-INS")) { // delete instance RemoveIndividualClass change = new RemoveIndividualClass(ontology, (OWLIndividual) obj, (OWLDescription) displayedEntity, null); changes.add(change); } else if (titleCode.equals("C-TYP")) { // delete type RemoveIndividualClass change = new RemoveIndividualClass(ontology, (OWLIndividual) displayedEntity, (OWLDescription) obj, null); changes.add(change); } else if (titleCode.equals("C-HASDOM")) { // delete property domain OWLProperty prop = (OWLProperty) displayedEntity; if (obj instanceof OWLDescription) { OWLDescription desc = (OWLDescription) obj; RemoveDomain change = new RemoveDomain(ontology, prop, desc, null); changes.add(change); } } else if (titleCode.equals("P-DOM")) { // delete property domain OWLClass cla = (OWLClass) displayedEntity; if (obj instanceof OWLProperty) { RemoveDomain change = new RemoveDomain(ontology, (OWLProperty) obj, cla, null); changes.add(change); } } else if (titleCode.equals("C-HASRAN")) { // delete property range // check if datatype or object property if (displayedEntity instanceof OWLObjectProperty) { OWLObjectProperty prop = (OWLObjectProperty) displayedEntity; if (obj instanceof OWLDescription) { OWLDescription desc = (OWLDescription) obj; RemoveObjectPropertyRange change = new RemoveObjectPropertyRange(ontology, prop, desc, null); changes.add(change); } } else { OWLDataProperty prop = (OWLDataProperty) displayedEntity; if (obj instanceof OWLDataRange) { OWLDataRange dran = (OWLDataRange) obj; RemoveDataPropertyRange change = new RemoveDataPropertyRange(ontology, prop, dran, null); changes.add(change); } } } else if (titleCode.equals("P-RAN")) { // delete property range OWLClass cla = (OWLClass) displayedEntity; if (obj instanceof OWLObjectProperty) { RemoveObjectPropertyRange change = new RemoveObjectPropertyRange(ontology, (OWLObjectProperty) obj, cla, null); changes.add(change); } } else if (titleCode.equals("P-SUB")) { // remove super property if (obj instanceof OWLProperty) { if (((OWLProperty) displayedEntity).getSuperProperties(ontology).contains(obj)) { RemoveSuperProperty change = new RemoveSuperProperty(ontology, (OWLProperty) displayedEntity, (OWLProperty) obj, null); changes.add(change); } else { // remove specific axiom if present OWLSubPropertyAxiom axiom = ontology.getOWLDataFactory() .getOWLSubPropertyAxiom((OWLProperty) displayedEntity, (OWLProperty) obj); RemovePropertyAxiom change2 = new RemovePropertyAxiom(ontology, axiom, null); changes.add(change2); } } } else if (titleCode.equals("P-SUP")) { // remove sub property if (obj instanceof OWLProperty) { if (((OWLProperty) obj).getSuperProperties(ontology).contains(displayedEntity)) { RemoveSuperProperty change = new RemoveSuperProperty(ontology, (OWLProperty) obj, (OWLProperty) displayedEntity, null); changes.add(change); } else { // remove specific axiom if present OWLSubPropertyAxiom axiom = ontology.getOWLDataFactory() .getOWLSubPropertyAxiom((OWLProperty) obj, (OWLProperty) displayedEntity); RemovePropertyAxiom change2 = new RemovePropertyAxiom(ontology, axiom, null); changes.add(change2); } } } else if (titleCode.equals("P-EQU")) { // remove equivalent property if (obj instanceof OWLProperty) { Set propSet = new HashSet(); propSet.add((OWLProperty) obj); propSet.add((OWLProperty) displayedEntity); OWLEquivalentPropertiesAxiom axiom = ontology.getOWLDataFactory() .getOWLEquivalentPropertiesAxiom(propSet); RemovePropertyAxiom change = new RemovePropertyAxiom(ontology, axiom, null); changes.add(change); } } else if (titleCode.equals("P-INV")) { // remove inverse property if (obj instanceof OWLObjectProperty) { OWLObjectProperty prop = (OWLObjectProperty) displayedEntity; OWLObjectProperty inverse = (OWLObjectProperty) obj; RemoveInverse change = new RemoveInverse(ontology, prop, inverse, null); changes.add(change); } } else if (titleCode.startsWith(("P-VAL"))) { // remove property value pair // also obtain value from hash table String valueHashKey = titleCode.substring(titleCode.lastIndexOf(":") + 1, titleCode.length()); Object value = OWLDescHash.get(valueHashKey); if (obj instanceof OWLObjectProperty) { RemoveObjectPropertyInstance change = new RemoveObjectPropertyInstance(ontology, (OWLIndividual) displayedEntity, (OWLObjectProperty) obj, (OWLIndividual) value, null); changes.add(change); } else if (obj instanceof OWLDataProperty) { RemoveDataPropertyInstance change = new RemoveDataPropertyInstance(ontology, (OWLIndividual) displayedEntity, (OWLDataProperty) obj, (OWLDataValue) value, null); changes.add(change); } } else if (titleCode.startsWith("RULE")) { System.out.println("Deleting Rule"); //String n3 = swoopModel.getRuleExpr().publishRulesToPychinko(); // obj is OWLRule // go through the rulemap and remove this rule OWLRule rule = (OWLRule) obj; //get the rule and its friggin expressivity (exactly why are they bundled together?) OWLRuleAtom consAtom = (OWLRuleAtom) rule.getConsequents().iterator().next(); OWLObject key = null; if (consAtom instanceof OWLRuleClassAtom) { key = ((OWLRuleClassAtom) consAtom).getDescription(); } else { if (consAtom instanceof OWLRuleDataPropertyAtom) { key = ((OWLRuleDataPropertyAtom) consAtom).getProperty(); } else { if (consAtom instanceof OWLRuleObjectPropertyAtom) { key = ((OWLRuleObjectPropertyAtom) consAtom).getProperty(); } } } HashSet rulesSet = (HashSet) swoopModel.getRuleExpr().getRuleMap().get(key); //find the rule we want to delete RuleValue rvDelete = null; Iterator it = rulesSet.iterator(); while (it.hasNext()) { RuleValue rv = (RuleValue) it.next(); if (rv.getRule().equals(obj)) { rvDelete = rv; } } rulesSet.remove(rvDelete); swoopModel.getRuleExpr().getRuleMap().put(key, rulesSet); } return changes; }
From source file:org.sakaiproject.tool.assessment.facade.AssessmentGradingFacadeQueries.java
public List getExportResponsesData(String publishedAssessmentId, boolean anonymous, String audioMessage, String fileUploadMessage, String noSubmissionMessage, boolean showPartAndTotalScoreSpreadsheetColumns, String poolString, String partString, String questionString, String textString, String rationaleString, String itemGradingCommentsString, Map useridMap, String responseCommentString) { ArrayList dataList = new ArrayList(); ArrayList headerList = new ArrayList(); ArrayList finalList = new ArrayList(2); PublishedAssessmentService pubService = new PublishedAssessmentService(); HashSet publishedAssessmentSections = pubService .getSectionSetForAssessment(Long.valueOf(publishedAssessmentId)); Double zeroDouble = new Double(0.0); HashMap publishedAnswerHash = pubService .preparePublishedAnswerHash(pubService.getPublishedAssessment(publishedAssessmentId)); HashMap publishedItemTextHash = pubService .preparePublishedItemTextHash(pubService.getPublishedAssessment(publishedAssessmentId)); HashMap publishedItemHash = pubService .preparePublishedItemHash(pubService.getPublishedAssessment(publishedAssessmentId)); //Get this sorted to add the blank gradings for the questions not answered later. Set publishItemSet = new TreeSet(new ItemComparator()); publishItemSet.addAll(publishedItemHash.values()); int numSubmission = 1; String numSubmissionText = noSubmissionMessage; String lastAgentId = ""; String agentEid = ""; String firstName = ""; String lastName = ""; Set useridSet = new HashSet(useridMap.keySet()); ArrayList responseList = null; boolean canBeExported = false; boolean fistItemGradingData = true; List list = getAllOrderedSubmissions(publishedAssessmentId); Iterator assessmentGradingIter = list.iterator(); while (assessmentGradingIter.hasNext()) { // create new section-item-scores structure for this assessmentGrading Iterator sectionsIter = publishedAssessmentSections.iterator(); HashMap sectionItems = new HashMap(); TreeMap sectionScores = new TreeMap(); while (sectionsIter.hasNext()) { PublishedSectionData publishedSection = (PublishedSectionData) sectionsIter.next(); ArrayList itemsArray = publishedSection.getItemArraySortedForGrading(); Iterator itemsIter = itemsArray.iterator(); // Iterate through the assessment questions (items) HashMap itemsForSection = new HashMap(); while (itemsIter.hasNext()) { ItemDataIfc item = (ItemDataIfc) itemsIter.next(); itemsForSection.put(item.getItemId(), item.getItemId()); }//www . ja v a2 s .co m sectionItems.put(publishedSection.getSequence(), itemsForSection); sectionScores.put(publishedSection.getSequence(), zeroDouble); } AssessmentGradingData assessmentGradingData = (AssessmentGradingData) assessmentGradingIter.next(); String agentId = assessmentGradingData.getAgentId(); responseList = new ArrayList(); canBeExported = false; if (anonymous) { canBeExported = true; responseList.add(assessmentGradingData.getAssessmentGradingId()); } else { if (useridMap.containsKey(assessmentGradingData.getAgentId())) { useridSet.remove(assessmentGradingData.getAgentId()); canBeExported = true; try { agentEid = userDirectoryService.getUser(assessmentGradingData.getAgentId()).getEid(); firstName = userDirectoryService.getUser(assessmentGradingData.getAgentId()).getFirstName(); lastName = userDirectoryService.getUser(assessmentGradingData.getAgentId()).getLastName(); } catch (Exception e) { log.error("Cannot get user"); } responseList.add(lastName); responseList.add(firstName); responseList.add(agentEid); if (assessmentGradingData.getForGrade()) { if (lastAgentId.equals(agentId)) { numSubmission++; } else { numSubmission = 1; lastAgentId = agentId; } } else { numSubmission = 0; lastAgentId = agentId; } if (numSubmission == 0) { numSubmissionText = noSubmissionMessage; } else { numSubmissionText = String.valueOf(numSubmission); } responseList.add(numSubmissionText); } } if (canBeExported) { int sectionScoreColumnStart = responseList.size(); if (showPartAndTotalScoreSpreadsheetColumns) { Double finalScore = assessmentGradingData.getFinalScore(); if (finalScore != null) { responseList.add((Double) finalScore.doubleValue()); // gopal - cast for spreadsheet numerics } else { log.debug("finalScore is NULL"); responseList.add(0d); } } String assessmentGradingComments = ""; if (assessmentGradingData.getComments() != null) { assessmentGradingComments = assessmentGradingData.getComments().replaceAll("<br\\s*/>", ""); } responseList.add(assessmentGradingComments); Long assessmentGradingId = assessmentGradingData.getAssessmentGradingId(); HashMap studentGradingMap = getStudentGradingData( assessmentGradingData.getAssessmentGradingId().toString(), false); ArrayList grades = new ArrayList(); grades.addAll(studentGradingMap.values()); Collections.sort(grades, new QuestionComparator(publishedItemHash)); //Add the blank gradings for the questions not answered in random pools. if (grades.size() < publishItemSet.size()) { int index = -1; for (Object pido : publishItemSet) { index++; PublishedItemData pid = (PublishedItemData) pido; if (index == grades.size() || ((ItemGradingData) ((List) grades.get(index)).get(0)) .getPublishedItemId().longValue() != pid.getItemId().longValue()) { //have to add the placeholder List newList = new ArrayList(); newList.add(new EmptyItemGrading(pid.getSection().getSequence(), pid.getItemId(), pid.getSequence())); grades.add(index, newList); } } } int questionNumber = 0; for (Object oo : grades) { // There can be more than one answer to a question, e.g. for // FIB with more than one blank or matching questions. So sort // by sequence number of answer. (don't bother to sort if just 1) List l = (List) oo; if (l.size() > 1) Collections.sort(l, new AnswerComparator(publishedAnswerHash)); String maintext = ""; String rationale = ""; String responseComment = ""; boolean addRationale = false; boolean addResponseComment = false; boolean matrixChoices = false; TreeMap responsesMap = new TreeMap(); // loop over answers per question int count = 0; ItemGradingData grade = null; //boolean isAudioFileUpload = false; boolean isFinFib = false; double itemScore = 0.0d; //Add the missing sequences! //To manage emi answers, could help with others too Map<Long, String> emiAnswerText = new TreeMap<Long, String>(); for (Object ooo : l) { grade = (ItemGradingData) ooo; if (grade == null || EmptyItemGrading.class.isInstance(grade)) { continue; } if (grade != null && grade.getAutoScore() != null) { itemScore += grade.getAutoScore().doubleValue(); } // now print answer data log.debug("<br> " + grade.getPublishedItemId() + " " + grade.getRationale() + " " + grade.getAnswerText() + " " + grade.getComments() + " " + grade.getReview()); Long publishedItemId = grade.getPublishedItemId(); ItemDataIfc publishedItemData = (ItemDataIfc) publishedItemHash.get(publishedItemId); Long typeId = publishedItemData.getTypeId(); questionNumber = publishedItemData.getSequence(); if (typeId.equals(TypeIfc.FILL_IN_BLANK) || typeId.equals(TypeIfc.FILL_IN_NUMERIC) || typeId.equals(TypeIfc.CALCULATED_QUESTION)) { log.debug("FILL_IN_BLANK, FILL_IN_NUMERIC"); isFinFib = true; String thistext = ""; Long answerid = grade.getPublishedAnswerId(); Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { sequence = answer.getSequence(); } } String temptext = grade.getAnswerText(); if (temptext == null) { temptext = "No Answer"; } thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.MATCHING)) { log.debug("MATCHING"); String thistext = ""; // for some question types we have another text field Long answerid = grade.getPublishedAnswerId(); String temptext = "No Answer"; Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { temptext = answer.getText(); if (temptext == null) { temptext = "No Answer"; } sequence = answer.getItemText().getSequence(); } else if (answerid == -1) { temptext = "None of the Above"; ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); sequence = itemTextIfc.getSequence(); } } else { ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); sequence = itemTextIfc.getSequence(); } thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.IMAGEMAP_QUESTION)) { log.debug("MATCHING"); ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); Long sequence = itemTextIfc.getSequence(); String temptext = (grade.getIsCorrect()) ? "OK" : "No OK"; String thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.IMAGEMAP_QUESTION)) { log.debug("MATCHING"); ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); Long sequence = itemTextIfc.getSequence(); String temptext = (grade.getIsCorrect()) ? "OK" : "No OK"; String thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.IMAGEMAP_QUESTION)) { log.debug("MATCHING"); ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); Long sequence = itemTextIfc.getSequence(); String temptext = (grade.getIsCorrect()) ? "OK" : "No OK"; String thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.EXTENDED_MATCHING_ITEMS)) { log.debug("EXTENDED_MATCHING_ITEMS"); String thistext = ""; // for some question types we have another text field Long answerid = grade.getPublishedAnswerId(); String temptext = "No Answer"; Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { temptext = answer.getLabel(); if (temptext == null) { temptext = "No Answer"; } sequence = answer.getItemText().getSequence(); } } if (sequence == null) { ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); if (itemTextIfc != null) { sequence = itemTextIfc.getSequence(); } } if (sequence != null) { thistext = emiAnswerText.get(sequence); if (thistext == null) { thistext = temptext; } else { thistext = thistext + temptext; } emiAnswerText.put(sequence, thistext); } else { // Orphaned answer: the answer item to which it refers was removed after the assessment was taken, // as a result of editing the published assessment. This behaviour should be fixed, i.e. it should // not be possible to get orphaned answer item references in the database. sequence = new Long(99); emiAnswerText.put(sequence, "Item Removed"); } } else if (typeId.equals(TypeIfc.MATRIX_CHOICES_SURVEY)) { log.debug("MATRIX_CHOICES_SURVEY"); // for this kind of question a responsesMap is generated matrixChoices = true; Long answerid = grade.getPublishedAnswerId(); String temptext = "No Answer"; Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); temptext = answer.getText(); if (temptext == null) { temptext = "No Answer"; } sequence = answer.getItemText().getSequence(); } else { ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); sequence = itemTextIfc.getSequence(); log.debug( "Answerid null for " + grade.getPublishedItemId() + ". Adding " + sequence); temptext = "No Answer"; } responsesMap.put(sequence, temptext); } else if (typeId.equals(TypeIfc.AUDIO_RECORDING)) { log.debug("AUDIO_RECORDING"); maintext = audioMessage; //isAudioFileUpload = true; } else if (typeId.equals(TypeIfc.FILE_UPLOAD)) { log.debug("FILE_UPLOAD"); maintext = fileUploadMessage; //isAudioFileUpload = true; } else if (typeId.equals(TypeIfc.ESSAY_QUESTION)) { log.debug("ESSAY_QUESTION"); if (grade.getAnswerText() != null) { maintext = grade.getAnswerText(); } } else { log.debug("other type"); String thistext = ""; // for some question types we have another text field Long answerid = grade.getPublishedAnswerId(); if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { String temptext = answer.getText(); if (temptext != null) thistext = temptext; } else { log.warn("Published answer for " + answerid + " is null"); } } if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } // taking care of rationale if (!addRationale && (typeId.equals(TypeIfc.MULTIPLE_CHOICE) || typeId.equals(TypeIfc.MULTIPLE_CORRECT) || typeId.equals(TypeIfc.MULTIPLE_CORRECT_SINGLE_SELECTION) || typeId.equals(TypeIfc.TRUE_FALSE))) { log.debug( "MULTIPLE_CHOICE or MULTIPLE_CORRECT or MULTIPLE_CORRECT_SINGLE_SELECTION or TRUE_FALSE"); if (publishedItemData.getHasRationale() != null && publishedItemData.getHasRationale()) { addRationale = true; rationale = grade.getRationale(); if (rationale == null) { rationale = ""; } } } //Survey - Matrix of Choices - Add Comment Field if (typeId.equals(TypeIfc.MATRIX_CHOICES_SURVEY)) { PublishedItemData pid = (PublishedItemData) publishedItemData; if (pid.getAddCommentFlag()) { addResponseComment = true; if (responseComment.equals("") && grade.getAnswerText() != null) { responseComment = grade.getAnswerText(); } } } } // inner for - answers if (!emiAnswerText.isEmpty()) { if (maintext == null) { maintext = ""; } for (Entry<Long, String> entry : emiAnswerText.entrySet()) { maintext = maintext + "|" + entry.getKey().toString() + ":" + entry.getValue(); } if (maintext.startsWith("|")) { maintext = maintext.substring(1); } } Integer sectionSequenceNumber = null; if (grade == null || EmptyItemGrading.class.isInstance(grade)) { sectionSequenceNumber = EmptyItemGrading.class.cast(grade).getSectionSequence(); questionNumber = EmptyItemGrading.class.cast(grade).getItemSequence(); // indicate that the student was not presented with this question maintext = "-"; } else { sectionSequenceNumber = updateSectionScore(sectionItems, sectionScores, grade.getPublishedItemId(), itemScore); } if (isFinFib && maintext.indexOf("No Answer") >= 0 && count == 1) { maintext = "No Answer"; } else if ("".equals(maintext)) { maintext = "No Answer"; } String itemGradingComments = ""; // if question type is not matrix choices apply the original code if (!matrixChoices) { responseList.add(maintext); if (grade.getComments() != null) { itemGradingComments = grade.getComments().replaceAll("<br\\s*/>", ""); } responseList.add(itemGradingComments); } else { // if there are questions not answered, a no answer response is added to the map ItemDataIfc correspondingPublishedItemData = (ItemDataIfc) publishedItemHash .get(grade.getPublishedItemId()); List correspondingItemTextArray = correspondingPublishedItemData.getItemTextArray(); log.debug("publishedItem is " + correspondingPublishedItemData.getText() + " and number of rows " + correspondingItemTextArray.size()); if (responsesMap.size() < correspondingItemTextArray.size()) { Iterator itItemTextHash = correspondingItemTextArray.iterator(); while (itItemTextHash.hasNext()) { ItemTextIfc itemTextIfc = (ItemTextIfc) itItemTextHash.next(); if (!responsesMap.containsKey(itemTextIfc.getSequence())) { log.debug("responsesMap does not contain answer to " + itemTextIfc.getText()); responsesMap.put(itemTextIfc.getSequence(), "No Answer"); } } } Iterator it = responsesMap.entrySet().iterator(); while (it.hasNext()) { Map.Entry e = (Map.Entry) it.next(); log.debug("Adding to response list " + e.getKey() + " and " + e.getValue()); responseList.add(e.getValue()); if (grade.getComments() != null) { itemGradingComments = grade.getComments().replaceAll("<br\\s*/>", ""); } responseList.add(itemGradingComments); itemGradingComments = ""; } } if (addRationale) { responseList.add(rationale); } if (addResponseComment) { responseList.add(responseComment); } // Only set header based on the first item grading data if (fistItemGradingData) { //get the pool name String poolName = null; for (Iterator i = publishedAssessmentSections.iterator(); i.hasNext();) { PublishedSectionData psd = (PublishedSectionData) i.next(); if (psd.getSequence().intValue() == sectionSequenceNumber) { poolName = psd.getSectionMetaDataByLabel(SectionDataIfc.POOLNAME_FOR_RANDOM_DRAW); } } if (!matrixChoices) { headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, textString, questionNumber, poolString, poolName)); if (addRationale) { headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, rationaleString, questionNumber, poolString, poolName)); } if (addResponseComment) { headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, responseCommentString, questionNumber, poolString, poolName)); } headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, itemGradingCommentsString, questionNumber, poolString, poolName)); } else { int numberRows = responsesMap.size(); for (int i = 0; i < numberRows; i = i + 1) { headerList.add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, textString, questionNumber, i + 1, poolString, poolName)); if (addRationale) { headerList .add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, rationaleString, questionNumber, i + 1, poolString, poolName)); } if (addResponseComment) { headerList.add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, responseCommentString, questionNumber, i + 1, poolString, poolName)); } headerList.add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, itemGradingCommentsString, questionNumber, i + 1, poolString, poolName)); } } } } // outer for - questions if (showPartAndTotalScoreSpreadsheetColumns) { if (sectionScores.size() > 1) { Iterator keys = sectionScores.keySet().iterator(); while (keys.hasNext()) { Double partScore = (Double) ((Double) sectionScores.get(keys.next())).doubleValue(); responseList.add(sectionScoreColumnStart++, partScore); } } } dataList.add(responseList); if (fistItemGradingData) { fistItemGradingData = false; } } } // while if (!anonymous && useridSet.size() != 0) { Iterator iter = useridSet.iterator(); while (iter.hasNext()) { String id = (String) iter.next(); try { agentEid = userDirectoryService.getUser(id).getEid(); firstName = userDirectoryService.getUser(id).getFirstName(); lastName = userDirectoryService.getUser(id).getLastName(); } catch (Exception e) { log.error("Cannot get user"); } responseList = new ArrayList(); responseList.add(lastName); responseList.add(firstName); responseList.add(agentEid); responseList.add(noSubmissionMessage); dataList.add(responseList); } } Collections.sort(dataList, new ResponsesComparator(anonymous)); finalList.add(dataList); finalList.add(headerList); return finalList; }
From source file:dao.DirectoryDaoDb.java
/** * getDirBlobsFromTags - gets the directory blobs based on tags * @param dirtags - directory with tags//from w w w.j a v a 2s.c o m */ public HashSet getDirBlobsFromTags(HashSet dirtags) throws BaseDaoException { if (dirtags != null && dirtags.size() > 0) { Iterator it1 = dirtags.iterator(); while (it1.hasNext()) { Photo blob = (Photo) it1.next(); if (blob == null) continue; String entryid = blob.getValue(DbConstants.ENTRYID); blob.setObject(DbConstants.PHOTO, getPhoto(blob.getValue(DbConstants.ENTRYID), blob.getValue(DbConstants.DIRECTORY_ID))); } } return dirtags; }