List of usage examples for java.util HashSet iterator
public Iterator<E> iterator()
From source file:com.peterbochs.instrument.InstrumentPanel.java
private void loadInformation(boolean sort, String str) { String address[] = str.split(":"); HashSet<BigInteger> c = new HashSet<BigInteger>(); for (int x = 0; x < address.length; x++) { try {/* w w w. j a v a 2s.c o m*/ c.add(CommonLib.string2BigInteger(address[x])); } catch (Exception ex) { } } Vector<BigInteger> v = new Vector<BigInteger>(); Iterator<BigInteger> it = c.iterator(); while (it.hasNext()) { BigInteger element = it.next(); v.add(element); } if (sort) { Collections.sort(v); } jTextArea1.setText(""); Iterator<BigInteger> it2 = v.iterator(); while (it2.hasNext()) { BigInteger element = it2.next(); jTextArea1.setText(jTextArea1.getText() + "\n0x" + element.toString(16)); } }
From source file:org.gcaldaemon.core.notifier.GmailNotifier.java
public final void run() { log.info("Gmail notifier started successfully."); try {//from w ww . j a va 2s . c om sleep(7000); } catch (Exception ignored) { return; } // Processed (displayed) mails HashSet processedMails = new HashSet(); // Polling mailbox int i; for (;;) { try { // Verify local username if (users != null) { // List active users String[] activeUsers = getActiveUsers(); boolean enabled = false; if (activeUsers != null && activeUsers.length != 0) { for (i = 0; i < activeUsers.length; i++) { enabled = isUserMatch(activeUsers[i]); if (enabled) { break; } } if (!enabled) { // Sleep for a minute log.debug("Access denied for active local users."); sleep(MINUTE); // Restart loop (verify username) continue; } } } // Get Gmail address book (or null) GmailContact[] contacts = configurator.getAddressBook(); GmailContact contact; // Load feed entries SyndEntry[] entries = FeedUtilities.getFeedEntries(FEED_URL, username, password); SyndEntry entry; HashSet newMails = new HashSet(); for (i = 0; i < entries.length; i++) { entry = entries[i]; String date = getDate(entry); String from = getFrom(entry); if (contacts != null) { for (int n = 0; n < contacts.length; n++) { contact = contacts[n]; if (from.equalsIgnoreCase(contact.email)) { from = contact.name; break; } } } String title = getTitle(entry); if (mailtermSubject != null) { if (title.equals(mailtermSubject) || title.equals("Re:" + mailtermSubject)) { // Do not display mailterm commands and responses continue; } } String summary = getSummary(entry); newMails.add(date + '\t' + from + '\t' + title + '\t' + summary); } // Remove readed mails Iterator iterator = processedMails.iterator(); Object key; while (iterator.hasNext()) { key = iterator.next(); if (!newMails.contains(key)) { iterator.remove(); } } // Look up unprocessed mails LinkedList unprocessedMails = new LinkedList(); iterator = newMails.iterator(); while (iterator.hasNext()) { key = iterator.next(); if (processedMails.contains(key)) { continue; } processedMails.add(key); unprocessedMails.addLast(key); } // Display unprocessed mails if (!unprocessedMails.isEmpty()) { String[] array = new String[unprocessedMails.size()]; unprocessedMails.toArray(array); Arrays.sort(array, String.CASE_INSENSITIVE_ORDER); window.show(array); } // Sleep sleep(pollingTimeout); } catch (InterruptedException interrupt) { // Dispose window if (window != null) { try { window.setVisible(false); } catch (Exception ignored) { } } break; } catch (Exception loadError) { log.error("Unable to load Gmail feed!", loadError); try { sleep(HOUR); } catch (Exception interrupt) { return; } } } }
From source file:org.apache.axis2.jaxws.description.builder.JAXWSRIWSDLGenerator.java
/** * Get the default classpath from various thingies in the message context * * @param msgContext/*from w w w . ja va 2 s . c o m*/ * @return default classpath */ public String getDefaultClasspath(String webBase) { HashSet classpath = new HashSet(); ClassLoader cl = Thread.currentThread().getContextClassLoader(); fillClassPath(cl, classpath); // Just to be safe (the above doesn't seem to return the webapp // classpath in all cases), manually do this: if (webBase != null) { addPath(classpath, webBase + File.separatorChar + "classes"); try { String libBase = webBase + File.separatorChar + "lib"; File libDir = new File(libBase); String[] jarFiles = libDir.list(); for (int i = 0; i < jarFiles.length; i++) { String jarFile = jarFiles[i]; if (jarFile.endsWith(".jar")) { addPath(classpath, libBase + File.separatorChar + jarFile); } } } catch (Exception e) { // Oh well. No big deal. } } URL serviceArchive = axisService.getFileName(); if (serviceArchive != null) { try { /** * If the service contains libraries in the 'lib' folder, we have to add those also * into classpath */ URL[] urls = Utils.getURLsForAllJars(serviceArchive, null); for (URL url : urls) { classpath.add(Utils.toFile(url).getCanonicalPath()); } } catch (UnsupportedEncodingException e) { log.error(e.getMessage(), e); } catch (IOException e) { log.error(e.getMessage(), e); } } // axis.ext.dirs can be used in any appserver getClassPathFromDirectoryProperty(classpath, "axis.ext.dirs"); // classpath used by Jasper getClassPathFromProperty(classpath, "org.apache.catalina.jsp_classpath"); // websphere stuff. getClassPathFromProperty(classpath, "ws.ext.dirs"); getClassPathFromProperty(classpath, "com.ibm.websphere.servlet.application.classpath"); // java class path getClassPathFromProperty(classpath, "java.class.path"); // Load jars from java external directory getClassPathFromDirectoryProperty(classpath, "java.ext.dirs"); // boot classpath isn't found in above search getClassPathFromProperty(classpath, "sun.boot.class.path"); StringBuffer path = new StringBuffer(); for (Iterator iterator = classpath.iterator(); iterator.hasNext();) { String s = (String) iterator.next(); path.append(s); path.append(File.pathSeparatorChar); } log.debug(path); return path.toString(); }
From source file:xc.mst.services.marcaggregation.MarcAggregationService.java
/** * do some of the housekeeping required with merging, really a premerge process, then hand off to mergeBibSet, if necessary. * * @param results - may already have some results from a prior merge or some deleted records from prior cleanup of unmerged records. * @param matchedRecordIds - the match set, will always have at least one element. * @param repo - the input records/*from w w w . j ava 2 s. c o m*/ * @return - the prior passed in results + new results of OutputRecord resulting from the merge */ private List<OutputRecord> mergeOverlord(List<OutputRecord> results, HashSet<Long> matchedRecordIds) { TimingLogger.start("mergeOverlord"); if (LOG.isDebugEnabled()) { StringBuffer buf = new StringBuffer(); for (Long num : matchedRecordIds) { buf.append(num).append(" "); } LOG.debug("** MERGE overlord, matchset =" + buf.toString()); } List<OutputRecord> list = null; // may not have any matches! final boolean hasMatches = matchedRecordIds.size() > 1; if (hasMatches) { //masMatchSetList = addToMatchSetList(matchedRecordIds, masMatchSetList); InputRecord record = masRsm.getRecordOfSourceRecord(matchedRecordIds, getInputRepo(), scores); String xml = mergeBibSet(record, matchedRecordIds, getInputRepo()); list = createNewBibRecord(record, xml, matchedRecordIds); // this method calls addToMasMergedRecordsMemory recordOfSourceMap.put(list.get(0).getId(), record.getId()); LOG.debug("** create merged output record: " + list.get(0).getId() + " status=" + list.get(0).getStatus()); } else { InputRecord r = getInputRepo().getRecord(matchedRecordIds.iterator().next()); String xml = masBld.update005(r.getOaiXml(), _005_Transformer); list = createNewRecord(r, "b", xml); // even though it is not merged, must still track the I<->O relationships! if (list.size() > 0) { // will get 1 agg. record back. HashSet<Long> littleSet = new HashSet<Long>(); littleSet.add(r.getId()); addToMasMergedRecordsMemory(list.get(0).getId(), littleSet); } LOG.debug("** create unmerged output record: " + list.get(0).getId() + " status=" + list.get(0).getStatus()); } results.addAll(list); TimingLogger.stop("mergeOverlord"); return results; }
From source file:com.datatorrent.stram.engine.StreamingContainer.java
public synchronized void activate(final Map<Integer, OperatorDeployInfo> nodeMap, Map<String, ComponentContextPair<Stream, StreamContext>> newStreams) { for (ComponentContextPair<Stream, StreamContext> pair : newStreams.values()) { if (!(pair.component instanceof BufferServerSubscriber)) { activeStreams.put(pair.component, pair.context); pair.component.activate(pair.context); eventBus.publish(new StreamActivationEvent(pair)); }/*w w w .j ava 2 s. c o m*/ } final CountDownLatch signal = new CountDownLatch(nodeMap.size()); for (final OperatorDeployInfo ndi : nodeMap.values()) { /* * OiO nodes get activated with their primary nodes. */ if (ndi.type == OperatorType.OIO) { continue; } final Node<?> node = nodes.get(ndi.id); final String name = new StringBuilder(Integer.toString(ndi.id)).append('/').append(ndi.name).append(':') .append(node.getOperator().getClass().getSimpleName()).toString(); final Thread thread = new Thread(name) { @Override public void run() { HashSet<OperatorDeployInfo> setOperators = new HashSet<OperatorDeployInfo>(); OperatorDeployInfo currentdi = ndi; try { /* primary operator initialization */ setupNode(currentdi); setOperators.add(currentdi); /* lets go for OiO operator initialization */ List<Integer> oioNodeIdList = oioGroups.get(ndi.id); if (oioNodeIdList != null) { for (Integer oioNodeId : oioNodeIdList) { currentdi = nodeMap.get(oioNodeId); setupNode(currentdi); setOperators.add(currentdi); } } currentdi = null; for (int i = setOperators.size(); i-- > 0;) { signal.countDown(); } node.run(); /* this is a blocking call */ } catch (Error error) { int[] operators; if (currentdi == null) { logger.error("Voluntary container termination due to an error in operator set {}.", setOperators, error); operators = new int[setOperators.size()]; int i = 0; for (Iterator<OperatorDeployInfo> it = setOperators.iterator(); it.hasNext(); i++) { operators[i] = it.next().id; } } else { logger.error("Voluntary container termination due to an error in operator {}.", currentdi, error); operators = new int[] { currentdi.id }; } umbilical.reportError(containerId, operators, "Voluntary container termination due to an error. " + ExceptionUtils.getStackTrace(error)); System.exit(1); } catch (Exception ex) { if (currentdi == null) { failedNodes.add(ndi.id); logger.error("Operator set {} stopped running due to an exception.", setOperators, ex); int[] operators = new int[] { ndi.id }; umbilical.reportError(containerId, operators, "Stopped running due to an exception. " + ExceptionUtils.getStackTrace(ex)); } else { failedNodes.add(currentdi.id); logger.error("Abandoning deployment of operator {} due to setup failure.", currentdi, ex); int[] operators = new int[] { currentdi.id }; umbilical.reportError(containerId, operators, "Abandoning deployment due to setup failure. " + ExceptionUtils.getStackTrace(ex)); } } finally { if (setOperators.contains(ndi)) { try { teardownNode(ndi); } catch (Exception ex) { failedNodes.add(ndi.id); logger.error("Shutdown of operator {} failed due to an exception.", ndi, ex); } } else { signal.countDown(); } List<Integer> oioNodeIdList = oioGroups.get(ndi.id); if (oioNodeIdList != null) { for (Integer oioNodeId : oioNodeIdList) { OperatorDeployInfo oiodi = nodeMap.get(oioNodeId); if (setOperators.contains(oiodi)) { try { teardownNode(oiodi); } catch (Exception ex) { failedNodes.add(oiodi.id); logger.error("Shutdown of operator {} failed due to an exception.", oiodi, ex); } } else { signal.countDown(); } } } } } }; node.context.setThread(thread); thread.start(); } /** * we need to make sure that before any of the operators gets the first message, it's activated. */ try { signal.await(); } catch (InterruptedException ex) { logger.debug("Activation of operators interrupted.", ex); } for (ComponentContextPair<Stream, StreamContext> pair : newStreams.values()) { if (pair.component instanceof BufferServerSubscriber) { activeStreams.put(pair.component, pair.context); pair.component.activate(pair.context); eventBus.publish(new StreamActivationEvent(pair)); } } for (WindowGenerator wg : generators.values()) { if (!activeGenerators.containsKey(wg)) { activeGenerators.put(wg, generators); wg.activate(null); } } }
From source file:org.unitime.timetable.solver.TimetableDatabaseSaver.java
public void save() { org.hibernate.Session hibSession = null; Transaction tx = null;/* w w w . j a v a 2 s. com*/ try { TimetableManagerDAO dao = new TimetableManagerDAO(); hibSession = dao.getSession(); hibSession.setCacheMode(CacheMode.IGNORE); hibSession.setFlushMode(FlushMode.COMMIT); tx = hibSession.beginTransaction(); Long[] solutionIds = save(hibSession); tx.commit(); HashSet refreshIds = new HashSet(); if (iCommitSolution && solutionIds != null) { HashSet<Solution> touchedSolutions = new HashSet<Solution>(); if (hibSession != null && hibSession.isOpen()) hibSession.close(); hibSession = dao.getSession(); iProgress.setPhase("Committing solution ...", 2 * solutionIds.length); tx = hibSession.beginTransaction(); for (int i = 0; i < solutionIds.length; i++) { Solution solution = (new SolutionDAO()).get(solutionIds[i]); Solution committedSolution = solution.getOwner().getCommittedSolution(); if (committedSolution != null) { committedSolution.uncommitSolution(hibSession, getModel().getProperties().getProperty("General.OwnerPuid")); refreshIds.add(committedSolution.getUniqueId()); touchedSolutions.add(committedSolution); } touchedSolutions.add(solution); iProgress.incProgress(); } for (int i = 0; i < solutionIds.length; i++) { Solution solution = (new SolutionDAO()).get(solutionIds[i]); List<String> messages = new ArrayList<String>(); solution.commitSolution(messages, hibSession, getModel().getProperties().getProperty("General.OwnerPuid")); touchedSolutions.add(solution); for (String m : messages) { iProgress.error("Unable to commit: " + m); } hibSession.update(solution); iProgress.incProgress(); } tx.commit(); String className = ApplicationProperty.ExternalActionSolutionCommit.value(); if (className != null && className.trim().length() > 0) { ExternalSolutionCommitAction commitAction = (ExternalSolutionCommitAction) (Class .forName(className).newInstance()); commitAction.performExternalSolutionCommitAction(touchedSolutions, hibSession); } } iProgress.setPhase("Refreshing solution ...", solutionIds.length + refreshIds.size()); for (Iterator i = refreshIds.iterator(); i.hasNext();) { Long solutionId = (Long) i.next(); refreshCourseSolution(solutionId); try { } catch (Exception e) { iProgress.warn("Unable to refresh solution " + solutionId + ", reason:" + e.getMessage(), e); } iProgress.incProgress(); } for (int i = 0; i < solutionIds.length; i++) { try { refreshCourseSolution(solutionIds[i]); } catch (Exception e) { iProgress.warn("Unable to refresh solution " + solutionIds[i] + ", reason:" + e.getMessage(), e); } iProgress.incProgress(); } if (solutionIds != null) { getModel().getProperties().setProperty("General.SolutionId", solutionIds); iProgress.info("Solution successfully saved."); if (hibSession != null && hibSession.isOpen()) hibSession.close(); hibSession = dao.getSession(); for (int i = 0; i < solutionIds.length; i++) { tx = hibSession.beginTransaction(); Solution solution = (new SolutionDAO()).get(solutionIds[i]); LogInfo lInfo = new LogInfo(); lInfo.setLog(iProgress.getLog()); SolutionInfo logInfo = new SolutionInfo(); logInfo.setDefinition(SolverInfoDef.findByName(hibSession, "LogInfo")); logInfo.setOpt(null); logInfo.setSolution(solution); logInfo.setInfo(lInfo, getFileProxy()); hibSession.save(logInfo); tx.commit(); } } } catch (Exception e) { iProgress.fatal("Unable to save timetable, reason: " + e.getMessage(), e); sLog.error(e.getMessage(), e); tx.rollback(); } finally { // here we need to close the session since this code may run in a separate thread if (hibSession != null && hibSession.isOpen()) hibSession.close(); } }
From source file:org.sakaiproject.tool.assessment.ui.listener.delivery.SubmitToGradingActionListener.java
private AssessmentGradingData persistAssessmentGrading(ActionEvent ae, DeliveryBean delivery, HashSet<ItemGradingData> itemGradingHash, PublishedAssessmentFacade publishedAssessment, HashSet<ItemGradingData> adds, HashSet<ItemGradingData> removes, HashMap invalidFINMap, ArrayList invalidSALengthList) throws FinFormatException { AssessmentGradingData adata = null;//from w w w. java2 s . c om if (delivery.getAssessmentGrading() != null) { adata = delivery.getAssessmentGrading(); } GradingService service = new GradingService(); log.debug("**adata=" + adata); if (adata == null) { // <--- this shouldn't happened 'cos it should // have been created by BeginDelivery adata = makeNewAssessmentGrading(publishedAssessment, delivery, itemGradingHash); delivery.setAssessmentGrading(adata); } else { // 1. add all the new itemgrading for MC/Survey and discard any // itemgrading for MC/Survey // 2. add any modified SAQ/TF/FIB/Matching/MCMR/FIN // 3. save any modified Mark for Review in FileUplaod/Audio HashMap<Long, ItemDataIfc> fibMap = getFIBMap(publishedAssessment); HashMap<Long, ItemDataIfc> finMap = getFINMap(publishedAssessment); HashMap<Long, ItemDataIfc> calcQuestionMap = getCalcQuestionMap(publishedAssessment); // CALCULATED_QUESTION HashMap<Long, ItemDataIfc> imagQuestionMap = getImagQuestionMap(publishedAssessment); // IMAGEMAP_QUESTION HashMap<Long, ItemDataIfc> mcmrMap = getMCMRMap(publishedAssessment); HashMap<Long, ItemDataIfc> emiMap = getEMIMap(publishedAssessment); Set<ItemGradingData> itemGradingSet = adata.getItemGradingSet(); log.debug("*** 2a. before removal & addition " + (new Date())); if (itemGradingSet != null) { log.debug("*** 2aa. removing old itemGrading " + (new Date())); itemGradingSet.removeAll(removes); service.deleteAll(removes); // refresh itemGradingSet & assessmentGrading after removal log.debug("*** 2ab. reload itemGradingSet " + (new Date())); itemGradingSet = service.getItemGradingSet(adata.getAssessmentGradingId().toString()); log.debug("*** 2ac. load assessmentGarding " + (new Date())); adata = service.load(adata.getAssessmentGradingId().toString(), false); Iterator<ItemGradingData> iter = adds.iterator(); while (iter.hasNext()) { iter.next().setAssessmentGradingId(adata.getAssessmentGradingId()); } // make update to old item and insert new item // and we will only update item that has been changed log.debug("*** 2ad. set assessmentGrading with new/updated itemGrading " + (new Date())); log.debug("Submitforgrading: before calling .....................oldItemGradingSet.size = " + itemGradingSet.size()); log.debug("Submitforgrading: newItemGradingSet.size = " + adds.size()); HashSet<ItemGradingData> updateItemGradingSet = getUpdateItemGradingSet(itemGradingSet, adds, fibMap, finMap, calcQuestionMap, imagQuestionMap, mcmrMap, emiMap, adata); adata.setItemGradingSet(updateItemGradingSet); } } adata.setSubmitFromTimeoutPopup(delivery.getsubmitFromTimeoutPopup()); adata.setIsLate(isLate(publishedAssessment, delivery.getsubmitFromTimeoutPopup())); adata.setForGrade(Boolean.valueOf(delivery.getForGrade())); // If this assessment grading data has been updated (comments or adj. score) by grader and then republic and allow student to resubmit // when the student submit his answers, we update the status back to 0 and remove the grading entry/info. if (AssessmentGradingData.ASSESSMENT_UPDATED_NEED_RESUBMIT.equals(adata.getStatus()) || AssessmentGradingData.ASSESSMENT_UPDATED.equals(adata.getStatus())) { adata.setStatus(Integer.valueOf(0)); adata.setGradedBy(null); adata.setGradedDate(null); adata.setComments(null); adata.setTotalOverrideScore(Double.valueOf(0d)); } log.debug("*** 2b. before storingGrades, did all the removes and adds " + (new Date())); if (delivery.getNavigation().equals("1") && ae != null && "showFeedback".equals(ae.getComponent().getId())) { log.debug("Do not persist to db if it is linear access and the action is show feedback"); // 3. let's build three HashMap with (publishedItemId, publishedItem), // (publishedItemTextId, publishedItem), (publishedAnswerId, // publishedItem) to help with storing grades to adata only, not db HashMap publishedItemHash = delivery.getPublishedItemHash(); HashMap publishedItemTextHash = delivery.getPublishedItemTextHash(); HashMap publishedAnswerHash = delivery.getPublishedAnswerHash(); service.storeGrades(adata, publishedAssessment, publishedItemHash, publishedItemTextHash, publishedAnswerHash, false, invalidFINMap, invalidSALengthList); } else { log.debug("Persist to db otherwise"); // The following line seems redundant. I cannot see a reason why we need to save the adata here // and then again in following service.storeGrades(). Comment it out. //service.saveOrUpdateAssessmentGrading(adata); log.debug("*** 3. before storingGrades, did all the removes and adds " + (new Date())); // 3. let's build three HashMap with (publishedItemId, publishedItem), // (publishedItemTextId, publishedItem), (publishedAnswerId, // publishedItem) to help with storing grades to adata and then persist to DB HashMap publishedItemHash = delivery.getPublishedItemHash(); HashMap publishedItemTextHash = delivery.getPublishedItemTextHash(); HashMap publishedAnswerHash = delivery.getPublishedAnswerHash(); service.storeGrades(adata, publishedAssessment, publishedItemHash, publishedItemTextHash, publishedAnswerHash, invalidFINMap, invalidSALengthList); } return adata; }
From source file:xc.mst.services.marcaggregation.MarcAggregationService.java
private List<OutputRecord> processBibUpdateActive(InputRecord r, SaxMarcXmlRecord smr, Repository repo) { LOG.info("MAS: processBibUpdateActive: " + r.getId()); List<OutputRecord> results = new ArrayList<OutputRecord>(); boolean processedAlready = false; // If the match points are the same, then we do not need to worry about the match set changing; just update the record payload if (!changedMatchpoints.contains(r.getId())) { LOG.info(/*from www . ja v a2s. co m*/ "MAS: processBibUpdateActive: matchpoints have NOT changed; going to re-use the current matchset's agg record."); OutputRecord oldOutput; String xml; HashSet<Long> formerMatchSet = getCurrentMatchSetForRecord(r); LOG.info("MAS: processBibUpdateActive formerMatchSet [" + formerMatchSet.size() + "] = " + formerMatchSet); // Although rare, it's possible that one or more bibs in this set are marked for deletion, but haven't actually been deleted yet. // So, let's not include any deleted records in the creation of this new aggregated record (because it will throw an exception later when MAS tries to read a potentially empty record). // Although, MAS *should* fix this scenario later on when the deleted record gets processed, let's be pro-active. if (formerMatchSet.size() > 0) { List<Long> deleteThese = new ArrayList<Long>(); for (Long num : formerMatchSet) { if (repo.getRecord(num).getDeleted()) { LOG.info( "MAS: processBibUpdateActive: we found a bib that's been marked for deletion in formerMatchSet; we are skipping this record: " + num); deleteThese.add(num); } } formerMatchSet.removeAll(deleteThese); } if (formerMatchSet.size() > 0) { Long oldOutputId = getBibOutputId(formerMatchSet.iterator().next()); oldOutput = getRecord(oldOutputId); InputRecord record = masRsm.getRecordOfSourceRecord(formerMatchSet, repo, scores); xml = mergeBibSet(record, formerMatchSet, repo); // inject 001/003 if necessary (based on custom.properties settings) xml = injectNew001(oldOutputId, xml); oldOutput.setMode(Record.STRING_MODE); oldOutput.setFormat(marc21); oldOutput.setStatus(Record.ACTIVE); // Set the XML to the updated XML - remerged and reconstituted the xml // Do NOT create a new record, update, the OLD record! // Set the XML to the updated XML - reconstituted the xml oldOutput.setOaiXml(xml); // we need the clear out the old updatedAt value // so that the MST will correctly set it later (when repo is persisted) // issue: mst-549 ((Record) oldOutput).setUpdatedAt(null); // Add the updated record oldOutput.setType("b"); results.add(oldOutput); processedAlready = true; } } // If the match points change at all, we must re-match/merge all records in the set if (!processedAlready) { LOG.info( "MAS: processBibUpdateActive: matchpoints HAVE changed; need to re-match/merge, i.e., delete-then-re-add."); results = processBibDelete(r); // processBibDelete nukes all the record's score data; must re-add it addAndPersistScores(r, smr); results.addAll(processBibNewActive(r, smr, repo)); } return results; }
From source file:es.tekniker.framework.ktek.questionnaire.mng.server.QuestionnaireMngServer.java
private KtekQuestionnaireModelEntity[] getQuestionnaires4TypeMonitoringActivityPlannedData(int idUser, String codtelecareprogram, int idLang, short typeMonitoringActivity, short status) throws KtekExceptionEntity { KtekQuestionnaireModelEntity[] array = null; Ktek_questionnaire[] arrayInstance = null; KtekQuestionnaireModelEntity instance = null; QuestionnaireManagerDB db = null;/* ww w. j a va 2s .com*/ Ktek_user user = null; HashSet<Ktek_questionnaire> questionnaireIdHash; Ktek_questionnaire instanceQuestionaire = null; db = new QuestionnaireManagerDB(); try { user = UtilsQuestionnaire.getUserByIdUser(idUser); arrayInstance = db.getQuestionnaires4TypeMonitoringActivityPlanned(typeMonitoringActivity, user.getKtek_uk_coduser(), codtelecareprogram, status); } catch (Exception e) { e.printStackTrace(); } if (arrayInstance != null && arrayInstance.length > 0) { array = new KtekQuestionnaireModelEntity[arrayInstance.length]; for (int i = 0; i < arrayInstance.length; i++) { instance = BO2Entity.QuestionnaireModelBaseBO2Entity(idLang, arrayInstance[i]); array[i] = instance; } questionnaireIdHash = new HashSet<Ktek_questionnaire>(); for (int i = 0; i < arrayInstance.length; i++) { if (questionnaireIdHash.contains(arrayInstance[i]) == false) { questionnaireIdHash.add(arrayInstance[i]); } } array = new KtekQuestionnaireModelEntity[questionnaireIdHash.size()]; Iterator<Ktek_questionnaire> it = questionnaireIdHash.iterator(); int i = 0; while (it.hasNext()) { instanceQuestionaire = it.next(); instance = BO2Entity.QuestionnaireModelBaseBO2Entity(idLang, instanceQuestionaire); array[i] = instance; i = i + 1; } } else { //array = new KtekQuestionnaireModelEntity[1]; } return array; }
From source file:org.apache.axis.wsdl.toJava.JavaStubWriter.java
/** * Write the body of the binding's stub file. * /*www.ja v a 2 s . c om*/ * @param pw * @throws IOException */ protected void writeFileBody(PrintWriter pw) throws IOException { PortType portType = binding.getPortType(); HashSet types = getTypesInPortType(portType); boolean hasMIME = Utils.hasMIME(bEntry); if ((types.size() > 0) || hasMIME) { pw.println(" private java.util.Vector cachedSerClasses = new java.util.Vector();"); pw.println(" private java.util.Vector cachedSerQNames = new java.util.Vector();"); pw.println(" private java.util.Vector cachedSerFactories = new java.util.Vector();"); pw.println(" private java.util.Vector cachedDeserFactories = new java.util.Vector();"); } pw.println(); pw.println(" static org.apache.axis.description.OperationDesc [] _operations;"); pw.println(); writeOperationMap(pw); pw.println(); pw.println(" public " + className + "() throws org.apache.axis.AxisFault {"); pw.println(" this(null);"); pw.println(" }"); pw.println(); pw.println(" public " + className + "(java.net.URL endpointURL, javax.xml.rpc.Service service) throws org.apache.axis.AxisFault {"); pw.println(" this(service);"); pw.println(" super.cachedEndpoint = endpointURL;"); pw.println(" }"); pw.println(); pw.println( " public " + className + "(javax.xml.rpc.Service service) throws org.apache.axis.AxisFault {"); pw.println(" if (service == null) {"); pw.println(" super.service = new org.apache.axis.client.Service();"); pw.println(" } else {"); pw.println(" super.service = service;"); pw.println(" }"); pw.println(" ((org.apache.axis.client.Service)super.service).setTypeMappingVersion(\"" + emitter.getTypeMappingVersion() + "\");"); List deferredBindings = new ArrayList(); // keep track of how many type mappings we write out int typeMappingCount = 0; if (types.size() > 0) { Iterator it = types.iterator(); while (it.hasNext()) { TypeEntry type = (TypeEntry) it.next(); if (!Utils.shouldEmit(type)) { continue; } // Write out serializer declarations if (typeMappingCount == 0) { writeSerializationDecls(pw, hasMIME, binding.getQName().getNamespaceURI()); } // write the type mapping for this type // writeSerializationInit(pw, type); deferredBindings.add(type); // increase the number of type mappings count typeMappingCount++; } } // Sort the TypeEntry's by their qname. Collections.sort(deferredBindings, new Comparator() { public int compare(Object a, Object b) { TypeEntry type1 = (TypeEntry) a; TypeEntry type2 = (TypeEntry) b; return type1.getQName().toString().compareToIgnoreCase(type2.getQName().toString()); } }); // We need to write out the MIME mapping, even if we don't have // any type mappings if ((typeMappingCount == 0) && hasMIME) { writeSerializationDecls(pw, hasMIME, binding.getQName().getNamespaceURI()); typeMappingCount++; } // track whether the number of bindings exceeds the threshold // that we allow per method. boolean needsMultipleBindingMethods = false; if (deferredBindings.size() < MAXIMUM_BINDINGS_PER_METHOD) { // small number of bindings, just inline them: for (Iterator it = deferredBindings.iterator(); it.hasNext();) { writeSerializationInit(pw, (TypeEntry) it.next()); } } else { needsMultipleBindingMethods = true; int methodCount = calculateBindingMethodCount(deferredBindings); // invoke each of the soon-to-be generated addBindings methods // from the constructor. for (int i = 0; i < methodCount; i++) { pw.println(" addBindings" + i + "();"); } } pw.println(" }"); pw.println(); // emit any necessary methods for assembling binding metadata. if (needsMultipleBindingMethods) { writeBindingMethods(pw, deferredBindings); pw.println(); } pw.println(" protected org.apache.axis.client.Call createCall() throws java.rmi.RemoteException {"); pw.println(" try {"); pw.println(" org.apache.axis.client.Call _call = super._createCall();"); pw.println(" if (super.maintainSessionSet) {"); pw.println(" _call.setMaintainSession(super.maintainSession);"); pw.println(" }"); pw.println(" if (super.cachedUsername != null) {"); pw.println(" _call.setUsername(super.cachedUsername);"); pw.println(" }"); pw.println(" if (super.cachedPassword != null) {"); pw.println(" _call.setPassword(super.cachedPassword);"); pw.println(" }"); pw.println(" if (super.cachedEndpoint != null) {"); pw.println(" _call.setTargetEndpointAddress(super.cachedEndpoint);"); pw.println(" }"); pw.println(" if (super.cachedTimeout != null) {"); pw.println(" _call.setTimeout(super.cachedTimeout);"); pw.println(" }"); pw.println(" if (super.cachedPortName != null) {"); pw.println(" _call.setPortName(super.cachedPortName);"); pw.println(" }"); pw.println(" java.util.Enumeration keys = super.cachedProperties.keys();"); pw.println(" while (keys.hasMoreElements()) {"); pw.println(" java.lang.String key = (java.lang.String) keys.nextElement();"); pw.println(" _call.setProperty(key, super.cachedProperties.get(key));"); pw.println(" }"); if (typeMappingCount > 0) { pw.println(" // " + Messages.getMessage("typeMap00")); pw.println(" // " + Messages.getMessage("typeMap01")); pw.println(" // " + Messages.getMessage("typeMap02")); pw.println(" // " + Messages.getMessage("typeMap03")); pw.println(" // " + Messages.getMessage("typeMap04")); pw.println(" synchronized (this) {"); pw.println(" if (firstCall()) {"); // Hack alert - we need to establish the encoding style before we register type mappings due // to the fact that TypeMappings key off of encoding style pw.println(" // " + Messages.getMessage("mustSetStyle")); if (bEntry.hasLiteral()) { pw.println(" _call.setEncodingStyle(null);"); } else { Iterator iterator = bEntry.getBinding().getExtensibilityElements().iterator(); while (iterator.hasNext()) { Object obj = iterator.next(); if (obj instanceof SOAPBinding) { pw.println( " _call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS);"); pw.println( " _call.setEncodingStyle(org.apache.axis.Constants.URI_SOAP11_ENC);"); } else if (obj instanceof UnknownExtensibilityElement) { // TODO: After WSDL4J supports soap12, change this code UnknownExtensibilityElement unkElement = (UnknownExtensibilityElement) obj; QName name = unkElement.getElementType(); if (name.getNamespaceURI().equals(Constants.URI_WSDL12_SOAP) && name.getLocalPart().equals("binding")) { pw.println( " _call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP12_CONSTANTS);"); pw.println( " _call.setEncodingStyle(org.apache.axis.Constants.URI_SOAP12_ENC);"); } } } } pw.println(" for (int i = 0; i < cachedSerFactories.size(); ++i) {"); pw.println(" java.lang.Class cls = (java.lang.Class) cachedSerClasses.get(i);"); pw.println(" javax.xml.namespace.QName qName ="); pw.println(" (javax.xml.namespace.QName) cachedSerQNames.get(i);"); pw.println(" java.lang.Object x = cachedSerFactories.get(i);"); pw.println(" if (x instanceof Class) {"); pw.println(" java.lang.Class sf = (java.lang.Class)"); pw.println(" cachedSerFactories.get(i);"); pw.println(" java.lang.Class df = (java.lang.Class)"); pw.println(" cachedDeserFactories.get(i);"); pw.println(" _call.registerTypeMapping(cls, qName, sf, df, false);"); pw.println(" }"); pw.println(" else if (x instanceof javax.xml.rpc.encoding.SerializerFactory) {"); pw.println( " org.apache.axis.encoding.SerializerFactory sf = (org.apache.axis.encoding.SerializerFactory)"); pw.println(" cachedSerFactories.get(i);"); pw.println( " org.apache.axis.encoding.DeserializerFactory df = (org.apache.axis.encoding.DeserializerFactory)"); pw.println(" cachedDeserFactories.get(i);"); pw.println(" _call.registerTypeMapping(cls, qName, sf, df, false);"); pw.println(" }"); pw.println(" }"); pw.println(" }"); pw.println(" }"); } pw.println(" return _call;"); pw.println(" }"); pw.println(" catch (java.lang.Throwable _t) {"); pw.println(" throw new org.apache.axis.AxisFault(\"" + Messages.getMessage("badCall01") + "\", _t);"); pw.println(" }"); pw.println(" }"); pw.println(); List operations = binding.getBindingOperations(); for (int i = 0; i < operations.size(); ++i) { BindingOperation operation = (BindingOperation) operations.get(i); Parameters parameters = bEntry.getParameters(operation.getOperation()); // Get the soapAction from the <soap:operation> String soapAction = ""; String opStyle = null; Iterator operationExtensibilityIterator = operation.getExtensibilityElements().iterator(); for (; operationExtensibilityIterator.hasNext();) { Object obj = operationExtensibilityIterator.next(); if (obj instanceof SOAPOperation) { soapAction = ((SOAPOperation) obj).getSoapActionURI(); opStyle = ((SOAPOperation) obj).getStyle(); break; } else if (obj instanceof UnknownExtensibilityElement) { // TODO: After WSDL4J supports soap12, change this code UnknownExtensibilityElement unkElement = (UnknownExtensibilityElement) obj; QName name = unkElement.getElementType(); if (name.getNamespaceURI().equals(Constants.URI_WSDL12_SOAP) && name.getLocalPart().equals("operation")) { if (unkElement.getElement().getAttribute("soapAction") != null) { soapAction = unkElement.getElement().getAttribute("soapAction"); } opStyle = unkElement.getElement().getAttribute("style"); } } } Operation ptOperation = operation.getOperation(); OperationType type = ptOperation.getStyle(); // These operation types are not supported. The signature // will be a string stating that fact. if ((OperationType.NOTIFICATION.equals(type)) || (OperationType.SOLICIT_RESPONSE.equals(type))) { pw.println(parameters.signature); pw.println(); } else { writeOperation(pw, operation, parameters, soapAction, opStyle, type == OperationType.ONE_WAY, i); } } }