List of usage examples for java.util Set clear
void clear();
From source file:com.netflix.genie.core.jpa.services.JpaClusterServiceImplIntegrationTests.java
/** * Test the get clusters method./*from w w w .ja va 2 s . c om*/ */ @Test public void testGetClustersByTags() { final Set<String> tags = Sets.newHashSet("prod"); Page<Cluster> clusters = this.service.getClusters(null, null, tags, null, null, PAGE); Assert.assertEquals(1, clusters.getNumberOfElements()); Assert.assertEquals(CLUSTER_1_ID, clusters.getContent().get(0).getId().orElseThrow(IllegalArgumentException::new)); tags.clear(); tags.add("hive"); clusters = this.service.getClusters(null, null, tags, null, null, PAGE); Assert.assertEquals(2, clusters.getNumberOfElements()); Assert.assertEquals(CLUSTER_2_ID, clusters.getContent().get(0).getId().orElseThrow(IllegalArgumentException::new)); Assert.assertEquals(CLUSTER_1_ID, clusters.getContent().get(1).getId().orElseThrow(IllegalArgumentException::new)); tags.add("somethingThatWouldNeverReallyExist"); clusters = this.service.getClusters(null, null, tags, null, null, PAGE); Assert.assertTrue(clusters.getContent().isEmpty()); tags.clear(); clusters = this.service.getClusters(null, null, tags, null, null, PAGE); Assert.assertEquals(2, clusters.getNumberOfElements()); Assert.assertEquals(CLUSTER_2_ID, clusters.getContent().get(0).getId().orElseThrow(IllegalArgumentException::new)); Assert.assertEquals(CLUSTER_1_ID, clusters.getContent().get(1).getId().orElseThrow(IllegalArgumentException::new)); }
From source file:gaffer.accumulostore.operation.spark.handler.GetJavaRDDOfElementsHandlerTest.java
@Test public void checkGetCorrectElementsInRDDForEdgeSeed() throws OperationException, IOException { final Graph graph1 = new Graph.Builder() .addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")) .addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")) .addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")) .storeProperties(getClass().getResourceAsStream("/store.properties")).build(); final List<Element> elements = new ArrayList<>(); for (int i = 0; i < 10; i++) { final Entity entity = new Entity(ENTITY_GROUP); entity.setVertex("" + i); final Edge edge1 = new Edge(EDGE_GROUP); edge1.setSource("" + i); edge1.setDestination("B"); edge1.setDirected(false);/*from w w w .j a v a2s . c om*/ edge1.putProperty("count", 2); final Edge edge2 = new Edge(EDGE_GROUP); edge2.setSource("" + i); edge2.setDestination("C"); edge2.setDirected(false); edge2.putProperty("count", 4); elements.add(edge1); elements.add(edge2); elements.add(entity); } final User user = new User(); graph1.execute(new AddElements(elements), user); final SparkConf sparkConf = new SparkConf().setMaster("local") .setAppName("testCheckGetCorrectElementsInJavaRDDForEdgeSeed") .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .set("spark.kryo.registrator", "gaffer.serialisation.kryo.Registrator") .set("spark.driver.allowMultipleContexts", "true"); final JavaSparkContext sparkContext = new JavaSparkContext(sparkConf); // Create Hadoop configuration and serialise to a string final Configuration configuration = new Configuration(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); configuration.write(new DataOutputStream(baos)); final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8); // Check get correct edges for EdgeSeed 1 -> B GetJavaRDDOfElements<EdgeSeed> rddQuery = new GetJavaRDDOfElements.Builder<EdgeSeed>() .javaSparkContext(sparkContext).seeds(Collections.singleton(new EdgeSeed("1", "B", false))) .setIncludeEdges(GetOperation.IncludeEdgeType.ALL).setIncludeEntities(false).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); JavaRDD<Element> rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } final Set<Element> results = new HashSet<>(); results.addAll(rdd.collect()); final Set<Element> expectedElements = new HashSet<>(); final Edge edge1B = new Edge(EDGE_GROUP); edge1B.setSource("1"); edge1B.setDestination("B"); edge1B.setDirected(false); edge1B.putProperty("count", 2); expectedElements.add(edge1B); assertEquals(expectedElements, results); // Check get entity for 1 when query for 1 -> B and specify entities only rddQuery = new GetJavaRDDOfElements.Builder<EdgeSeed>().javaSparkContext(sparkContext) .seeds(Collections.singleton(new EdgeSeed("1", "B", false))).setIncludeEntities(true) .setIncludeEdges(GetOperation.IncludeEdgeType.NONE).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); results.addAll(rdd.collect()); expectedElements.clear(); final Entity entity1 = new Entity(ENTITY_GROUP); entity1.setVertex("1"); expectedElements.add(entity1); assertEquals(expectedElements, results); // Check get correct edges for 1 -> B when specify edges only rddQuery = new GetJavaRDDOfElements.Builder<EdgeSeed>().javaSparkContext(sparkContext) .seeds(Collections.singleton(new EdgeSeed("1", "B", false))) .view(new View.Builder().edge(EDGE_GROUP).build()).setIncludeEntities(false) .setIncludeEdges(GetOperation.IncludeEdgeType.ALL).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); results.addAll(rdd.collect()); expectedElements.clear(); expectedElements.add(edge1B); assertEquals(expectedElements, results); // Check get correct edges for 1 -> B and 5 -> C Set<EdgeSeed> seeds = new HashSet<>(); seeds.add(new EdgeSeed("1", "B", false)); seeds.add(new EdgeSeed("5", "C", false)); rddQuery = new GetJavaRDDOfElements.Builder<EdgeSeed>().javaSparkContext(sparkContext) .setIncludeEntities(false).seeds(seeds).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); results.addAll(rdd.collect()); final Edge edge5C = new Edge(EDGE_GROUP); edge5C.setSource("5"); edge5C.setDestination("C"); edge5C.setDirected(false); edge5C.putProperty("count", 4); expectedElements.clear(); expectedElements.add(edge1B); expectedElements.add(edge5C); assertEquals(expectedElements, results); sparkContext.stop(); }
From source file:control.ExperimentImportController.java
protected void initMissingInfoListener(SamplePreparator prep, Map<String, List<String>> missingCategoryToValues, Map<String, List<String>> catToVocabulary) { extCodeToBarcode = new HashMap<String, String>(); // TODO where is this added? does it need to be added? ProjectInformationComponent projectInfoComponent = new ProjectInformationComponent(vocabs.getSpaces(), vocabs.getPeople().keySet()); ValueChangeListener missingInfoFilledListener = new ValueChangeListener() { @Override/*from ww w . j ava 2 s.c o m*/ public void valueChange(ValueChangeEvent event) { boolean overflow = false; boolean infoComplete = questionaire.isValid(); boolean samplesToRegister = false; if (infoComplete) { List<SampleSummaryBean> summaries = prep.getSummary(); for (SampleSummaryBean b : summaries) { String cat = ""; if (b.getSampleType().contains("Source")) cat = "Species"; else if (b.getSampleType().contains("Sample Extract")) cat = "Tissues"; else if (b.getSampleType().contains("Preparations")) cat = "Analytes"; if (missingCategoryToValues.containsKey(cat)) { String val = b.getFullSampleContent(); List<String> newVal = new ArrayList<String>(); for (String v : val.split(", ")) { v = v.trim(); String translation = questionaire.getVocabularyLabelForValue(cat, v); if (translation == null) translation = reverseTissueMap.get(v); if (translation == null) translation = v; newVal.add(translation); } b.setSampleContent(StringUtils.join(newVal, ", ")); } } view.setSummary(summaries); projectInfo = projectInfoComponent.getProjectInfo(); String space = questionaire.getSpaceCode(); String project = questionaire.getProjectCode(); try { countExistingOpenbisEntities(space, project); } catch (TooManySamplesException e1) { // TODO Auto-generated catch block overflow = true; } int entityNum = firstFreeEntityID; nextBarcode = null; List<List<ISampleBean>> processed = prep.getProcessed(); Set<String> msCodes = new HashSet<String>(); dnaSampleCodeToMHCType = new HashMap<String, MHCTyping>(); Map<String, String> specialExpToExpCode = new HashMap<String, String>(); for (List<ISampleBean> level : processed) { String type = level.get(0).getType(); String exp = ""; if (!type.equals("Q_MS_RUN") && !type.equals("Q_MHC_LIGAND_EXTRACT")) exp = getNextExperiment(project); // list of existing samples to be removed before registration List<ISampleBean> existing = new ArrayList<ISampleBean>(); for (ISampleBean b : level) { TSVSampleBean t = (TSVSampleBean) b; String extID = (String) t.getMetadata().get("Q_EXTERNALDB_ID"); if (extIDToSample.containsKey(extID)) { existing.add(t); extCodeToBarcode.put(extID, extIDToSample.get(extID).getCode()); } else { t.setProject(project); t.setSpace(space); String code = ""; Map<String, Object> props = t.getMetadata(); switch (t.getType()) { case "Q_BIOLOGICAL_ENTITY": code = project + "ENTITY-" + entityNum; String newVal = questionaire.getVocabularyLabelForValue("Species", props.get("Q_NCBI_ORGANISM")); props.put("Q_NCBI_ORGANISM", taxMap.get(newVal)); entityNum++; break; case "Q_BIOLOGICAL_SAMPLE": try { incrementOrCreateBarcode(project); } catch (TooManySamplesException e) { overflow = true; } code = nextBarcode; newVal = questionaire.getVocabularyLabelForValue("Tissues", props.get("Q_PRIMARY_TISSUE")); props.put("Q_PRIMARY_TISSUE", tissueMap.get(newVal)); break; case "Q_TEST_SAMPLE": try { incrementOrCreateBarcode(project); } catch (TooManySamplesException e) { overflow = true; } code = nextBarcode; newVal = questionaire.getVocabularyLabelForValue("Analytes", props.get("Q_SAMPLE_TYPE")); props.put("Q_SAMPLE_TYPE", newVal); if (getImportType().equals(DesignType.MHC_Ligands_Finished)) { if ("DNA".equals(newVal)) { List<String> c1 = (List<String>) props.get("MHC_I"); List<String> c2 = (List<String>) props.get("MHC_II"); dnaSampleCodeToMHCType.put(code, new MHCTyping(c1, c2)); props.remove("MHC_I"); props.remove("MHC_II"); } } break; case "Q_MHC_LIGAND_EXTRACT": try { incrementOrCreateBarcode(project); } catch (TooManySamplesException e) { overflow = true; } code = nextBarcode; if (!specialExpToExpCode.containsKey(t.getExperiment())) { specialExpToExpCode.put(t.getExperiment(), getNextExperiment(project)); } exp = specialExpToExpCode.get(t.getExperiment()); break; case "Q_MS_RUN": // get ms experiment to connect it correctly if (!specialExpToExpCode.containsKey(t.getExperiment())) { specialExpToExpCode.put(t.getExperiment(), getNextExperiment(project)); } exp = specialExpToExpCode.get(t.getExperiment()); // get parent sample for code String parentExtID = t.fetchParentIDs().get(0); String parentCode = extCodeToBarcode.get(parentExtID);// .getCode(); int msRun = 1; code = ""; while (code.isEmpty() || msCodes.contains(code)) { code = "MS" + Integer.toString(msRun) + parentCode; msRun++; } msCodes.add(code); break; } t.setExperiment(exp); t.setCode(code); extCodeToBarcode.put((String) props.get("Q_EXTERNALDB_ID"), code);// t); List<String> parents = t.fetchParentIDs(); t.setParents(""); for (String parentExtID : parents) { if (extCodeToBarcode.containsKey(parentExtID)) t.addParent(extCodeToBarcode.get(parentExtID));// .getCode()); else logger.warn( "Parent could not be translated, because no ext id to code mapping was found for ext id " + parentExtID); } } } // remove existing samples from registration process level.removeAll(existing); samplesToRegister |= !level.isEmpty(); } fixSpecialExperiments(specialExpToExpCode); view.setProcessed(processed); } view.setRegEnabled(infoComplete && samplesToRegister && !overflow); if (infoComplete) { if (!samplesToRegister) { Styles.notification("Samples already exist.", "Every Analyte ID was already found in existing samples of this project.", NotificationType.DEFAULT); } if (overflow) { Styles.notification("Too many samples.", "This experiment exceeds the maximum number of samples for one project.", NotificationType.ERROR); } } } private void fixSpecialExperiments(Map<String, String> specialExpToExpCode) { Set<String> codes = new HashSet<String>(); if (mhcProperties != null) { codes.addAll(mhcProperties.keySet()); for (String code : codes) { mhcProperties.put(specialExpToExpCode.get(code), mhcProperties.get(code)); mhcProperties.remove(code); } } codes.clear(); if (msProperties != null) { codes.addAll(msProperties.keySet()); for (String code : codes) { msProperties.put(specialExpToExpCode.get(code), msProperties.get(code)); msProperties.remove(code); } } } }; questionaire = view.initMissingInfoComponent(projectInfoComponent, missingCategoryToValues, catToVocabulary, missingInfoFilledListener); // view.addComponent(questionaire); // add project code validators TextField f = projectInfoComponent.getProjectField(); CompositeValidator vd = new CompositeValidator(); RegexpValidator p = new RegexpValidator("Q[A-Xa-x0-9]{4}", "Project must have length of 5, start with Q and not contain Y or Z"); vd.addValidator(p); vd.addValidator(new ProjectNameValidator(openbis)); f.addValidator(vd); f.setImmediate(true); f.setValidationVisible(true); FocusListener fListener = new FocusListener() { private static final long serialVersionUID = 8721337946386845992L; @Override public void focus(FocusEvent event) { TextField pr = projectInfoComponent.getProjectField(); if (!pr.isValid() || pr.isEmpty()) { // new project selected...keep generating codes until one is valid projectInfoComponent.tryEnableCustomProject(generateUnusedProjectCode()); } } }; projectInfoComponent.getProjectField().addFocusListener(fListener); Button.ClickListener projCL = new Button.ClickListener() { /** * */ private static final long serialVersionUID = -6646294420820222646L; @Override public void buttonClick(ClickEvent event) { String existingProject = (String) projectInfoComponent.getProjectBox().getValue(); if (existingProject == null || existingProject.isEmpty()) { projectInfoComponent.tryEnableCustomProject(generateUnusedProjectCode()); } } }; projectInfoComponent.getProjectReloadButton().addClickListener(projCL); questionaire.getSpaceBox().addValueChangeListener(new ValueChangeListener() { @Override public void valueChange(ValueChangeEvent event) { questionaire.resetProjects(); String space = questionaire.getSpaceCode(); if (space != null) { List<String> projects = new ArrayList<String>(); for (Project p : openbis.getProjectsOfSpace(space)) { String code = p.getCode(); // String name = dbm.getProjectName("/" + space + "/" + code); // if (name != null && name.length() > 0) { // if (name.length() >= 80) // name = name.substring(0, 80) + "..."; // code += " (" + name + ")"; // } projects.add(code); } questionaire.setProjectCodes(projects); } } }); }
From source file:org.asqatasun.service.command.AuditCommandImpl.java
@Override public void process() { audit = auditDataService.getAuditWithTest(audit.getId()); if (!audit.getStatus().equals(AuditStatus.PROCESSING)) { LOGGER.warn(//from w ww . j av a 2 s . c o m new StringBuilder(AUDIT_STATUS_IS_LOGGER_STR).append(audit.getStatus()).append(WHILE_LOGGER_STR) .append(AuditStatus.PROCESSING).append(WAS_REQUIRED_LOGGER_STR).toString()); return; } if (LOGGER.isInfoEnabled()) { LOGGER.info("Processing " + audit.getSubject().getURL()); } // debug tools Date beginProcessDate = null; Date endProcessDate = null; Date endPersistDate; Long persistenceDuration = (long) 0; Long i = (long) 0; Long webResourceId = audit.getSubject().getId(); Long nbOfContent = contentDataService.getNumberOfSSPFromWebResource(audit.getSubject(), HttpStatus.SC_OK); Set<ProcessResult> processResultSet = new HashSet<>(); while (i.compareTo(nbOfContent) < 0) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(new StringBuilder("Processing from ").append(i).append(TO_LOGGER_STR) .append(i + processingTreatmentWindow).append("for ").append(audit.getSubject().getURL()) .toString()); beginProcessDate = Calendar.getInstance().getTime(); } Collection<Content> contentList = contentDataService .getSSPWithRelatedContentFromWebResource(webResourceId, i, processingTreatmentWindow, false); processResultSet.clear(); processResultSet.addAll(processorService.process(contentList, audit.getTestList())); for (ProcessResult processResult : processResultSet) { processResult.setGrossResultAudit(audit); } if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug(new StringBuilder("Processing of ").append(processingTreatmentWindow) .append(" elements took ").append(endProcessDate.getTime() - beginProcessDate.getTime()) .append(MS_LOGGER_STR).append("for ").append(audit.getSubject().getURL()).toString()); } if (LOGGER.isDebugEnabled()) { for (Content content : contentList) { LOGGER.debug("Persisting result for page " + content.getURI()); } } processResultDataService.saveOrUpdate(processResultSet); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug(new StringBuilder("Persist processing of ").append(processingTreatmentWindow) .append(" elements took ").append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR).append("for ").append(audit.getSubject().getURL()).toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } i = i + processingTreatmentWindow; System.gc(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug(new StringBuilder("Application spent ").append(persistenceDuration) .append(" ms to write in Disk while processing").toString()); } if (processResultDataService.getNumberOfGrossResultFromAudit(audit) > 0) { setStatusToAudit(AuditStatus.CONSOLIDATION); } else { LOGGER.error("Audit has no gross result"); setStatusToAudit(AuditStatus.ERROR); } if (LOGGER.isInfoEnabled()) { LOGGER.info(audit.getSubject().getURL() + " has been processed"); } }
From source file:tools.xor.logic.DefaultUpdate4Set.java
public void testCase22() { C = taskDao.findById(C.getId());// w ww . j a v a 2 s . c o m Set<Task> children = new HashSet<Task>(); children.add(C); // Setup the bi-directional link A = taskDao.findById(A.getId()); A.setTaskChildren(children); C.setTaskParent(A); taskDao.saveOrUpdate(A); A = (Task) aggregateService.read(A, getSettings()); C = A.getTaskChildren().iterator().next(); assert (C.getTaskParent() != null); // Remove child1 and add child2 children = A.getTaskChildren(); children.clear(); children.add(B); children.add(D); A.setAlternateTask(C); C.setTaskParent(null); B.setTaskParent(A); D.setTaskParent(A); Settings settings = getSettings(); settings.setInterceptor(new Interceptor() { // check the number of actions in each object private void checkNumber(BusinessObject dataObject, List<Executable> actions) { Task task = (Task) dataObject.getInstance(); if (task.getName().equals(A_NAME)) { assert (actions.size() == 1); // SetUpdateAction assert (SetUpdateAction.class.isAssignableFrom(actions.get(0).getClass())); } else assert (actions.size() == 2); } @Override public void preBiDirActionStage(Map<PropertyKey, List<Executable>> actions) { // check the action queue to see if the correct number of actions are present assert (actions.size() == 4); for (Map.Entry<PropertyKey, List<Executable>> entry : actions.entrySet()) checkNumber(entry.getKey().getDataObject(), entry.getValue()); } }); A = (Task) aggregateService.update(A, settings); A = taskDao.findById(A.getId()); B = taskDao.findById(B.getId()); D = taskDao.findById(D.getId()); A = (Task) aggregateService.read(A, getSettings()); B = (Task) aggregateService.read(B, getSettings()); D = (Task) aggregateService.read(D, getSettings()); assert (A.getTaskChildren() != null && A.getTaskChildren().size() == 2); }
From source file:org.carewebframework.maven.plugin.theme.CSSTransform.java
@Override public void process() throws Exception { int c = 0;/*w w w . j a v a2 s. c o m*/ int state = 0; StringBuilder sb = new StringBuilder(); Set<String> matches = new HashSet<String>(); Map<String, String> refMap = new LinkedHashMap<String, String>(); Map<String, String> styles = new HashMap<String, String>(); String prop = null; checkForMatch("@before@", matches, refMap); while (c != -1) { c = inputStream.read(); if (c == -1) { state = -1; } else { outputStream.write(c); } switch (state) { case -1: // Process template for (String match : matches) { String template = refMap.get(match); if (!template.contains(DELIM)) { continue; } for (Entry<String, String> entry : styles.entrySet()) { String replace = DELIM + entry.getKey() + DELIM; template = template.replace(replace, entry.getValue()); } refMap.put(match, template); } matches.clear(); styles.clear(); if (c == -1) { continue; } state = 0; // Fall through intended case 0: // Baseline switch (c) { case '/': // Possible comment start state = 1; break; case '<': // Directive start state = 10; break; case '{': // Declaration block state = 20; break; case ',': // Selector separator break; case '}': // Don't know why these occur, but ignore them. continue; default: sb.append((char) c); continue; } checkForMatch(sb.toString(), matches, refMap); sb.setLength(0); break; case 1: // Possible comment state = c == '*' ? 2 : 0; break; case 2: // Possible comment end state = c == '*' ? 3 : state; break; case 3: // Comment end state = c == '/' ? 0 : c == '*' ? state : 2; break; case 10: // Directive end state = c == '>' ? 0 : state; break; case 20: // Declaration block switch (c) { case '}': // End block state = -1; break; case ':': // Start of property value prop = sb.toString().trim(); sb.setLength(0); state = 30; break; default: // Build property name sb.append((char) c); break; } break; case 30: // Property value switch (c) { case ';': // Property separator case '}': // Block terminator styles.put(prop, sb.toString()); sb.setLength(0); state = c == ';' ? 20 : -1; break; default: // Build property value sb.append((char) c); break; } break; } } checkForMatch("@after@", matches, defMap); writeMap(refMap); writeMap(defMap); if (!srcMap.isEmpty()) { mojo.getLog().warn("The following entries failed to match and were ignored:"); for (Entry<String, String> entry : srcMap.entrySet()) { mojo.getLog().warn(" " + entry); } } }
From source file:gov.nih.nci.ispy.web.taglib.CorrScatterPlotTag.java
public int doStartTag() { chart = null;// w w w .j av a2 s . co m plotPoints.clear(); ServletRequest request = pageContext.getRequest(); HttpSession session = pageContext.getSession(); Object o = request.getAttribute(beanName); JspWriter out = pageContext.getOut(); ServletResponse response = pageContext.getResponse(); try { //retrieve the Finding from cache and build the list of PCAData points ISPYCorrelationFinding corrFinding = (ISPYCorrelationFinding) businessTierCache .getSessionFinding(session.getId(), taskId); Collection<ClinicalFactorType> clinicalFactors = new ArrayList<ClinicalFactorType>(); List<String> sampleIds = new ArrayList<String>(); List<DataPoint> points = corrFinding.getDataPoints(); ClinicalDataService cqs = ClinicalDataServiceFactory.getInstance(); IdMapperFileBasedService idMapper = IdMapperFileBasedService.getInstance(); List<ISPYPlotPoint> plotPoints = new ArrayList<ISPYPlotPoint>(); ISPYPlotPoint pp; SampleInfo si; ISPYclinicalDataQueryDTO dto; Set<String> sampleHolder = new HashSet<String>(); //set just holds one entry need this for the dto Set<PatientData> dataHolder = new HashSet<PatientData>(); PatientData pd = null; for (DataPoint p : points) { pp = new ISPYPlotPoint(p.getId()); pp.setX(p.getX()); pp.setY(p.getY()); pp.setZ(p.getZ()); String patientId = null; if (corrFinding.isSampleBased()) { si = idMapper.getSampleInfoForLabtrackId(p.getId()); if (si != null) { pp.setSampleInfo(si); patientId = si.getISPYId(); } else { logger.warn("Could not get sample info for DataPoint=" + p.getId()); } } else if (corrFinding.isPatientBased()) { patientId = p.getId(); } if (patientId != null) { dto = new ISPYclinicalDataQueryDTO(); sampleHolder.clear(); sampleHolder.add(patientId); dto.setRestrainingSamples(sampleHolder); dataHolder.clear(); dataHolder = cqs.getClinicalData(dto); if (dataHolder.size() == 1) { Iterator i = dataHolder.iterator(); pd = (PatientData) i.next(); pp.setPatientData(pd); } else { logger.error("Internal Error. Did not get back correct patient data for patientId=" + patientId); } } plotPoints.add(pp); } ISPYCorrelationScatterPlot plot = new ISPYCorrelationScatterPlot(plotPoints, corrFinding.getGroup1Name(), corrFinding.getGroup2Name(), corrFinding.getContinuousType1(), corrFinding.getContinuousType2(), corrFinding.getCorrelationValue(), ColorByType.valueOf(ColorByType.class, colorBy.toUpperCase())); chart = plot.getChart(); ISPYImageFileHandler imageHandler = new ISPYImageFileHandler(session.getId(), "png", 650, 600); //The final complete path to be used by the webapplication String finalPath = imageHandler.getSessionTempFolder(); String finalURLpath = imageHandler.getFinalURLPath(); /* * Create the actual charts, writing it to the session temp folder */ ChartRenderingInfo info = new ChartRenderingInfo(new StandardEntityCollection()); String mapName = imageHandler.createUniqueMapName(); //PrintWriter writer = new PrintWriter(new FileWriter(mapName)); ChartUtilities.writeChartAsPNG(new FileOutputStream(finalPath), chart, 650, 600, info); //ImageMapUtil.writeBoundingRectImageMap(writer,"PCAimageMap",info,true); //writer.close(); /* This is here to put the thread into a loop while it waits for the * image to be available. It has an unsophisticated timer but at * least it is something to avoid an endless loop. **/ boolean imageReady = false; int timeout = 1000; FileInputStream inputStream = null; while (!imageReady) { timeout--; try { inputStream = new FileInputStream(finalPath); inputStream.available(); imageReady = true; inputStream.close(); } catch (IOException ioe) { imageReady = false; if (inputStream != null) { inputStream.close(); } } if (timeout <= 1) { break; } } out.print(ImageMapUtil.getBoundingRectImageMapTag(mapName, true, info)); finalURLpath = finalURLpath.replace("\\", "/"); long randomness = System.currentTimeMillis(); //prevent image caching out.print("<img id=\"geneChart\" name=\"geneChart\" src=\"" + finalURLpath + "?" + randomness + "\" usemap=\"#" + mapName + "\" border=\"0\" />"); //(imageHandler.getImageTag(mapFileName)); } catch (IOException e) { logger.error(e); } catch (Exception e) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); logger.error(sw.toString()); } catch (Throwable t) { logger.error(t); } return EVAL_BODY_INCLUDE; }
From source file:com.google.gwt.emultest.java.util.TreeSetTest.java
/** * Test method for 'java.util.Set.clear()'. * * @see java.util.Set#clear()//from ww w . j av a 2s . c o m */ public void testClear_throwsUnsupportedOperationException() { Set<E> set = createSet(); if (!isClearSupported) { try { set.clear(); fail("expected exception"); } catch (UnsupportedOperationException e) { // expected outcome } } }
From source file:org.apache.camel.util.IntrospectionSupport.java
private static Set<Method> findSetterMethods(TypeConverter typeConverter, Class<?> clazz, String name, Object value) {/*from w ww .ja va 2 s.c o m*/ Set<Method> candidates = new LinkedHashSet<Method>(); // Build the method name. name = "set" + ObjectHelper.capitalize(name); while (clazz != Object.class) { // Since Object.class.isInstance all the objects, // here we just make sure it will be add to the bottom of the set. Method objectSetMethod = null; Method[] methods = clazz.getMethods(); for (Method method : methods) { Class<?> params[] = method.getParameterTypes(); if (method.getName().equals(name) && params.length == 1) { Class<?> paramType = params[0]; if (paramType.equals(Object.class)) { objectSetMethod = method; } else if (typeConverter != null || isSetter(method) || paramType.isInstance(value)) { candidates.add(method); } } } if (objectSetMethod != null) { candidates.add(objectSetMethod); } clazz = clazz.getSuperclass(); } if (candidates.isEmpty()) { return candidates; } else if (candidates.size() == 1) { // only one return candidates; } else { // find the best match if possible if (LOG.isTraceEnabled()) { LOG.trace("Found " + candidates.size() + " suitable setter methods for setting " + name); } // prefer to use the one with the same instance if any exists for (Method method : candidates) { if (method.getParameterTypes()[0].isInstance(value)) { if (LOG.isTraceEnabled()) { LOG.trace("Method " + method + " is the best candidate as it has parameter with same instance type"); } // retain only this method in the answer candidates.clear(); candidates.add(method); return candidates; } } // fallback to return what we have found as candidates so far return candidates; } }
From source file:org.apache.hadoop.hdfs.server.blockmanagement.TestReplicationPolicyWithNodeGroup.java
/** * In this testcase, client is dataNodes[0], but the dataNodes[1] is * not allowed to be chosen. So the 1st replica should be * placed on dataNodes[0], the 2nd replica should be placed on a different * rack, the 3rd should be on same rack as the 2nd replica but in different * node group, and the rest should be placed on a third rack. * @throws Exception/*from ww w .ja va2 s. c o m*/ */ @Test public void testChooseTarget2() throws Exception { DatanodeStorageInfo[] targets; BlockPlacementPolicyDefault repl = (BlockPlacementPolicyDefault) replicator; List<DatanodeStorageInfo> chosenNodes = new ArrayList<DatanodeStorageInfo>(); Set<Node> excludedNodes = new HashSet<Node>(); excludedNodes.add(dataNodes[1]); targets = repl.chooseTarget(filename, 4, dataNodes[0], chosenNodes, false, excludedNodes, BLOCK_SIZE, TestBlockStoragePolicy.DEFAULT_STORAGE_POLICY); assertEquals(targets.length, 4); assertEquals(storages[0], targets[0]); assertTrue(cluster.isNodeGroupAware()); // Make sure no replicas are on the same nodegroup for (int i = 1; i < 4; i++) { assertFalse(isOnSameNodeGroup(targets[0], targets[i])); } assertTrue(isOnSameRack(targets[1], targets[2]) || isOnSameRack(targets[2], targets[3])); assertFalse(isOnSameRack(targets[1], targets[3])); excludedNodes.clear(); chosenNodes.clear(); excludedNodes.add(dataNodes[1]); chosenNodes.add(storages[2]); targets = repl.chooseTarget(filename, 1, dataNodes[0], chosenNodes, true, excludedNodes, BLOCK_SIZE, TestBlockStoragePolicy.DEFAULT_STORAGE_POLICY); System.out.println("targets=" + Arrays.asList(targets)); assertEquals(2, targets.length); //make sure that the chosen node is in the target. int i = 0; for (; i < targets.length && !storages[2].equals(targets[i]); i++) ; assertTrue(i < targets.length); }