List of usage examples for java.util Collection clear
void clear();
From source file:ubic.gemma.core.analysis.service.ArrayDesignAnnotationServiceImpl.java
private int generateAnnotationFile(Writer writer, Map<CompositeSequence, Collection<BioSequence2GeneProduct>> genesWithSpecificity, OutputType ty) throws IOException { int compositeSequencesProcessed = 0; int simple = 0; int empty = 0; int complex = 0; // we used LinkedHasSets to keep everything in a predictable order - this is important for the gene symbols, // descriptions and NCBIIds (but not important for GO terms). When a probe maps to multiple genes, we list those // three items for the genes in the same order. There is a feature request to make // the order deterministic (i.e.,lexicographic sort), this could be done by using little gene objects or whatever. Collection<OntologyTerm> goTerms = new LinkedHashSet<>(); Set<String> genes = new LinkedHashSet<>(); Set<String> geneDescriptions = new LinkedHashSet<>(); Set<String> geneIds = new LinkedHashSet<>(); Set<String> ncbiIds = new LinkedHashSet<>(); Map<Gene, Collection<Characteristic>> goMappings = this.getGOMappings(genesWithSpecificity); for (CompositeSequence cs : genesWithSpecificity.keySet()) { Collection<BioSequence2GeneProduct> geneclusters = genesWithSpecificity.get(cs); if (++compositeSequencesProcessed % 2000 == 0 && ArrayDesignAnnotationServiceImpl.log.isInfoEnabled()) { ArrayDesignAnnotationServiceImpl.log.info("Processed " + compositeSequencesProcessed + "/" + genesWithSpecificity.size() + " compositeSequences " + empty + " empty; " + simple + " simple; " + complex + " complex;"); }/* w w w.ja va 2 s. com*/ if (geneclusters.isEmpty()) { this.writeAnnotationLine(writer, cs.getName(), "", "", null, "", ""); empty++; continue; } if (geneclusters.size() == 1) { // common case, do it quickly. BioSequence2GeneProduct b2g = geneclusters.iterator().next(); Gene g = b2g.getGeneProduct().getGene(); goTerms = this.getGoTerms(goMappings.get(g), ty); String gemmaId = g.getId() == null ? "" : g.getId().toString(); String ncbiId = g.getNcbiGeneId() == null ? "" : g.getNcbiGeneId().toString(); this.writeAnnotationLine(writer, cs.getName(), g.getOfficialSymbol(), g.getOfficialName(), goTerms, gemmaId, ncbiId); simple++; continue; } goTerms.clear(); genes.clear(); geneDescriptions.clear(); geneIds.clear(); ncbiIds.clear(); for (BioSequence2GeneProduct bioSequence2GeneProduct : geneclusters) { Gene g = bioSequence2GeneProduct.getGeneProduct().getGene(); genes.add(g.getOfficialSymbol()); geneDescriptions.add(g.getOfficialName()); geneIds.add(g.getId().toString()); Integer ncbiGeneId = g.getNcbiGeneId(); if (ncbiGeneId != null) { ncbiIds.add(ncbiGeneId.toString()); } goTerms.addAll(this.getGoTerms(goMappings.get(g), ty)); } String geneString = StringUtils.join(genes, "|"); String geneDescriptionString = StringUtils.join(geneDescriptions, "|"); String geneIdsString = StringUtils.join(geneIds, "|"); String ncbiIdsString = StringUtils.join(ncbiIds, "|"); this.writeAnnotationLine(writer, cs.getName(), geneString, geneDescriptionString, goTerms, geneIdsString, ncbiIdsString); complex++; } writer.close(); return compositeSequencesProcessed; }
From source file:org.apache.ojb.otm.core.ConcreteEditingContext.java
private void setCollectionField(Object obj, PersistentField f, List newCol) { Class type = f.getType();//from w ww. java 2 s.c om if (Collection.class.isAssignableFrom(type)) { Collection col = (Collection) f.get(obj); if (col == null) { if (type == List.class || type == Collection.class) { col = new ArrayList(); } else if (type == Set.class) { col = new HashSet(); } else { try { col = (Collection) type.newInstance(); } catch (Throwable ex) { System.err.println("Cannot instantiate collection field: " + f); ex.printStackTrace(); return; } } } else { if (col instanceof CollectionProxyDefaultImpl) { CollectionProxyDefaultImpl cp = (CollectionProxyDefaultImpl) col; if (col instanceof List) { col = new ListProxyDefaultImpl(_pb.getPBKey(), cp.getData().getClass(), null); } else if (col instanceof Set) { col = new SetProxyDefaultImpl(_pb.getPBKey(), cp.getData().getClass(), null); } else { col = new CollectionProxyDefaultImpl(_pb.getPBKey(), cp.getData().getClass(), null); } col.clear(); } else { try { col = (Collection) col.getClass().newInstance(); } catch (Exception ex) { System.err.println("Cannot instantiate collection field: " + f); ex.printStackTrace(); return; } } } col.addAll(newCol); f.set(obj, col); } else if (type.isArray()) { int length = newCol.size(); Object array = Array.newInstance(type.getComponentType(), length); for (int i = 0; i < length; i++) { Array.set(array, i, newCol.get(i)); } f.set(obj, array); } }
From source file:ubic.gemma.persistence.service.expression.experiment.ExpressionExperimentDaoImpl.java
@Override public Map<ExpressionExperiment, BioMaterial> findByBioMaterials(Collection<BioMaterial> bms) { if (bms == null || bms.size() == 0) { return new HashMap<>(); }//from www . j av a 2s .com //language=HQL final String queryString = "select distinct ee, sample from ExpressionExperiment as ee " + "inner join ee.bioAssays as ba inner join ba.sampleUsed as sample where sample in (:bms) group by ee"; Map<ExpressionExperiment, BioMaterial> results = new HashMap<>(); Collection<BioMaterial> batch = new HashSet<>(); for (BioMaterial o : bms) { batch.add(o); if (batch.size() == ExpressionExperimentDaoImpl.BATCH_SIZE) { //noinspection unchecked List<Object> r = this.getSessionFactory().getCurrentSession().createQuery(queryString) .setParameterList("bms", batch).list(); for (Object a : r) { ExpressionExperiment e = (ExpressionExperiment) ((Object[]) a)[0]; BioMaterial b = (BioMaterial) ((Object[]) a)[1]; // representative, there may have been multiple used as inputs results.put(e, b); } batch.clear(); } } if (batch.size() > 0) { //noinspection unchecked List<Object> r = this.getSessionFactory().getCurrentSession().createQuery(queryString) .setParameterList("bms", batch).list(); for (Object a : r) { ExpressionExperiment e = (ExpressionExperiment) ((Object[]) a)[0]; BioMaterial b = (BioMaterial) ((Object[]) a)[1]; // representative, there may have been multiple used as inputs results.put(e, b); } } return results; }
From source file:com.delphix.session.test.ServiceTest.java
private void closeTransports(ServiceNexus nexus, double chance) { Collection<ServiceTransport> xports = nexus.getTransports(); Iterator<ServiceTransport> iter = xports.iterator(); while (iter.hasNext()) { ServiceTransport xport = iter.next(); // Randomly victimize a transport if (Math.random() < chance) { xport.close();// w w w . j a v a 2 s . c o m } } xports.clear(); }
From source file:com.cloudera.knittingboar.sgd.iterativereduce.POLRMasterNode.java
@Override public ParameterVectorGradientUpdatable compute(Collection<ParameterVectorGradientUpdatable> workerUpdates, Collection<ParameterVectorGradientUpdatable> masterUpdates) { System.out.println("\nMaster Compute: SuperStep - Worker Info ----- "); int x = 0;//from w w w .j a v a2s . c o m // reset //this.Global_Min_IterationCount = this.NumberPasses; boolean iterationComplete = true; for (ParameterVectorGradientUpdatable i : workerUpdates) { // not sure we still need this --------------- if (i.get().SrcWorkerPassCount > this.GlobalMaxPassCount) { this.GlobalMaxPassCount = i.get().SrcWorkerPassCount; } // if any worker is not done with hte iteration, trip the flag if (i.get().IterationComplete == 0) { //this.Global_Min_IterationCount = i.get().IterationCount; iterationComplete = false; } System.out.println("[Master] WorkerReport[" + x + "]: I: " + i.get().CurrentIteration + ", IC: " + i.get().IterationComplete + " Trained Recs: " + i.get().TrainedRecords + " AvgLogLikelihood: " + i.get().AvgLogLikelihood + " PercentCorrect: " + i.get().PercentCorrect); if (i.get().IterationComplete == 1) { System.out.println("> worker " + x + " is done with current iteration"); } x++; // accumulate gradient of parameter vectors this.global_parameter_vector.AccumulateGradient(i.get().parameter_vector); } // now average the parameter vectors together this.global_parameter_vector.AverageAccumulations(workerUpdates.size()); LOG.debug("Master node accumulating and averaging " + workerUpdates.size() + " worker updates."); ParameterVectorGradient gradient_msg = new ParameterVectorGradient(); gradient_msg.GlobalPassCount = this.GlobalMaxPassCount; /* if (iterationComplete) { gradient_msg.IterationComplete = 1; System.out.println( "> Master says: Iteration Complete" ); } else { gradient_msg.IterationComplete = 0; } */ gradient_msg.parameter_vector = this.global_parameter_vector.getMatrix().clone(); ParameterVectorGradientUpdatable return_msg = new ParameterVectorGradientUpdatable(); return_msg.set(gradient_msg); // set the master copy! this.polr.SetBeta(this.global_parameter_vector.getMatrix().clone()); // THIS NEEDS TO BE DONE, probably automated! workerUpdates.clear(); return return_msg; }
From source file:org.cruk.genologics.api.impl.GenologicsAPIImpl.java
/** * Reflectively set all the attributes in {@code original} to the values given * by {@code updated}. This has the effect of making {@code original} the * same as {@code updated} but without requiring the client code to change the * object reference to {@code original}, which may be referenced in many places. * * <p>/* w ww . j av a 2 s . co m*/ * Where a field is a Collection, the existing collection is emptied and all the * objects from that field in {@code updated} are added in the same order to the * collection in {@code original}. Whether this order is maintained depends on * the type of collection in {@code original} (a list will maintain order, a set * typically won't). * </p> * * <p> * Fields that are static, transient or final are ignored, as are any fields annotated * with the {@code @XmlTransient} annotation. * </p> * * <p> * Note that fields within the original object that are objects themselves (as opposed to * primitives) are replaced with the new versions. References to sub objects are therefore * no longer valid. * </p> * * @param original The original object that was provided in the call and needs updating. * @param updated The version of the object returned from the LIMS with the current state. * * @throws IllegalArgumentException if either {@code original} or {@code updated} * are null, or are of different classes. */ @SuppressWarnings({ "unchecked", "rawtypes" }) protected void reflectiveUpdate(Object original, Object updated) { if (original == null) { throw new IllegalArgumentException("original cannot be null"); } if (updated == null) { throw new IllegalArgumentException("updated cannot be null"); } if (!original.getClass().equals(updated.getClass())) { throw new IllegalArgumentException("original and updated are of different classes"); } Class<?> clazz = original.getClass(); do { Map<String, java.lang.reflect.Field> fieldMap = updaterFields.get(clazz); if (fieldMap == null) { fieldMap = Collections.synchronizedMap(new HashMap<String, java.lang.reflect.Field>()); updaterFields.put(clazz, fieldMap); Class<?> currentClass = clazz; while (!Object.class.equals(currentClass)) { for (java.lang.reflect.Field field : currentClass.getDeclaredFields()) { // Skip transient and XmlTransient fields. if ((field.getModifiers() & REFLECTIVE_UPDATE_MODIFIER_MASK) == 0 && field.getAnnotation(XmlTransient.class) == null) { field.setAccessible(true); java.lang.reflect.Field clash = fieldMap.put(field.getName(), field); if (clash != null) { throw new AssertionError("There is more than one field with the name '" + field.getName() + " in the class hierarchy of " + clazz.getName() + " (" + getShortClassName(field.getDeclaringClass()) + " and " + getShortClassName(clash.getDeclaringClass()) + ")"); } } } currentClass = currentClass.getSuperclass(); } } for (java.lang.reflect.Field field : fieldMap.values()) { try { Object originalValue = field.get(original); Object updatedValue = field.get(updated); if (Collection.class.isAssignableFrom(field.getDeclaringClass())) { Collection originalCollection = (Collection) originalValue; Collection updatedCollection = (Collection) updatedValue; if (originalCollection != null) { originalCollection.clear(); if (updatedCollection != null) { originalCollection.addAll(updatedCollection); } } else { if (updatedCollection != null) { // Getting as a property should create the collection object. originalCollection = (Collection) PropertyUtils.getProperty(original, field.getName()); originalCollection.addAll(updatedCollection); } } } else if (Map.class.isAssignableFrom(field.getDeclaringClass())) { throw new AssertionError("I didn't think we'd be dealing with maps: field " + field.getName() + " on class " + field.getDeclaringClass().getName()); } else { field.set(original, updatedValue); } } catch (IllegalAccessException e) { logger.error("Cannot access the property {} on the class {}", field.getName(), field.getDeclaringClass().getName()); fieldMap.remove(field.getName()); } catch (NoSuchMethodException e) { logger.error("There is no getter method for the property {} on the class {}", field.getName(), field.getDeclaringClass().getName()); fieldMap.remove(field.getName()); } catch (InvocationTargetException e) { logger.error("Error while getting collection property {}", field.getName(), e.getTargetException()); } catch (ClassCastException e) { logger.error("Cannot cast a {} to a Collection.", e.getMessage()); } } clazz = clazz.getSuperclass(); } while (!Object.class.equals(clazz)); }
From source file:org.wso2.carbon.identity.entitlement.policy.finder.CarbonPolicyFinder.java
@Override public PolicyFinderResult findPolicy(EvaluationCtx context) { if (EntitlementEngine.getInstance().getPolicyCache().isInvalidate()) { init(this.finder); policyReferenceCache.clear();/* w ww .j av a 2 s .c o m*/ EntitlementEngine.getInstance().clearDecisionCache(); if (log.isDebugEnabled()) { int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId(); log.debug("Invalidation cache message is received. " + "Re-initialized policy finder module of current node and invalidate decision " + "caching for tenantId : " + tenantId); } } else { Collection<PolicyStatus> policies = EntitlementEngine.getInstance().getPolicyCache() .getInvalidatedPolicies(); if (policies != null) { if (policies.size() > 0) { synchronized (policies) { boolean isReorder = false; policyReferenceCache.clear(); EntitlementEngine.getInstance().clearDecisionCache(); for (PolicyStatus policyStatus : policies) { if (EntitlementConstants.PolicyPublish.ACTION_DELETE .equals(policyStatus.getPolicyAction())) { policyCollection.deletePolicy(policyStatus.getPolicyId()); policyCollectionOrder.remove(new PolicyDTO(policyStatus.getPolicyId())); } else if (EntitlementConstants.PolicyPublish.ACTION_UPDATE .equals(policyStatus.getPolicyAction())) { AbstractPolicy abstractPolicy = loadPolicy(policyStatus.getPolicyId()); policyCollection.addPolicy(abstractPolicy); } else if (EntitlementConstants.PolicyPublish.ACTION_CREATE .equals(policyStatus.getPolicyAction())) { AbstractPolicy abstractPolicy = loadPolicy(policyStatus.getPolicyId()); policyCollection.addPolicy(abstractPolicy); isReorder = true; } else if (EntitlementConstants.PolicyPublish.ACTION_ORDER .equals(policyStatus.getPolicyAction())) { int order = getPolicyOrder(policyStatus.getPolicyId()); if (order != -1) { PolicyDTO policyDTO = new PolicyDTO(policyStatus.getPolicyId()); if (policyCollectionOrder.indexOf(policyDTO) != -1) { policyCollectionOrder.get(policyCollectionOrder.indexOf(policyDTO)) .setPolicyOrder(order); isReorder = true; } } } } if (isReorder) { orderPolicyCache(); } policies.clear(); } } } } try { AbstractPolicy policy = policyCollection.getEffectivePolicy(context); if (policy == null) { return new PolicyFinderResult(); } else { return new PolicyFinderResult(policy); } } catch (EntitlementException e) { ArrayList<String> code = new ArrayList<String>(); code.add(Status.STATUS_PROCESSING_ERROR); Status status = new Status(code, e.getMessage()); return new PolicyFinderResult(status); } }
From source file:net.sf.morph.context.support.ContextBaseTestCase.java
public void testKeySet() { Set keySet = null;/*from w w w. j a v a 2 s . c o m*/ Collection all = new ArrayList(); // Unsupported operations // keySet = context.keySet(); // try { // keySet.add("bop"); // fail("Should have thrown UnsupportedOperationException"); // } catch (UnsupportedOperationException e) { // ; // Expected result // } // try { // Collection adds = new ArrayList(); // adds.add("bop"); // keySet.addAll(adds); // fail("Should have thrown UnsupportedOperationException"); // } catch (UnsupportedOperationException e) { // ; // Expected result // } // Before-modification checks keySet = context.keySet(); assertEquals(context.size(), keySet.size()); assertTrue(!keySet.contains("foo")); assertTrue(!keySet.contains("bar")); assertTrue(!keySet.contains("baz")); assertTrue(!keySet.contains("bop")); // Add the new elements context.put("foo", "foo value"); context.put("bar", "bar value"); context.put("baz", "baz value"); all.add("foo"); all.add("bar"); all.add("baz"); // After-modification checks keySet = context.keySet(); assertEquals(expectedAttributeCount() + 3, keySet.size()); assertTrue(keySet.contains("foo")); assertTrue(keySet.contains("bar")); assertTrue(keySet.contains("baz")); assertTrue(!keySet.contains("bop")); assertTrue(keySet.containsAll(all)); // Remove a single element via remove() // context.remove("bar"); all.remove("bar"); keySet = context.keySet(); assertEquals(expectedAttributeCount() + 3, keySet.size()); assertTrue(keySet.contains("foo")); // assertTrue(!keySet.contains("bar")); assertTrue(keySet.contains("baz")); assertTrue(!keySet.contains("bop")); assertTrue(keySet.containsAll(all)); // Remove a single element via keySet.remove() keySet.remove("baz"); all.remove("baz"); keySet = context.keySet(); assertEquals(expectedAttributeCount() + 3, keySet.size()); assertTrue(keySet.contains("foo")); // assertTrue(!keySet.contains("bar")); // assertTrue(!keySet.contains("baz")); assertTrue(!keySet.contains("bop")); assertTrue(keySet.containsAll(all)); // Remove all elements via keySet.clear() all.clear(); // assertTrue(!keySet.contains("foo")); // assertTrue(!keySet.contains("bar")); // assertTrue(!keySet.contains("baz")); assertTrue(!keySet.contains("bop")); assertTrue(keySet.containsAll(all)); // Add the new elements #2 context.put("foo", "foo value"); context.put("bar", "bar value"); context.put("baz", "baz value"); all.add("foo"); all.add("bar"); all.add("baz"); // After-modification checks #2 keySet = context.keySet(); assertEquals(expectedAttributeCount() + 3, keySet.size()); assertTrue(keySet.contains("foo")); assertTrue(keySet.contains("bar")); assertTrue(keySet.contains("baz")); assertTrue(!keySet.contains("bop")); assertTrue(keySet.containsAll(all)); }
From source file:org.openmrs.module.webservices.rest.web.v1_0.resource.openmrs1_8.ModuleActionResource1_8.java
/** * Overriding create directly, because ModuleFactory requires ServletContext to execute any * action//ww w . ja v a2s. c o m */ @Override public Object create(SimpleObject post, RequestContext context) throws ResponseException { moduleFactoryWrapper.checkPrivilege(); ModuleAction action = newDelegate(); setConvertedProperties(action, post, getCreatableProperties(), true); String installUri = action.getInstallUri(); Collection<Module> modules; if (action.isAllModules() != null && action.isAllModules()) { modules = moduleFactoryWrapper.getLoadedModules(); action.setModules(new ArrayList<Module>(modules)); } else { modules = action.getModules(); } ServletContext servletContext = getServletContext(context); if (modules == null || modules.isEmpty()) { throw new IllegalRequestException( "Cannot execute action " + action.getAction() + " on empty set of modules."); } else { if (action.getAction() == Action.INSTALL) { if (installUri == null || !ResourceUtils.isUrl(installUri)) { throw new IllegalRequestException( "The installUri needs to be a URL for this action to be performed"); } } else { if (action.isAllModules() == null || !action.isAllModules()) { // ensure all specified modules exist // ensure they're not trying to modify the REST module for (Module module : modules) { // if they specified a module that's not loaded, it will show up here as null if (module == null) { throw new IllegalRequestException( "One or more of the modules you specified are not loaded on this server"); } if (module.getModuleId().equals(RestConstants.MODULE_ID)) { throw new IllegalRequestException("You are not allowed to modify " + module.getModuleId() + " via this REST call"); } } } // even if they said allModule=true, don't touch the REST module Module restModule = moduleFactoryWrapper.getModuleById(RestConstants.MODULE_ID); modules.remove(restModule); } switch (action.getAction()) { case START: startModules(modules, servletContext); break; case STOP: stopModules(modules, servletContext, true); break; case RESTART: restartModules(modules, servletContext); break; case UNLOAD: unloadModules(modules, servletContext); break; case INSTALL: Module module = installModule(modules, installUri, servletContext); modules.clear(); modules.add(module); action.setModules(new ArrayList<Module>(modules)); break; } } return ConversionUtil.convertToRepresentation(action, Representation.DEFAULT); }
From source file:ubic.gemma.core.search.SearchServiceImpl.java
private Collection<SearchResult> characteristicExpressionExperimentSearch(final SearchSettings settings) { Collection<Class<?>> classToSearch = new ArrayList<>(1); // this is a collection because of the API // for characteristicService; could add // findByUri(Class<?>...) // order matters if we hit the limits Queue<Class<?>> orderedClassesToSearch = new LinkedList<>(); orderedClassesToSearch.add(ExpressionExperiment.class); orderedClassesToSearch.add(FactorValue.class); orderedClassesToSearch.add(BioMaterial.class); Collection<SearchResult> results = new HashSet<>(); StopWatch watch = new StopWatch(); watch.start();//from w ww . ja v a 2 s .c o m while (results.size() < SearchServiceImpl.SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS && !orderedClassesToSearch.isEmpty()) { classToSearch.clear(); classToSearch.add(orderedClassesToSearch.poll()); // We handle the OR clauses here. String[] subclauses = settings.getQuery().split(" OR "); for (String subclause : subclauses) { /* * Note that the AND is applied only within one entity type. The fix would be to apply AND at this * level. */ Collection<SearchResult> classResults = this.characteristicSearchWithChildren(classToSearch, subclause); if (!classResults.isEmpty()) { String msg = "Found " + classResults.size() + " " + classToSearch.iterator().next().getSimpleName() + " results from characteristic search."; if (results.size() >= SearchServiceImpl.SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS) { msg += " Total found > " + SearchServiceImpl.SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS + ", will not search for more entities."; } SearchServiceImpl.log.info(msg); } results.addAll(classResults); } } SearchServiceImpl.log.debug("ExpressionExperiment search: " + settings + " -> " + results.size() + " characteristic hits " + watch.getTime() + " ms"); // Note that if we do this earlier (within each query) the limit SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS has // more meaning. We would have to unroll the loop above return filterExperimentHitsByTaxon(results, settings.getTaxon()); }