List of usage examples for java.util HashMap values
public Collection<V> values()
From source file:de.bangl.lm.LotManagerPlugin.java
public void listLotGroups(CommandSender sender) { HashMap<String, LotGroup> thegroups = this.lots.getAllLotGroups(); if (thegroups.isEmpty()) { sendInfo(sender, "No lotgroups defined."); }/*from w ww. java2s . co m*/ for (LotGroup lotgroup : thegroups.values()) { sendInfo(sender, lotgroup.getId()); } }
From source file:com.datatorrent.stram.engine.StreamingContainer.java
private synchronized void deploy(List<OperatorDeployInfo> nodeList) throws Exception { /*//from w ww. j a v a 2s .c om * A little bit of up front sanity check would reduce the percentage of deploy failures later. */ for (OperatorDeployInfo ndi : nodeList) { if (nodes.containsKey(ndi.id)) { throw new IllegalStateException( "Node with id: " + ndi.id + " already present in container " + containerId + "!"); } } deployNodes(nodeList); HashMap<String, ArrayList<String>> groupedInputStreams = new HashMap<String, ArrayList<String>>(); for (OperatorDeployInfo ndi : nodeList) { groupInputStreams(groupedInputStreams, ndi); } HashMap<String, ComponentContextPair<Stream, StreamContext>> newStreams = deployOutputStreams(nodeList, groupedInputStreams); deployInputStreams(nodeList, newStreams); for (ComponentContextPair<Stream, StreamContext> pair : newStreams.values()) { pair.component.setup(pair.context); } streams.putAll(newStreams); HashMap<Integer, OperatorDeployInfo> operatorMap = new HashMap<Integer, OperatorDeployInfo>( nodeList.size()); for (OperatorDeployInfo o : nodeList) { operatorMap.put(o.id, o); } activate(operatorMap, newStreams); }
From source file:de.unibi.techfak.bibiserv.web.beans.session.AbstractCloudInputBean.java
public void validate_s3_url() { HashMap<Integer, String> validationMessage = awsbean.validate_s3_url_awsbean(s3url_to_object, selectedData); throwFacesMessage(validationMessage, getId() + "_msg_validation"); resetValidated();/*from w w w. j a v a 2s . c o m*/ showPublicObjects = false; loadListMsg = messages.property("de.unibi.techfak.bibiserv.bibimainapp.input.VALIDATINGEND") + ".<br/>"; // this is bucket url if (!selectedData.getBucket().isEmpty() && selectedData.getFile().isEmpty()) { showPublicObjects = true; validationMessage = awsbean.loadS3ObjectListForced(selectedData.getBucket()); if (validationMessage.isEmpty()) { loadListMsg += messages.property("de.unibi.techfak.bibiserv.bibimainapp.input.LISTOBJECTS") + "."; } else { loadListMsg = ""; for (String str : validationMessage.values()) { loadListMsg += str + ".<br/>"; } } itemlist_public_objects = awsbean.getS3ObjectList(selectedData.getBucket()); } }
From source file:edu.cornell.mannlib.vitro.webapp.dao.jena.DataPropertyDaoJena.java
private List<DataProperty> getDataPropertyStatements(List<DataPropertyStatement> dataPropertyStmts) { if (dataPropertyStmts == null || dataPropertyStmts.isEmpty()) { return new ArrayList<DataProperty>(); }/*from w w w. j av a 2s.co m*/ HashMap<String, DataProperty> hash = new HashMap<String, DataProperty>(); for (DataPropertyStatement dataPropertyStmt : dataPropertyStmts) { if (hash.containsKey(dataPropertyStmt.getDatapropURI())) { DataProperty p = hash.get(dataPropertyStmt.getDatapropURI()); p.addDataPropertyStatement(dataPropertyStmt); } else { OntModel ontModel = getOntModelSelector().getTBoxModel(); ontModel.enterCriticalSection(Lock.READ); try { OntProperty op = ontModel.getOntProperty(dataPropertyStmt.getDatapropURI()); if (op != null) { DataProperty p = datapropFromOntProperty(op); hash.put(p.getURI(), p); p.addDataPropertyStatement(dataPropertyStmt); } } finally { ontModel.leaveCriticalSection(); } } } List<DataProperty> dataprops = new ArrayList<DataProperty>(hash.values()); Collections.sort(dataprops, new DataPropertyRanker()); return dataprops; }
From source file:gedi.lfc.LfcComputer.java
public void compute(LineOrientedFile out, GenomicRegionStorage<AlignedReadsData> reads, ReferenceSequenceConversion readConversion, GenomicRegionStorage<Transcript> transcripts, ContrastMapping contrast, Downsampling downsampling, Set<String> restrictToGenes) { if (contrast.getNumMergedConditions() != 2) throw new RuntimeException("Must be binary contrast!"); // mapping to genes HashMap<String, MutableReferenceGenomicRegion<String>> genesToRegon = new HashMap<String, MutableReferenceGenomicRegion<String>>(); transcripts.iterateReferenceGenomicRegions().forEachRemaining(rgr -> { if (restrictToGenes != null && !restrictToGenes.contains(rgr.getData().getGeneId())) return; MutableReferenceGenomicRegion<String> r = genesToRegon.get(rgr.getData().getGeneId()); if (r == null) genesToRegon.put(rgr.getData().getGeneId(), new MutableReferenceGenomicRegion<String>().setReference(rgr.getReference()) .setRegion(rgr.getRegion()).setData(rgr.getData().getGeneId())); else {/* w ww . j a va2 s.co m*/ if (!r.getReference().equals(rgr.getReference())) throw new RuntimeException(rgr.getData().getGeneId() + " is located on multiple chromosomes: " + r.getReference() + ", " + rgr.getReference()); r.setRegion(r.getRegion().union(rgr.getRegion())); } }); // MemoryIntervalTreeStorage<String> genes = new MemoryIntervalTreeStorage<String>(); // for (MutableReferenceGenomicRegion<String> rgr : genesToRegon.values()) // genes.add(rgr.getReference(), rgr.getRegion(), rgr.getData()); double credi = 0.05; try { out.startWriting(); out.writef("Gene\talpha\tbeta\t%.3g credibility\tlog2 fold change\t%.3g credibility\n", 0.5 * credi, 1 - 0.5 * credi); for (MutableReferenceGenomicRegion<String> gene : genesToRegon.values()) { double[] total = new double[contrast.getNumMergedConditions()]; double[] buff = new double[contrast.getNumMergedConditions()]; reads.iterateIntersectingMutableReferenceGenomicRegions(readConversion.apply(gene.getReference()), gene.getRegion().getStart(), gene.getRegion().getEnd()).forEachRemaining(rgr -> { // check if there is a matching transcript if (gene.getRegion().contains(rgr.getRegion())) { // compute downsampled and add downsampling.getDownsampled(rgr.getData(), contrast, buff); ArrayUtils.add(total, buff); // System.out.println(Arrays.toString(buff)+"\t"+rgr.getReference()+":"+rgr.getRegion()+"\t"+rgr.getData()); } }); // System.err.println(gene); BetaDistribution beta = new BetaDistribution(total[0] + 1, total[1] + 1); out.writef("%s\t%.1f\t%.1f\t%.4f\t%.4f\t%.4f\n", gene.getData(), total[0] + 1, total[1] + 1, pToLog2Fc(beta.inverseCumulativeProbability(0.5 * credi)), pToLog2Fc((beta.getAlpha() - 1) / (beta.getAlpha() + beta.getBeta() - 2)), pToLog2Fc(beta.inverseCumulativeProbability(1 - 0.5 * credi))); } out.finishWriting(); } catch (IOException e) { } }
From source file:com.comcast.freeflow.core.FreeFlowContainer.java
public LayoutChangeset getViewChanges(HashMap<Object, FreeFlowItem> oldFrames, HashMap<Object, FreeFlowItem> newFrames, boolean moveEvenIfSame) { // cleanupViews(); LayoutChangeset change = new LayoutChangeset(); if (oldFrames == null) { markAdapterDirty = false;// w ww. j av a2s. c om for (FreeFlowItem freeflowItem : newFrames.values()) { change.addToAdded(freeflowItem); } return change; } if (markAdapterDirty) { markAdapterDirty = false; for (FreeFlowItem freeflowItem : newFrames.values()) { change.addToAdded(freeflowItem); } for (FreeFlowItem freeflowItem : oldFrames.values()) { change.addToDeleted(freeflowItem); } return change; } Iterator<?> it = newFrames.entrySet().iterator(); while (it.hasNext()) { Map.Entry<?, ?> m = (Map.Entry<?, ?>) it.next(); FreeFlowItem freeflowItem = (FreeFlowItem) m.getValue(); if (oldFrames.get(m.getKey()) != null) { FreeFlowItem old = oldFrames.remove(m.getKey()); freeflowItem.view = old.view; // if (moveEvenIfSame || !old.compareRect(((FreeFlowItem) // m.getValue()).frame)) { if (moveEvenIfSame || !old.frame.equals(((FreeFlowItem) m.getValue()).frame)) { change.addToMoved(freeflowItem, getActualFrame(freeflowItem)); } } else { change.addToAdded(freeflowItem); } } for (FreeFlowItem freeflowItem : oldFrames.values()) { change.addToDeleted(freeflowItem); } frames = newFrames; return change; }
From source file:gedi.lfc.LfcComputer.java
public void compute(LineOrientedFile out, DiskGenomicNumericProvider coverage, GenomicRegionStorage<Transcript> transcripts, Set<String> restrictToGenes, int readLength) { if (coverage.getNumDataRows() != 2) throw new RuntimeException("Must be binary contrast!"); // mapping to genes HashMap<String, MutableReferenceGenomicRegion<String>> genesToRegon = new HashMap<String, MutableReferenceGenomicRegion<String>>(); transcripts.iterateReferenceGenomicRegions().forEachRemaining(rgr -> { if (restrictToGenes != null && !restrictToGenes.contains(rgr.getData().getGeneId())) return; MutableReferenceGenomicRegion<String> r = genesToRegon.get(rgr.getData().getGeneId()); if (r == null) genesToRegon.put(rgr.getData().getGeneId(), new MutableReferenceGenomicRegion<String>().setReference(rgr.getReference()) .setRegion(rgr.getRegion()).setData(rgr.getData().getGeneId())); else {/*from ww w. ja v a 2 s. c o m*/ if (!r.getReference().equals(rgr.getReference())) throw new RuntimeException(rgr.getData().getGeneId() + " is located on multiple chromosomes: " + r.getReference() + ", " + rgr.getReference()); r.setRegion(r.getRegion().union(rgr.getRegion())); } }); // MemoryIntervalTreeStorage<String> genes = new MemoryIntervalTreeStorage<String>(); // for (MutableReferenceGenomicRegion<String> rgr : genesToRegon.values()) // genes.add(rgr.getReference(), rgr.getRegion(), rgr.getData()); double credi = 0.05; try { out.startWriting(); out.writef("Gene\talpha\tbeta\t%.3g credibility\tlog2 fold change\t%.3g credibility\n", 0.5 * credi, 1 - 0.5 * credi); for (MutableReferenceGenomicRegion<String> gene : genesToRegon.values()) { DoubleArrayList a = new DoubleArrayList(); DoubleArrayList b = new DoubleArrayList(); DoubleArrayList sum = new DoubleArrayList(); for (int i = 0; i < gene.getRegion().getTotalLength(); i++) { int p = gene.getRegion().map(i); double ca = coverage.getValue(gene.getReference(), p, 0); double cb = coverage.getValue(gene.getReference(), p, 1); a.add(ca); b.add(cb); sum.add(ca + cb); } sum.sort(); double threshold = sum.getDouble(sum.size() / 2); double asum = 0; double bsum = 0; DoubleArrayList fc = new DoubleArrayList(); for (int i = 0; i < a.size(); i++) { double ca = a.getDouble(i); double cb = b.getDouble(i); double cm = Math.max(ca, cb); if (ca + cb > threshold && cm > 0) { fc.add(ca / cb); ca /= cm; cb /= cm; asum += ca; bsum += cb; } } fc.sort(); double m = Math.log(fc.getDouble(fc.size() / 2)) / Math.log(2); // System.err.println(gene); BetaDistribution beta = new BetaDistribution(asum + 1, bsum + 1); out.writef("%s\t%.1f\t%.1f\t%.4f\t%.4f\t%.4f\n", gene.getData(), asum + 1, bsum + 1, 0.0, //pToLog2Fc(beta.inverseCumulativeProbability(0.5*credi)), m, //pToLog2Fc((beta.getAlpha()-1)/(beta.getAlpha()+beta.getBeta()-2)), 0.0//pToLog2Fc(beta.inverseCumulativeProbability(1-0.5*credi)) ); } out.finishWriting(); } catch (IOException e) { } }
From source file:org.apache.solr.handler.component.QueryComponent.java
private void createRetrieveDocs(ResponseBuilder rb) { // TODO: in a system with nTiers > 2, we could be passed "ids" here // unless those requests always go to the final destination shard // for each shard, collect the documents for that shard. HashMap<String, Collection<ShardDoc>> shardMap = new HashMap<>(); for (ShardDoc sdoc : rb.resultIds.values()) { Collection<ShardDoc> shardDocs = shardMap.get(sdoc.shard); if (shardDocs == null) { shardDocs = new ArrayList<>(); shardMap.put(sdoc.shard, shardDocs); }//from w w w . ja va 2 s . c om shardDocs.add(sdoc); } SchemaField uniqueField = rb.req.getSchema().getUniqueKeyField(); // Now create a request for each shard to retrieve the stored fields for (Collection<ShardDoc> shardDocs : shardMap.values()) { ShardRequest sreq = new ShardRequest(); sreq.purpose = ShardRequest.PURPOSE_GET_FIELDS; sreq.shards = new String[] { shardDocs.iterator().next().shard }; sreq.params = new ModifiableSolrParams(); // add original params sreq.params.add(rb.req.getParams()); // no need for a sort, we already have order sreq.params.remove(CommonParams.SORT); sreq.params.remove(CursorMarkParams.CURSOR_MARK_PARAM); // we already have the field sort values sreq.params.remove(ResponseBuilder.FIELD_SORT_VALUES); if (!rb.rsp.getReturnFields().wantsField(uniqueField.getName())) { sreq.params.add(CommonParams.FL, uniqueField.getName()); } ArrayList<String> ids = new ArrayList<>(shardDocs.size()); for (ShardDoc shardDoc : shardDocs) { // TODO: depending on the type, we may need more tha a simple toString()? ids.add(shardDoc.id.toString()); } sreq.params.add(ShardParams.IDS, StrUtils.join(ids, ',')); rb.addRequest(this, sreq); } }
From source file:com.google.maps.android.utils.demo.HeatmapsPlacesDemoActivity.java
/** * Makes four radar search requests for the given keyword, then parses the * json output and returns the search results as a collection of LatLng objects. * * @param keyword A string to use as a search term for the radar search * @return Returns the search results from radar search as a collection * of LatLng objects./*from w ww . j a v a2 s. c o m*/ */ private Collection<LatLng> getPoints(String keyword) { HashMap<String, LatLng> results = new HashMap<String, LatLng>(); // Calculate four equidistant points around Sydney to use as search centers // so that four searches can be done. ArrayList<LatLng> searchCenters = new ArrayList<LatLng>(4); for (int heading = 45; heading < 360; heading += 90) { searchCenters.add(SphericalUtil.computeOffset(SYDNEY, SEARCH_RADIUS / 2, heading)); } for (int j = 0; j < 4; j++) { String jsonResults = getJsonPlaces(keyword, searchCenters.get(j)); try { // Create a JSON object hierarchy from the results JSONObject jsonObj = new JSONObject(jsonResults); JSONArray pointsJsonArray = jsonObj.getJSONArray("results"); // Extract the Place descriptions from the results for (int i = 0; i < pointsJsonArray.length(); i++) { if (!results.containsKey(pointsJsonArray.getJSONObject(i).getString("id"))) { JSONObject location = pointsJsonArray.getJSONObject(i).getJSONObject("geometry") .getJSONObject("location"); results.put(pointsJsonArray.getJSONObject(i).getString("id"), new LatLng(location.getDouble("lat"), location.getDouble("lng"))); } } } catch (JSONException e) { Toast.makeText(this, "Cannot process JSON results", Toast.LENGTH_SHORT).show(); } } return results.values(); }
From source file:nzilbb.agcsv.AgCsvDeserializer.java
/** * Create annotations from the given CSV rows. * @param lines CSV records./* ww w.j a v a 2 s . c o m*/ * @param layer Layer for the annotations. * @param graph Graph to add the annotations to. * @throws SerializationException On error. */ public void readAnnotations(Vector<CSVRecord> lines, Layer layer, Graph graph) throws SerializationException { // map header columns HashMap<String, Integer> mHeadings = new HashMap<String, Integer>(); for (int c = 0; c < lines.elementAt(1).size(); c++) { String sHeader = lines.elementAt(1).get(c); if (sHeader.equalsIgnoreCase("id")) mHeadings.put("id", c); else if (sHeader.equalsIgnoreCase("startAnchor.id")) mHeadings.put("startAnchor.id", c); else if (sHeader.equalsIgnoreCase("endAnchor.id")) mHeadings.put("endAnchor.id", c); else if (sHeader.equalsIgnoreCase("label")) mHeadings.put("label", c); else if (sHeader.equalsIgnoreCase("labelStatus")) mHeadings.put("labelStatus", c); else if (sHeader.equalsIgnoreCase("turnAnnotationId")) mHeadings.put("turnAnnotationId", c); else if (sHeader.equalsIgnoreCase("ordinalInTurn")) mHeadings.put("ordinalInTurn", c); else if (sHeader.equalsIgnoreCase("wordAnnotationId")) mHeadings.put("wordAnnotationId", c); else if (sHeader.equalsIgnoreCase("ordinalInWord")) mHeadings.put("ordinalInWord", c); else if (sHeader.equalsIgnoreCase("segmentAnnotationId")) mHeadings.put("segmentAnnotationId", c); } // next header int highestHeaderIndex = 0; for (Integer i : mHeadings.values()) highestHeaderIndex = Math.max(highestHeaderIndex, i); mHeadings.put("comment", highestHeaderIndex + 1); for (int i = 2; i < lines.size(); i++) { CSVRecord line = lines.elementAt(i); Annotation annotation = new Annotation(line.get(mHeadings.get("id")), line.get(mHeadings.get("label")), layer.getId(), line.get(mHeadings.get("startAnchor.id")), line.get(mHeadings.get("endAnchor.id"))); annotation.setConfidence(new Integer(line.get(mHeadings.get("labelStatus")))); if (mHeadings.get("comment") < line.size()) { String comment = line.get(mHeadings.get("comment")); if (comment.length() > 0) { annotation.put("comment", comment); } } // parent if (layer.getParentId().equals("graph")) { annotation.setParentId(graph.getId()); } else if (layer.getParentId().equals(graph.getSchema().getTurnLayerId())) { if (layer.getId().equals(graph.getSchema().getUtteranceLayerId())) { // make sure turn exists Annotation turn = graph.getAnnotation(line.get(mHeadings.get("turnAnnotationId"))); if (turn == null) { // make sure participant exists Annotation participant = graph.getAnnotation(annotation.getLabel()); if (participant == null) { participant = new Annotation(annotation.getLabel(), annotation.getLabel(), graph.getSchema().getParticipantLayerId()); graph.addAnnotation(participant); } turn = new Annotation(line.get(mHeadings.get("turnAnnotationId")), annotation.getLabel(), graph.getSchema().getTurnLayerId(), // start/end IDs are set, but the anchor's themselves aren't added line.get(mHeadings.get("turnAnnotationId")) + " start", line.get(mHeadings.get("turnAnnotationId")) + " end", participant.getId()); graph.addAnnotation(turn); } // turn isn't there } // utterance layer annotation.setParentId(line.get(mHeadings.get("turnAnnotationId"))); } else if (layer.getParentId().equals(graph.getSchema().getWordLayerId())) { annotation.setParentId(line.get(mHeadings.get("wordAnnotationId"))); } else if (layer.getParentId().equals("segments")) { annotation.setParentId(line.get(mHeadings.get("segmentAnnotationId"))); } else if (layer.getId().equals(graph.getSchema().getTurnLayerId())) { // turn layer // make sure participant exists Annotation participant = graph.getAnnotation(annotation.getLabel()); if (participant == null) { participant = new Annotation(annotation.getLabel(), annotation.getLabel(), graph.getSchema().getParticipantLayerId()); graph.addAnnotation(participant); } annotation.setParentId(participant.getId()); } // ordinal if (layer.getId().equals(graph.getSchema().getWordLayerId())) { annotation.setOrdinal(Integer.parseInt(line.get(mHeadings.get("ordinalInTurn")))); } else if (layer.getId().equals("segments")) { annotation.setOrdinal(Integer.parseInt(line.get(mHeadings.get("ordinalInWord")))); } graph.addAnnotation(annotation); } }