List of usage examples for java.util LinkedHashMap get
public V get(Object key)
From source file:org.karndo.graphs.CustomChartFactory.java
/** * Creates a chart of the selected PiracyEvent data graphed by event * status. Presently uses a very basic method of graphing this data by * using the static CreateBarChart3D method available in class * org.jfree.chart.ChartFactory. /* ww w . java 2 s. c o m*/ * * @param data the selected PiracyEvent data to graph. * @return A JFreeChart object representing a graph of the selected * PiracyEvent data against event status. */ public JFreeChart createHistogramStatus(LinkedList<PiracyEvent> data) { //the data to plot DefaultCategoryDataset dataset = new DefaultCategoryDataset(); LinkedHashMap<String, MutableInt> freqs_cats = new LinkedHashMap<String, MutableInt>(); for (PiracyEvent ev : data) { if (!freqs_cats.containsKey(ev.getStatus())) { freqs_cats.put(ev.getStatus(), new MutableInt(1)); } else { freqs_cats.get(ev.getStatus()).increment(); } } Iterator itr = freqs_cats.keySet().iterator(); while (itr.hasNext()) { String category = (String) itr.next(); Integer i1 = freqs_cats.get(category).getValue(); dataset.addValue(i1, "Piracy Incidents", category); } JFreeChart chart = ChartFactory.createBarChart3D("Piracy Incidents " + "by event status", "Event Status", "Frequency", dataset, PlotOrientation.VERTICAL, false, true, false); return chart; }
From source file:org.karndo.graphs.CustomChartFactory.java
/** * Creates a chart of the selected PiracyEvent data graphed by vessel * type. Presently uses a very basic method of graphing this data by * using the static CreateBarChart3D method available in class * org.jfree.chart.ChartFactory. //from w w w . j a v a2 s. c om * * @param data the selected PiracyEvent data to graph. * @return A JFreeChart object representing a graph of the selected * PiracyEvent data against vessel type. */ public JFreeChart createHistogramVesselType(LinkedList<PiracyEvent> data) { //the data to plot DefaultCategoryDataset dataset = new DefaultCategoryDataset(); LinkedHashMap<String, MutableInt> freqs_cats = new LinkedHashMap<String, MutableInt>(); for (PiracyEvent ev : data) { if (!freqs_cats.containsKey(ev.getVesselType())) { freqs_cats.put(ev.getVesselType(), new MutableInt(1)); } else { freqs_cats.get(ev.getVesselType()).increment(); } } Iterator itr = freqs_cats.keySet().iterator(); while (itr.hasNext()) { String category = (String) itr.next(); Integer i1 = freqs_cats.get(category).getValue(); dataset.addValue(i1, "Piracy Incidents", category); } JFreeChart chart = ChartFactory.createBarChart3D("Piracy Incidents " + "by vessel type", "Vessel Type", "Frequency", dataset, PlotOrientation.VERTICAL, false, true, false); return chart; }
From source file:com.opengamma.analytics.financial.interestrate.capletstripping.CapletStrippingJacobian.java
@Override public DoubleMatrix2D evaluate(final DoubleMatrix1D x) { final LinkedHashMap<String, Interpolator1DDataBundle> db = _dataBundleBuilder.evaluate(x); //TODO merge these - they do the same work! final LinkedHashMap<String, InterpolatedDoublesCurve> curves = _curveBuilder.evaluate(x); // set any known (i.e. fixed) curves if (_knownParameterTermSturctures != null) { curves.putAll(_knownParameterTermSturctures); }/* w ww . ja v a 2 s . co m*/ //TODO make this general - not SABR specific final Curve<Double, Double> cAlpha = curves.get(ALPHA); final Curve<Double, Double> cBeta = curves.get(BETA); final Curve<Double, Double> cRho = curves.get(RHO); final Curve<Double, Double> cNu = curves.get(NU); final VolatilityModel1D volModel = new SABRTermStructureParameters(cAlpha, cBeta, cRho, cNu); final int nCaps = _capPricers.size(); final int m = x.getNumberOfElements(); final double[][] jac = new double[nCaps][m]; double f, k, t; for (int i = 0; i < nCaps; i++) { //outer loop over caps final CapFloorPricer capPricer = _capPricers.get(i); final double vega = capPricer.vega(volModel); final int nCaplets = capPricer.getNumberCaplets(); final double[][] greeks = new double[nCaplets][]; //the sensitivity of vol to the model parameters final double[] capletVega = new double[nCaplets]; final double[] capletExpiries = capPricer.getExpiries(); final double[] capletFwds = capPricer.getForwards(); final double[] capletDF = capPricer.getDiscountFactors(); final double[][][] nodeSens = new double[_parameterNames.size()][nCaplets][]; //Sensitivity to the nodes of a particular curve at a particular time k = capPricer.getStrike(); //TODO There will be much repeated calculations here, as many of the caplets are shared across caps for (int tIndex = 0; tIndex < nCaplets; tIndex++) { f = capletFwds[tIndex]; t = capletExpiries[tIndex]; final EuropeanVanillaOption option = new EuropeanVanillaOption(k, t, true); //TODO again this is SABR specific final SABRFormulaData data = new SABRFormulaData(cAlpha.getYValue(t), cBeta.getYValue(t), cRho.getYValue(t), cNu.getYValue(t)); greeks[tIndex] = SABR.getVolatilityAdjoint(option, f, data); //2nd and 3rd entries are forward & strike sensitivity which we don't use capletVega[tIndex] = capletDF[tIndex] * BlackFormulaRepository.vega(f, k, t, greeks[tIndex][0]); int parmIndex = 0; for (final String name : _parameterNames) { final Interpolator1D itrp = _interpolators.get(name); nodeSens[parmIndex++][tIndex] = itrp.getNodeSensitivitiesForValue(db.get(name), t); } } final double[] res = new double[x.getNumberOfElements()]; for (int tIndex = 0; tIndex < nCaplets; tIndex++) { int index = 0; for (int parmIndex = 0; parmIndex < _parameterNames.size(); parmIndex++) { final double temp = capletVega[tIndex] * greeks[tIndex][parmIndex + 3]; //1st 3 are vol, dForward & dStrike final double[] ns = nodeSens[parmIndex][tIndex]; for (final double element : ns) { res[index] += element * temp; index++; } } } for (int j = 0; j < res.length; j++) { jac[i][j] = res[j] / vega; } } return new DoubleMatrix2D(jac); }
From source file:hydrograph.ui.propertywindow.propertydialog.PropertyDialogBuilder.java
private void addGroupsInTab(ScrolledCompositeHolder scrolledCompositeHolder, LinkedHashMap<String, ArrayList<Property>> subgroupTree) { for (String subgroupName : subgroupTree.keySet()) { Property property = subgroupTree.get(subgroupName).get(0); AbstractELTContainerWidget subGroupContainer = getGroupWidgetContainer(scrolledCompositeHolder, subgroupName, property); addCustomWidgetsToGroupWidget(subgroupTree, subgroupName, subGroupContainer); }// ww w . j a va 2 s . co m }
From source file:org.fourthline.cling.bridge.link.LinkManager.java
synchronized public boolean registerAndGet(final EndpointResource resource, RegisterAndGetProgress progess) { getUpnpService().getRegistry().addResource(resource); boolean failed = false; String requestURL = resource.getRemoteEndpointURL().toString(); String body = null;/*from w w w . ja v a 2 s . c om*/ try { HttpGet request = new HttpGet(requestURL); body = getUpnpService().getConfiguration().getHttpClient().execute(request, new ResponseHandler<String>() { @Override public String handleResponse(HttpResponse response) throws HttpResponseException, IOException { StatusLine statusLine = response.getStatusLine(); int responseCode = statusLine.getStatusCode(); if (responseCode != HttpStatus.SC_OK && responseCode != HttpStatus.SC_CREATED) { //log.info("Remote '" + resource.getModel() + "' notification failed: " + responseCode); throw new HttpResponseException(statusLine.getStatusCode(), statusLine.getReasonPhrase()); } else if (responseCode == HttpStatus.SC_CREATED) { HttpEntity entity = response.getEntity(); return entity == null ? null : EntityUtils.toString(entity); } return null; // link already created: HttpStatus.SC_OK } }); } catch (Exception e) { log.info("Remote '" + resource.getModel() + "' notification failed: " + Exceptions.unwrap(e)); log.info(Exceptions.unwrap(e).toString()); e.printStackTrace(); failed = true; } if (body != null) { log.info("New link created with local origin: " + resource.getModel()); for (final LinkManagementListener listener : listeners) { getUpnpService().getConfiguration().getRegistryListenerExecutor().execute(new Runnable() { public void run() { listener.endpointRegistered(resource.getModel()); } }); } log.info(body); LinkedHashMap container = JsonScripts.parseJsonScript(body); if (container == null) { failed = true; } else { Vector<HashMap> devices = (Vector) container.get("devices"); for (HashMap device : devices) { String friendlyName = (String) device.get("friendlyName"); String udn = (String) device.get("udn"); if (progess != null) { if (progess.isAborted()) { log.warning("registerAndGet aborted"); failed = true; break; } progess.onLoadNewDevice(friendlyName); } addProxyLocalDevice(resource, udn); } } } if (failed) { deregister(resource); } return !failed; }
From source file:com.datatorrent.stram.engine.OperatorThread.java
private void setupNode(OperatorDeployInfo ndi) { //failedNodes.remove(ndi.id); //final Node<?> node = nodes.get(ndi.id); node.setup(node.context);//from ww w .j a v a2 s.c o m /* setup context for all the input ports */ LinkedHashMap<String, PortContextPair<InputPort<?>>> inputPorts = node .getPortMappingDescriptor().inputPorts; LinkedHashMap<String, Operators.PortContextPair<InputPort<?>>> newInputPorts = new LinkedHashMap<>( inputPorts.size()); for (OperatorDeployInfo.InputDeployInfo idi : ndi.inputs) { InputPort<?> port = inputPorts.get(idi.portName).component; PortContext context = new PortContext(idi.contextAttributes, node.context); newInputPorts.put(idi.portName, new PortContextPair<>(port, context)); port.setup(context); } inputPorts.putAll(newInputPorts); /* setup context for all the output ports */ LinkedHashMap<String, PortContextPair<OutputPort<?>>> outputPorts = node .getPortMappingDescriptor().outputPorts; LinkedHashMap<String, PortContextPair<OutputPort<?>>> newOutputPorts = new LinkedHashMap<>( outputPorts.size()); for (OperatorDeployInfo.OutputDeployInfo odi : ndi.outputs) { OutputPort<?> port = outputPorts.get(odi.portName).component; PortContext context = new PortContext(odi.contextAttributes, node.context); newOutputPorts.put(odi.portName, new PortContextPair<>(port, context)); port.setup(context); } outputPorts.putAll(newOutputPorts); /* This introduces need for synchronization on processNodeRequest which was solved by adding deleted field in StramToNodeRequest */ cContext.processNodeRequests(false); node.activate(); cContext.publish(new ContainerEvent.NodeActivationEvent(node)); }
From source file:eu.project.ttc.test.unit.io.JsonTermIndexIOSpec.java
@Test public void testExportTermIndexToJsonWithOccurrencesAndContext() throws IOException { StringWriter writer = new StringWriter(); JsonTermIndexIO.save(writer, termIndex, new JsonOptions().withContexts(true).withOccurrences(true)); ObjectMapper mapper = new ObjectMapper(); // System.out.println(writer.toString()); Map<String, Object> map = mapper.readValue(writer.toString(), new TypeReference<HashMap<String, Object>>() { });//from w ww . ja v a 2 s . c o m assertThat(map.keySet()).hasSize(5).containsOnly("metadata", "words", "terms", "variations", "input_sources"); // test metadata Map<String, String> metadata = (LinkedHashMap<String, String>) map.get("metadata"); assertThat(metadata).containsOnlyKeys("name", "corpus-id", "wordsNum", "spottedTermsNum", "lang", "occurrence_storage"); // test input sources1 @SuppressWarnings("unchecked") Map<String, String> inputSources = (LinkedHashMap<String, String>) map.get("input_sources"); assertThat(inputSources).containsOnlyKeys("1", "2", "3"); assertThat(inputSources.values()).containsOnly("source1", "source2", "source3"); // test words List<?> wordList = (List<?>) map.get("words"); assertThat(wordList).hasSize(3).extracting("lemma").containsOnly("word1", "word2", "word3"); LinkedHashMap<?, ?> w3 = null; for (Object wl : wordList) { if (((LinkedHashMap<?, ?>) wl).get("lemma").equals("word3")) w3 = (LinkedHashMap<?, ?>) wl; } assertEquals("word3", w3.get("lemma")); assertEquals("stem3", w3.get("stem")); assertEquals("NATIVE", w3.get("compound_type")); List<?> components = (List<?>) w3.get("components"); assertThat(components).hasSize(2).extracting("lemma", "begin", "end").contains(tuple("wop", 0, 2), tuple("rd3", 2, 5)); // test terms BiMap<String, String> sources = HashBiMap.create(inputSources); List<?> termList = (List<?>) map.get("terms"); assertThat(termList).hasSize(2).extracting("id").containsOnly(term1.getId(), term2.getId()); LinkedHashMap<?, ?> t1 = (LinkedHashMap<?, ?>) termList.get(0); assertThat(t1.get("rank")).isEqualTo(1); assertThat(t1.get("spec")).isEqualTo(1.1); assertThat((List<?>) t1.get("words")).extracting("lemma", "syn").containsOnly(tuple("word1", "L1"), tuple("word2", "L2")); assertThat((List<?>) t1.get("occurrences")).hasSize(2).extracting("begin", "end", "file", "text") .containsOnly(tuple(10, 12, Integer.parseInt(sources.inverse().get("source2")), "coveredText 3"), tuple(20, 30, Integer.parseInt(sources.inverse().get("source3")), "coveredText 4")); final Map<?, ?> t1Ctxt = (Map<?, ?>) t1.get("context"); assertEquals(21, t1Ctxt.get("total_cooccs")); assertThat((List<?>) t1Ctxt.get("cooccs")).hasSize(1).extracting("co_term", "cnt", "assoc_rate") .contains(tuple("l1l2l3: word1 word2 word3", 21, 2.0d)); LinkedHashMap<?, ?> t2 = (LinkedHashMap<?, ?>) termList.get(1); assertThat((List<?>) t2.get("occurrences")).hasSize(3).extracting("begin", "end", "file", "text") .containsOnly(tuple(0, 2, Integer.parseInt(sources.inverse().get("source2")), "coveredText 1"), tuple(10, 12, Integer.parseInt(sources.inverse().get("source1")), "coveredText 2"), tuple(14, 20, Integer.parseInt(sources.inverse().get("source2")), "coveredText 2")); assertThat((List<?>) t2.get("words")).extracting("lemma", "syn").containsOnly(tuple("word1", "L1"), tuple("word2", "L2"), tuple("word3", "L3")); // test syntactic variants List<?> variantList = (List<?>) map.get("variations"); assertThat(variantList).hasSize(2).extracting("base", "variant", "info", "type").contains( tuple(term1.getGroupingKey(), term2.getGroupingKey(), "variationRule1", "syn"), tuple(term1.getGroupingKey(), term2.getGroupingKey(), "0.956", "graph")); }
From source file:com.johan.vertretungsplan.parser.UntisMonitorParser.java
public Vertretungsplan getVertretungsplan() throws IOException, JSONException { new LoginHandler(schule).handleLogin(executor, cookieStore, username, password); // JSONArray urls = schule.getData().getJSONArray("urls"); String encoding = schule.getData().getString("encoding"); List<Document> docs = new ArrayList<Document>(); for (int i = 0; i < urls.length(); i++) { JSONObject url = urls.getJSONObject(i); loadUrl(url.getString("url"), encoding, url.getBoolean("following"), docs); }//from w w w . jav a 2 s . c om LinkedHashMap<String, VertretungsplanTag> tage = new LinkedHashMap<String, VertretungsplanTag>(); for (Document doc : docs) { if (doc.title().contains("Untis")) { VertretungsplanTag tag = parseMonitorVertretungsplanTag(doc, schule.getData()); if (!tage.containsKey(tag.getDatum())) { tage.put(tag.getDatum(), tag); } else { VertretungsplanTag tagToMerge = tage.get(tag.getDatum()); tagToMerge.merge(tag); tage.put(tag.getDatum(), tagToMerge); } } else { //Fehler } } Vertretungsplan v = new Vertretungsplan(); v.setTage(new ArrayList<VertretungsplanTag>(tage.values())); return v; }
From source file:com.okta.tools.awscli.java
public static void selectFactor(AuthResult authResult) { ArrayList<LinkedHashMap> factors = (ArrayList<LinkedHashMap>) authResult.getEmbedded().get("factors"); String factorType;/*from w w w.ja va2 s.c o m*/ System.out.println("\nMulti-Factor authentication required. Please select a factor to use."); //list factor to select from to user System.out.println("Factors:"); for (int i = 0; i < factors.size(); i++) { LinkedHashMap<String, Object> factor = factors.get(i); //Factor factor = factors.get(i); factorType = (String) factor.get("factorType");// factor.getFactorType(); if (factorType.equals("question")) { factorType = "Security Question"; } else if (factorType.equals("sms")) { factorType = "SMS Authentication"; } else if (factorType.equals("token:software:totp")) { String provider = (String) factor.get("provider");//factor.getProvider(); if (provider.equals("GOOGLE")) { factorType = "Google Authenticator"; } else { factorType = "Okta Verify"; } } System.out.println("[ " + (i + 1) + " ] :" + factorType); } //Handles user factor selection int selection = numSelection(factors.size()); //return factors.get(selection); }
From source file:com.joyfulmongo.db.JFMongoCmdQuery.java
private void linkIncludeObjectIdToParentObject(JFMongoObject parentObject, Map<String, LinkedHashMap<String, List<JFMongoObject>>> pointerMap, Map<String, String> includeKeyToPointerColnameMap) { for (String includeKey : includeFields) { if (includeKey.length() > 0) { ContainerObjectPointer[] pointers = parentObject.getPointer(includeKey); for (ContainerObjectPointer pointer : pointers) { String pointerObjectId = pointer.getObjectId(); LinkedHashMap<String, List<JFMongoObject>> pointerObjectIdToParentObjectMap = pointerMap .get(includeKey); List<JFMongoObject> parentObjects = pointerObjectIdToParentObjectMap.get(pointerObjectId); if (parentObjects == null) { parentObjects = new ArrayList<JFMongoObject>(1); pointerObjectIdToParentObjectMap.put(pointerObjectId, parentObjects); }//from ww w . j a v a 2s. co m parentObjects.add(parentObject); includeKeyToPointerColnameMap.put(includeKey, pointer.getClassName()); } } } }