List of usage examples for java.util LinkedHashSet LinkedHashSet
public LinkedHashSet(Collection<? extends E> c)
From source file:org.openmrs.module.kenyaemr.KenyaEmrUiUtilsTest.java
@Before public void setup() throws Exception { executeDataSet("test-data.xml"); executeDataSet("test-drugdata.xml"); InputStream stream = getClass().getClassLoader().getResourceAsStream("test-regimens.xml"); emr.getRegimenManager().loadDefinitionsFromXML(stream); this.ui = new FragmentActionUiUtils(null, null, null); DrugOrder aspirin = new DrugOrder(); aspirin.setConcept(Context.getConceptService().getConcept(71617)); aspirin.setDose(100.0d);/*from w ww .j a v a 2 s .com*/ aspirin.setUnits("mg"); aspirin.setFrequency("OD"); DrugOrder stavudine = new DrugOrder(); stavudine.setConcept(Context.getConceptService().getConcept(84309)); stavudine.setDose(30.0d); stavudine.setUnits("ml"); stavudine.setFrequency("BD"); regimen = new RegimenOrder(new LinkedHashSet<DrugOrder>(Arrays.asList(aspirin, stavudine))); }
From source file:com.diversityarrays.dal.db.bms.BMS_DalDbProviderService.java
@Override public Set<Parameter<?>> getParametersRequired() { return new LinkedHashSet<Parameter<?>>(Arrays.asList(PARAMETERS)); }
From source file:eu.itesla_project.modules.histo.tools.HistoDbCountAttributesTool.java
@Override public void run(CommandLine line) throws Exception { Interval interval = Interval.parse(line.getOptionValue("interval")); HistoDbHorizon horizon = HistoDbHorizon.SN; if (line.hasOption("horizon")) { horizon = HistoDbHorizon.valueOf(line.getOptionValue("horizon")); }//from ww w .j a v a 2s . c om OfflineConfig config = OfflineConfig.load(); try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create(true)) { Set<HistoDbAttributeId> attributeIds = new LinkedHashSet<>(histoDbClient.listAttributes()); HistoDbStats stats = histoDbClient.queryStats(attributeIds, interval, horizon, true); for (HistoDbAttributeId attributeId : attributeIds) { System.out .println(attributeId + ";" + (int) stats.getValue(HistoDbStatsType.COUNT, attributeId, -1)); } } }
From source file:com.joyent.manta.http.MantaSSLConnectionSocketFactory.java
@Override protected void prepareSocket(final SSLSocket socket) throws IOException { final Set<String> enabledProtocols = new LinkedHashSet<>(Arrays.asList(socket.getEnabledProtocols())); final Set<String> enabledCipherSuites = new LinkedHashSet<>(Arrays.asList(socket.getEnabledCipherSuites())); if (LOG.isDebugEnabled()) { LOG.debug("Enabled TLS protocols: {}", MantaUtils.asString(enabledProtocols)); LOG.debug("Enabled cipher suites: {}", MantaUtils.asString(enabledCipherSuites)); }// ww w.j a v a2 s .c o m supportedCipherSuites.retainAll(enabledCipherSuites); if (!supportedCipherSuites.isEmpty()) { try { String[] supportedCiphers = new String[supportedCipherSuites.size()]; supportedCipherSuites.toArray(supportedCiphers); socket.setEnabledCipherSuites(supportedCiphers); } catch (IllegalArgumentException e) { String msg = String.format("Unsupported encryption provider. Supported providers: %s", MantaUtils.asString(socket.getEnabledCipherSuites())); throw new ConfigurationException(msg, e); } } supportedProtocols.retainAll(enabledProtocols); if (!supportedProtocols.isEmpty()) { String[] supportedProtos = new String[supportedProtocols.size()]; supportedProtocols.toArray(supportedProtos); socket.setEnabledProtocols(supportedProtos); } if (LOG.isDebugEnabled()) { LOG.debug("Supported TLS protocols: {}", MantaUtils.asString(supportedProtocols)); LOG.debug("Supported cipher suites: {}", MantaUtils.asString(supportedCipherSuites)); } }
From source file:org.appcomponents.platform.impl.RootPlatformComponent.java
@Override public Set<Component> getComponents() { return new LinkedHashSet<>(this.components.values()); }
From source file:com.github.jknack.amd4j.Shim.java
/** * Creates a new {@link Shim} object./*w w w .j a v a2 s. c om*/ * * @param exports The exports option. * @param dependencies The dependency list. * @param init The init function. */ public Shim(final String exports, final Set<String> dependencies, final String init) { this.exports = notEmpty(exports, "The exports is required."); deps = new LinkedHashSet<String>(notNull(dependencies, "The dependencies is required.")); this.init = notEmpty(init, "The init is required."); }
From source file:net.sf.maltcms.chromaui.normalization.spi.charts.PeakGroupBoxPlot.java
public List<JFreeChart> createChart() { List<JFreeChart> charts = new ArrayList<>(); LinkedHashSet<ITreatmentGroupDescriptor> treatmentGroups = new LinkedHashSet<>( project.getTreatmentGroups()); List<CategoryPlot> plots = new LinkedList<>(); for (IPeakGroupDescriptor pgd : pgdl) { LinkedHashMap<ITreatmentGroupDescriptor, HashSet<IPeakAnnotationDescriptor>> map = new LinkedHashMap<>(); for (ITreatmentGroupDescriptor itgd : treatmentGroups) { map.put(itgd, new LinkedHashSet<IPeakAnnotationDescriptor>()); }/*from ww w . j a v a 2 s.c om*/ List<IPeakAnnotationDescriptor> descriptors = pgd.getPeakAnnotationDescriptors(); DefaultBoxAndWhiskerCategoryDataset baw = new DefaultBoxAndWhiskerCategoryDataset(); for (IPeakAnnotationDescriptor ipad : descriptors) { ITreatmentGroupDescriptor treatmentGroup = ipad.getChromatogramDescriptor().getTreatmentGroup(); HashSet<IPeakAnnotationDescriptor> descr = map.get(treatmentGroup); if (descr == null) { descr = new HashSet<>(); map.put(treatmentGroup, descr); } descr.add(ipad); } List<Color> colors = new LinkedList<>(); for (ITreatmentGroupDescriptor tgd : map.keySet()) { String name = getPeakName(pgd); baw.add(createBoxAndWhiskerItem(map.get(tgd)), tgd.getName() + " (" + map.get(tgd).size() + ")", name); colors.add(tgd.getColor()); } BoxAndWhiskerRenderer renderer = new BoxAndWhiskerRenderer(); renderer.setFillBox(true); renderer.setMeanVisible(false); renderer.setMedianVisible(true); renderer.setArtifactPaint(new Color(0, 0, 0, 128)); renderer.setMaximumBarWidth(0.1); renderer.setUseOutlinePaintForWhiskers(false); // renderer.setAutoPopulateSeriesFillPaint(true); // renderer.setAutoPopulateSeriesPaint(true); // renderer.setAutoPopulateSeriesOutlinePaint(true); CategoryPlot cp = new CategoryPlot(baw, new CategoryAxis("Treatment Groups"), new NumberAxis("Normalized Peak Area"), renderer); Logger.getLogger(getClass().getName()).log(Level.INFO, "Setting {0} colors!", colors.size()); ChartCustomizer.setSeriesColors(cp, 0.6f, colors); // ChartCustomizer.setSeriesColors(cp, 0.9f,colors); plots.add(cp); JFreeChart chart = new JFreeChart(cp); chart.setTitle( "Peak group " + pgd.getDisplayName() + " size: " + pgd.getPeakAnnotationDescriptors().size()); charts.add(chart); } // CategoryAxis ca = new CategoryAxis("Treatment Groups"); // NumberAxis va = new NumberAxis("Normalized Peak Area"); // CombinedDomainCategoryPlot cdcp = new CombinedDomainCategoryPlot(ca); // for (CategoryPlot cp : plots) { // cp.setRangeAxis(va); // cdcp.add(cp); // break; // } // return new JFreeChart(cdcp); return charts; }
From source file:com.ge.research.semtk.sparqlX.parallel.SparqlParallelQueries.java
@SuppressWarnings("unchecked") public SparqlParallelQueries(String subqueriesJson, String subqueryType, boolean isSubqueryOptional, String columnsToFuseOn, String columnsToReturn) throws Exception { // parse the json array and build the subquery objects as we go gResultTable = null;// w ww.j a v a 2s . com JSONArray subqueries = (JSONArray) (new JSONParser()).parse(subqueriesJson); this.subqueries = new ArrayList<>(subqueries.size()); for (int i = 0; i < subqueries.size(); i++) { JSONObject subquery = (JSONObject) subqueries.get(i); // let the constructor do the heavy lifting here this.subqueries.add(new SparqlSubquery(subquery)); } this.subqueryType = subqueryType; this.isSubqueryOptional = isSubqueryOptional; this.columnsToFuseOn = new LinkedHashSet<>(Arrays.asList(columnsToFuseOn.split(","))); this.columnsToReturn = new LinkedHashSet<>(Arrays.asList(columnsToReturn.split(","))); if (this.subqueries.size() == 0) { // this was completely invalid a call as we have no subqueries to process throw new Exception("subqueries json does not contain any subqueries."); } }
From source file:com.emergya.spring.security.oauth.google.GoogleAccessTokenConverter.java
/** * Creates an OAuth2Authentication object from the info recieved form the OAuth endpoint in a map. * * @param map the map containing authentication info * @return the encapsulated data/*from w ww. j av a 2 s .c o m*/ */ @Override public final OAuth2Authentication extractAuthentication(final Map<String, ?> map) { Map<String, String> parameters = new HashMap<>(); Set<String> scope = parseScopes(map); Authentication user = userTokenConverter.extractAuthentication(map); String clientId = (String) map.get(CLIENT_ID); parameters.put(CLIENT_ID, clientId); Set<String> resourceIds; if (map.containsKey(AUD)) { resourceIds = new LinkedHashSet<>((Collection<String>) map.get(AUD)); } else { resourceIds = new LinkedHashSet<>(Collections.<String>emptySet()); } OAuth2Request request = new OAuth2Request(parameters, clientId, null, true, scope, resourceIds, null, null, null); return new OAuth2Authentication(request, user); }
From source file:grails.plugin.cache.gemfire.GrailsGemfireCacheManager.java
protected Collection<Cache> loadCaches() { Assert.notNull(gemfireCache, "a backing GemFire cache is required"); Assert.isTrue(!gemfireCache.isClosed(), "the GemFire cache is closed; an open instance is required"); Set<Region<?, ?>> regions = gemfireCache.rootRegions(); Collection<Cache> caches = new LinkedHashSet<Cache>(regions.size()); for (Region<?, ?> region : regions) { caches.add(new GrailsGemfireCache(region)); }//from w w w . j av a2 s . co m return caches; }