List of usage examples for java.util List set
E set(int index, E element);
From source file:org.openmrs.module.pmtct.web.view.chart.HivStatusPieChartView.java
@SuppressWarnings("static-access") @Override//from ww w. ja v a 2 s . c o m protected JFreeChart createChart(Map<String, Object> model, HttpServletRequest request) { UserContext userContext = Context.getUserContext(); ApplicationContext appContext = ContextProvider.getApplicationContext(); PmtctService pmtct = Context.getService(PmtctService.class); DefaultPieDataset pieDataset = new DefaultPieDataset(); List<Object> objects = null; PMTCTModuleTag tag = new PMTCTModuleTag(); List<String> hivOptions = new ArrayList<String>(); List<Integer> hivOptionValues = new ArrayList<Integer>(); Collection<ConceptAnswer> answers = Context.getConceptService() .getConcept(PMTCTConstants.RESULT_OF_HIV_TEST).getAnswers(); try { objects = pmtct.getCurrentPatientsInPmtct(); for (ConceptAnswer str : answers) { hivOptions.add(str.getAnswerConcept().getName().getName()); hivOptionValues.add(0); } hivOptions.add("Others"); hivOptionValues.add(0); for (Object ob : objects) { int patientId = (Integer) ((Object[]) ob)[0]; String patientHivStatus = tag.lastObsValueByConceptId(patientId, PMTCTConstants.RESULT_OF_HIV_TEST); int i = 0; boolean found = false; for (String s : hivOptions) { if ((s.compareToIgnoreCase(patientHivStatus)) == 0) { hivOptionValues.set(i, hivOptionValues.get(i) + 1); found = true; } i++; } if (!found) { hivOptionValues.set(hivOptionValues.size() - 1, hivOptionValues.get(hivOptionValues.size() - 1) + 1); } } int i = 0; for (String s : hivOptions) { if (hivOptionValues.get(i) > 0) { Float percentage = new Float(100 * hivOptionValues.get(i) / objects.size()); pieDataset.setValue(s + " (" + hivOptionValues.get(i) + " , " + percentage + "%)", percentage); } i++; } } catch (Exception e) { e.printStackTrace(); } String title = appContext.getMessage("pmtct.menu.patientInPmtct", null, userContext.getLocale()); JFreeChart chart = ChartFactory.createPieChart(title + " : " + Context.getConceptService() .getConcept(PMTCTConstants.HIV_STATUS).getPreferredName(userContext.getLocale()), pieDataset, true, true, false); return chart; }
From source file:com.slidespeech.server.service.TextToSpeechService.java
private static String createXML4Cereproc(String fileName, String speakernotes) throws IOException { List<String> voices = new ArrayList<String>(); try {// w w w . jav a 2 s . co m Document doc = Jsoup.parse(speakernotes, ""); doc.outputSettings().prettyPrint(false); Elements voiceNodes = doc.select("voice"); for (Element voiceNode : voiceNodes) { String lang = (voiceNode.hasAttr("xml:lang") && !voiceNode.attr("xml:lang").equals("")) ? voiceNode.attr("xml:lang") : "en"; String gender = (voiceNode.hasAttr("gender") && !voiceNode.attr("gender").equals("")) ? voiceNode.attr("gender") : "female"; String voiceName = (voiceNode.hasAttr("name") && !voiceNode.attr("name").equals("")) ? voiceNode.attr("name") : ""; //voice name not set by user -> choose one depending on language and gender if (voiceName.equals("")) { voiceName = "isabella";//default //if(lang.equalsIgnoreCase("en") && gender.equalsIgnoreCase("female")) voiceName = "isabella"; if (lang.equalsIgnoreCase("en") && gender.equalsIgnoreCase("male")) voiceName = "william"; if (lang.equalsIgnoreCase("de")) voiceName = "alex"; voiceNode.attr("name", voiceName); } if (!voices.contains(voiceName)) { voices.add(voiceName); } } BufferedWriter out = new BufferedWriter(new FileWriter(fileName)); out.write(doc.select("body").first().html()); //out.write(doc.select("body").first().html()); out.close(); for (int i = 0; i < voices.size(); i++) { if (voices.get(i).equals("william")) voices.set(i, "/opt/cereproc/cerevoice_william_3.0.5_22k.voice"); if (voices.get(i).equals("isabella")) voices.set(i, "/opt/cereproc/cerevoice_isabella_3.0.3_22k.voice"); if (voices.get(i).equals("alex")) voices.set(i, "/opt/cereproc/cerevoice_alex_3.0.0_beta_22k.voice"); } } catch (Exception e) { //Fallback if ssml parsing fails Writer out = new OutputStreamWriter(new FileOutputStream(fileName)); try { out.write(speakernotes); } finally { out.close(); } voices.add("ssml parsing failed"); } return StringUtils.join(voices, ","); }
From source file:org.optaplanner.benchmark.impl.statistic.single.constraintmatchtotalbestscore.ConstraintMatchTotalBestScoreSingleStatistic.java
@Override public void writeGraphFiles(BenchmarkReport benchmarkReport) { List<Map<String, XYSeries>> constraintIdToWeightSeriesMapList = new ArrayList<Map<String, XYSeries>>( BenchmarkReport.CHARTED_SCORE_LEVEL_SIZE); for (ConstraintMatchTotalBestScoreStatisticPoint point : getPointList()) { int scoreLevel = point.getScoreLevel(); if (scoreLevel >= BenchmarkReport.CHARTED_SCORE_LEVEL_SIZE) { continue; }/*from w w w . j a v a 2s . c om*/ while (scoreLevel >= constraintIdToWeightSeriesMapList.size()) { constraintIdToWeightSeriesMapList.add(new LinkedHashMap<String, XYSeries>()); } Map<String, XYSeries> constraintIdToWeightSeriesMap = constraintIdToWeightSeriesMapList.get(scoreLevel); if (constraintIdToWeightSeriesMap == null) { constraintIdToWeightSeriesMap = new LinkedHashMap<String, XYSeries>(); constraintIdToWeightSeriesMapList.set(scoreLevel, constraintIdToWeightSeriesMap); } String constraintId = point.getConstraintPackage() + ":" + point.getConstraintName(); XYSeries weightSeries = constraintIdToWeightSeriesMap.get(constraintId); if (weightSeries == null) { weightSeries = new XYSeries(point.getConstraintName() + " weight"); constraintIdToWeightSeriesMap.put(constraintId, weightSeries); } long timeMillisSpent = point.getTimeMillisSpent(); weightSeries.add(timeMillisSpent, point.getWeightTotal()); } graphFileList = new ArrayList<File>(constraintIdToWeightSeriesMapList.size()); for (int scoreLevelIndex = 0; scoreLevelIndex < constraintIdToWeightSeriesMapList .size(); scoreLevelIndex++) { XYPlot plot = createPlot(benchmarkReport, scoreLevelIndex); // No direct ascending lines between 2 points, but a stepping line instead XYItemRenderer renderer = new XYStepRenderer(); plot.setRenderer(renderer); XYSeriesCollection seriesCollection = new XYSeriesCollection(); for (XYSeries series : constraintIdToWeightSeriesMapList.get(scoreLevelIndex).values()) { seriesCollection.addSeries(series); } plot.setDataset(seriesCollection); JFreeChart chart = new JFreeChart(singleBenchmarkResult.getName() + " constraint match total best score diff level " + scoreLevelIndex + " statistic", JFreeChart.DEFAULT_TITLE_FONT, plot, true); graphFileList.add( writeChartToImageFile(chart, "ConstraintMatchTotalBestScoreStatisticLevel" + scoreLevelIndex)); } }
From source file:org.optaplanner.benchmark.impl.statistic.subsingle.constraintmatchtotalbestscore.ConstraintMatchTotalBestScoreSubSingleStatistic.java
@Override public void writeGraphFiles(BenchmarkReport benchmarkReport) { List<Map<String, XYSeries>> constraintIdToWeightSeriesMapList = new ArrayList<Map<String, XYSeries>>( BenchmarkReport.CHARTED_SCORE_LEVEL_SIZE); for (ConstraintMatchTotalBestScoreStatisticPoint point : getPointList()) { int scoreLevel = point.getScoreLevel(); if (scoreLevel >= BenchmarkReport.CHARTED_SCORE_LEVEL_SIZE) { continue; }/* ww w . jav a 2 s.com*/ while (scoreLevel >= constraintIdToWeightSeriesMapList.size()) { constraintIdToWeightSeriesMapList.add(new LinkedHashMap<String, XYSeries>()); } Map<String, XYSeries> constraintIdToWeightSeriesMap = constraintIdToWeightSeriesMapList.get(scoreLevel); if (constraintIdToWeightSeriesMap == null) { constraintIdToWeightSeriesMap = new LinkedHashMap<String, XYSeries>(); constraintIdToWeightSeriesMapList.set(scoreLevel, constraintIdToWeightSeriesMap); } String constraintId = point.getConstraintPackage() + ":" + point.getConstraintName(); XYSeries weightSeries = constraintIdToWeightSeriesMap.get(constraintId); if (weightSeries == null) { weightSeries = new XYSeries(point.getConstraintName() + " weight"); constraintIdToWeightSeriesMap.put(constraintId, weightSeries); } long timeMillisSpent = point.getTimeMillisSpent(); weightSeries.add(timeMillisSpent, point.getWeightTotal()); } graphFileList = new ArrayList<File>(constraintIdToWeightSeriesMapList.size()); for (int scoreLevelIndex = 0; scoreLevelIndex < constraintIdToWeightSeriesMapList .size(); scoreLevelIndex++) { XYPlot plot = createPlot(benchmarkReport, scoreLevelIndex); // No direct ascending lines between 2 points, but a stepping line instead XYItemRenderer renderer = new XYStepRenderer(); plot.setRenderer(renderer); XYSeriesCollection seriesCollection = new XYSeriesCollection(); for (XYSeries series : constraintIdToWeightSeriesMapList.get(scoreLevelIndex).values()) { seriesCollection.addSeries(series); } plot.setDataset(seriesCollection); JFreeChart chart = new JFreeChart(subSingleBenchmarkResult.getName() + " constraint match total best score diff level " + scoreLevelIndex + " statistic", JFreeChart.DEFAULT_TITLE_FONT, plot, true); graphFileList.add( writeChartToImageFile(chart, "ConstraintMatchTotalBestScoreStatisticLevel" + scoreLevelIndex)); } }
From source file:edu.psu.citeseerx.corrections.CorrectAuthors.java
private boolean eliminateDuplicateAuthors(Document doc) { List<Author> authors = doc.getAuthors(); if (null == authors || authors.size() <= 1) { // Nothing to do. return false; }//from w ww.j a v a 2 s . com boolean corrected = false; for (int i = authors.size() - 1; i >= 1; --i) { Author lastAuthor = authors.get(i); for (int j = i - 1; j >= 0; --j) { Author currentAuthor = authors.get(j); if (compareAuthors(lastAuthor, currentAuthor)) { logger.info("Duplicate authors have been found: " + lastAuthor.getDatum(Author.NAME_KEY) + " and " + currentAuthor.getDatum(Author.NAME_KEY)); // They are the same. Find out which one has more data. if (-1 == bestAuthor(lastAuthor, currentAuthor)) { authors.set(j, lastAuthor); } // No matter what. We always delete the last one. authors.remove(i); j = -1; corrected = true; } } } if (corrected) { for (int i = 0; i < authors.size(); i++) { authors.get(i).setDatum(Author.ORD_KEY, Integer.toString(i + 1)); authors.get(i).setSource(Author.ORD_KEY, CORRECTION_SOURCE); // update source of correction for the rest of the fields setCorrectionSource(authors.get(i)); } // Send the correction to the database. logger.info("Sending correction of: " + doc.getDatum(Document.DOI_KEY)); try { updateManager.doCorrection(doc, "SystemCorrections"); } catch (IOException e) { logger.error("An error ocurred while saving correction for: " + doc.getDatum(Document.DOI_KEY), e); } catch (JSONException e) { logger.error("An error ocurred while saving correction for: " + doc.getDatum(Document.DOI_KEY), e); } } return corrected; }
From source file:com.haulmont.cuba.gui.data.impl.CollectionPropertyDatasourceImpl.java
@SuppressWarnings("unchecked") public void replaceItem(T item) { checkNotNullArgument(item, "item is null"); Collection<T> collection = getCollection(); if (collection != null) { for (T t : collection) { if (t.equals(item)) { detachListener(t);//from w ww.j av a2 s. c o m if (collection instanceof List) { List list = (List) collection; int itemIdx = list.indexOf(t); list.set(itemIdx, item); } else if (collection instanceof LinkedHashSet) { LinkedHashSet set = (LinkedHashSet) collection; List list = new ArrayList(set); int itemIdx = list.indexOf(t); list.set(itemIdx, item); set.clear(); set.addAll(list); } else { collection.remove(t); collection.add(item); } attachListener(item); if (item.equals(this.item)) { this.item = item; } break; } } if (sortInfos != null) doSort(); fireCollectionChanged(Operation.UPDATE, Collections.singletonList(item)); } }
From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java
@SuppressWarnings("unchecked") public void getGroupStats(ResourceRequest request, PrintWriter writer) throws IOException { DataApiHandler dataApi = new DataApiHandler(request); JSONObject figfams = new JSONObject(); Set<String> figfamIdList = new HashSet<>(); List<String> genomeIdList = new LinkedList<>(); // get family Type final String familyType = request.getParameter("familyType"); final String familyId = familyType + "_id"; // get genome list in order String genomeIds = request.getParameter("genomeIds"); try {/*from ww w . j a v a 2 s . c o m*/ SolrQuery query = new SolrQuery("genome_id:(" + genomeIds.replaceAll(",", " OR ") + ")"); query.addSort("genome_name", SolrQuery.ORDER.asc).addField("genome_id") .setRows(DataApiHandler.MAX_ROWS); LOGGER.trace("[{}] {}", SolrCore.GENOME.getSolrCoreName(), query); String apiResponse = dataApi.solrQuery(SolrCore.GENOME, query); Map resp = jsonReader.readValue(apiResponse); Map respBody = (Map) resp.get("response"); List<Genome> genomes = dataApi.bindDocuments((List<Map>) respBody.get("docs"), Genome.class); for (final Genome genome : genomes) { genomeIdList.add(genome.getId()); } if (genomeIdList.size() == 25000) { query.setStart(25000); apiResponse = dataApi.solrQuery(SolrCore.GENOME, query); resp = jsonReader.readValue(apiResponse); respBody = (Map) resp.get("response"); genomes = dataApi.bindDocuments((List<Map>) respBody.get("docs"), Genome.class); for (final Genome genome : genomes) { genomeIdList.add(genome.getId()); } } } catch (IOException e) { LOGGER.error(e.getMessage(), e); } // LOGGER.debug("genomeIdList: {}", genomeIdList); // getting genome counts per figfamID (figfam) // {stat:{field:{field:figfam_id,limit:-1,facet:{min:"min(aa_length)",max:"max(aa_length)",mean:"avg(aa_length)",ss:"sumsq(aa_length)",sum:"sum(aa_length)",dist:"percentile(aa_length,50,75,99,99.9)",field:{field:genome_id}}}}} try { long start = System.currentTimeMillis(); SolrQuery query = new SolrQuery("annotation:PATRIC AND feature_type:CDS"); // query.addFilterQuery("end:[3200 TO 4300] OR end:[4400 TO 4490] OR end:[4990 TO 4999]"); query.addFilterQuery(getSolrQuery(request)); query.addFilterQuery("!" + familyId + ":\"\""); query.setRows(0).setFacet(true).set("facet.threads", 15); query.add("json.facet", "{stat:{type:field,field:genome_id,limit:-1,facet:{figfams:{type:field,field:" + familyId + ",limit:-1,sort:{index:asc}}}}}"); LOGGER.trace("getGroupStats() 1/3: [{}] {}", SolrCore.FEATURE.getSolrCoreName(), query); String apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query); long point = System.currentTimeMillis(); LOGGER.debug("1st query: {} ms", (point - start)); start = point; Map resp = jsonReader.readValue(apiResponse); Map facets = (Map) resp.get("facets"); Map stat = (Map) facets.get("stat"); final Map<String, String> figfamGenomeIdStr = new LinkedHashMap<>(); final Map<String, Integer> figfamGenomeCount = new LinkedHashMap<>(); final int genomeTotal = genomeIdList.size(); final Map<String, Integer> genomePosMap = new LinkedHashMap<>(); for (String genomeId : genomeIdList) { genomePosMap.put(genomeId, genomeIdList.indexOf(genomeId)); } final Map<String, List> figfamGenomeIdCountMap = new ConcurrentHashMap<>(); final Map<String, Set> figfamGenomeIdSet = new ConcurrentHashMap<>(); List<Map> genomeBuckets = (List<Map>) stat.get("buckets"); for (final Map bucket : genomeBuckets) { final String genomeId = (String) bucket.get("val"); final List<Map> figfamBucket = (List<Map>) ((Map) bucket.get("figfams")).get("buckets"); for (final Map figfam : figfamBucket) { final String figfamId = (String) figfam.get("val"); final String genomeCount = String.format("%02x", (Integer) figfam.get("count")); if (figfamGenomeIdCountMap.containsKey(figfamId)) { figfamGenomeIdCountMap.get(figfamId).set(genomePosMap.get(genomeId), genomeCount); } else { final List<String> genomeIdCount = new LinkedList<>(Collections.nCopies(genomeTotal, "00")); genomeIdCount.set(genomePosMap.get(genomeId), genomeCount); figfamGenomeIdCountMap.put(figfamId, genomeIdCount); } if (figfamGenomeIdSet.containsKey(figfamId)) { figfamGenomeIdSet.get(figfamId).add(genomeId); } else { final Set<String> genomeIdSet = new HashSet<>(); genomeIdSet.add(genomeId); figfamGenomeIdSet.put(figfamId, genomeIdSet); } } } for (String figfamId : figfamGenomeIdCountMap.keySet()) { final List genomeIdStr = figfamGenomeIdCountMap.get(figfamId); figfamGenomeIdStr.put(figfamId, StringUtils.join(genomeIdStr, "")); figfamGenomeCount.put(figfamId, figfamGenomeIdSet.get(figfamId).size()); } point = System.currentTimeMillis(); LOGGER.debug("1st query process : {} ms, figfamGenomeIdStr:{}, figfamGenomeCount:{}", (point - start), figfamGenomeIdStr.size(), figfamGenomeCount.size()); long start2nd = System.currentTimeMillis(); // 2nd query query.set("json.facet", "{stat:{type:field,field:" + familyId + ",limit:-1,facet:{min:\"min(aa_length)\",max:\"max(aa_length)\",mean:\"avg(aa_length)\",ss:\"sumsq(aa_length)\",sum:\"sum(aa_length)\"}}}"); LOGGER.trace("getGroupStats() 2/3: [{}] {}", SolrCore.FEATURE.getSolrCoreName(), query); apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query); point = System.currentTimeMillis(); LOGGER.debug("2st query: {} ms", (point - start2nd)); start2nd = point; resp = jsonReader.readValue(apiResponse); facets = (Map) resp.get("facets"); stat = (Map) facets.get("stat"); List<Map> buckets = (List<Map>) stat.get("buckets"); for (Map bucket : buckets) { final String figfamId = (String) bucket.get("val"); final int count = (Integer) bucket.get("count"); double min, max, mean, sumsq, sum; if (bucket.get("min") instanceof Double) { min = (Double) bucket.get("min"); } else if (bucket.get("min") instanceof Integer) { min = ((Integer) bucket.get("min")).doubleValue(); } else { min = 0; } if (bucket.get("max") instanceof Double) { max = (Double) bucket.get("max"); } else if (bucket.get("max") instanceof Integer) { max = ((Integer) bucket.get("max")).doubleValue(); } else { max = 0; } if (bucket.get("mean") instanceof Double) { mean = (Double) bucket.get("mean"); } else if (bucket.get("mean") instanceof Integer) { mean = ((Integer) bucket.get("mean")).doubleValue(); } else { mean = 0; } if (bucket.get("ss") instanceof Double) { sumsq = (Double) bucket.get("ss"); } else if (bucket.get("ss") instanceof Integer) { sumsq = ((Integer) bucket.get("ss")).doubleValue(); } else { sumsq = 0; } if (bucket.get("sum") instanceof Double) { sum = (Double) bucket.get("sum"); } else if (bucket.get("sum") instanceof Integer) { sum = ((Integer) bucket.get("sum")).doubleValue(); } else { sum = 0; } // LOGGER.debug("bucket:{}, sumsq:{}, count: {}", bucket, sumsq, count); double std; if (count > 1) { // std = Math.sqrt(sumsq / (count - 1)); final double realSq = sumsq - (sum * sum) / count; std = Math.sqrt(realSq / (count - 1)); } else { std = 0; } final JSONObject aaLength = new JSONObject(); aaLength.put("min", min); aaLength.put("max", max); aaLength.put("mean", mean); aaLength.put("stddev", std); figfamIdList.add(figfamId); final JSONObject figfam = new JSONObject(); figfam.put("genomes", figfamGenomeIdStr.get(figfamId)); figfam.put("genome_count", figfamGenomeCount.get(figfamId)); figfam.put("feature_count", count); figfam.put("stats", aaLength); figfams.put(figfamId, figfam); } point = System.currentTimeMillis(); LOGGER.debug("2st query process: {} ms", (point - start2nd)); } catch (IOException e) { LOGGER.error(e.getMessage(), e); } // getting distinct figfam_product if (!figfamIdList.isEmpty()) { figfamIdList.remove(""); try { SolrQuery query = new SolrQuery("family_id:(" + StringUtils.join(figfamIdList, " OR ") + ")"); query.addFilterQuery("family_type:" + familyType); query.addField("family_id,family_product").setRows(figfamIdList.size()); LOGGER.debug("getGroupStats() 3/3: [{}] {}", SolrCore.FIGFAM_DIC.getSolrCoreName(), query); String apiResponse = dataApi.solrQuery(SolrCore.FIGFAM_DIC, query); Map resp = jsonReader.readValue(apiResponse); Map respBody = (Map) resp.get("response"); List<Map> sdl = (List<Map>) respBody.get("docs"); for (final Map doc : sdl) { final JSONObject figfam = (JSONObject) figfams.get(doc.get("family_id")); figfam.put("description", doc.get("family_product")); figfams.put(doc.get("family_id").toString(), figfam); } int i = 1; while (sdl.size() == 25000) { query.setStart(25000 * i); apiResponse = dataApi.solrQuery(SolrCore.FIGFAM_DIC, query); resp = jsonReader.readValue(apiResponse); respBody = (Map) resp.get("response"); sdl = (List<Map>) respBody.get("docs"); for (final Map doc : sdl) { final JSONObject figfam = (JSONObject) figfams.get(doc.get("family_id")); figfam.put("description", doc.get("family_product")); figfams.put(doc.get("family_id").toString(), figfam); } i++; } } catch (IOException e) { LOGGER.error(e.getMessage(), e); LOGGER.debug("::getGroupStats() 3/3, params: {}", request.getParameterMap().toString()); } figfams.writeJSONString(writer); } }
From source file:com.xiaomi.linden.core.search.MultiLindenCoreImpl.java
@Override public Response delete(LindenDeleteRequest request) throws IOException { List<String> indexNames; // Only INDEX_NAME division type supports specified index name request if (multiIndexStrategy instanceof TimeLimitMultiIndexStrategy || multiIndexStrategy instanceof DocNumLimitMultiIndexStrategy) { indexNames = new ArrayList<>(lindenCoreMap.keySet()); } else {// w ww .j a v a 2 s .co m if (request.getIndexNames() == null || (request.getIndexNamesSize() == 1 && request.getIndexNames().get(0).equals(LINDEN))) { indexNames = new ArrayList<>(lindenCoreMap.keySet()); } else { indexNames = request.getIndexNames(); for (int i = 0; i < indexNames.size(); ++i) { indexNames.set(i, MultiIndexStrategy.MULTI_INDEX_PREFIX_NAME + indexNames.get(i)); } } } StringBuilder errorInfo = new StringBuilder(); for (final String indexName : indexNames) { final LindenCore core = lindenCoreMap.get(indexName); if (core != null) { try { Response response = core.delete(request); if (!response.isSuccess()) { errorInfo.append(indexName + ":" + response.getError() + ";"); LOGGER.error("Multi-index {} delete error: {}", indexName, response.error); } } catch (Exception e) { errorInfo.append(indexName + ":" + Throwables.getStackTraceAsString(e) + ";"); LOGGER.error("Multi-index {} delete error: {}", indexName, Throwables.getStackTraceAsString(e)); } } else { errorInfo.append(indexName + " doesn't exist"); LOGGER.error("Multi-index {} delete error: " + indexName + " doesn't exist"); } } if (errorInfo.length() > 0) { return ResponseUtils.buildFailedResponse("Multi-index delete error: " + errorInfo.toString()); } else { return ResponseUtils.SUCCESS; } }
From source file:com.gemini.provision.network.openstack.NetworkProviderOpenStackImpl.java
@Override public List<ProvisioningProviderResponseType> bulkCreateRouter(GeminiTenant tenant, GeminiEnvironment env, List<GeminiNetworkRouter> routes) { List<ProvisioningProviderResponseType> retValues = Collections.synchronizedList(new ArrayList()); //TODO: Only the first element is set ... NEED to research whether it is possible to get the current position from the stream routes.stream().forEach(r -> retValues.set(0, createRouter(tenant, env, r))); return retValues; }
From source file:com.gemini.provision.network.openstack.NetworkProviderOpenStackImpl.java
@Override public List<ProvisioningProviderResponseType> bulkCreateSubnet(GeminiTenant tenant, GeminiEnvironment env, GeminiNetwork parent, List<GeminiSubnet> subnets) { List<ProvisioningProviderResponseType> retValues = Collections.synchronizedList(new ArrayList()); //TODO: Only the first element is set ... NEED to research whether it is possible to get the current position from the stream subnets.stream().forEach(n -> retValues.set(0, createSubnet(tenant, env, parent, n))); return retValues; }