List of usage examples for java.util LinkedHashMap size
int size();
From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java
@SuppressWarnings("unchecked") private List<ImportantTerm> getImportantTerms(FullTextQuery fullTextQuery, int count, boolean filterStopWords) { final Long start = System.currentTimeMillis(); final List<Object[]> results = fullTextQuery.list(); final LinkedHashMap<String, Tuple<Integer, Float>> termCountMap = new LinkedHashMap<String, Tuple<Integer, Float>>(); final FullTextSession fullTextSession = Search.getFullTextSession((Session) this.em.getDelegate()); final SearchFactory searchFactory = fullTextSession.getSearchFactory(); final IndexReaderAccessor ira = searchFactory.getIndexReaderAccessor(); final IndexReader reader = ira.open(com.mothsoft.alexis.domain.Document.class); final IndexSearcher searcher = new IndexSearcher(reader); final List<ImportantTerm> importantTerms; final int numDocs; try {//from w ww . j a v a2 s . c o m numDocs = reader.numDocs(); Term luceneTerm = new Term(CONTENT_TEXT_FIELD_NAME); if (logger.isDebugEnabled()) { logger.debug(String.format("Found %d matching Lucene documents of %d in reader", results.size(), numDocs)); } // loop over all the matching documents for (final Object[] ith : results) { int docId = ((Number) ith[0]).intValue(); final TermFreqVector tfv = reader.getTermFreqVector(docId, CONTENT_TEXT_FIELD_NAME); if (tfv == null) { continue; } final String[] terms = tfv.getTerms(); final int[] freqs = tfv.getTermFrequencies(); // total document size int size = 0; for (int freq : freqs) { size += freq; } if (logger.isDebugEnabled()) { logger.debug( String.format("Lucene document %d has %d terms, to be merged with running count %d", docId, size, termCountMap.size())); } // loop over the terms and aggregate the counts and tf-idf int i = 0; for (final String term : terms) { if (StopWords.ENGLISH.contains(term)) { continue; } luceneTerm = luceneTerm.createTerm(term); final int termCount = freqs[i++]; final Tuple<Integer, Float> countScore; if (termCountMap.containsKey(term)) { countScore = termCountMap.get(term); countScore.t1 += termCount; countScore.t2 += (TFIDF.score(term, termCount, size, numDocs, searcher.docFreq(luceneTerm))); } else { countScore = new Tuple<Integer, Float>(); countScore.t1 = termCount; countScore.t2 = (TFIDF.score(term, termCount, size, numDocs, searcher.docFreq(luceneTerm))); termCountMap.put(term, countScore); } } } if (logger.isDebugEnabled()) { logger.debug("Completed Lucene document processing."); } importantTerms = new ArrayList<ImportantTerm>(termCountMap.size()); // find max TF-IDF float maxTfIdf = 0.0f; for (final Tuple<Integer, Float> ith : termCountMap.values()) { if (ith.t2 > maxTfIdf) { maxTfIdf = ith.t2; } } for (final Map.Entry<String, Tuple<Integer, Float>> entry : termCountMap.entrySet()) { final int ithCount = entry.getValue().t1; final float ithTfIdf = entry.getValue().t2; importantTerms.add(new ImportantTerm(entry.getKey(), ithCount, ithTfIdf, maxTfIdf)); } if (logger.isDebugEnabled()) { logger.debug("Completed term aggregation, will clear term map"); } termCountMap.clear(); } catch (IOException e) { throw new RuntimeException(e); } finally { try { searcher.close(); } catch (IOException e) { logger.warn("Failed to close searcher: " + e, e); } ira.close(reader); } if (logger.isDebugEnabled()) { logger.debug("Sorting terms"); } Collections.sort(importantTerms, new Comparator<ImportantTerm>() { @Override public int compare(ImportantTerm term1, ImportantTerm term2) { return -1 * term1.getTfIdf().compareTo(term2.getTfIdf()); } }); if (logger.isDebugEnabled()) { logger.debug("Term sort complete"); } if (importantTerms.isEmpty() || importantTerms.size() < count) { if (logger.isDebugEnabled()) { logger.debug("Will return full list."); } logger.debug("Timer: " + (System.currentTimeMillis() - start)); return importantTerms; } else { if (logger.isDebugEnabled()) { logger.debug( "Will return sublist containing " + count + " of " + importantTerms.size() + " terms."); } logger.debug("Timer: " + (System.currentTimeMillis() - start)); return importantTerms.subList(0, count); } }
From source file:com.clustercontrol.util.LoginManager.java
private static boolean addConnect(List<LoginAccount> loginList) { LinkedHashMap<String, String> msgs = new LinkedHashMap<>(); ArrayList<Status> statusList = new ArrayList<>(); boolean connectFlag = false; boolean isError = false; m_log.debug("addConnections loginList.size=" + loginList.size()); for (LoginAccount account : loginList) { connectFlag = true;/*from w w w . j av a 2 s .c o m*/ String managerName = account.getManagerName(); try { EndpointManager.add(account.getUserId(), account.getPassword(), managerName, account.getUrl()); if (!EndpointManager.get(managerName).isActive()) { connect(managerName); // ?? msgs.put(managerName, Messages.getString("message.accesscontrol.5")); m_log.info("Login Success : userId = " + account.getUserId() + ", url = " + account.getUrl()); } else { m_log.info("Login already : userId = " + account.getUserId() + ", url = " + account.getUrl()); } } catch (CommunicationException e) { // ? Status status = new Status(IStatus.ERROR, ClusterControlPlugin.getPluginId(), IStatus.OK, managerName + " : " + Messages.getString("message.accesscontrol.22"), e); statusList.add(status); msgs.put(managerName, Messages.getString("message.accesscontrol.21")); isError = true; m_log.info("Login Fail : userId = " + account.getUserId() + ", url = " + account.getUrl()); } catch (AccessException e) { // ? msgs.put(managerName, Messages.getString("message.accesscontrol.6")); isError = true; m_log.info("Login Fail : userId = " + account.getUserId() + ", url = " + account.getUrl()); } catch (InvalidUserPass_Exception e) { // ? msgs.put(managerName, Messages.getString("message.accesscontrol.45")); isError = true; m_log.info("Login Fail : userId = " + account.getUserId() + ", url = " + account.getUrl()); } catch (InvalidTimezone e) { // ? msgs.put(managerName, Messages.getString("message.accesscontrol.65")); isError = true; m_log.info("Login Fail : userId = " + account.getUserId() + ", url = " + account.getUrl()); } catch (Exception e) { // ??? Status status = new Status(IStatus.ERROR, ClusterControlPlugin.getPluginId(), IStatus.OK, managerName + " : " + Messages.getString("message.accesscontrol.23"), e); statusList.add(status); msgs.put(managerName, Messages.getString("message.accesscontrol.6")); isError = true; m_log.info("Login Fail : userId = " + account.getUserId() + ", url = " + account.getUrl()); } } if (0 < msgs.size()) { MultiStatus multiStatus = null; if (0 < statusList.size()) { multiStatus = new MultiStatus(ClusterControlPlugin.getPluginId(), IStatus.ERROR, statusList.toArray(new Status[statusList.size()]), Messages.getString("message.accesscontrol.56"), null); } UIManager.showMessageBox(msgs, isError, multiStatus); } return connectFlag; }
From source file:com.sonicle.webtop.core.Service.java
public void processLookupLanguages(HttpServletRequest request, HttpServletResponse response, PrintWriter out) { LinkedHashMap<String, JsSimple> items = new LinkedHashMap<>(); Locale locale = getEnv().getSession().getLocale(); try {/*from w ww . j a va 2s . co m*/ for (AppLocale apploc : WT.getInstalledLocales()) { final Locale loc = apploc.getLocale(); final String lang = loc.getLanguage(); if (!items.containsKey(lang)) { //items.put(lang, new JsSimple(lang, loc.getDisplayLanguage(locale))); items.put(lang, new JsSimple(apploc.getId(), apploc.getLocale().getDisplayName(locale))); } } new JsonResult("languages", items.values(), items.size()).printTo(out); } catch (Exception ex) { logger.error("Error in LookupLanguages", ex); new JsonResult(false, "Unable to lookup languages").printTo(out); } }
From source file:juicebox.tools.utils.original.Preprocessor.java
/** * Note -- compressed//from w ww .j a va2 s . c o m * * @param zd Matrix zoom data * @param block Block to write * @param sampledData Array to hold a sample of the data (to compute statistics) * @throws IOException */ private void writeBlock(MatrixZoomDataPP zd, BlockPP block, DownsampledDoubleArrayList sampledData) throws IOException { final Map<Point, ContactCount> records = block.getContactRecordMap();// getContactRecords(); // System.out.println("Write contact records : records count = " + records.size()); // Count records first int nRecords; if (countThreshold > 0) { nRecords = 0; for (ContactCount rec : records.values()) { if (rec.getCounts() >= countThreshold) { nRecords++; } } } else { nRecords = records.size(); } BufferedByteWriter buffer = new BufferedByteWriter(nRecords * 12); buffer.putInt(nRecords); zd.cellCount += nRecords; // Find extents of occupied cells int binXOffset = Integer.MAX_VALUE; int binYOffset = Integer.MAX_VALUE; int binXMax = 0; int binYMax = 0; for (Map.Entry<Point, ContactCount> entry : records.entrySet()) { Point point = entry.getKey(); binXOffset = Math.min(binXOffset, point.x); binYOffset = Math.min(binYOffset, point.y); binXMax = Math.max(binXMax, point.x); binYMax = Math.max(binYMax, point.y); } buffer.putInt(binXOffset); buffer.putInt(binYOffset); // Sort keys in row-major order List<Point> keys = new ArrayList<Point>(records.keySet()); Collections.sort(keys, new Comparator<Point>() { @Override public int compare(Point o1, Point o2) { if (o1.y != o2.y) { return o1.y - o2.y; } else { return o1.x - o2.x; } } }); Point lastPoint = keys.get(keys.size() - 1); final short w = (short) (binXMax - binXOffset + 1); boolean isInteger = true; float maxCounts = 0; LinkedHashMap<Integer, List<ContactRecord>> rows = new LinkedHashMap<Integer, List<ContactRecord>>(); for (Point point : keys) { final ContactCount contactCount = records.get(point); float counts = contactCount.getCounts(); if (counts >= countThreshold) { isInteger = isInteger && (Math.floor(counts) == counts); maxCounts = Math.max(counts, maxCounts); final int px = point.x - binXOffset; final int py = point.y - binYOffset; List<ContactRecord> row = rows.get(py); if (row == null) { row = new ArrayList<ContactRecord>(10); rows.put(py, row); } row.add(new ContactRecord(px, py, counts)); } } // Compute size for each representation and choose smallest boolean useShort = isInteger && (maxCounts < Short.MAX_VALUE); int valueSize = useShort ? 2 : 4; int lorSize = 0; int nDensePts = (lastPoint.y - binYOffset) * w + (lastPoint.x - binXOffset) + 1; int denseSize = nDensePts * valueSize; for (List<ContactRecord> row : rows.values()) { lorSize += 4 + row.size() * valueSize; } buffer.put((byte) (useShort ? 0 : 1)); if (lorSize < denseSize) { buffer.put((byte) 1); // List of rows representation buffer.putShort((short) rows.size()); // # of rows for (Map.Entry<Integer, List<ContactRecord>> entry : rows.entrySet()) { int py = entry.getKey(); List<ContactRecord> row = entry.getValue(); buffer.putShort((short) py); // Row number buffer.putShort((short) row.size()); // size of row for (ContactRecord contactRecord : row) { buffer.putShort((short) (contactRecord.getBinX())); final float counts = contactRecord.getCounts(); if (useShort) { buffer.putShort((short) counts); } else { buffer.putFloat(counts); } sampledData.add(counts); zd.sum += counts; } } } else { buffer.put((byte) 2); // Dense matrix buffer.putInt(nDensePts); buffer.putShort(w); int lastIdx = 0; for (Point p : keys) { int idx = (p.y - binYOffset) * w + (p.x - binXOffset); for (int i = lastIdx; i < idx; i++) { // Filler value if (useShort) { buffer.putShort(Short.MIN_VALUE); } else { buffer.putFloat(Float.NaN); } } float counts = records.get(p).getCounts(); if (useShort) { buffer.putShort((short) counts); } else { buffer.putFloat(counts); } lastIdx = idx + 1; sampledData.add(counts); zd.sum += counts; } } byte[] bytes = buffer.getBytes(); byte[] compressedBytes = compress(bytes); los.write(compressedBytes); }
From source file:com.day.cq.wcm.foundation.List.java
@SuppressWarnings("unchecked") private boolean init() { if (!inited) { initConfig();/*from ww w . jav a2s. com*/ // Note: this iter can also be set from the outside (setPageIterator()) if (pageIterator == null) { PageManager pm = request.getResourceResolver().adaptTo(PageManager.class); // per default we don't want duplicate pages in the result boolean allowDuplicates = properties.get(ALLOW_DUPLICATES_PROPERTY_NAME, false); try { Session session = resource.getResourceResolver().adaptTo(Session.class); // advanced search = querybuilder if (SOURCE_QUERYBUILDER.equals(source)) { QueryBuilder queryBuilder = resource.getResourceResolver().adaptTo(QueryBuilder.class); if (session != null && queryBuilder != null) { try { Query query = queryBuilder .loadQuery(resource.getPath() + "/" + SAVEDQUERY_PROPERTY_NAME, session); if (query != null) { query.setHitsPerPage(limit); SearchResult result = query.getResult(); // store as both page and node iterator pageIterator = new HitBasedPageIterator(pm, result.getHits().iterator(), !allowDuplicates, this.pageFilter); nodeIterator = result.getNodes(); } } catch (Exception e) { log.error("error loading stored querybuilder query from " + resource.getPath(), e); } } // simple search } else if (SOURCE_SEARCH.equals(source)) { if (DEFAULT_QUERY.equals(query)) { pageIterator = EmptyIterator.INSTANCE; } if (queryType != null) { javax.jcr.query.Query jcrQuery = session.getWorkspace().getQueryManager() .createQuery(query, queryType); QueryResult result = jcrQuery.execute(); pageIterator = new NodeBasedPageIterator(pm, result.getNodes(), !allowDuplicates, this.pageFilter); } else { SimpleSearch search = getSearch(resource.getPath()); search.setQuery(query); search.setSearchIn(startIn); // ensure we only get pages search.addPredicate(new Predicate("type", "type").set("type", NameConstants.NT_PAGE)); search.setHitsPerPage(100000); // run simple search SearchResult result = search.getResult(); pageIterator = new HitBasedPageIterator(pm, result.getHits().iterator(), !allowDuplicates, this.pageFilter); } // list child pages } else if (SOURCE_CHILDREN.equals(source)) { // default to current page String parentPath = properties.get(PARENT_PAGE_PROPERTY_NAME, resource.getPath()); Page startPage = pm.getContainingPage(parentPath); if (startPage != null) { // only get pages that are valid (on/off times, hide in nav) // Use default page filter if there is no page filter, because it was working this way // before adding PageFilter support pageIterator = startPage .listChildren(this.pageFilter != null ? this.pageFilter : new PageFilter()); } else { pageIterator = EmptyIterator.INSTANCE; } // list from tags } else if (SOURCE_TAGS.equals(source)) { // default to current page String parentPath = properties.get(TAGS_SEARCH_ROOT_PROPERTY_NAME, resource.getPath()); String[] tags = properties.get(TAGS_PROPERTY_NAME, new String[0]); boolean matchAny = properties.get(TAGS_MATCH_PROPERTY_NAME, "any").equals("any"); Page startPage = pm.getContainingPage(parentPath); if (startPage != null && tags.length > 0) { TagManager tagManager = request.getResourceResolver().adaptTo(TagManager.class); RangeIterator<Resource> results = tagManager.find(startPage.getPath(), tags, matchAny); LinkedHashMap<String, Page> pages = new LinkedHashMap<String, Page>(); while (results.hasNext()) { Resource r = results.next(); Page page = pm.getContainingPage(r); if (page != null && (pageFilter == null || pageFilter.includes(page))) { pages.put(page.getPath(), page); } } pageIterator = pages.values().iterator(); } else { pageIterator = EmptyIterator.INSTANCE; } // fixed list of pages } else { ArrayList<Page> staticPages = new ArrayList<Page>(); String[] statics = properties.get(PAGES_PROPERTY_NAME, new String[0]); for (String path : statics) { Page p = pm.getContainingPage(path); if (p != null && (pageFilter == null || pageFilter.includes(p))) { staticPages.add(p); } } pageIterator = staticPages.iterator(); } } catch (Exception e) { log.error("error creating page iterator", e); } } pages = new ArrayList<Page>(); resources = new ArrayList<Resource>(); if (pageIterator == null) { return false; } // build list of pages and resources from page iterator while (pageIterator.hasNext()) { Page page = pageIterator.next(); pages.add(page); } // apply sort order if present if (orderComparator != null) { Collections.sort(pages, orderComparator); } else if (orderBy != null) { Collections.sort(pages, new PageComparator<Page>(orderBy)); } // apply limit if (pages.size() > limit) { pages = pages.subList(0, limit); } for (Page p : pages) { resources.add(p.getContentResource()); } inited = true; } return true; }
From source file:com.ikanow.aleph2.analytics.services.TestDeduplicationService.java
@SuppressWarnings("unchecked") @Test/*w w w. ja v a 2 s . c o m*/ public void test_handleDuplicateRecord() { final IEnrichmentModuleContext enrich_context = Mockito.mock(IEnrichmentModuleContext.class); Mockito.when(enrich_context.emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class))) .thenReturn(Validation.success(_mapper.createObjectNode())); TestDedupEnrichmentModule test_module = new TestDedupEnrichmentModule(); final String ts_field = "@timestamp"; final ObjectNode old_json = _mapper.createObjectNode(); old_json.put("_id", "old_record"); old_json.put("@timestamp", 0L); old_json.put("url", "test"); final ObjectNode new_json = _mapper.createObjectNode(); new_json.put("@timestamp", 1L); new_json.put("url", "test"); final ObjectNode new_json_but_same_time = _mapper.createObjectNode(); new_json_but_same_time.put("@timestamp", 0L); new_json_but_same_time.put("url", "test"); Tuple3<Long, IBatchRecord, ObjectNode> new_record = Tuples._3T(0L, new BatchRecordUtils.JsonBatchRecord(new_json), _mapper.createObjectNode()); Tuple3<Long, IBatchRecord, ObjectNode> new_record_but_same_time = Tuples._3T(0L, new BatchRecordUtils.JsonBatchRecord(new_json_but_same_time), _mapper.createObjectNode()); new_record._2().getContent(); //(code coverage!) final TextNode key = new TextNode("url"); LinkedHashMap<JsonNode, LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>>> mutable_obj_map = new LinkedHashMap<>(); final LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>> new_records = Stream.of(new_record) .collect(Collectors.toCollection(LinkedList::new)); final LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>> new_records_but_same_time = Stream .of(new_record_but_same_time).collect(Collectors.toCollection(LinkedList::new)); // Simple case Leave policy { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.leave).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // No annotations/mutations assertEquals("{}", new_record._3().toString()); // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); } // Simple case update policy - time updates final Consumer<Boolean> test_time_updates = delete_unhandled -> { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.update) .with(DocumentSchemaBean::delete_unhandled_duplicates, delete_unhandled).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); // (add the same object twice to test the "return ids to delete" functionality) final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records, Arrays.asList(old_json, old_json), key, mutable_obj_map); if (delete_unhandled) { assertEquals(Arrays.asList("old_record"), ret_val.sorted() .map(j -> DeduplicationService.jsonToObject(j)).collect(Collectors.toList())); } else { assertEquals(0L, ret_val.count()); } // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // _id assertEquals("{\"_id\":\"old_record\"}", new_record._3().toString()); // Object removed from mutable map assertEquals(2, mutable_obj_map.size()); }; test_time_updates.accept(true); test_time_updates.accept(false); // Simple case update policy - times the same { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.update) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // No annotations/mutations assertEquals("{}", new_record_but_same_time._3().toString()); // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); } // overwrite final Consumer<Boolean> test_overwrites = delete_unhandled -> { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.overwrite) .with(DocumentSchemaBean::delete_unhandled_duplicates, delete_unhandled).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records, Arrays.asList(old_json, old_json), key, mutable_obj_map); if (delete_unhandled) { assertEquals(Arrays.asList("old_record"), ret_val.sorted() .map(j -> DeduplicationService.jsonToObject(j)).collect(Collectors.toList())); } else { assertEquals(0L, ret_val.count()); } // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // _id assertEquals("{\"_id\":\"old_record\"}", new_record._3().toString()); // Object removed from mutable map assertEquals(2, mutable_obj_map.size()); }; test_overwrites.accept(true); test_overwrites.accept(false); //(check ignores times) { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.overwrite) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // _id assertEquals("{\"_id\":\"old_record\"}", new_record_but_same_time._3().toString()); // Object removed from mutable map assertEquals(2, mutable_obj_map.size()); } // custom { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(2, _called_batch.get()); //(old + new) // _id assertEquals("{}", new_record._3().toString()); // up to the custom code to do this // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); //(remove since it's the responsibility of the custom code to emit) } //(check ignores times) { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(2, _called_batch.get()); //(old + new) // _id assertEquals("{}", new_record_but_same_time._3().toString()); // up to the custom code to do this // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); //(remove since it's the responsibility of the custom code to emit) } // Simple case *custom* update policy - time updates { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom_update) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(2, _called_batch.get()); //(old + new) // _id assertEquals("{}", new_record._3().toString()); // up to the custom code to do this // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); //(remove since it's the responsibility of the custom code to emit) } // Simple case *custom* update policy - times the same { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom_update) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // No annotations/mutations assertEquals("{}", new_record_but_same_time._3().toString()); // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); } }
From source file:org.bimserver.charting.SupportFunctions.java
public static ArrayList<LinkedHashMap<String, Object>> getIfcMaterialsByNameWithTreeStructure( String structureKeyword, IfcModelInterface model, Chart chart, MutableInt subChartCount) { // Derive the column name. String leafColumnName = structureKeyword; // Update the chart configuration. chart.setDimensionLookupKey(structureKeyword, leafColumnName); chart.setDimensionLookupKey("date", "date"); chart.setDimensionLookupKey("size", "size"); // Prepare to iterate the relationships. LinkedHashMap<String, ArrayList<Double>> materialNameWithSizes = new LinkedHashMap<>(); // Iterate only the relationships. for (IfcRelAssociatesMaterial ifcRelAssociatesMaterial : model .getAllWithSubTypes(IfcRelAssociatesMaterial.class)) { // IfcMaterialSelect: IfcMaterial, IfcMaterialList, IfcMaterialLayerSetUsage, IfcMaterialLayerSet, IfcMaterialLayer. IfcMaterialSelect materialLike = ifcRelAssociatesMaterial.getRelatingMaterial(); // If there was a material-like object, sum the names of what it decomposes into across X individually. if (materialLike != null) { // First, get size data from IFC products. ArrayList<Double> sizes = new ArrayList<>(); // Iterate objects. EList<IfcRoot> ifcRoots = ifcRelAssociatesMaterial.getRelatedObjects(); for (IfcRoot ifcRoot : ifcRoots) { Double size = 0.0; if (ifcRoot instanceof IfcObjectDefinition) { IfcObjectDefinition ifcObjectDefinition = (IfcObjectDefinition) ifcRoot; if (ifcObjectDefinition instanceof IfcObject) { IfcObject ifcObject = (IfcObject) ifcObjectDefinition; if (ifcObject instanceof IfcProduct) { IfcProduct ifcProduct = (IfcProduct) ifcObject; Double volume = getRoughVolumeEstimateFromIfcProduct(ifcProduct); size = volume; }/*from w w w . j a va2s . c om*/ } } if (size != null && size > 0) sizes.add(size); } // Get material names with percentages, like: Material Name -> 0.5 LinkedHashMap<String, Double> materials = getNameOfMaterialsFromMaterialLikeWithPercents( materialLike, false); // Second, iterate materials, realizing the percentage of the sizes onto the collection of sizes for each material name. for (Entry<String, Double> materialEntry : materials.entrySet()) { String materialName = materialEntry.getKey(); Double percent = materialEntry.getValue(); // Use material name if available. Otherwise, use OID of top-level material-like object. String name = (materialName != null) ? materialName : String.format("%d", materialLike.getOid()); // Add entry if it doesn't exist. if (!materialNameWithSizes.containsKey(name)) materialNameWithSizes.put(name, new ArrayList<Double>()); ArrayList<Double> theseSizes = materialNameWithSizes.get(name); // Get existing size data. if (percent != null && percent > 0) { // If not alteration is required, clone into the stack. if (percent == 1.0) theseSizes.addAll(sizes); // Otherwise, realize the percent of the size. else for (Double size : sizes) theseSizes.add(size * percent); } } } } // subChartCount.setValue(materialNameWithSizes.size()); // ArrayList<LinkedHashMap<String, Object>> rawData = new ArrayList<>(); // for (Entry<String, ArrayList<Double>> entry : materialNameWithSizes.entrySet()) { String name = entry.getKey(); // Get existing size data. ArrayList<Double> sizes = materialNameWithSizes.get(name); // Sort, value ascending. Collections.sort(sizes, sortSmallerValuesToFront); sizes.add(0, 0.0); if (sizes.size() == 1) sizes.add(0, 0.0); // Count including empty first entry. double count = Math.max(1, sizes.size() - 1); double step = 10000.0 / count; double runningSize = 0.0; // Add sum of zero at entry zero. int i = 0; // Iterate objects, summing them across 0 to 10000 (an arbitrary range, a way to relate to other sums along X). for (Double size : sizes) { double someMeasurement = (size != null) ? size : 0.0; runningSize += someMeasurement; // Prepare to store this raw data entry. LinkedHashMap<String, Object> dataEntry = new LinkedHashMap<>(); // Name the group. dataEntry.put(leafColumnName, name); dataEntry.put("date", i * step); dataEntry.put("size", runningSize); // Push the entry into the data pool. rawData.add(dataEntry); // i += 1; } } // Send it all back. return rawData; }
From source file:ch.elexis.core.ui.views.FallDetailBlatt2.java
/** * disposes of required and optional fields on the bottom of the page.<br> * re-reads the contents for this case for the upper part<br> * recreates the required and optional fields on the bottom of the page (call to * setExtendedFields)//w w w. j a va2s . co m * * @param f * caseID */ @SuppressWarnings("unchecked") public void setFall(final IFall f) { if (actFall != null) { save(); } // *** dispose of currently displayed fields actFall = f; for (Control c : lReqs) { c.dispose(); } lReqs.clear(); keepEditable.clear(); // *** fill billing systems into combo, set current system cAbrechnung.setItems(Abrechnungstypen); if (f == null) { form.setText(Messages.FallDetailBlatt2_NoCaseSelected); //$NON-NLS-1$ tBezeichnung.setText(""); tBezeichnung.setMessage(Messages.FallDetailBlatt2_29); cReason.select(0); return; } String billingSystem = f.getAbrechnungsSystem(); // auto select accident if uvg is selected if ("UVG".equals(billingSystem)) { int idx = ArrayUtils.indexOf(Reasons, FallConstants.TYPE_ACCIDENT); if (idx > -1) { f.setGrund(Reasons[idx]); } } form.setText(f.getLabel()); // *** set Fallbezeichnung tBezeichnung.setText(f.getBezeichnung()); // *** set Grund (Krankheit/Unfall/...) String grund = f.getGrund(); int ix = cReason.indexOf(grund); if (ix == -1) { ix = 0; } cReason.select(ix); cAbrechnung.setText(billingSystem); // *** set startDate/EndDate TimeTool tt = new TimeTool(); if (tt.set(f.getBeginnDatum()) == true) { dpVon.setDate(tt.getTime()); } else { dpVon.setDate(null); } if (tt.set(f.getEndDatum()) == true) { dpBis.setDate(tt.getTime()); } else { dpBis.setDate(null); } // *** set copy for patient btnCopyForPatient.setSelection(f.getCopyForPatient()); // *** set Garant tGarant.setBackground(null); tGarant.setToolTipText(null); Kontakt garant = f.getGarant(); String garantLabel = garant.getLabel(); if (garant.isDeleted()) { tGarant.setBackground(UiDesk.getColor(UiDesk.COL_RED)); garantLabel = "*** " + garantLabel; tGarant.setToolTipText(Messages.Contact_is_marked_deleted); } tGarant.setText(garantLabel); // *** set cost bearer (if enabled for billing system) boolean costBearerDisabled = BillingSystem.isCostBearerDisabled(billingSystem); tCostBearer.setVisible(!costBearerDisabled); hlCostBearer.setVisible(!costBearerDisabled); tCostBearer.setBackground(null); tCostBearer.setToolTipText(null); if (!costBearerDisabled) { Kontakt costBearer = f.getCostBearer(); String label = (costBearer != null) ? costBearer.getLabel() : f.getPatient().getLabel(); if (costBearer != null && costBearer.isDeleted()) { tCostBearer.setBackground(UiDesk.getColor(UiDesk.COL_RED)); label = "*** " + label; tCostBearer.setToolTipText(Messages.Contact_is_marked_deleted); } tCostBearer.setText(label); } else { tCostBearer.setText(StringConstants.EMPTY); } // *** adding required fields defined in prefs String reqs = BillingSystem.getRequirementsBySystem(billingSystem); if ((reqs != null) && (reqs.length() > 0)) { // *** do not display a title bar since this is already displayed // above Rechnungsempfnger! setExtendedFields(f, reqs, StringTool.leer, false, false, false); } // *** adding optional fields defined in prefs String optionals = f.getOptionals(); if ((optionals != null) && (optionals.length() > 0)) { setExtendedFields(f, optionals, Messages.FallDetailBlatt2_optionalData, false, false, true); //$NON-NLS-1$ } // ****** show any other fields from extinfo - ONLY FOR ADMINS, NOT // INTENDED FOR NORMAL USERS !!! // first part fields with definition, second part without definition // *** display all unused field having a display specification String[] reqsArray = BillingSystem.getRequirementsBySystem(billingSystem).split(DEFINITIONSDELIMITER); for (int reqI = 0; reqI < reqsArray.length; reqI++) { reqsArray[reqI] = reqsArray[reqI].split(ARGUMENTSSDELIMITER)[0]; } String[] optsArray = f.getOptionals().split(DEFINITIONSDELIMITER); for (int reqI = 0; reqI < optsArray.length; reqI++) { optsArray[reqI] = optsArray[reqI].split(ARGUMENTSSDELIMITER)[0]; } // *** read field definitions for unused fields (previously required or // optional) List<String> unused = new ArrayList<String>(); LinkedHashMap<String, String> unusedHash = new LinkedHashMap<String, String>(); String strUnused = f.getUnused(); if ((strUnused != null) && (!strUnused.isEmpty())) { String[] allUnused = strUnused.split(DEFINITIONSDELIMITER); //$NON-NLS-1$ Arrays.sort(allUnused, String.CASE_INSENSITIVE_ORDER); // *** sort // alphabetically for (String unusedPart : allUnused) { int posColon = unusedPart.indexOf(ARGUMENTSSDELIMITER); String key = unusedPart.substring(0, posColon); // *** do not show if already displayed in required or optional // fields boolean alreadyDisplayed = false; for (int reqI = 0; reqI < reqsArray.length; reqI++) { if (key.equalsIgnoreCase(reqsArray[reqI])) { alreadyDisplayed = true; } } for (int reqI = 0; reqI < optsArray.length; reqI++) { if (key.equalsIgnoreCase(optsArray[reqI])) { alreadyDisplayed = true; } } if (!alreadyDisplayed) { String value = unusedPart.substring(posColon + 1); unusedHash.put(key, value); unused.add(unusedPart); } } } Map<String, String> httmp = getSelectedFall().getMap(PersistentObject.FLD_EXTINFO); HashMap<String, String> ht = new HashMap<String, String>(httmp); String[] unusedHashStringArray = {}; if (unusedHash.size() > 0) { String unusedHashString = unusedHash.toString(); unusedHashString = unusedHashString.substring(1); unusedHashString = unusedHashString.substring(0, unusedHashString.length() - 1); unusedHashStringArray = unusedHashString.split(", "); //$NON-NLS-1$ } String otherFieldsList_2 = StringTool.leer; String delim = StringTool.leer; for (int uhi = 0; uhi < unusedHashStringArray.length; uhi++) { String unusedItem = unusedHashStringArray[uhi]; String[] itemParts = unusedItem.split("="); //$NON-NLS-1$ String controlName = itemParts[0]; String[] controlDefParts = itemParts[1].split(ARGUMENTSSDELIMITER); String controlType = controlDefParts[0]; String[] itemList = { StringTool.leer }; if (controlType.equalsIgnoreCase("X")) { //$NON-NLS-1$ if (controlDefParts.length > 1) { itemList = controlDefParts[1].split(ITEMDELIMITER); } } boolean isAdded = false; // *** special handling if multiple items for (int ili = 0; ili < itemList.length; ili++) { String item = itemList[ili]; if (!item.isEmpty()) { item = "_" + item; //$NON-NLS-1$ } String combControlName = controlName + item; if (ht.containsKey(combControlName)) { ht.remove(combControlName); String values = StringTool.leer; if (controlDefParts.length > 1) { values = controlDefParts[1]; } if (!isAdded) { otherFieldsList_2 = otherFieldsList_2 + delim + controlName + ARGUMENTSSDELIMITER + controlType + ARGUMENTSSDELIMITER + values; } delim = DEFINITIONSDELIMITER; isAdded = true; } } } // *** only for admins! if (otherFieldsList_2.length() > 0) { if (CoreHub.acl.request(AccessControlDefaults.CASE_MODIFY_SPECIALS) == true) { setExtendedFields(f, otherFieldsList_2, Messages.FallDetailBlatt2_unusedFieldsWithDefinition, true, true, false); //$NON-NLS-1$ } } // *** collect all other fields that are not yet shown anywhere else, // display as text String otherFieldsList = otherFieldsList_2; otherFieldsList = StringTool.leer; Set<String> keySet = ht.keySet(); Object[] arr = keySet.toArray(); for (int i = 0; i < arr.length; i++) { String subkey = (String) arr[i]; String abrSystem = getSelectedFall().getAbrechnungsSystem(); String key = Preferences.LEISTUNGSCODES_CFG_KEY + "/" + abrSystem; //$NON-NLS-1$ String bed = CoreHub.globalCfg.get(key + "/bedingungen", StringTool.leer); //$NON-NLS-1$ boolean isAlreadyShown = false; if (subkey.equalsIgnoreCase(FallConstants.FLD_EXTINFO_BILLING)) isAlreadyShown = true; // if (subkey.equalsIgnoreCase("payment")) isAlreadyShown = true; // if (subkey.equalsIgnoreCase("Fallnummer")) isAlreadyShown = true; // if (subkey.equalsIgnoreCase("Gesetz")) isAlreadyShown = true; String[] bedArr = bed.split(DEFINITIONSDELIMITER); if (!bed.isEmpty()) { for (int ii = 0; ii < bedArr.length; ii++) { String fldParts = bedArr[ii]; String[] flds = fldParts.split(ARGUMENTSSDELIMITER); String fld = flds[0]; if ((flds[1].equalsIgnoreCase("X")) && ((flds.length > 2)) //$NON-NLS-1$ && (!flds[2].isEmpty())) { String checkBoxes = flds[2]; String[] checkBoxArray = checkBoxes.split(ITEMDELIMITER); for (int cb_i = 0; cb_i < checkBoxArray.length; cb_i++) { if ((fld + "_" + checkBoxArray[cb_i]).equalsIgnoreCase(subkey)) { //$NON-NLS-1$ isAlreadyShown = true; break; } } } else { if (fld.equalsIgnoreCase(subkey)) { isAlreadyShown = true; break; } } } } String opt = CoreHub.globalCfg.get(key + "/fakultativ", StringTool.leer); //$NON-NLS-1$ if (!isAlreadyShown) { String[] optArr = opt.split(DEFINITIONSDELIMITER); if (!opt.isEmpty()) { for (int ii = 0; ii < optArr.length; ii++) { String fld = optArr[ii].split(ARGUMENTSSDELIMITER)[0]; if (fld.equalsIgnoreCase(subkey)) { isAlreadyShown = true; break; } } } } if (!isAlreadyShown) { if (unusedHash.containsKey(subkey)) { // *** try to find def String theVal = (String) unusedHash.get(subkey); String[] vals = theVal.split(ARGUMENTSSDELIMITER); otherFieldsList = otherFieldsList + delim + subkey + ARGUMENTSSDELIMITER + vals[0]; if (vals.length > 1) { otherFieldsList = otherFieldsList + ARGUMENTSSDELIMITER + vals[1]; } } else { // *** if no spec found, then show as text otherFieldsList = otherFieldsList + delim + subkey + ":T"; //$NON-NLS-1$ } delim = DEFINITIONSDELIMITER; } } if (otherFieldsList.length() > 0) { // *** want to sort alphabetically here String[] tmpArr = otherFieldsList.split(DEFINITIONSDELIMITER); Arrays.sort(tmpArr, String.CASE_INSENSITIVE_ORDER); otherFieldsList = StringTool.leer; String tmpDel = StringTool.leer; for (int i = 0; i < tmpArr.length; i++) { otherFieldsList = otherFieldsList + tmpDel + tmpArr[i]; tmpDel = DEFINITIONSDELIMITER; } // *** only for admins! if (CoreHub.acl.request(AccessControlDefaults.CASE_MODIFY_SPECIALS) == true) { setExtendedFields(f, otherFieldsList, Messages.FallDetailBlatt2_unusedFieldsWithoutDefinition, true, true, false); //$NON-NLS-1$ } } if (lockUpdate) { setUnlocked(CoreHub.getLocalLockService().isLockedLocal(actFall)); } }
From source file:org.cerberus.servlet.crud.testexecution.ReadTestCaseExecution.java
private AnswerItem findExecutionListBySystem(String system, ApplicationContext appContext, HttpServletRequest request) throws ParseException, JSONException { AnswerItem answer = new AnswerItem(new MessageEvent(MessageEventEnum.DATA_OPERATION_OK)); /**/* w ww . jav a2 s.co m*/ * Parse all parameters used in the search. */ String charset = request.getCharacterEncoding(); /** * Parse parameters - list of values */ List<String> testList = ParameterParserUtil.parseListParamAndDecode(request.getParameterValues("test"), null, charset); List<String> applicationList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("application"), null, charset); List<String> projectList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("project"), null, charset); List<String> tcstatusList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("tcstatus"), null, charset); List<String> groupList = ParameterParserUtil.parseListParamAndDecode(request.getParameterValues("group"), null, charset); List<String> tcactiveList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("tcactive"), null, charset); List<String> priorityList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("priority"), null, charset); List<String> targetsprintList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("targetsprint"), null, charset); List<String> targetrevisionList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("targetrevision"), null, charset); List<String> creatorList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("creator"), null, charset); List<String> implementerList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("implementer"), null, charset); List<String> environmentList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("environment"), null, charset); List<String> buildList = ParameterParserUtil.parseListParamAndDecode(request.getParameterValues("build"), null, charset); List<String> revisionList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("revision"), null, charset); List<String> countryList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("country"), null, charset); List<String> browserList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("browser"), null, charset); List<String> tcestatusList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("tcestatus"), null, charset); //Sorts the lists if (countryList != null) { Collections.sort(countryList); } if (browserList != null) { Collections.sort(browserList); } /** * Parse parameters - free text */ String bugid = StringEscapeUtils.escapeHtml4(request.getParameter("bugid")); String ticket = StringEscapeUtils.escapeHtml4(request.getParameter("ticket")); String ip = StringEscapeUtils.escapeHtml4(request.getParameter("ip")); String port = StringEscapeUtils.escapeHtml4(request.getParameter("port")); String tag = StringEscapeUtils.escapeHtml4(request.getParameter("tag")); String browserversion = StringEscapeUtils.escapeHtml4(request.getParameter("browserversion")); String comment = StringEscapeUtils.escapeHtml4(request.getParameter("comment")); /** * Gets regular executions (not in queue) */ AnswerList answerExecutions = testCaseExecutionService.readBySystemByVarious(system, testList, applicationList, projectList, tcstatusList, groupList, tcactiveList, priorityList, targetsprintList, targetrevisionList, creatorList, implementerList, buildList, revisionList, environmentList, countryList, browserList, tcestatusList, ip, port, tag, browserversion, comment, bugid, ticket); List<TestCaseExecution> testCaseExecutions = (List<TestCaseExecution>) answerExecutions.getDataList(); /** * Get list of Execution in Queue by Tag */ ITestCaseExecutionInQueueService testCaseExecutionInQueueService = appContext .getBean(ITestCaseExecutionInQueueService.class); AnswerList answerExecutionsInQueue = testCaseExecutionInQueueService.readBySystemByVarious(system, testList, applicationList, projectList, tcstatusList, groupList, tcactiveList, priorityList, targetsprintList, targetrevisionList, creatorList, implementerList, buildList, revisionList, environmentList, countryList, browserList, tcestatusList, ip, port, tag, browserversion, comment, bugid, ticket); List<TestCaseExecutionInQueue> testCaseExecutionsInQueue = (List<TestCaseExecutionInQueue>) answerExecutionsInQueue .getDataList(); /** * Merge Test Case Executions */ List<TestCaseExecution> allTestCaseExecutions = hashExecution(testCaseExecutions, testCaseExecutionsInQueue); JSONArray executionList = new JSONArray(); LinkedHashMap<String, JSONObject> ttc = new LinkedHashMap<String, JSONObject>(); for (TestCaseExecution testCaseExecution : allTestCaseExecutions) { try { JSONObject execution = testCaseExecutionToJSONObject(testCaseExecution); String execKey = testCaseExecution.getCountry() + " " + testCaseExecution.getBrowser(); //the key is country and browser String testCaseKey = testCaseExecution.getTest() + "_" + testCaseExecution.getTestCase(); JSONObject execTab = new JSONObject(); executionList.put(testCaseExecutionToJSONObject(testCaseExecution)); JSONObject ttcObject = new JSONObject(); if (ttc.containsKey(testCaseKey)) { ttcObject = ttc.get(testCaseKey); execTab = ttcObject.getJSONObject("execTab"); execTab.put(execKey, execution); ttcObject.put("execTab", execTab); } else { ttcObject.put("test", testCaseExecution.getTest()); ttcObject.put("testCase", testCaseExecution.getTestCase()); ttcObject.put("function", testCaseExecution.getTestCaseObj().getFunction()); ttcObject.put("shortDesc", testCaseExecution.getTestCaseObj().getDescription()); ttcObject.put("status", testCaseExecution.getTestCaseObj().getStatus()); ttcObject.put("application", testCaseExecution.getApplication()); ttcObject.put("bugId", testCaseExecution.getTestCaseObj().getBugID()); ttcObject.put("ticket", testCaseExecution.getTestCaseObj().getTicket()); ttcObject.put("comment", testCaseExecution.getTestCaseObj().getComment()); ttcObject.put("priority", testCaseExecution.getTestCaseObj().getPriority()); ttcObject.put("status", testCaseExecution.getStatus()); ttcObject.put("group", testCaseExecution.getTestCaseObj().getGroup()); execTab.put(execKey, execution); ttcObject.put("execTab", execTab); } ttc.put(testCaseExecution.getTest() + "_" + testCaseExecution.getTestCase(), ttcObject); } catch (JSONException ex) { Logger.getLogger(ReadTestCaseExecution.class.getName()).log(Level.SEVERE, null, ex); } } JSONObject jsonResponse = new JSONObject(); jsonResponse.put("contentTable", ttc.values()); jsonResponse.put("iTotalRecords", ttc.size()); jsonResponse.put("iTotalDisplayRecords", ttc.size()); answer.setItem(jsonResponse); answer.setResultMessage(new MessageEvent(MessageEventEnum.DATA_OPERATION_OK)); return answer; }