List of usage examples for java.util TreeMap isEmpty
boolean isEmpty();
From source file:com.vgi.mafscaling.OpenLoop.java
protected void calculateMafScaling() { setCursor(new Cursor(Cursor.WAIT_CURSOR)); try {/*w ww .j a va2 s . c o m*/ clearData(); clearChartData(); clearChartCheckBoxes(); TreeMap<Integer, ArrayList<Double>> result = new TreeMap<Integer, ArrayList<Double>>(); if (!getMafTableData(voltArray, gsArray)) return; if (!sortRunData(result) || result.isEmpty()) return; calculateCorrectedGS(result); setCorrectedMafData(); smoothGsArray.addAll(gsCorrected); checkBoxCorrectedMaf.setSelected(true); setXYTable(mafSmoothingTable, voltArray, smoothGsArray); setRanges(); setSelectedIndex(1); } catch (Exception e) { e.printStackTrace(); logger.error(e); JOptionPane.showMessageDialog(null, "Error: " + e, "Error", JOptionPane.ERROR_MESSAGE); } finally { setCursor(new Cursor(Cursor.DEFAULT_CURSOR)); } }
From source file:com.adobe.cq.dialogconversion.datasources.DialogsDataSource.java
private void setDataSource(Resource resource, String path, ResourceResolver resourceResolver, SlingHttpServletRequest request, String itemResourceType) throws RepositoryException { List<Resource> resources = new ArrayList<Resource>(); if (StringUtils.isNotEmpty(path)) { Session session = request.getResourceResolver().adaptTo(Session.class); TreeMap<String, Node> nodeMap = new TreeMap<String, Node>(); // sanitize path path = path.trim();/*from w w w .j a v a 2 s . com*/ if (!path.startsWith("/")) { path = "/" + path; } // First check if the supplied path is a dialog node itself if (session.nodeExists(path)) { Node node = session.getNode(path); DialogType type = DialogRewriteUtils.getDialogType(node); if (type != DialogType.UNKNOWN && type != DialogType.CORAL_3) { nodeMap.put(node.getPath(), node); } } // If the path does not point to a dialog node: we query for dialog nodes if (nodeMap.isEmpty()) { String encodedPath = "/".equals(path) ? "" : ISO9075.encodePath(path); if (encodedPath.length() > 1 && encodedPath.endsWith("/")) { encodedPath = encodedPath.substring(0, encodedPath.length() - 1); } String classicStatement = "SELECT * FROM [" + NT_DIALOG + "] AS s WHERE ISDESCENDANTNODE(s, '" + encodedPath + "') " + "AND NAME() IN ('" + NameConstants.NN_DIALOG + "', '" + NameConstants.NN_DESIGN_DIALOG + "')"; String coral2Statement = "SELECT parent.* FROM [nt:unstructured] AS parent INNER JOIN [nt:unstructured] " + "AS child on ISCHILDNODE(child, parent) WHERE ISDESCENDANTNODE(parent, '" + encodedPath + "') " + "AND NAME(parent) IN ('" + NN_CQ_DIALOG + "', '" + NN_CQ_DIALOG + CORAL_2_BACKUP_SUFFIX + "', '" + NN_CQ_DESIGN_DIALOG + "', '" + NN_CQ_DESIGN_DIALOG + CORAL_2_BACKUP_SUFFIX + "') " + "AND NAME(child) = 'content' AND child.[sling:resourceType] NOT LIKE '" + DIALOG_CONTENT_RESOURCETYPE_PREFIX_CORAL3 + "%'"; QueryManager queryManager = session.getWorkspace().getQueryManager(); List<Query> queries = new ArrayList<Query>(); queries.add(queryManager.createQuery(classicStatement, Query.JCR_SQL2)); queries.add(queryManager.createQuery(coral2Statement, Query.JCR_SQL2)); for (Query query : queries) { NodeIterator iterator = query.execute().getNodes(); while (iterator.hasNext()) { Node node = iterator.nextNode(); Node parent = node.getParent(); if (parent != null) { // put design dialogs at a relative key String key = (DialogRewriteUtils.isDesignDialog(node)) ? parent.getPath() + "/" + NameConstants.NN_DESIGN_DIALOG : parent.getPath(); // backup Coral 2 dialogs shouldn't override none backup ones if (node.getName().endsWith(CORAL_2_BACKUP_SUFFIX) && nodeMap.get(key) != null) { continue; } nodeMap.put(key, node); } } } } int index = 0; Iterator iterator = nodeMap.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = (Map.Entry) iterator.next(); Node dialog = (Node) entry.getValue(); if (dialog == null) { continue; } Node parent = dialog.getParent(); if (parent == null) { continue; } DialogType dialogType = DialogRewriteUtils.getDialogType(dialog); String dialogPath = dialog.getPath(); String type = dialogType.getString(); String href = externalizer.relativeLink(request, dialogPath) + ".html"; String crxHref = externalizer.relativeLink(request, CRX_LITE_PATH) + ".jsp#" + dialogPath; boolean isDesignDialog = DialogRewriteUtils.isDesignDialog(dialog); // only allow Coral 2 backup dialogs in the result if there's a replacement if (dialogType == DialogType.CORAL_2 && dialog.getName().endsWith(CORAL_2_BACKUP_SUFFIX)) { if ((!isDesignDialog && !parent.hasNode(NN_CQ_DIALOG)) || (isDesignDialog && !parent.hasNode(NN_CQ_DESIGN_DIALOG))) { continue; } } boolean converted = false; if (dialogType == DialogType.CLASSIC) { converted = isDesignDialog ? parent.hasNode(NN_CQ_DESIGN_DIALOG) : parent.hasNode(NN_CQ_DIALOG); } else if (dialogType == DialogType.CORAL_2) { converted = dialog.getName().endsWith(CORAL_2_BACKUP_SUFFIX); } Map<String, Object> map = new HashMap<String, Object>(); map.put("dialogPath", dialogPath); map.put("type", type); map.put("href", href); map.put("converted", converted); map.put("crxHref", crxHref); if (converted) { Node convertedNode = (isDesignDialog) ? parent.getNode(NN_CQ_DESIGN_DIALOG) : parent.getNode(NN_CQ_DIALOG); String touchHref = externalizer.relativeLink(request, convertedNode.getPath()) + ".html"; String touchCrxHref = externalizer.relativeLink(request, CRX_LITE_PATH) + ".jsp#" + convertedNode.getPath().replaceAll(":", "%3A"); map.put("touchHref", touchHref); map.put("touchCrxHref", touchCrxHref); } resources.add(new ValueMapResource(resourceResolver, resource.getPath() + "/dialog_" + index, itemResourceType, new ValueMapDecorator(map))); index++; } } DataSource ds = new SimpleDataSource(resources.iterator()); request.setAttribute(DataSource.class.getName(), ds); }
From source file:com.serphacker.serposcope.db.google.GoogleSerpRescanDB.java
public void rescanNonBulk(Integer specificRunId, Collection<GoogleTarget> targets, Collection<GoogleSearch> searches, boolean updateSummary) { LOG.debug("SERP rescan (non-bulk) : starting"); long _start = System.currentTimeMillis(); Run specPrevRun = null;//from ww w.j a v a 2 s .c om Map<Integer, GoogleTargetSummary> specPrevRunSummaryByTarget = new HashMap<>(); if (specificRunId != null) { specPrevRun = runDB.findPrevious(specificRunId); if (specPrevRun != null) { specPrevRunSummaryByTarget = targetSummaryDB.list(specPrevRun.getId()).stream() .collect(Collectors.toMap(GoogleTargetSummary::getTargetId, Function.identity())); } } for (GoogleTarget target : targets) { Map<Integer, GoogleTargetSummary> summaryByRunId = new HashMap<>(); GoogleTargetSummary specificPreviousSummary = specPrevRunSummaryByTarget.get(target.getId()); if (specificPreviousSummary != null) { summaryByRunId.put(specPrevRun.getId(), specificPreviousSummary); } for (GoogleSearch search : searches) { final MutableInt previousRunId = new MutableInt(0); final MutableInt previousRank = new MutableInt(GoogleRank.UNRANKED); GoogleBest searchBest = new GoogleBest(target.getGroupId(), target.getId(), search.getId(), GoogleRank.UNRANKED, null, null); if (specPrevRun != null) { previousRunId.setValue(specPrevRun.getId()); previousRank.setValue( rankDB.get(specPrevRun.getId(), target.getGroupId(), target.getId(), search.getId())); GoogleBest specificBest = rankDB.getBest(target.getGroupId(), target.getId(), search.getId()); if (specificBest != null) { searchBest = specificBest; } } final GoogleBest best = searchBest; serpDB.stream(specificRunId, specificRunId, search.getId(), (GoogleSerp res) -> { int rank = GoogleRank.UNRANKED; String rankedUrl = null; for (int i = 0; i < res.getEntries().size(); i++) { if (target.match(res.getEntries().get(i).getUrl())) { rankedUrl = res.getEntries().get(i).getUrl(); rank = i + 1; break; } } // only update last run GoogleRank gRank = new GoogleRank(res.getRunId(), target.getGroupId(), target.getId(), search.getId(), rank, previousRank.shortValue(), rankedUrl); rankDB.insert(gRank); if (updateSummary) { GoogleTargetSummary summary = summaryByRunId.get(res.getRunId()); if (summary == null) { summaryByRunId.put(res.getRunId(), summary = new GoogleTargetSummary(target.getGroupId(), target.getId(), res.getRunId(), 0)); } summary.addRankCandidat(gRank); } if (rank != GoogleRank.UNRANKED && rank <= best.getRank()) { best.setRank((short) rank); best.setUrl(rankedUrl); best.setRunDay(res.getRunDay()); } previousRunId.setValue(res.getRunId()); previousRank.setValue(rank); }); if (best.getRank() != GoogleRank.UNRANKED) { rankDB.insertBest(best); } } // fill previous summary score if (updateSummary) { TreeMap<Integer, GoogleTargetSummary> summaries = new TreeMap<>(summaryByRunId); GoogleTargetSummary previousSummary = null; for (Map.Entry<Integer, GoogleTargetSummary> entry : summaries.entrySet()) { if (previousSummary != null) { entry.getValue().setPreviousScoreBP(previousSummary.getScoreBP()); } previousSummary = entry.getValue(); } if (specPrevRun != null) { summaries.remove(specPrevRun.getId()); } if (!summaries.isEmpty()) { targetSummaryDB.insert(summaries.values()); } } } LOG.debug("SERP rescan : done, duration = {}", DurationFormatUtils.formatDurationHMS(System.currentTimeMillis() - _start)); }
From source file:org.corpus_tools.peppermodules.annis.SRelation2ANNISMapper.java
protected SLayer getFirstComponentLayer(SNode node) { SLayer componentLayer = null;//from w ww . j a v a2 s . c om Set<SLayer> nodeLayer = node.getLayers(); if (nodeLayer != null) { // get layer name which comes lexically first TreeMap<String, SLayer> layers = new TreeMap<>(); for (SLayer l : nodeLayer) { layers.put(l.getName(), l); } if (!layers.isEmpty()) { componentLayer = layers.firstEntry().getValue(); } } return componentLayer; }
From source file:com.asakusafw.directio.hive.parquet.DataModelMaterializer.java
private List<Mapping> computeMapping(DataModelDescriptor descriptor, MessageType schema, DataModelMapping configuration) { List<Mapping> mappings; switch (configuration.getFieldMappingStrategy()) { case NAME://from w w w . j a v a2 s . co m mappings = computeMappingByName(descriptor, schema); break; case POSITION: mappings = computeMappingByPosition(descriptor, schema); break; default: throw new AssertionError(configuration.getFieldMappingStrategy()); } TreeMap<Integer, Mapping> propertyMap = new TreeMap<>(); for (Mapping mapping : mappings) { if (checkMapping(descriptor, mapping, configuration)) { assert mapping.source != null; assert mapping.target != null; if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format("Map Parquet column: {0}:{1} -> {2}:{3}", //$NON-NLS-1$ mapping.source.getPath()[0], mapping.source.getType(), mapping.target.getFieldName(), mapping.target.getTypeInfo())); } int index = schema.getFieldIndex(mapping.source.getPath()[0]); propertyMap.put(index, mapping); } } int lastIndex = -1; if (propertyMap.isEmpty() == false) { lastIndex = propertyMap.lastKey(); } Mapping[] results = new Mapping[lastIndex + 1]; for (Map.Entry<Integer, Mapping> entry : propertyMap.entrySet()) { results[entry.getKey()] = entry.getValue(); } return Arrays.asList(results); }
From source file:org.apache.hadoop.hbase.util.RegionSplitter.java
static void rollingSplit(String tableName, SplitAlgorithm splitAlgo, Configuration conf) throws IOException, InterruptedException { final int minOS = conf.getInt("split.outstanding", 2); HTable table = new HTable(conf, tableName); // max outstanding splits. default == 50% of servers final int MAX_OUTSTANDING = Math.max(table.getConnection().getCurrentNrHRS() / 2, minOS); Path hbDir = FSUtils.getRootDir(conf); Path tableDir = FSUtils.getTableDir(hbDir, table.getName()); Path splitFile = new Path(tableDir, "_balancedSplit"); FileSystem fs = FileSystem.get(conf); // get a list of daughter regions to create LinkedList<Pair<byte[], byte[]>> tmpRegionSet = getSplits(table, splitAlgo); LinkedList<Pair<byte[], byte[]>> outstanding = Lists.newLinkedList(); int splitCount = 0; final int origCount = tmpRegionSet.size(); // all splits must compact & we have 1 compact thread, so 2 split // requests to the same RS can stall the outstanding split queue. // To fix, group the regions into an RS pool and round-robin through it LOG.debug("Bucketing regions by regionserver..."); TreeMap<String, LinkedList<Pair<byte[], byte[]>>> daughterRegions = Maps.newTreeMap(); for (Pair<byte[], byte[]> dr : tmpRegionSet) { String rsLocation = table.getRegionLocation(dr.getSecond()).getHostnamePort(); if (!daughterRegions.containsKey(rsLocation)) { LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList(); daughterRegions.put(rsLocation, entry); }/*from w w w . ja va 2 s . c o m*/ daughterRegions.get(rsLocation).add(dr); } LOG.debug("Done with bucketing. Split time!"); long startTime = System.currentTimeMillis(); // open the split file and modify it as splits finish FSDataInputStream tmpIn = fs.open(splitFile); byte[] rawData = new byte[tmpIn.available()]; tmpIn.readFully(rawData); tmpIn.close(); FSDataOutputStream splitOut = fs.create(splitFile); splitOut.write(rawData); try { // *** split code *** while (!daughterRegions.isEmpty()) { LOG.debug(daughterRegions.size() + " RS have regions to splt."); // Get RegionServer : region count mapping final TreeMap<ServerName, Integer> rsSizes = Maps.newTreeMap(); Map<HRegionInfo, ServerName> regionsInfo = table.getRegionLocations(); for (ServerName rs : regionsInfo.values()) { if (rsSizes.containsKey(rs)) { rsSizes.put(rs, rsSizes.get(rs) + 1); } else { rsSizes.put(rs, 1); } } // sort the RS by the number of regions they have List<String> serversLeft = Lists.newArrayList(daughterRegions.keySet()); Collections.sort(serversLeft, new Comparator<String>() { public int compare(String o1, String o2) { return rsSizes.get(o1).compareTo(rsSizes.get(o2)); } }); // round-robin through the RS list. Choose the lightest-loaded servers // first to keep the master from load-balancing regions as we split. for (String rsLoc : serversLeft) { Pair<byte[], byte[]> dr = null; // find a region in the RS list that hasn't been moved LOG.debug("Finding a region on " + rsLoc); LinkedList<Pair<byte[], byte[]>> regionList = daughterRegions.get(rsLoc); while (!regionList.isEmpty()) { dr = regionList.pop(); // get current region info byte[] split = dr.getSecond(); HRegionLocation regionLoc = table.getRegionLocation(split); // if this region moved locations String newRs = regionLoc.getHostnamePort(); if (newRs.compareTo(rsLoc) != 0) { LOG.debug("Region with " + splitAlgo.rowToStr(split) + " moved to " + newRs + ". Relocating..."); // relocate it, don't use it right now if (!daughterRegions.containsKey(newRs)) { LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList(); daughterRegions.put(newRs, entry); } daughterRegions.get(newRs).add(dr); dr = null; continue; } // make sure this region wasn't already split byte[] sk = regionLoc.getRegionInfo().getStartKey(); if (sk.length != 0) { if (Bytes.equals(split, sk)) { LOG.debug("Region already split on " + splitAlgo.rowToStr(split) + ". Skipping this region..."); ++splitCount; dr = null; continue; } byte[] start = dr.getFirst(); Preconditions.checkArgument(Bytes.equals(start, sk), splitAlgo.rowToStr(start) + " != " + splitAlgo.rowToStr(sk)); } // passed all checks! found a good region break; } if (regionList.isEmpty()) { daughterRegions.remove(rsLoc); } if (dr == null) continue; // we have a good region, time to split! byte[] split = dr.getSecond(); LOG.debug("Splitting at " + splitAlgo.rowToStr(split)); HBaseAdmin admin = new HBaseAdmin(table.getConfiguration()); admin.split(table.getTableName(), split); LinkedList<Pair<byte[], byte[]>> finished = Lists.newLinkedList(); if (conf.getBoolean("split.verify", true)) { // we need to verify and rate-limit our splits outstanding.addLast(dr); // with too many outstanding splits, wait for some to finish while (outstanding.size() >= MAX_OUTSTANDING) { finished = splitScan(outstanding, table, splitAlgo); if (finished.isEmpty()) { Thread.sleep(30 * 1000); } else { outstanding.removeAll(finished); } } } else { finished.add(dr); } // mark each finished region as successfully split. for (Pair<byte[], byte[]> region : finished) { splitOut.writeChars("- " + splitAlgo.rowToStr(region.getFirst()) + " " + splitAlgo.rowToStr(region.getSecond()) + "\n"); splitCount++; if (splitCount % 10 == 0) { long tDiff = (System.currentTimeMillis() - startTime) / splitCount; LOG.debug("STATUS UPDATE: " + splitCount + " / " + origCount + ". Avg Time / Split = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff)); } } } } if (conf.getBoolean("split.verify", true)) { while (!outstanding.isEmpty()) { LinkedList<Pair<byte[], byte[]>> finished = splitScan(outstanding, table, splitAlgo); if (finished.isEmpty()) { Thread.sleep(30 * 1000); } else { outstanding.removeAll(finished); for (Pair<byte[], byte[]> region : finished) { splitOut.writeChars("- " + splitAlgo.rowToStr(region.getFirst()) + " " + splitAlgo.rowToStr(region.getSecond()) + "\n"); } } } } LOG.debug("All regions have been successfully split!"); } finally { long tDiff = System.currentTimeMillis() - startTime; LOG.debug("TOTAL TIME = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff)); LOG.debug("Splits = " + splitCount); LOG.debug("Avg Time / Split = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff / splitCount)); splitOut.close(); if (table != null) { table.close(); } } fs.delete(splitFile, false); }
From source file:org.apache.rocketmq.tools.command.consumer.ConsumerSubCommand.java
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try {/*from w w w . j a va 2 s . c om*/ defaultMQAdminExt.start(); String group = commandLine.getOptionValue('g').trim(); ConsumerConnection cc = defaultMQAdminExt.examineConsumerConnectionInfo(group); boolean jstack = commandLine.hasOption('s'); if (!commandLine.hasOption('i')) { int i = 1; long now = System.currentTimeMillis(); final TreeMap<String/* clientId */, ConsumerRunningInfo> criTable = new TreeMap<String, ConsumerRunningInfo>(); for (Connection conn : cc.getConnectionSet()) { try { ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, conn.getClientId(), jstack); if (consumerRunningInfo != null) { criTable.put(conn.getClientId(), consumerRunningInfo); String filePath = now + "/" + conn.getClientId(); MixAll.string2FileNotSafe(consumerRunningInfo.formatString(), filePath); System.out.printf("%03d %-40s %-20s %s%n", i++, conn.getClientId(), MQVersion.getVersionDesc(conn.getVersion()), filePath); } } catch (Exception e) { e.printStackTrace(); } } if (!criTable.isEmpty()) { boolean subSame = ConsumerRunningInfo.analyzeSubscription(criTable); boolean rebalanceOK = subSame && ConsumerRunningInfo.analyzeRebalance(criTable); if (subSame) { System.out.printf("%n%nSame subscription in the same group of consumer"); System.out.printf("%n%nRebalance %s%n", rebalanceOK ? "OK" : "Failed"); Iterator<Entry<String, ConsumerRunningInfo>> it = criTable.entrySet().iterator(); while (it.hasNext()) { Entry<String, ConsumerRunningInfo> next = it.next(); String result = ConsumerRunningInfo.analyzeProcessQueue(next.getKey(), next.getValue()); if (result.length() > 0) { System.out.printf("%s", result); } } } else { System.out.printf("%n%nWARN: Different subscription in the same group of consumer!!!"); } } } else { String clientId = commandLine.getOptionValue('i').trim(); ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, clientId, jstack); if (consumerRunningInfo != null) { System.out.printf("%s", consumerRunningInfo.formatString()); } } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
From source file:org.apache.rocketmq.tools.command.consumer.ConsumerStatusSubCommand.java
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try {/* ww w . ja v a 2s . c o m*/ defaultMQAdminExt.start(); String group = commandLine.getOptionValue('g').trim(); ConsumerConnection cc = defaultMQAdminExt.examineConsumerConnectionInfo(group); boolean jstack = commandLine.hasOption('s'); if (!commandLine.hasOption('i')) { int i = 1; long now = System.currentTimeMillis(); final TreeMap<String/* clientId */, ConsumerRunningInfo> criTable = new TreeMap<String, ConsumerRunningInfo>(); for (Connection conn : cc.getConnectionSet()) { try { ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, conn.getClientId(), jstack); if (consumerRunningInfo != null) { criTable.put(conn.getClientId(), consumerRunningInfo); String filePath = now + "/" + conn.getClientId(); MixAll.string2FileNotSafe(consumerRunningInfo.formatString(), filePath); System.out.printf("%03d %-40s %-20s %s%n", i++, conn.getClientId(), MQVersion.getVersionDesc(conn.getVersion()), filePath); } } catch (Exception e) { e.printStackTrace(); } } if (!criTable.isEmpty()) { boolean subSame = ConsumerRunningInfo.analyzeSubscription(criTable); boolean rebalanceOK = subSame && ConsumerRunningInfo.analyzeRebalance(criTable); if (subSame) { System.out.printf("%n%nSame subscription in the same group of consumer"); System.out.printf("%n%nRebalance %s%n", rebalanceOK ? "OK" : "Failed"); Iterator<Entry<String, ConsumerRunningInfo>> it = criTable.entrySet().iterator(); while (it.hasNext()) { Entry<String, ConsumerRunningInfo> next = it.next(); String result = ConsumerRunningInfo.analyzeProcessQueue(next.getKey(), next.getValue()); if (result.length() > 0) { System.out.printf("%s", result); } } } else { System.out.printf("%n%nWARN: Different subscription in the same group of consumer!!!"); } } } else { String clientId = commandLine.getOptionValue('i').trim(); ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, clientId, jstack); if (consumerRunningInfo != null) { System.out.printf("%s", consumerRunningInfo.formatString()); } } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
From source file:com.serphacker.serposcope.db.google.GoogleSerpRescanDB.java
public void rescan(Integer specificRunId, Collection<GoogleTarget> targets, Collection<GoogleSearch> searches, boolean updateSummary) { LOG.debug("SERP rescan (bulk) : starting"); long _start = System.currentTimeMillis(); Map<Integer, Integer> searchCountByGroup = searchDB.countByGroup(); Run specPrevRun = null;//w w w .ja v a 2 s . c o m Map<Integer, GoogleTargetSummary> specPrevRunSummaryByTarget = new HashMap<>(); if (specificRunId != null) { specPrevRun = runDB.findPrevious(specificRunId); if (specPrevRun != null) { specPrevRunSummaryByTarget = targetSummaryDB.list(specPrevRun.getId()).stream() .collect(Collectors.toMap(GoogleTargetSummary::getTargetId, Function.identity())); } } List<GoogleRank> ranks = new ArrayList<>(); for (GoogleTarget target : targets) { Map<Integer, GoogleTargetSummary> summaryByRunId = new HashMap<>(); GoogleTargetSummary specificPreviousSummary = specPrevRunSummaryByTarget.get(target.getId()); if (specificPreviousSummary != null) { summaryByRunId.put(specPrevRun.getId(), specificPreviousSummary); } for (GoogleSearch search : searches) { final MutableInt previousRunId = new MutableInt(0); final MutableInt previousRank = new MutableInt(GoogleRank.UNRANKED); GoogleBest searchBest = new GoogleBest(target.getGroupId(), target.getId(), search.getId(), GoogleRank.UNRANKED, null, null); if (specPrevRun != null) { previousRunId.setValue(specPrevRun.getId()); previousRank.setValue( rankDB.get(specPrevRun.getId(), target.getGroupId(), target.getId(), search.getId())); GoogleBest specificBest = rankDB.getBest(target.getGroupId(), target.getId(), search.getId()); if (specificBest != null) { searchBest = specificBest; } } final GoogleBest best = searchBest; serpDB.stream(specificRunId, specificRunId, search.getId(), (GoogleSerp res) -> { int rank = GoogleRank.UNRANKED; String rankedUrl = null; for (int i = 0; i < res.getEntries().size(); i++) { if (target.match(res.getEntries().get(i).getUrl())) { rankedUrl = res.getEntries().get(i).getUrl(); rank = i + 1; break; } } // only update last run GoogleRank gRank = new GoogleRank(res.getRunId(), target.getGroupId(), target.getId(), search.getId(), rank, previousRank.shortValue(), rankedUrl); ranks.add(gRank); if (ranks.size() > 2000) { rankDB.insert(ranks); ranks.clear(); } if (updateSummary) { GoogleTargetSummary summary = summaryByRunId.get(res.getRunId()); if (summary == null) { summaryByRunId.put(res.getRunId(), summary = new GoogleTargetSummary(target.getGroupId(), target.getId(), res.getRunId(), 0)); } summary.addRankCandidat(gRank); } if (rank != GoogleRank.UNRANKED && rank <= best.getRank()) { best.setRank((short) rank); best.setUrl(rankedUrl); best.setRunDay(res.getRunDay()); } previousRunId.setValue(res.getRunId()); previousRank.setValue(rank); }); if (best.getRank() != GoogleRank.UNRANKED) { rankDB.insertBest(best); } } // fill previous summary score if (updateSummary) { TreeMap<Integer, GoogleTargetSummary> summaries = new TreeMap<>(summaryByRunId); GoogleTargetSummary previousSummary = null; for (Map.Entry<Integer, GoogleTargetSummary> entry : summaries.entrySet()) { GoogleTargetSummary summary = entry.getValue(); summary.computeScoreBP(searchCountByGroup.getOrDefault(summary.getGroupId(), 0)); if (previousSummary != null) { summary.setPreviousScoreBP(previousSummary.getScoreBP()); } previousSummary = summary; } if (specPrevRun != null) { summaries.remove(specPrevRun.getId()); } if (!summaries.isEmpty()) { targetSummaryDB.insert(summaries.values()); } } } if (!ranks.isEmpty()) { rankDB.insert(ranks); ranks.clear(); } LOG.debug("SERP rescan : done, duration = {}", DurationFormatUtils.formatDurationHMS(System.currentTimeMillis() - _start)); }
From source file:ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition.java
@SuppressWarnings("unchecked") private void scanCompositeElementForChildren(Set<String> elementNames, TreeMap<Integer, BaseRuntimeDeclaredChildDefinition> theOrderToElementDef, TreeMap<Integer, BaseRuntimeDeclaredChildDefinition> theOrderToExtensionDef) { int baseElementOrder = 0; for (ScannedField next : myScannedFields) { if (next.isFirstFieldInNewClass()) { baseElementOrder = theOrderToElementDef.isEmpty() ? 0 : theOrderToElementDef.lastEntry().getKey() + 1; }/*from w ww .ja v a2 s . co m*/ Class<?> declaringClass = next.getField().getDeclaringClass(); Description descriptionAnnotation = ModelScanner.pullAnnotation(next.getField(), Description.class); TreeMap<Integer, BaseRuntimeDeclaredChildDefinition> orderMap = theOrderToElementDef; Extension extensionAttr = ModelScanner.pullAnnotation(next.getField(), Extension.class); if (extensionAttr != null) { orderMap = theOrderToExtensionDef; } Child childAnnotation = next.getChildAnnotation(); Field nextField = next.getField(); String elementName = childAnnotation.name(); int order = childAnnotation.order(); boolean childIsChoiceType = false; boolean orderIsReplaceParent = false; if (order == Child.REPLACE_PARENT) { if (extensionAttr != null) { for (Entry<Integer, BaseRuntimeDeclaredChildDefinition> nextEntry : orderMap.entrySet()) { BaseRuntimeDeclaredChildDefinition nextDef = nextEntry.getValue(); if (nextDef instanceof RuntimeChildDeclaredExtensionDefinition) { if (nextDef.getExtensionUrl().equals(extensionAttr.url())) { orderIsReplaceParent = true; order = nextEntry.getKey(); orderMap.remove(nextEntry.getKey()); elementNames.remove(elementName); break; } } } if (order == Child.REPLACE_PARENT) { throw new ConfigurationException("Field " + nextField.getName() + "' on target type " + declaringClass.getSimpleName() + " has order() of REPLACE_PARENT (" + Child.REPLACE_PARENT + ") but no parent element with extension URL " + extensionAttr.url() + " could be found on type " + nextField.getDeclaringClass().getSimpleName()); } } else { for (Entry<Integer, BaseRuntimeDeclaredChildDefinition> nextEntry : orderMap.entrySet()) { BaseRuntimeDeclaredChildDefinition nextDef = nextEntry.getValue(); if (elementName.equals(nextDef.getElementName())) { orderIsReplaceParent = true; order = nextEntry.getKey(); BaseRuntimeDeclaredChildDefinition existing = orderMap.remove(nextEntry.getKey()); elementNames.remove(elementName); /* * See #350 - If the original field (in the superclass) with the given name is a choice, then we need to make sure * that the field which replaces is a choice even if it's only a choice of one type - this is because the * element name when serialized still needs to reflect the datatype */ if (existing instanceof RuntimeChildChoiceDefinition) { childIsChoiceType = true; } break; } } if (order == Child.REPLACE_PARENT) { throw new ConfigurationException("Field " + nextField.getName() + "' on target type " + declaringClass.getSimpleName() + " has order() of REPLACE_PARENT (" + Child.REPLACE_PARENT + ") but no parent element with name " + elementName + " could be found on type " + nextField.getDeclaringClass().getSimpleName()); } } } if (order < 0 && order != Child.ORDER_UNKNOWN) { throw new ConfigurationException("Invalid order '" + order + "' on @Child for field '" + nextField.getName() + "' on target type: " + declaringClass); } if (order != Child.ORDER_UNKNOWN && !orderIsReplaceParent) { order = order + baseElementOrder; } // int min = childAnnotation.min(); // int max = childAnnotation.max(); /* * Anything that's marked as unknown is given a new ID that is <0 so that it doesn't conflict with any given IDs and can be figured out later */ if (order == Child.ORDER_UNKNOWN) { order = Integer.valueOf(0); while (orderMap.containsKey(order)) { order++; } } List<Class<? extends IBase>> choiceTypes = next.getChoiceTypes(); if (orderMap.containsKey(order)) { throw new ConfigurationException("Detected duplicate field order '" + childAnnotation.order() + "' for element named '" + elementName + "' in type '" + declaringClass.getCanonicalName() + "' - Already had: " + orderMap.get(order).getElementName()); } if (elementNames.contains(elementName)) { throw new ConfigurationException("Detected duplicate field name '" + elementName + "' in type '" + declaringClass.getCanonicalName() + "'"); } Class<?> nextElementType = next.getElementType(); BaseRuntimeDeclaredChildDefinition def; if (childAnnotation.name().equals("extension") && IBaseExtension.class.isAssignableFrom(nextElementType)) { def = new RuntimeChildExtension(nextField, childAnnotation.name(), childAnnotation, descriptionAnnotation); } else if (childAnnotation.name().equals("modifierExtension") && IBaseExtension.class.isAssignableFrom(nextElementType)) { def = new RuntimeChildExtension(nextField, childAnnotation.name(), childAnnotation, descriptionAnnotation); } else if (BaseContainedDt.class.isAssignableFrom(nextElementType) || (childAnnotation.name().equals("contained") && IBaseResource.class.isAssignableFrom(nextElementType))) { /* * Child is contained resources */ def = new RuntimeChildContainedResources(nextField, childAnnotation, descriptionAnnotation, elementName); } else if (IAnyResource.class.isAssignableFrom(nextElementType) || IResource.class.equals(nextElementType)) { /* * Child is a resource as a direct child, as in Bundle.entry.resource */ def = new RuntimeChildDirectResource(nextField, childAnnotation, descriptionAnnotation, elementName); } else { childIsChoiceType |= choiceTypes.size() > 1; if (childIsChoiceType && !BaseResourceReferenceDt.class.isAssignableFrom(nextElementType) && !IBaseReference.class.isAssignableFrom(nextElementType)) { def = new RuntimeChildChoiceDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, choiceTypes); } else if (extensionAttr != null) { /* * Child is an extension */ Class<? extends IBase> et = (Class<? extends IBase>) nextElementType; Object binder = null; if (BoundCodeDt.class.isAssignableFrom(nextElementType) || IBoundCodeableConcept.class.isAssignableFrom(nextElementType)) { binder = ModelScanner.getBoundCodeBinder(nextField); } def = new RuntimeChildDeclaredExtensionDefinition(nextField, childAnnotation, descriptionAnnotation, extensionAttr, elementName, extensionAttr.url(), et, binder); if (IBaseEnumeration.class.isAssignableFrom(nextElementType)) { ((RuntimeChildDeclaredExtensionDefinition) def).setEnumerationType( ReflectionUtil.getGenericCollectionTypeOfFieldWithSecondOrderForList(nextField)); } } else if (BaseResourceReferenceDt.class.isAssignableFrom(nextElementType) || IBaseReference.class.isAssignableFrom(nextElementType)) { /* * Child is a resource reference */ List<Class<? extends IBaseResource>> refTypesList = new ArrayList<Class<? extends IBaseResource>>(); for (Class<? extends IElement> nextType : childAnnotation.type()) { if (IBaseReference.class.isAssignableFrom(nextType)) { refTypesList.add(myContext.getVersion().getVersion().isRi() ? IAnyResource.class : IResource.class); continue; } else if (IBaseResource.class.isAssignableFrom(nextType) == false) { throw new ConfigurationException("Field '" + nextField.getName() + "' in class '" + nextField.getDeclaringClass().getCanonicalName() + "' is of type " + BaseResourceReferenceDt.class + " but contains a non-resource type: " + nextType.getCanonicalName()); } refTypesList.add((Class<? extends IBaseResource>) nextType); } def = new RuntimeChildResourceDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, refTypesList); } else if (IResourceBlock.class.isAssignableFrom(nextElementType) || IBaseBackboneElement.class.isAssignableFrom(nextElementType) || IBaseDatatypeElement.class.isAssignableFrom(nextElementType)) { /* * Child is a resource block (i.e. a sub-tag within a resource) TODO: do these have a better name according to HL7? */ Class<? extends IBase> blockDef = (Class<? extends IBase>) nextElementType; def = new RuntimeChildResourceBlockDefinition(myContext, nextField, childAnnotation, descriptionAnnotation, elementName, blockDef); } else if (IDatatype.class.equals(nextElementType) || IElement.class.equals(nextElementType) || "Type".equals(nextElementType.getSimpleName()) || IBaseDatatype.class.equals(nextElementType)) { def = new RuntimeChildAny(nextField, elementName, childAnnotation, descriptionAnnotation); } else if (IDatatype.class.isAssignableFrom(nextElementType) || IPrimitiveType.class.isAssignableFrom(nextElementType) || ICompositeType.class.isAssignableFrom(nextElementType) || IBaseDatatype.class.isAssignableFrom(nextElementType) || IBaseExtension.class.isAssignableFrom(nextElementType)) { Class<? extends IBase> nextDatatype = (Class<? extends IBase>) nextElementType; if (IPrimitiveType.class.isAssignableFrom(nextElementType)) { if (nextElementType.equals(BoundCodeDt.class)) { IValueSetEnumBinder<Enum<?>> binder = ModelScanner.getBoundCodeBinder(nextField); Class<? extends Enum<?>> enumType = ModelScanner .determineEnumTypeForBoundField(nextField); def = new RuntimeChildPrimitiveBoundCodeDatatypeDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype, binder, enumType); } else if (IBaseEnumeration.class.isAssignableFrom(nextElementType)) { Class<? extends Enum<?>> binderType = ModelScanner .determineEnumTypeForBoundField(nextField); def = new RuntimeChildPrimitiveEnumerationDatatypeDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype, binderType); } else { def = new RuntimeChildPrimitiveDatatypeDefinition(nextField, elementName, descriptionAnnotation, childAnnotation, nextDatatype); } } else { if (IBoundCodeableConcept.class.isAssignableFrom(nextElementType)) { IValueSetEnumBinder<Enum<?>> binder = ModelScanner.getBoundCodeBinder(nextField); Class<? extends Enum<?>> enumType = ModelScanner .determineEnumTypeForBoundField(nextField); def = new RuntimeChildCompositeBoundDatatypeDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype, binder, enumType); } else if (BaseNarrativeDt.class.isAssignableFrom(nextElementType) || INarrative.class.isAssignableFrom(nextElementType)) { def = new RuntimeChildNarrativeDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype); } else { def = new RuntimeChildCompositeDatatypeDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype); } } } else { throw new ConfigurationException( "Field '" + elementName + "' in type '" + declaringClass.getCanonicalName() + "' is not a valid child type: " + nextElementType); } Binding bindingAnnotation = ModelScanner.pullAnnotation(nextField, Binding.class); if (bindingAnnotation != null) { if (isNotBlank(bindingAnnotation.valueSet())) { def.setBindingValueSet(bindingAnnotation.valueSet()); } } } orderMap.put(order, def); elementNames.add(elementName); } }