List of usage examples for java.util HashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:amie.keys.CSAKey.java
public void discoverConditionalKeysForCondition(Graph newGraph, Graph graph, HashSet<Node> candidateKeys, Rule conditionRule, Set<Rule> output) { HashSet<Node> newCandidateKeys = new HashSet<>(); for (Node candidateKey : candidateKeys) { // System.out.println("candidateKey:" + candidateKey); if (candidateKey.toExplore) { // System.out.println("candidate:" + candidateKey); // if (candidateKey.toExplore) { List<String> properties = candidateKey.mapToString(id2Property); Rule amieRule = buildAMIERule(properties, conditionRule); // System.out.println("rule:" + amieRule); boolean isConditionalKey = isConditionaKey(amieRule); //System.out.println("isConditionalKey:"+isConditionalKey + " Thread " + Thread.currentThread().getId() + "\t" + Utilities.formatKey(amieRule)); if (amieRule.getSupport() >= support && !isConditionalKey) { //System.out.println("Case 0" + " Thread " + Thread.currentThread().getId()); if (!newGraph.graph.containsKey(candidateKey)) { //System.out.println("Case 1" + " Thread " + Thread.currentThread().getId()); Node newCandidateKey = candidateKey.clone(); HashSet<Node> children = new HashSet<>(); newGraph.graph.put(newCandidateKey, children); newGraph.nodes.put(newCandidateKey, newCandidateKey); newCandidateKeys.add(newCandidateKey); } else { //System.out.println("Case 2" + " Thread " + Thread.currentThread().getId()); HashSet<Node> children = new HashSet<>(); newGraph.graph.put(candidateKey, children); newCandidateKeys.add(candidateKey); }/*from w w w. j a v a2 s.co m*/ } // If the rule is a conditional above the support // and there is no a simpler key already discovered // then output it if (isConditionalKey && amieRule.getSupport() >= support && !isSubsumedByKey(amieRule, conditionRule, conditions2Keys)) { // System.out.println("KEY"); if (!newGraph.graph.containsKey(candidateKey)) { // System.out.println("clone"); Node newCandidateKey = candidateKey.clone(); synchronized (output) { output.add(amieRule); } //System.out.println(Utilities.formatKey(amieRule) + "\tThread " + Thread.currentThread().getId() + " Case 3"); System.out.println(Utilities.formatKey(amieRule)); conditions2Keys.put(conditionRule, amieRule); newCandidateKey.toExplore = false; HashSet<Node> children = new HashSet<>(); newGraph.graph.put(newCandidateKey, children); newGraph.nodes.put(newCandidateKey, newCandidateKey); newCandidateKeys.add(newCandidateKey); } else { synchronized (output) { output.add(amieRule); } System.out.println(Utilities.formatKey(amieRule)); //System.out.println(Utilities.formatKey(amieRule) + "\tThread " + Thread.currentThread().getId() + " Case 4"); conditions2Keys.put(conditionRule, amieRule); candidateKey.toExplore = false; HashSet<Node> children = new HashSet<>(); newGraph.graph.put(candidateKey, children); newGraph.nodes.put(candidateKey, candidateKey); newCandidateKeys.add(candidateKey); } } } else { //System.out.println("Case 5"); newCandidateKeys.add(candidateKey); } } // createChildren HashSet<Node> allChildren = new HashSet<>(); // System.out.println("newCandidateKeys:"+newCandidateKeys); for (Node parent1 : newCandidateKeys) { // System.out.println("parent1:"+parent1); for (Node parent2 : newCandidateKeys) { if (parent1 != parent2 && parent1.toExplore != false && parent2.toExplore != false) { HashSet<Integer> newSet = new HashSet<>(); newSet.addAll(parent1.set); newSet.addAll(parent2.set); HashSet<Integer> condProp_KeyProp = new HashSet<>(); condProp_KeyProp.addAll(newSet); condProp_KeyProp.addAll(getRelations(conditionRule, property2Id)); // System.out.println("newSet:" + newSet); if ((newSet.size() == parent1.set.size() + 1) && (getSupport(newSet, conditionRule, (int) support)) && Graph.containsASuperSetOf(nonKeysInt, condProp_KeyProp) != -1) { // System.out.println("enters"); Node child = new Node(newSet); if (hasFalseParent(newSet, newCandidateKeys)) { // System.out.println("falseParent"); child.toExplore = false; } HashSet<Node> children1 = newGraph.graph.get(parent1); children1.add(child); newGraph.graph.put(parent1, children1); newGraph.nodes.put(child, child); HashSet<Node> grandChildren = new HashSet<>(); newGraph.graph.put(child, grandChildren); HashSet<Node> children2 = newGraph.graph.get(parent2); children2.add(child); newGraph.graph.put(parent2, children2); allChildren.add(child); } } } } if (!allChildren.isEmpty()) { discoverConditionalKeysForCondition(newGraph, newGraph, allChildren, conditionRule, output); } }
From source file:io.hops.hopsworks.api.zeppelin.rest.NotebookRestApi.java
/** * Search for a Notes with permissions/*from w w w.ja v a 2 s.c o m*/ */ @GET @Path("search") public Response search(@QueryParam("q") String queryTerm) { LOG.info("Searching notes for: {}", queryTerm); String principal = SecurityUtils.getPrincipal(); HashSet<String> roles = SecurityUtils.getRoles(); HashSet<String> userAndRoles = new HashSet<>(); userAndRoles.add(principal); userAndRoles.addAll(roles); List<Map<String, String>> notesFound = noteSearchService.query(queryTerm); for (int i = 0; i < notesFound.size(); i++) { String[] Id = notesFound.get(i).get("id").split("/", 2); String noteId = Id[0]; if (!notebookAuthorization.isOwner(noteId, userAndRoles) && !notebookAuthorization.isReader(noteId, userAndRoles) && !notebookAuthorization.isWriter(noteId, userAndRoles)) { notesFound.remove(i); i--; } } LOG.info("{} notes found", notesFound.size()); return new JsonResponse<>(Status.OK, notesFound).build(); }
From source file:com.espertech.esper.epl.named.NamedWindowRootViewInstance.java
/** * Return a snapshot using index lookup filters. * @param optionalFilter to index lookup * @return events//w w w . j a v a2s. c o m */ public Collection<EventBean> snapshot(FilterSpecCompiled optionalFilter, Annotation[] annotations) { // Determine virtual data window VirtualDWView virtualDataWindow = null; if (isVirtualDataWindow()) { virtualDataWindow = getVirtualDataWindow(); } if (optionalFilter == null || optionalFilter.getParameters().length == 0) { if (virtualDataWindow != null) { Pair<IndexMultiKey, EventTable> pair = virtualDataWindow .getFireAndForgetDesc(Collections.<String>emptySet(), Collections.<String>emptySet()); return virtualDataWindow.getFireAndForgetData(pair.getSecond(), new Object[0], new RangeIndexLookupValue[0], annotations); } return null; } // Determine what straight-equals keys and which ranges are available. // Widening/Coercion is part of filter spec compile. Set<String> keysAvailable = new HashSet<String>(); Set<String> rangesAvailable = new HashSet<String>(); for (FilterSpecParam param : optionalFilter.getParameters()) { if (!(param instanceof FilterSpecParamConstant || param instanceof FilterSpecParamRange || param instanceof FilterSpecParamIn)) { continue; } if (param.getFilterOperator() == FilterOperator.EQUAL || param.getFilterOperator() == FilterOperator.IS || param.getFilterOperator() == FilterOperator.IN_LIST_OF_VALUES) { keysAvailable.add(param.getLookupable().getExpression()); } else if (param.getFilterOperator().isRangeOperator() || param.getFilterOperator().isInvertedRangeOperator() || param.getFilterOperator().isComparisonOperator()) { rangesAvailable.add(param.getLookupable().getExpression()); } else if (param.getFilterOperator().isRangeOperator()) { rangesAvailable.add(param.getLookupable().getExpression()); } } // Find an index that matches the needs Pair<IndexMultiKey, EventTableAndNamePair> tablePair; if (virtualDataWindow != null) { Pair<IndexMultiKey, EventTable> tablePairNoName = virtualDataWindow.getFireAndForgetDesc(keysAvailable, rangesAvailable); tablePair = new Pair<IndexMultiKey, EventTableAndNamePair>(tablePairNoName.getFirst(), new EventTableAndNamePair(tablePairNoName.getSecond(), null)); } else { IndexHint indexHint = IndexHint.getIndexHint(annotations); tablePair = indexRepository.findTable(keysAvailable, rangesAvailable, explicitIndexes, indexHint); } if (rootView.isQueryPlanLogging() && rootView.getQueryPlanLog().isInfoEnabled()) { String prefix = "Fire-and-forget from window " + rootView.getEventType().getName() + " "; String indexName = tablePair != null && tablePair.getSecond() != null ? tablePair.getSecond().getIndexName() : null; String indexText = indexName != null ? "index " + indexName + " " : "full table scan "; indexText += "(snapshot only, for join see separate query plan)"; if (tablePair == null) { rootView.getQueryPlanLog().info(prefix + indexText); } else { rootView.getQueryPlanLog() .info(prefix + indexText + tablePair.getSecond().getEventTable().toQueryPlan()); } QueryPlanIndexHook hook = QueryPlanIndexHookUtil.getHook(annotations); if (hook != null) { hook.fireAndForget(new QueryPlanIndexDescFAF(indexName, tablePair != null ? tablePair.getSecond().getEventTable().getClass().getSimpleName() : null)); } } if (tablePair == null) { return null; // indicates table scan } // Compile key sets which contain key index lookup values String[] keyIndexProps = IndexedPropDesc.getIndexProperties(tablePair.getFirst().getHashIndexedProps()); boolean hasKeyWithInClause = false; Object[] keyValues = new Object[keyIndexProps.length]; for (int keyIndex = 0; keyIndex < keyIndexProps.length; keyIndex++) { for (FilterSpecParam param : optionalFilter.getParameters()) { if (param.getLookupable().getExpression().equals(keyIndexProps[keyIndex])) { if (param.getFilterOperator() == FilterOperator.IN_LIST_OF_VALUES) { Object[] keyValuesList = ((MultiKeyUntyped) param.getFilterValue(null, agentInstanceContext)).getKeys(); if (keyValuesList.length == 0) { continue; } else if (keyValuesList.length == 1) { keyValues[keyIndex] = keyValuesList[0]; } else { keyValues[keyIndex] = keyValuesList; hasKeyWithInClause = true; } } else { keyValues[keyIndex] = param.getFilterValue(null, agentInstanceContext); } break; } } } // Analyze ranges - these may include key lookup value (EQUALS semantics) String[] rangeIndexProps = IndexedPropDesc.getIndexProperties(tablePair.getFirst().getRangeIndexedProps()); RangeIndexLookupValue[] rangeValues; if (rangeIndexProps.length > 0) { rangeValues = compileRangeLookupValues(rangeIndexProps, optionalFilter.getParameters()); } else { rangeValues = new RangeIndexLookupValue[0]; } EventTable eventTable = tablePair.getSecond().getEventTable(); IndexMultiKey indexMultiKey = tablePair.getFirst(); // table lookup without in-clause if (!hasKeyWithInClause) { return fafTableLookup(virtualDataWindow, indexMultiKey, eventTable, keyValues, rangeValues, annotations); } // table lookup with in-clause: determine combinations Object[][] combinations = new Object[keyIndexProps.length][]; for (int i = 0; i < keyValues.length; i++) { if (keyValues[i] instanceof Object[]) { combinations[i] = (Object[]) keyValues[i]; } else { combinations[i] = new Object[] { keyValues[i] }; } } // enumerate combinations CombinationEnumeration enumeration = new CombinationEnumeration(combinations); HashSet<EventBean> events = new HashSet<EventBean>(); for (; enumeration.hasMoreElements();) { Object[] keys = enumeration.nextElement(); Collection<EventBean> result = fafTableLookup(virtualDataWindow, indexMultiKey, eventTable, keys, rangeValues, annotations); events.addAll(result); } return events; }
From source file:org.apache.maven.plugin.cxx.CppCheckMojo.java
@Override protected String getCommandArgs() { String params = commandArgs + " "; HashSet<String> excudedSet = new HashSet<String>(); Iterator it = includeDirs.iterator(); while (it.hasNext()) { FileSet afileSet = new FileSet(); String dir = it.next().toString(); params += "-I\"" + dir + "\" "; if (StringUtils.isNotEmpty(excludes)) { afileSet.setDirectory(new File(dir).getAbsolutePath()); // $FB pour viter d'avoir TROP de fichier excludes (inutiles) dans la boucle for ci-aprs afileSet.setUseDefaultExcludes(false); afileSet.setExcludes(Arrays.asList(excludes.split(","))); getLog().debug("cppcheck excludes are :" + Arrays.toString(afileSet.getExcludes().toArray())); FileSetManager aFileSetManager = new FileSetManager(); String[] found = aFileSetManager.getExcludedFiles(afileSet); excudedSet.addAll(new HashSet<String>(Arrays.asList(found))); }//from w w w . j a v a 2s .c om } it = sourceDirs.iterator(); while (it.hasNext()) { FileSet afileSet = new FileSet(); String dir = it.next().toString(); params += "-I\"" + dir + "\" "; if (StringUtils.isNotEmpty(excludes)) { afileSet.setDirectory(new File(dir).getAbsolutePath()); // $FB pour viter d'avoir TROP de fichiers exclude (inutile) dans la boucle for ci-aprs afileSet.setUseDefaultExcludes(false); afileSet.setExcludes(Arrays.asList(excludes.split(","))); getLog().debug("cppcheck excludes are :" + Arrays.toString(afileSet.getExcludes().toArray())); FileSetManager aFileSetManager = new FileSetManager(); String[] found = aFileSetManager.getExcludedFiles(afileSet); excudedSet.addAll(new HashSet<String>(Arrays.asList(found))); } } for (Iterator<String> iter = excudedSet.iterator(); iter.hasNext();) { String s = iter.next(); //cppcheck only check *.cpp, *.cxx, *.cc, *.c++, *.c, *.tpp, and *.txx files // so remove unneeded exclusions if (s.matches("(.+\\.cpp)|(.+\\.cxx)|(.+\\.cc)|(.+\\.c\\+\\+)|(.+\\.c)|(.+\\.tpp)|(.+\\.txx)")) { params += "-i\"" + s + "\" "; } } it = sourceDirs.iterator(); while (it.hasNext()) { params += "\"" + it.next() + "\" "; } return params; }
From source file:io.hops.hopsworks.api.zeppelin.rest.NotebookRestApi.java
/** * set note authorization information// ww w . j a v a 2 s.c om */ @PUT @Path("{noteId}/permissions") public Response putNotePermissions(@PathParam("noteId") String noteId, String req) throws IOException { String principal = SecurityUtils.getPrincipal(); HashSet<String> roles = SecurityUtils.getRoles(); HashSet<String> userAndRoles = new HashSet<>(); userAndRoles.add(principal); userAndRoles.addAll(roles); checkIfUserIsAnon(getBlockNotAuthenticatedUserErrorMsg()); checkIfUserIsOwner(noteId, ownerPermissionError(userAndRoles, notebookAuthorization.getOwners(noteId))); HashMap<String, HashSet<String>> permMap = gson.fromJson(req, new TypeToken<HashMap<String, HashSet<String>>>() { }.getType()); Note note = notebook.getNote(noteId); LOG.info("Set permissions {} {} {} {} {}", noteId, principal, permMap.get("owners"), permMap.get("readers"), permMap.get("writers")); HashSet<String> readers = permMap.get("readers"); HashSet<String> owners = permMap.get("owners"); HashSet<String> writers = permMap.get("writers"); // Set readers, if writers and owners is empty -> set to user requesting the change if (readers != null && !readers.isEmpty()) { if (owners.isEmpty()) { owners = Sets.newHashSet(SecurityUtils.getPrincipal()); } } // Set writers, if owners is empty -> set to user requesting the change if (writers != null && !writers.isEmpty()) { if (owners.isEmpty()) { owners = Sets.newHashSet(SecurityUtils.getPrincipal()); } } notebookAuthorization.setReaders(noteId, readers); notebookAuthorization.setWriters(noteId, writers); notebookAuthorization.setOwners(noteId, owners); LOG.debug("After set permissions {} {} {}", notebookAuthorization.getOwners(noteId), notebookAuthorization.getReaders(noteId), notebookAuthorization.getWriters(noteId)); AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal()); note.persist(subject); notebookServer.broadcastNote(note); notebookServer.broadcastNoteList(subject, userAndRoles); return new JsonResponse<>(Status.OK).build(); }
From source file:com.milaboratory.core.tree.SequenceTreeMapTest.java
@Test public void testNIterator() throws Exception { SequenceTreeMap<NucleotideSequence, Integer> map = new SequenceTreeMap<>(NucleotideSequence.ALPHABET); assertNull(map.put(new NucleotideSequence("attagaca"), 1)); // 1 mm assertNull(map.put(new NucleotideSequence("attacaca"), 2)); // match assertNull(map.put(new NucleotideSequence("ataacaca"), 3)); // 1 mm assertNull(map.put(new NucleotideSequence("attcgtca"), 4)); // many mm assertNull(map.put(new NucleotideSequence("atttacaca"), 5)); // 1 insertion in stretch assertNull(map.put(new NucleotideSequence("atacaca"), 6)); // 1 deletion in the "t" stretch assertNull(map.put(new NucleotideSequence("attacacta"), 7)); // 1 insertion assertNull(map.put(new NucleotideSequence("attcaca"), 8)); // 1 deletion assertNull(map.put(new NucleotideSequence("attacac"), 9)); // 1 deletion in the end assertNull(map.put(new NucleotideSequence("ttacaca"), 10)); // 1 deletion in the beginning assertNull(map.put(new NucleotideSequence("tattacaca"), 11)); // 1 insertion in the beginning assertNull(map.put(new NucleotideSequence("attacacat"), 12)); // 1 insertion in the ent assertNull(map.put(new NucleotideSequence("attacact"), 13)); // 1 mm end assertNull(map.put(new NucleotideSequence("tttacaca"), 14)); // 1 mm begin NucleotideSequence reference = new NucleotideSequence("attacaca"); SequenceTreeMap.Node<Integer> node; HashSet<Integer>[] allAsserts = new HashSet[3]; allAsserts[0] = new HashSet<>(Arrays.asList(1, 3, 13, 14)); allAsserts[1] = new HashSet<>(Arrays.asList(6, 8, 9, 10)); allAsserts[2] = new HashSet<>(Arrays.asList(5, 7, 11, 12)); for (int i = 0; i < 8; ++i) { double lastPenalty = -1.0; HashSet<Integer> asserts = new HashSet<>(); asserts.add(2);/*from w w w . j a v a 2 s .c o m*/ int[] maxMut = new int[3]; for (int j = 0; j < 3; ++j) { if (((0x1 << j) & i) != 0) { maxMut[j] = 1; asserts.addAll(allAsserts[j]); } } HashSet<Integer> asserts1 = new HashSet<>(asserts); NeighborhoodIterator ni = map.getNeighborhoodIterator(reference, 0.5, new double[] { 0.31, 0.301, 0.3001 }, maxMut, null); while ((node = ni.nextNode()) != null) { assertTrue(lastPenalty <= ni.getPenalty()); lastPenalty = ni.getPenalty(); asserts.remove(node.object); assertTrue(asserts1.contains(node.object)); } assertTrue(asserts.isEmpty()); } }
From source file:de.ks.flatadocdb.session.Session.java
private Set<SessionEntry> handleRenames() { Set<SessionEntry> renamed = entriesById.values().stream()// .filter(e -> !e.isChild())// .filter(e -> {/*w w w.j av a2 s .c o m*/ EntityDescriptor entityDescriptor = e.getEntityDescriptor(); String newFileName = entityDescriptor.getFileGenerator().getFileName(repository, entityDescriptor, e.object); return !newFileName.equals(e.getFileName()); }).collect(Collectors.toSet()); HashSet<Object> processed = new HashSet<>(); for (SessionEntry sessionEntry : renamed) { boolean alreadyProcessed = processed.contains(sessionEntry.getObject()); if (!alreadyProcessed) { removeSessionEntry(sessionEntry, sessionEntry.getObject(), processed); persist(sessionEntry.getObject()); Set<Relation> allRelations = sessionEntry.getEntityDescriptor().getChildRelations(); for (Relation child : allRelations) { Collection<Object> related = child.getRelatedEntities(sessionEntry.getObject()); processed.addAll(related); } processed.add(sessionEntry.getObject()); } } return renamed; }
From source file:org.briljantframework.data.vector.AbstractVector.java
protected <T> Vector combineVectors(Class<? extends T> cls, Vector other, BiFunction<? super T, ? super T, ?> combiner, Builder builder) { Index thisIndex = getIndex();// www .j av a2s .c o m Index otherIndex = Objects.requireNonNull(other, "require other vector").getIndex(); if (otherIndex instanceof IntIndex) { int size = Math.min(size(), other.size()); for (int i = 0; i < size; i++) { builder.set(thisIndex.get(i), combiner.apply(loc().get(cls, i), other.loc().get(cls, i))); } } else { HashSet<Object> keys = new HashSet<>(); keys.addAll(thisIndex.keySet()); keys.addAll(otherIndex.keySet()); for (Object key : keys) { boolean thisIndexContainsKey = thisIndex.contains(key); boolean otherIndexContainsKey = otherIndex.contains(key); if (thisIndexContainsKey && otherIndexContainsKey) { builder.set(key, combiner.apply(get(cls, key), other.get(cls, key))); } else if (thisIndexContainsKey) { builder.set(key, this, key); } else { builder.set(key, other, key); } } } return builder.build(); }
From source file:org.compass.core.lucene.engine.store.DefaultLuceneSearchEngineStore.java
public String[] internalCalcSubIndexes(String[] subIndexes, String[] aliases, Class[] types, boolean poly) { if (aliases == null && types == null) { return calcSubIndexes(subIndexes, aliases); }// w ww . j a v a2s . c om HashSet<String> aliasesSet = new HashSet<String>(); if (aliases != null) { for (String alias : aliases) { ResourceMapping resourceMapping = mapping.getRootMappingByAlias(alias); if (resourceMapping == null) { throw new IllegalArgumentException("No root mapping found for alias [" + alias + "]"); } aliasesSet.add(resourceMapping.getAlias()); if (poly) { aliasesSet.addAll(Arrays.asList(resourceMapping.getExtendingAliases())); } } } if (types != null) { for (Class type : types) { ResourceMapping resourceMapping = mapping.getRootMappingByClass(type); if (resourceMapping == null) { throw new IllegalArgumentException("No root mapping found for class [" + type + "]"); } aliasesSet.add(resourceMapping.getAlias()); if (poly) { aliasesSet.addAll(Arrays.asList(resourceMapping.getExtendingAliases())); } } } return calcSubIndexes(subIndexes, aliasesSet.toArray(new String[aliasesSet.size()])); }
From source file:uk.ac.soton.itinnovation.sad.service.services.EMClient.java
/** * Defines SAD metric model./*from w w w .j a v a 2s . c om*/ */ private HashSet<MetricGenerator> createMetricModel(Experiment experiment) { measurementSetMap.clear(); // This map will be useful later for reporting measurement summaries metricGenerators.clear(); theMetricGenerator = new MetricGenerator(); theMetricGenerator.setName("SAD Metric Generator"); theMetricGenerator.setDescription("Metric generator for Social Analytics Dashboard"); experiment.addMetricGenerator(theMetricGenerator); MetricGroup theMetricGroup = new MetricGroup(); theMetricGroup.setName("SAD Metric Group"); theMetricGroup.setDescription("Metric group for all Social Analytics Dashboard metrics"); theMetricGroup.setMetricGeneratorUUID(theMetricGenerator.getUUID()); theMetricGenerator.addMetricGroup(theMetricGroup); Entity theEntity = new Entity(); theEntity.setName("SAD Service"); theEntity.setDescription("Entity for Social Analytics Dashboard"); theMetricGenerator.addEntity(theEntity); Attribute totalPluginExecutions = new Attribute(); totalPluginExecutions.setName("Number of jobs started"); totalPluginExecutions.setDescription("Number of times SAD plugins have been submitted for execution"); totalPluginExecutions.setEntityUUID(theEntity.getUUID()); theEntity.addAttribute(totalPluginExecutions); numPluginsRunMeasurementSetUuid = setupMeasurementForAttribute(totalPluginExecutions, theMetricGroup, MetricType.RATIO, new Unit("Times")); Attribute failedPluginExecutions = new Attribute(); failedPluginExecutions.setName("Failed plugin executions"); failedPluginExecutions.setDescription("Number of times SAD plugins have failed to execute"); failedPluginExecutions.setEntityUUID(theEntity.getUUID()); theEntity.addAttribute(failedPluginExecutions); numPluginsFailedMeasurementSetUuid = setupMeasurementForAttribute(failedPluginExecutions, theMetricGroup, MetricType.RATIO, new Unit("Times")); Attribute successfulPluginExecutions = new Attribute(); successfulPluginExecutions.setName("Successful plugin executions"); successfulPluginExecutions.setDescription("Number of times SAD plugins have executed successfully"); successfulPluginExecutions.setEntityUUID(theEntity.getUUID()); theEntity.addAttribute(successfulPluginExecutions); numPluginsSuccessMeasurementSetUuid = setupMeasurementForAttribute(successfulPluginExecutions, theMetricGroup, MetricType.RATIO, new Unit("Times")); Attribute namesOfPluginsExecuted = new Attribute(); namesOfPluginsExecuted.setName("Names of plugins executed"); namesOfPluginsExecuted.setDescription("Names of plugins submitted for execution"); namesOfPluginsExecuted.setEntityUUID(theEntity.getUUID()); theEntity.addAttribute(namesOfPluginsExecuted); pluginNameMeasurementSetUuid = setupMeasurementForAttribute(namesOfPluginsExecuted, theMetricGroup, MetricType.NOMINAL, new Unit("")); Attribute namesOfMethodsCalled = new Attribute(); namesOfMethodsCalled.setName("Names of methods called"); namesOfMethodsCalled.setDescription("Names of service methods called"); namesOfMethodsCalled.setEntityUUID(theEntity.getUUID()); theEntity.addAttribute(namesOfMethodsCalled); methodsCalledMeasurementSetUuid = setupMeasurementForAttribute(namesOfMethodsCalled, theMetricGroup, MetricType.NOMINAL, new Unit("")); Attribute timeSpendOnServiceMethodCall = new Attribute(); timeSpendOnServiceMethodCall.setName("Service method call duration"); timeSpendOnServiceMethodCall.setDescription("Time spend on service method call"); timeSpendOnServiceMethodCall.setEntityUUID(theEntity.getUUID()); theEntity.addAttribute(timeSpendOnServiceMethodCall); timeSpentOnServiceCallMeasurementSetUuid = setupMeasurementForAttribute(timeSpendOnServiceMethodCall, theMetricGroup, MetricType.RATIO, new Unit("mSec")); Attribute timeSpendOnDatabaseQueryCall = new Attribute(); timeSpendOnDatabaseQueryCall.setName("Database query duration"); timeSpendOnDatabaseQueryCall.setDescription("Time spend on querying the database"); timeSpendOnDatabaseQueryCall.setEntityUUID(theEntity.getUUID()); theEntity.addAttribute(timeSpendOnDatabaseQueryCall); timeSpentOnDatabaseQueryMeasurementSetUuid = setupMeasurementForAttribute(timeSpendOnDatabaseQueryCall, theMetricGroup, MetricType.RATIO, new Unit("mSec")); Attribute pluginExecutionDuration = new Attribute(); pluginExecutionDuration.setName("Plugin execution duration"); pluginExecutionDuration.setDescription("How long it took for the plugin to run"); pluginExecutionDuration.setEntityUUID(theEntity.getUUID()); theEntity.addAttribute(pluginExecutionDuration); pluginExecutionDurationMeasurementSetUuid = setupMeasurementForAttribute(pluginExecutionDuration, theMetricGroup, MetricType.RATIO, new Unit("mSec")); metricGenerators.put(theMetricGenerator.getUUID(), theMetricGenerator); HashSet<MetricGenerator> mgSet = new HashSet<>(); mgSet.addAll(metricGenerators.values()); logger.debug("Reporting the following metric generator set to ECC: "); int counter = 0; for (MetricGenerator tempMg : mgSet) { printMetricGenerator(tempMg, counter); counter++; } metricsModelSetup = true; return mgSet; }