List of usage examples for java.util Set forEach
default void forEach(Consumer<? super T> action)
From source file:org.apache.samza.system.kafka.KafkaSystemAdmin.java
/** * Note! This method does not populate SystemStreamMetadata for each stream with real data. * Thus, this method should ONLY be used to get number of partitions for each stream. * It will throw NotImplementedException if anyone tries to access the actual metadata. * @param streamNames set of streams for which get the partitions counts * @param cacheTTL cache TTL if caching the data * @return a map, keyed on stream names. Number of partitions in SystemStreamMetadata is the output of this method. */// w ww .j a v a 2 s. c o m @Override public Map<String, SystemStreamMetadata> getSystemStreamPartitionCounts(Set<String> streamNames, long cacheTTL) { // This optimization omits actual metadata for performance. Instead, we inject a dummy for all partitions. final SystemStreamMetadata.SystemStreamPartitionMetadata dummySspm = new SystemStreamMetadata.SystemStreamPartitionMetadata( null, null, null) { String msg = "getSystemStreamPartitionCounts does not populate SystemStreaMetadata info. Only number of partitions"; @Override public String getOldestOffset() { throw new NotImplementedException(msg); } @Override public String getNewestOffset() { throw new NotImplementedException(msg); } @Override public String getUpcomingOffset() { throw new NotImplementedException(msg); } }; ExponentialSleepStrategy strategy = new ExponentialSleepStrategy( DEFAULT_EXPONENTIAL_SLEEP_BACK_OFF_MULTIPLIER, DEFAULT_EXPONENTIAL_SLEEP_INITIAL_DELAY_MS, DEFAULT_EXPONENTIAL_SLEEP_MAX_DELAY_MS); Function1<ExponentialSleepStrategy.RetryLoop, Map<String, SystemStreamMetadata>> fetchMetadataOperation = new AbstractFunction1<ExponentialSleepStrategy.RetryLoop, Map<String, SystemStreamMetadata>>() { @Override public Map<String, SystemStreamMetadata> apply(ExponentialSleepStrategy.RetryLoop loop) { Map<String, SystemStreamMetadata> allMetadata = new HashMap<>(); streamNames.forEach(streamName -> { Map<Partition, SystemStreamMetadata.SystemStreamPartitionMetadata> partitionMetadata = new HashMap<>(); List<PartitionInfo> partitionInfos = threadSafeKafkaConsumer .execute(consumer -> consumer.partitionsFor(streamName)); LOG.debug("Stream {} has partitions {}", streamName, partitionInfos); partitionInfos.forEach(partitionInfo -> partitionMetadata .put(new Partition(partitionInfo.partition()), dummySspm)); allMetadata.put(streamName, new SystemStreamMetadata(streamName, partitionMetadata)); }); loop.done(); return allMetadata; } }; Map<String, SystemStreamMetadata> result = strategy.run(fetchMetadataOperation, new AbstractFunction2<Exception, ExponentialSleepStrategy.RetryLoop, BoxedUnit>() { @Override public BoxedUnit apply(Exception exception, ExponentialSleepStrategy.RetryLoop loop) { if (loop.sleepCount() < MAX_RETRIES_ON_EXCEPTION) { LOG.warn(String.format( "Fetching systemstreampartition counts for: %s threw an exception. Retrying.", streamNames), exception); } else { LOG.error(String.format( "Fetching systemstreampartition counts for: %s threw an exception.", streamNames), exception); loop.done(); throw new SamzaException(exception); } return null; } }).get(); LOG.info("SystemStream partition counts for system {}: {}", systemName, result); return result; }
From source file:org.matonto.ontology.rest.impl.OntologyRestImpl.java
/** * Uses the provided Set to construct a hierarchy of the entities provided. Each BindingSet in the Set must have the * parent set as the first binding and the child set as the second binding. * * @param tupleQueryResult the TupleQueryResult that contains the parent-child relationships for creating the * hierarchy./*from w w w . j a v a2 s .c o m*/ * @return a JSONObject containing the hierarchy of the entities provided. */ private JSONObject getHierarchy(TupleQueryResult tupleQueryResult) { Map<String, Set<String>> results = new HashMap<>(); Map<String, Set<String>> index = new HashMap<>(); Set<String> topLevel = new HashSet<>(); Set<String> lowerLevel = new HashSet<>(); tupleQueryResult.forEach(queryResult -> { Value key = Iterables.get(queryResult, 0).getValue(); Binding value = Iterables.get(queryResult, 1, null); if (!(key instanceof BNode)) { String keyString = key.stringValue(); topLevel.add(keyString); if (value != null && !(value.getValue() instanceof BNode)) { String valueString = value.getValue().stringValue(); lowerLevel.add(valueString); if (results.containsKey(keyString)) { results.get(keyString).add(valueString); } else { Set<String> newSet = new HashSet<>(); newSet.add(valueString); results.put(keyString, newSet); } if (index.containsKey(valueString)) { index.get(valueString).add(keyString); } else { Set<String> newSet = new HashSet<>(); newSet.add(keyString); index.put(valueString, newSet); } } else { results.put(key.stringValue(), new HashSet<>()); } } }); topLevel.removeAll(lowerLevel); JSONArray hierarchy = new JSONArray(); topLevel.forEach(classIRI -> { JSONObject item = getHierarchyItem(classIRI, results); hierarchy.add(item); }); return new JSONObject().element("hierarchy", hierarchy).element("index", JSONObject.fromObject(index)); }
From source file:edu.stanford.muse.index.Archive.java
public Pair<StringBuilder, Boolean> getHTMLForContents(Document d, Date date, String docId, String regexToHighlight, Set<String> highlightTerms, Map<String, Map<String, Short>> authorisedEntities, boolean IA_links, boolean inFull, boolean showDebugInfo) throws Exception { org.apache.lucene.document.Document ldoc = indexer.getDoc(d); Span[] names = getAllNamesInLuceneDoc(ldoc, true); String contents = indexer.getContents(d, false); Set<String> acrs = Util.getAcronyms(contents); if (ldoc == null) { System.err.println("Lucene Doc is null for: " + d.getUniqueId() + " but the content is " + (contents == null ? "null" : "not null")); return null; }// w ww . j a v a 2s .c o m // Contains all entities and id if it is authorised else null Map<String, EmailRenderer.Entity> entitiesWithId = new HashMap<>(); //we annotate three specially recognized types Map<Short, String> recMap = new HashMap<>(); recMap.put(NEType.Type.PERSON.getCode(), "cp"); recMap.put(NEType.Type.PLACE.getCode(), "cl"); recMap.put(NEType.Type.ORGANISATION.getCode(), "co"); Arrays.asList(names).stream().filter(n -> recMap.keySet().contains(NEType.getCoarseType(n.type).getCode())) .forEach(n -> { Set<String> types = new HashSet<>(); types.add(recMap.get(NEType.getCoarseType(n.type).getCode())); entitiesWithId.put(n.text, new EmailRenderer.Entity(n.text, authorisedEntities == null ? null : authorisedEntities.get(n), types)); }); acrs.forEach(acr -> { Set<String> types = new HashSet<>(); types.add("acr"); entitiesWithId.put(acr, new EmailRenderer.Entity(acr, authorisedEntities == null ? null : authorisedEntities.get(acr), types)); }); //don't want "more" button anymore boolean overflow = false; String htmlContents; if (contents.length() > Config.MAX_TEXT_SIZE_TO_ANNOTATE) // don't try to annotate extraordinarily long messages, probably bad data, as discovered on RF archive htmlContents = Util.escapeHTML(contents); else htmlContents = annotate(ldoc, contents, date, docId, regexToHighlight, highlightTerms, entitiesWithId, IA_links, showDebugInfo); if (ModeConfig.isPublicMode()) htmlContents = Util.maskEmailDomain(htmlContents); StringBuilder sb = new StringBuilder(); sb.append(htmlContents); return new Pair<>(sb, overflow); }
From source file:org.opencb.opencga.storage.mongodb.variant.VariantMongoDBAdaptor.java
private Document parseQuery(Query query, Document mongoQuery) { QueryBuilder builder = new QueryBuilder(); if (query != null) { /** VARIANT PARAMS **/ if (query.get(VariantQueryParams.CHROMOSOME.key()) != null && !query.getString(VariantQueryParams.CHROMOSOME.key()).isEmpty()) { List<String> chromosomes = query.getAsStringList(VariantQueryParams.CHROMOSOME.key()); LinkedList<String> regions = new LinkedList<>( query.getAsStringList(VariantQueryParams.REGION.key())); regions.addAll(chromosomes); query.put(VariantQueryParams.REGION.key(), regions); }/*from w w w.ja va 2 s . c o m*/ if (query.get(VariantQueryParams.REGION.key()) != null && !query.getString(VariantQueryParams.REGION.key()).isEmpty()) { List<String> stringList = query.getAsStringList(VariantQueryParams.REGION.key()); List<Region> regions = new ArrayList<>(stringList.size()); for (String reg : stringList) { Region region = Region.parseRegion(reg); regions.add(region); } getRegionFilter(regions, builder); } if (query.get(VariantQueryParams.ID.key()) != null && !query.getString(VariantQueryParams.ID.key()).isEmpty()) { List<String> idsList = query.getAsStringList(VariantQueryParams.ID.key()); for (String id : idsList) { if (id.contains(":")) { try { Variant variant = new Variant(id); String mongoId = MongoDBVariantStageLoader.STRING_ID_CONVERTER.buildId(variant); addQueryStringFilter("_id", mongoId, builder, QueryOperation.OR); } catch (IllegalArgumentException ignore) { logger.info("Wrong variant " + id); } } } String ids = query.getString(VariantQueryParams.ID.key()); addQueryStringFilter( DocumentToVariantConverter.ANNOTATION_FIELD + "." + DocumentToVariantAnnotationConverter.XREFS_FIELD + "." + DocumentToVariantAnnotationConverter.XREF_ID_FIELD, ids, builder, QueryOperation.OR); addQueryStringFilter(DocumentToVariantConverter.IDS_FIELD, ids, builder, QueryOperation.OR); } if (query.containsKey(VariantQueryParams.GENE.key())) { String xrefs = query.getString(VariantQueryParams.GENE.key()); addQueryStringFilter( DocumentToVariantConverter.ANNOTATION_FIELD + "." + DocumentToVariantAnnotationConverter.XREFS_FIELD + "." + DocumentToVariantAnnotationConverter.XREF_ID_FIELD, xrefs, builder, QueryOperation.OR); } if (query.containsKey(VariantQueryParams.REFERENCE.key()) && query.getString(VariantQueryParams.REFERENCE.key()) != null) { addQueryStringFilter(DocumentToVariantConverter.REFERENCE_FIELD, query.getString(VariantQueryParams.REFERENCE.key()), builder, QueryOperation.AND); } if (query.containsKey(VariantQueryParams.ALTERNATE.key()) && query.getString(VariantQueryParams.ALTERNATE.key()) != null) { addQueryStringFilter(DocumentToVariantConverter.ALTERNATE_FIELD, query.getString(VariantQueryParams.ALTERNATE.key()), builder, QueryOperation.AND); } if (query.containsKey(VariantQueryParams.TYPE.key()) && !query.getString(VariantQueryParams.TYPE.key()).isEmpty()) { addQueryFilter(DocumentToVariantConverter.TYPE_FIELD, query.getString(VariantQueryParams.TYPE.key()), builder, QueryOperation.AND, s -> { Set<VariantType> subTypes = Variant.subTypes(VariantType.valueOf(s)); List<String> types = new ArrayList<>(subTypes.size() + 1); types.add(s); subTypes.forEach(subType -> types.add(subType.toString())); return types; }); //addQueryStringFilter(DBObjectToVariantConverter.TYPE_FIELD, // query.getString(VariantQueryParams.TYPE.key()), builder, QueryOperation.AND); } /** ANNOTATION PARAMS **/ parseAnnotationQueryParams(query, builder); /** STUDIES **/ final StudyConfiguration defaultStudyConfiguration = parseStudyQueryParams(query, builder); /** STATS PARAMS **/ parseStatsQueryParams(query, builder, defaultStudyConfiguration); } logger.debug("Find = " + builder.get()); mongoQuery.putAll(builder.get().toMap()); return mongoQuery; }
From source file:org.opencb.opencga.storage.mongodb.variant.adaptors.VariantMongoDBAdaptor.java
private Document parseQuery(final Query originalQuery) { QueryBuilder builder = new QueryBuilder(); if (originalQuery != null) { // Copy given query. It may be modified Query query = new Query(originalQuery); boolean nonGeneRegionFilter = false; /* VARIANT PARAMS */ List<Region> regions = new ArrayList<>(); if (isValidParam(query, VariantQueryParams.CHROMOSOME)) { nonGeneRegionFilter = true;/*from w w w. j a v a 2 s . com*/ regions.addAll(Region.parseRegions(query.getString(VariantQueryParams.CHROMOSOME.key()), true)); } if (isValidParam(query, VariantQueryParams.REGION)) { nonGeneRegionFilter = true; regions.addAll(Region.parseRegions(query.getString(VariantQueryParams.REGION.key()), true)); } if (!regions.isEmpty()) { getRegionFilter(regions, builder); } // List with all MongoIds from ID and XREF filters List<String> mongoIds = new ArrayList<>(); if (isValidParam(query, VariantQueryParams.ID)) { nonGeneRegionFilter = true; List<String> idsList = query.getAsStringList(VariantQueryParams.ID.key()); List<String> otherIds = new ArrayList<>(idsList.size()); for (String value : idsList) { Variant variant = toVariant(value); if (variant != null) { mongoIds.add(MongoDBVariantStageLoader.STRING_ID_CONVERTER.buildId(variant)); } else { otherIds.add(value); } } if (!otherIds.isEmpty()) { String ids = otherIds.stream().collect(Collectors.joining(",")); addQueryStringFilter( DocumentToVariantConverter.ANNOTATION_FIELD + "." + DocumentToVariantAnnotationConverter.XREFS_FIELD + "." + DocumentToVariantAnnotationConverter.XREF_ID_FIELD, ids, builder, QueryOperation.OR); addQueryStringFilter(DocumentToVariantConverter.IDS_FIELD, ids, builder, QueryOperation.OR); } } List<String> genes = new ArrayList<>(query.getAsStringList(VariantQueryParams.GENE.key())); if (isValidParam(query, VariantQueryParams.ANNOT_XREF)) { List<String> xrefs = query.getAsStringList(VariantQueryParams.ANNOT_XREF.key()); List<String> otherXrefs = new ArrayList<>(); for (String value : xrefs) { Variant variant = toVariant(value); if (variant != null) { mongoIds.add(MongoDBVariantStageLoader.STRING_ID_CONVERTER.buildId(variant)); } else { if (isVariantAccession(value) || isClinicalAccession(value) || isGeneAccession(value)) { otherXrefs.add(value); } else { genes.add(value); } } } if (!otherXrefs.isEmpty()) { nonGeneRegionFilter = true; addQueryStringFilter( DocumentToVariantConverter.ANNOTATION_FIELD + '.' + DocumentToVariantAnnotationConverter.XREFS_FIELD + '.' + DocumentToVariantAnnotationConverter.XREF_ID_FIELD, String.join(",", otherXrefs), builder, QueryOperation.OR); } } if (!genes.isEmpty()) { if (isValidParam(query, VariantQueryParams.ANNOT_CONSEQUENCE_TYPE)) { List<String> soList = query.getAsStringList(VariantQueryParams.ANNOT_CONSEQUENCE_TYPE.key()); Set<String> gnSo = new HashSet<>(genes.size() * soList.size()); for (String gene : genes) { for (String so : soList) { int soNumber = parseConsequenceType(so); gnSo.add(DocumentToVariantAnnotationConverter.buildGeneSO(gene, soNumber)); } } builder.or(new BasicDBObject( DocumentToVariantConverter.ANNOTATION_FIELD + '.' + DocumentToVariantAnnotationConverter.GENE_SO_FIELD, new BasicDBObject("$in", gnSo))); if (!nonGeneRegionFilter) { // Filter already present in the GENE_SO_FIELD query.remove(VariantQueryParams.ANNOT_CONSEQUENCE_TYPE.key()); } } else { addQueryStringFilter( DocumentToVariantConverter.ANNOTATION_FIELD + '.' + DocumentToVariantAnnotationConverter.XREFS_FIELD + '.' + DocumentToVariantAnnotationConverter.XREF_ID_FIELD, String.join(",", genes), builder, QueryOperation.OR); } } if (!mongoIds.isEmpty()) { if (mongoIds.size() == 1) { builder.or(new QueryBuilder().and("_id").is(mongoIds.get(0)).get()); } else { builder.or(new QueryBuilder().and("_id").in(mongoIds).get()); } } if (isValidParam(query, VariantQueryParams.REFERENCE)) { addQueryStringFilter(DocumentToVariantConverter.REFERENCE_FIELD, query.getString(VariantQueryParams.REFERENCE.key()), builder, QueryOperation.AND); } if (isValidParam(query, VariantQueryParams.ALTERNATE)) { addQueryStringFilter(DocumentToVariantConverter.ALTERNATE_FIELD, query.getString(VariantQueryParams.ALTERNATE.key()), builder, QueryOperation.AND); } if (isValidParam(query, VariantQueryParams.TYPE)) { addQueryFilter(DocumentToVariantConverter.TYPE_FIELD, query.getString(VariantQueryParams.TYPE.key()), builder, QueryOperation.AND, s -> { Set<VariantType> subTypes = Variant.subTypes(VariantType.valueOf(s)); List<String> types = new ArrayList<>(subTypes.size() + 1); types.add(s); subTypes.forEach(subType -> types.add(subType.toString())); return types; }); //addQueryStringFilter(DBObjectToVariantConverter.TYPE_FIELD, // query.getString(VariantQueryParams.TYPE.key()), builder, QueryOperation.AND); } /* ANNOTATION PARAMS */ parseAnnotationQueryParams(query, builder); /* STUDIES */ final StudyConfiguration defaultStudyConfiguration = parseStudyQueryParams(query, builder); /* STATS PARAMS */ parseStatsQueryParams(query, builder, defaultStudyConfiguration); } logger.debug("Query = {}", originalQuery == null ? "{}" : originalQuery.toJson()); Document mongoQuery = new Document(builder.get().toMap()); logger.debug("MongoDB Query = {}", mongoQuery.toJson(new JsonWriterSettings(JsonMode.SHELL, false))); return mongoQuery; }
From source file:com.vmware.photon.controller.common.clients.HostClient.java
/** * This method performs an asynchronous Thrift call to provision an agent. On * completion, the specified handler is invoked. * * @param availabilityZone/*from w w w . java 2s . c o m*/ * @param dataStoreList * @param imageDataStores * @param usedForVMs * @param networkList * @param hostAddress * @param hostPort * @param chairmanServerList * @param memoryOverCommit * @param loggingEndpoint * @param logLevel * @param managementOnly * @param hostId * @param ntpEndpoint * @param handler Supplies a handler object to be invoked on completion. * @throws RpcException */ @RpcMethod public void provision(String availabilityZone, List<String> dataStoreList, Set<String> imageDataStores, boolean usedForVMs, List<String> networkList, String hostAddress, int hostPort, List<String> chairmanServerList, double memoryOverCommit, String loggingEndpoint, String logLevel, boolean managementOnly, String hostId, String ntpEndpoint, AsyncMethodCallback<Host.AsyncClient.provision_call> handler) throws RpcException { ensureClient(); HashSet<ImageDatastore> imageDatastoreSet = new HashSet<>(); imageDataStores.forEach((imageDatastoreName) -> { imageDatastoreSet.add(new ImageDatastore(imageDatastoreName, usedForVMs)); }); ProvisionRequest provisionRequest = new ProvisionRequest(); provisionRequest.setAvailability_zone(availabilityZone); provisionRequest.setDatastores(dataStoreList); provisionRequest.setNetworks(networkList); provisionRequest.setAddress(new ServerAddress(hostAddress, hostPort)); provisionRequest.setChairman_server(Util.getServerAddressList(chairmanServerList)); provisionRequest.setMemory_overcommit(memoryOverCommit); provisionRequest.setManagement_only(managementOnly); provisionRequest.setHost_id(hostId); provisionRequest.setNtp_endpoint(ntpEndpoint); provisionRequest.setImage_datastores(imageDatastoreSet); clientProxy.setTimeout(PROVISION_TIMEOUT_MS); logger.info("provision target {}, request {}", getTarget(), provisionRequest); try { clientProxy.provision(provisionRequest, handler); } catch (TException e) { throw new RpcException(e.getMessage()); } }
From source file:edu.usu.sdl.openstorefront.service.ComponentServiceImpl.java
@Override public void bulkComponentAttributeChange(BulkComponentAttributeChange bulkComponentAttributeChange) { Set<String> componentIdSet = new HashSet<>(); for (ComponentAttribute componentAttribute : bulkComponentAttributeChange.getAttributes()) { componentAttribute.populateBaseUpdateFields(); switch (bulkComponentAttributeChange.getOpertionType()) { case ACTIVATE: componentIdSet.add(componentAttribute.getComponentId()); componentAttribute.setActiveStatus(ComponentAttribute.ACTIVE_STATUS); persistenceService.persist(componentAttribute); break; case INACTIVE: componentIdSet.add(componentAttribute.getComponentId()); componentAttribute.setActiveStatus(ComponentAttribute.INACTIVE_STATUS); persistenceService.persist(componentAttribute); break; case DELETE: persistenceService.delete(componentAttribute); break; }//from ww w . ja v a 2 s . c o m } componentIdSet.forEach(componentId -> { updateComponentLastActivity(componentId); }); }
From source file:eu.itesla_project.online.tools.OnlineWorkflowTool.java
@Override public void run(CommandLine line) throws Exception { OnlineWorkflowStartParameters startconfig = OnlineWorkflowStartParameters.loadDefault(); String host = line.getOptionValue(OnlineWorkflowCommand.HOST); String port = line.getOptionValue(OnlineWorkflowCommand.PORT); String threads = line.getOptionValue(OnlineWorkflowCommand.THREADS); if (host != null) startconfig.setJmxHost(host);/* w w w. j ava 2s . c o m*/ if (port != null) startconfig.setJmxPort(Integer.valueOf(port)); if (threads != null) startconfig.setThreads(Integer.valueOf(threads)); Set<DateTime> baseCasesSet = null; OnlineWorkflowParameters params = OnlineWorkflowParameters.loadDefault(); boolean atLeastOneBaseCaseLineParam = line.hasOption(OnlineWorkflowCommand.CASE_TYPE) || line.hasOption(OnlineWorkflowCommand.COUNTRIES) || line.hasOption(OnlineWorkflowCommand.BASE_CASE) || line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL); boolean allNeededBaseCaseLineParams = line.hasOption(OnlineWorkflowCommand.CASE_TYPE) && line.hasOption(OnlineWorkflowCommand.COUNTRIES) && (line.hasOption(OnlineWorkflowCommand.BASE_CASE) || line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)); if (line.hasOption(OnlineWorkflowCommand.CASE_FILE)) { if (atLeastOneBaseCaseLineParam) { showHelp("parameter " + OnlineWorkflowCommand.CASE_FILE + " cannot be used together with parameters: " + OnlineWorkflowCommand.CASE_TYPE + ", " + OnlineWorkflowCommand.COUNTRIES + ", " + OnlineWorkflowCommand.BASE_CASE + ", " + OnlineWorkflowCommand.BASECASES_INTERVAL); return; } params.setCaseFile(line.getOptionValue(OnlineWorkflowCommand.CASE_FILE)); } else { if (params.getCaseFile() != null) { if (atLeastOneBaseCaseLineParam) { if (!allNeededBaseCaseLineParams) { showHelp("to override default parameter " + OnlineWorkflowCommand.CASE_FILE + ", all these parameters must be specified: " + OnlineWorkflowCommand.CASE_TYPE + ", " + OnlineWorkflowCommand.COUNTRIES + ", " + OnlineWorkflowCommand.BASE_CASE + " or " + OnlineWorkflowCommand.BASECASES_INTERVAL); return; } params.setCaseFile(null); } } if (line.hasOption(OnlineWorkflowCommand.CASE_TYPE)) params.setCaseType(CaseType.valueOf(line.getOptionValue(OnlineWorkflowCommand.CASE_TYPE))); if (line.hasOption(OnlineWorkflowCommand.COUNTRIES)) { params.setCountries(Arrays.stream(line.getOptionValue(OnlineWorkflowCommand.COUNTRIES).split(",")) .map(Country::valueOf).collect(Collectors.toSet())); } if (line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)) { Interval basecasesInterval = Interval .parse(line.getOptionValue(OnlineWorkflowCommand.BASECASES_INTERVAL)); OnlineConfig oConfig = OnlineConfig.load(); CaseRepository caseRepo = oConfig.getCaseRepositoryFactoryClass().newInstance() .create(new LocalComputationManager()); baseCasesSet = caseRepo.dataAvailable(params.getCaseType(), params.getCountries(), basecasesInterval); System.out.println("Base cases available for interval " + basecasesInterval.toString()); baseCasesSet.forEach(x -> { System.out.println(" " + x); }); } if (baseCasesSet == null) { baseCasesSet = new HashSet<>(); String base = line.getOptionValue(OnlineWorkflowCommand.BASE_CASE); if (base != null) { baseCasesSet.add(DateTime.parse(base)); } else { baseCasesSet.add(params.getBaseCaseDate()); } } } String histo = line.getOptionValue(OnlineWorkflowCommand.HISTODB_INTERVAL); if (histo != null) params.setHistoInterval(Interval.parse(histo)); String states = line.getOptionValue(OnlineWorkflowCommand.STATES); if (states != null) params.setStates(Integer.parseInt(states)); String timeHorizon = line.getOptionValue(OnlineWorkflowCommand.TIME_HORIZON); if (timeHorizon != null) params.setTimeHorizon(TimeHorizon.fromName(timeHorizon)); String workflowid = line.getOptionValue(OnlineWorkflowCommand.WORKFLOW_ID); if (workflowid != null) params.setOfflineWorkflowId(workflowid); String feAnalysisId = line.getOptionValue(OnlineWorkflowCommand.FEANALYSIS_ID); if (feAnalysisId != null) params.setFeAnalysisId(feAnalysisId); String rulesPurity = line.getOptionValue(OnlineWorkflowCommand.RULES_PURITY); if (rulesPurity != null) params.setRulesPurityThreshold(Double.parseDouble(rulesPurity)); if (line.hasOption(OnlineWorkflowCommand.STORE_STATES)) params.setStoreStates(true); if (line.hasOption(OnlineWorkflowCommand.ANALYSE_BASECASE)) params.setAnalyseBasecase(true); if (line.hasOption(OnlineWorkflowCommand.VALIDATION)) { params.setValidation(true); params.setStoreStates(true); // if validation then store states params.setAnalyseBasecase(true); // if validation then analyze base case } Set<SecurityIndexType> securityIndexes = null; if (line.hasOption(OnlineWorkflowCommand.SECURITY_INDEXES)) { if (!"ALL".equals(line.getOptionValue(OnlineWorkflowCommand.SECURITY_INDEXES))) securityIndexes = Arrays .stream(line.getOptionValue(OnlineWorkflowCommand.SECURITY_INDEXES).split(",")) .map(SecurityIndexType::valueOf).collect(Collectors.toSet()); params.setSecurityIndexes(securityIndexes); } if (line.hasOption(OnlineWorkflowCommand.MERGE_OPTIMIZED)) params.setMergeOptimized(true); String limitReduction = line.getOptionValue(OnlineWorkflowCommand.LIMIT_REDUCTION); if (limitReduction != null) params.setLimitReduction(Float.parseFloat(limitReduction)); if (line.hasOption(OnlineWorkflowCommand.HANDLE_VIOLATION_IN_N)) { params.setHandleViolationsInN(true); params.setAnalyseBasecase(true); // if I need to handle violations in N, I need to analyze base case } String constraintMargin = line.getOptionValue(OnlineWorkflowCommand.CONSTRAINT_MARGIN); if (constraintMargin != null) params.setConstraintMargin(Float.parseFloat(constraintMargin)); String urlString = "service:jmx:rmi:///jndi/rmi://" + startconfig.getJmxHost() + ":" + startconfig.getJmxPort() + "/jmxrmi"; JMXServiceURL serviceURL = new JMXServiceURL(urlString); Map<String, String> jmxEnv = new HashMap<>(); JMXConnector connector = JMXConnectorFactory.connect(serviceURL, jmxEnv); MBeanServerConnection mbsc = connector.getMBeanServerConnection(); ObjectName name = new ObjectName(LocalOnlineApplicationMBean.BEAN_NAME); LocalOnlineApplicationMBean application = MBeanServerInvocationHandler.newProxyInstance(mbsc, name, LocalOnlineApplicationMBean.class, false); if (line.hasOption(OnlineWorkflowCommand.START_CMD)) { if (params.getCaseFile() != null) { System.out.println("starting Online Workflow, caseFile " + params.getCaseFile()); String workflowId = application.startWorkflow(startconfig, params); System.out.println("workflowId=" + workflowId); } else { for (DateTime basecase : baseCasesSet) { params.setBaseCaseDate(basecase); System.out.println("starting Online Workflow, basecase " + basecase.toString()); String workflowId = application.startWorkflow(startconfig, params); System.out.println("workflowId=" + workflowId); } } } else if (line.hasOption(OnlineWorkflowCommand.SHUTDOWN_CMD)) { application.shutdown(); } else { showHelp(""); } }
From source file:org.matonto.ontology.rest.impl.OntologyRestImplTest.java
private void assertClasses(JSONObject responseObject, Set<OClass> set) { JSONArray jsonClasses = responseObject.optJSONArray("classes"); assertNotNull(jsonClasses);/* w w w. j av a2 s.co m*/ assertEquals(jsonClasses.size(), set.size()); set.forEach(oClass -> assertTrue(jsonClasses.contains(createJsonIRI(oClass.getIRI())))); }
From source file:org.matonto.ontology.rest.impl.OntologyRestImplTest.java
private void assertDatatypes(JSONObject responseObject, Set<Datatype> set) { JSONArray jsonDatatypes = responseObject.optJSONArray("datatypes"); assertNotNull(jsonDatatypes);//w w w .j a v a 2 s . c om assertEquals(jsonDatatypes.size(), set.size()); set.forEach(datatype -> assertTrue(jsonDatatypes.contains(createJsonIRI(datatype.getIRI())))); }