List of usage examples for java.util Map forEach
default void forEach(BiConsumer<? super K, ? super V> action)
From source file:com.okta.swagger.codegen.AbstractOktaJavaClientCodegen.java
private void handleOktaLinkedOperations(Swagger swagger) { // we want to move any operations defined by the 'x-okta-operations' or 'x-okta-crud' vendor extension to the model Map<String, Model> modelMap = swagger.getDefinitions().entrySet().stream() .filter(e -> e.getValue().getVendorExtensions().containsKey("x-okta-operations") || e.getValue().getVendorExtensions().containsKey("x-okta-crud")) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); modelMap.forEach((k, model) -> { List<ObjectNode> linkNodes = new ArrayList<>(); addAllIfNotNull(linkNodes, (List<ObjectNode>) model.getVendorExtensions().get("x-okta-operations")); addAllIfNotNull(linkNodes, (List<ObjectNode>) model.getVendorExtensions().get("x-okta-crud")); Map<String, CodegenOperation> operationMap = new HashMap<>(); linkNodes.forEach(n -> {//w w w .ja v a 2 s . com String operationId = n.get("operationId").textValue(); // find the swagger path operation swagger.getPaths().forEach((pathName, path) -> { Optional<Map.Entry<HttpMethod, Operation>> operationEntry = path.getOperationMap().entrySet() .stream().filter(e -> e.getValue().getOperationId().equals(operationId)).findFirst(); if (operationEntry.isPresent()) { Operation operation = operationEntry.get().getValue(); CodegenOperation cgOperation = fromOperation(pathName, operationEntry.get().getKey().name().toLowerCase(), operation, swagger.getDefinitions(), swagger); boolean canLinkMethod = true; JsonNode aliasNode = n.get("alias"); if (aliasNode != null) { String alias = aliasNode.textValue(); cgOperation.vendorExtensions.put("alias", alias); if ("update".equals(alias)) { model.getVendorExtensions().put("saveable", true); } else if ("delete".equals(alias)) { model.getVendorExtensions().put("deletable", true); cgOperation.vendorExtensions.put("selfDelete", true); } else if ("read".equals(alias) || "create".equals(alias)) { canLinkMethod = false; } } // we do NOT link read or create methods, those need to be on the parent object if (canLinkMethod) { // now any params that match the models we need to use the model value directly // for example if the path contained {id} we would call getId() instead Map<String, String> argMap = createArgMap(n); List<CodegenParameter> cgOtherPathParamList = new ArrayList<>(); List<CodegenParameter> cgParamAllList = new ArrayList<>(); List<CodegenParameter> cgParamModelList = new ArrayList<>(); cgOperation.pathParams.forEach(param -> { if (argMap.containsKey(param.paramName)) { String paramName = argMap.get(param.paramName); cgParamModelList.add(param); if (model.getProperties() != null) { CodegenProperty cgProperty = fromProperty(paramName, model.getProperties().get(paramName)); param.vendorExtensions.put("fromModel", cgProperty); } else { System.err.println("Model '" + model.getTitle() + "' has no properties"); } } else { cgOtherPathParamList.add(param); } }); // remove the body param if the body is the object itself for (Iterator<CodegenParameter> iter = cgOperation.bodyParams.iterator(); iter .hasNext();) { CodegenParameter bodyParam = iter.next(); if (argMap.containsKey(bodyParam.paramName)) { cgOperation.vendorExtensions.put("bodyIsSelf", true); iter.remove(); } } // do not add the parrent path params to the list (they will be parsed from the href) SortedSet<String> pathParents = parentPathParams(n); cgOtherPathParamList.forEach(param -> { if (!pathParents.contains(param.paramName)) { cgParamAllList.add(param); } }); if (!pathParents.isEmpty()) { cgOperation.vendorExtensions.put("hasPathParents", true); cgOperation.vendorExtensions.put("pathParents", pathParents); } cgParamAllList.addAll(cgOperation.queryParams); cgParamAllList.addAll(cgOperation.bodyParams); // set all params to have more cgParamAllList.forEach(param -> param.hasMore = true); // then grab the last one and mark it as the last if (!cgParamAllList.isEmpty()) { CodegenParameter param = cgParamAllList.get(cgParamAllList.size() - 1); param.hasMore = false; } cgOperation.vendorExtensions.put("allParams", cgParamAllList); cgOperation.vendorExtensions.put("fromModelPathParams", cgParamModelList); addOptionalExtension(cgOperation, cgParamAllList); operationMap.put(cgOperation.operationId, cgOperation); // mark the operation as moved so we do NOT add it to the client operation.getVendorExtensions().put("moved", true); } } }); }); model.getVendorExtensions().put("operations", operationMap.values()); }); }
From source file:org.openhab.binding.modbus.internal.handler.ModbusDataThingHandler.java
@Override public synchronized void onError(ModbusWriteRequestBlueprint request, Exception error) { if (hasConfigurationError()) { return;//from w ww . ja va 2s . c o m } else if (!isWriteEnabled) { return; } if (error instanceof ModbusConnectionException) { logger.error("Thing {} '{}' had {} error on write: {}", getThing().getUID(), getThing().getLabel(), error.getClass().getSimpleName(), error.toString()); } else if (error instanceof ModbusTransportException) { logger.error("Thing {} '{}' had {} error on write: {}", getThing().getUID(), getThing().getLabel(), error.getClass().getSimpleName(), error.toString()); } else { logger.error( "Thing {} '{}' had {} error on write: {} (message: {}). Stack trace follows since this is unexpected error.", getThing().getUID(), getThing().getLabel(), error.getClass().getName(), error.toString(), error.getMessage(), error); } Map<@NonNull ChannelUID, @NonNull State> states = new HashMap<>(); ChannelUID lastWriteErrorUID = getChannelUID(ModbusBindingConstantsInternal.CHANNEL_LAST_WRITE_ERROR); if (isLinked(lastWriteErrorUID)) { states.put(lastWriteErrorUID, new DateTimeType()); } synchronized (this) { // Update channels states.forEach((uid, state) -> { tryUpdateState(uid, state); }); updateStatusIfChanged(ThingStatus.OFFLINE, ThingStatusDetail.COMMUNICATION_ERROR, String.format("Error (%s) with write. Request: %s. Description: %s. Message: %s", error.getClass().getSimpleName(), request, error.toString(), error.getMessage())); } }
From source file:com.ikanow.aleph2.harvest.script.utils.ScriptUtils.java
/** * Creates a processbuilder pointed at the given script path and adds the working dir and environment vars for you. * Just runs a process that does "sh <script_file_path>" * @param script_file_path//from w w w . j av a 2 s . c om * @param working_dir * @return * @throws JsonProcessingException * @throws ExecutionException * @throws InterruptedException */ public static ProcessBuilder createProcessBuilderForScriptFile(final String script_file_path, final String working_dir, final Optional<Long> test_requested_num_objects, final Optional<Long> test_max_runtime_s, final Map<String, String> user_args, final IHarvestContext context, final DataBucketBean bucket, final String aleph_global_root_path) throws JsonProcessingException, InterruptedException, ExecutionException { _logger.debug("create pb for script file: " + script_file_path); ArrayList<String> args = new ArrayList<String>(); args.add("sh"); args.add(script_file_path); final ProcessBuilder pb = new ProcessBuilder(args); pb.directory(new File(working_dir)).redirectErrorStream(true); pb.environment().put("JAVA_OPTS", ""); if (test_requested_num_objects.isPresent()) pb.environment().put(ENV_TEST_NUM_OBJ, test_requested_num_objects.get().toString()); if (test_max_runtime_s.isPresent()) pb.environment().put(ENV_TEST_MAX_RUNTIME_S, test_max_runtime_s.get().toString()); //add in default env vars final String classpath = Stream .concat(context.getHarvestContextLibraries(Optional.empty()).stream(), context.getHarvestLibraries(Optional.of(bucket)).get().values().stream()) .collect(Collectors.joining(":")); pb.environment().put(ENV_MODULE_PATH, context.getHarvestContextLibraries(Optional.empty()).stream().collect(Collectors.joining(":"))); pb.environment().put(ENV_LIBRARY_PATH, context.getHarvestLibraries(Optional.of(bucket)).get().values() .stream().collect(Collectors.joining(":"))); pb.environment().put(ENV_CLASS_PATH, classpath); pb.environment().put(ENV_BUCKET_HDFS_PATH, aleph_global_root_path + "/data" + bucket.full_name()); pb.environment().put(ENV_BUCKET_SIGNATURE, BucketUtils.getUniqueSignature(bucket.full_name(), Optional.empty())); pb.environment().put(ENV_BUCKET_PATH, bucket.full_name()); pb.environment().put(ENV_BUCKET_STR, BeanTemplateUtils.toJson(bucket).toString()); //add user args as env vars user_args.forEach((k, val) -> pb.environment().put(k, val)); return pb; }
From source file:io.swagger.v3.parser.converter.SwaggerConverter.java
public Schema convert(io.swagger.models.Model v2Model) { if (v2Model == null) { return null; }//from ww w . j a v a2s.c om Schema result; if (v2Model instanceof ArrayModel) { ArraySchema arraySchema = Json.mapper().convertValue(v2Model, ArraySchema.class); arraySchema.setItems(convert(((ArrayModel) v2Model).getItems())); result = arraySchema; } else if (v2Model instanceof ComposedModel) { ComposedModel composedModel = (ComposedModel) v2Model; ComposedSchema composed = new ComposedSchema(); composed.setDescription(composedModel.getDescription()); composed.setExample(composedModel.getExample()); if (composedModel.getExternalDocs() != null) { composed.setExternalDocs(convert(composedModel.getExternalDocs())); } composed.setTitle(composedModel.getTitle()); composed.setExtensions(convert(composedModel.getVendorExtensions())); composed.setAllOf(composedModel.getAllOf().stream().map(this::convert).collect(Collectors.toList())); result = composed; } else { String v2discriminator = null; if (v2Model instanceof ModelImpl) { ModelImpl model = (ModelImpl) v2Model; v2discriminator = model.getDiscriminator(); model.setDiscriminator(null); } result = Json.mapper().convertValue(v2Model, Schema.class); if ((v2Model.getProperties() != null) && (v2Model.getProperties().size() > 0)) { Map<String, Property> properties = v2Model.getProperties(); properties.forEach((k, v) -> { result.addProperties(k, convert(v)); }); } if (v2Model instanceof ModelImpl) { ModelImpl model = (ModelImpl) v2Model; if (model.getAdditionalProperties() != null) { result.setAdditionalProperties(convert(model.getAdditionalProperties())); } } else if (v2Model instanceof RefModel) { RefModel ref = (RefModel) v2Model; if (ref.get$ref().indexOf("#/definitions") == 0) { String updatedRef = "#/components/schemas" + ref.get$ref().substring("#/definitions".length()); result.set$ref(updatedRef); } } if (v2discriminator != null) { Discriminator discriminator = new Discriminator(); discriminator.setPropertyName(v2discriminator); result.setDiscriminator(discriminator); } } if (v2Model.getVendorExtensions() != null) { Object nullableExtension = v2Model.getVendorExtensions().get("x-nullable"); if (nullableExtension != null) { result.setNullable((Boolean) nullableExtension); } } return result; }
From source file:io.swagger.v3.parser.converter.SwaggerConverter.java
private SecurityScheme convertOauth2SecurityScheme(SecuritySchemeDefinition definition) { SecurityScheme securityScheme = new SecurityScheme(); OAuth2Definition oAuth2Definition = (OAuth2Definition) definition; OAuthFlows oAuthFlows = new OAuthFlows(); OAuthFlow oAuthFlow = new OAuthFlow(); securityScheme.setType(SecurityScheme.Type.OAUTH2); String flow = oAuth2Definition.getFlow(); if (flow != null) { switch (flow) { case "implicit": oAuthFlow.setAuthorizationUrl(oAuth2Definition.getAuthorizationUrl()); oAuthFlows.setImplicit(oAuthFlow); break; case "password": oAuthFlow.setTokenUrl(oAuth2Definition.getTokenUrl()); oAuthFlows.setPassword(oAuthFlow); break; case "application": oAuthFlow.setTokenUrl(oAuth2Definition.getTokenUrl()); oAuthFlows.setClientCredentials(oAuthFlow); break; case "accessCode": oAuthFlow.setAuthorizationUrl(oAuth2Definition.getAuthorizationUrl()); oAuthFlow.setTokenUrl(oAuth2Definition.getTokenUrl()); oAuthFlows.setAuthorizationCode(oAuthFlow); break; }// w ww. jav a 2s . c o m } Scopes scopes = new Scopes(); Map<String, String> oAuth2Scopes = oAuth2Definition.getScopes(); if (oAuth2Scopes != null) { oAuth2Scopes.forEach((k, v) -> scopes.addString(k, v)); } oAuthFlow.setScopes(scopes); securityScheme.setFlows(oAuthFlows); return securityScheme; }
From source file:org.finra.herd.dao.impl.IndexSearchDaoImpl.java
/** * Private method to build a multimatch query based on a given set of fields and boost values in json format * * @param multiMatchQueryBuilder A {@link MultiMatchQueryBuilder} which should be constructed * @param fieldsBoostsJsonString A json formatted String which contains individual fields and their boost values * @param match the set of match fields that are to be searched upon in the index search *//*from ww w. j ava 2s . c o m*/ private void buildMultiMatchQueryWithBoosts(MultiMatchQueryBuilder multiMatchQueryBuilder, String fieldsBoostsJsonString, Set<String> match) { try { @SuppressWarnings("unchecked") final Map<String, String> fieldsBoostsMap = jsonHelper.unmarshallJsonToObject(Map.class, fieldsBoostsJsonString); // This additional step is needed because trying to cast an unmarshalled json to a Map of anything other than String key-value pairs won't work final Map<String, Float> fieldsBoosts = new HashMap<>(); // If the match column is included if (match != null && match.contains(MATCH_COLUMN)) { // Add only the column.name and schemaColumn.name fields to the fieldsBoosts map fieldsBoostsMap.forEach((field, boostValue) -> { if (field.contains(COLUMNS_NAME_FIELD) || field.contains(SCHEMA_COLUMNS_NAME_FIELD)) { fieldsBoosts.put(field, Float.parseFloat(boostValue)); } }); } else { fieldsBoostsMap .forEach((field, boostValue) -> fieldsBoosts.put(field, Float.parseFloat(boostValue))); } // Set the fields and their respective boosts to the multi-match query multiMatchQueryBuilder.fields(fieldsBoosts); } catch (IOException e) { LOGGER.warn("Could not parse the configured JSON value for ngrams fields: {}", ConfigurationValue.ELASTICSEARCH_SEARCHABLE_FIELDS_NGRAMS, e); } }
From source file:io.divolte.server.DslRecordMapperTest.java
@Test public void shouldMapAllGeoIpFields() throws IOException, InterruptedException, ClosedServiceException { /*//from ww w .j a v a 2s . c o m * Have to work around not being able to create a HttpServerExchange a bit. * We setup a actual server just to do a request and capture the HttpServerExchange * instance. Then we setup a DslRecordMapper instance with a mock ip2geo lookup service, * we then use the previously captured exchange object against our locally created mapper * instance to test the ip2geo mapping (using the a mock lookup service). */ setupServer("minimal-mapping.groovy"); EventPayload event = request("http://www.example.com"); final File geoMappingFile = File.createTempFile("geo-mapping", ".groovy"); Files.write(Resources.toByteArray(Resources.getResource("geo-mapping.groovy")), geoMappingFile); final ImmutableMap<String, Object> mappingConfig = ImmutableMap.of( "divolte.tracking.schema_mapping.mapping_script_file", geoMappingFile.getAbsolutePath(), "divolte.tracking.schema_file", avroFile.getAbsolutePath()); final Config geoConfig = ConfigFactory.parseMap(mappingConfig) .withFallback(ConfigFactory.parseResources("dsl-mapping-test.conf")) .withFallback(ConfigFactory.parseResources("reference-test.conf")); final ValidatedConfiguration vc = new ValidatedConfiguration(() -> geoConfig); final CityResponse mockResponseWithEverything = loadFromClassPath("/city-response-with-everything.json", new TypeReference<CityResponse>() { }); final Map<String, Object> expectedMapping = loadFromClassPath("/city-response-expected-mapping.json", new TypeReference<Map<String, Object>>() { }); final LookupService mockLookupService = mock(LookupService.class); when(mockLookupService.lookup(any())).thenReturn(Optional.of(mockResponseWithEverything)); final DslRecordMapper mapper = new DslRecordMapper(vc, new Schema.Parser().parse( Resources.toString(Resources.getResource("TestRecord.avsc"), StandardCharsets.UTF_8)), Optional.of(mockLookupService)); final GenericRecord record = mapper.newRecordFromExchange(event.exchange); // Validate the results. verify(mockLookupService).lookup(any()); verifyNoMoreInteractions(mockLookupService); expectedMapping.forEach((k, v) -> { final Object recordValue = record.get(k); assertEquals("Property " + k + " not mapped correctly.", v, recordValue); }); java.nio.file.Files.delete(geoMappingFile.toPath()); }
From source file:com.baidu.rigel.biplatform.tesseract.qsservice.query.impl.QueryContextSplitServiceImpl.java
/** * @param cube/* w w w .j a v a 2s .com*/ * @param queryContext * @return */ private QueryContextSplitResult splitByMeasureTypeStrategy(QuestionModel question, DataSourceInfo dsInfo, Cube cube, QueryContext queryContext) { QueryContextSplitResult result = new QueryContextSplitResult(QueryContextSplitStrategy.MeasureType, queryContext); // ? if (CollectionUtils.isNotEmpty(queryContext.getQueryMeasures())) { Set<String> callbackMeasureName = new HashSet<String>(); CompileContext compileContext = null; for (Iterator<MiniCubeMeasure> it = queryContext.getQueryMeasures().iterator(); it.hasNext();) { MiniCubeMeasure measure = it.next(); // ? if (measure.getAggregator().equals(Aggregator.CALCULATED)) { if (measure.getType().equals(MeasureType.CALLBACK)) { callbackMeasureName.add(measure.getUniqueName()); } else { ExtendMinicubeMeasure extendMeasure = (ExtendMinicubeMeasure) measure; compileContext = CompileExpression.compile(extendMeasure.getFormula()); result.getCompileContexts().put(measure.getUniqueName(), compileContext); } it.remove(); } } // ? Map<Condition, Set<String>> conditions = ConditionUtil .simpleMergeContexsCondition(result.getCompileContexts().values()); if (CollectionUtils.isNotEmpty(callbackMeasureName)) { conditions.put(CallbackCondition.getInstance(), callbackMeasureName); } if (!queryContext.getQueryMeasures().isEmpty()) { if (!conditions.containsKey(EmptyCondition.getInstance())) { conditions.put(EmptyCondition.getInstance(), new HashSet<>()); } for (MiniCubeMeasure m : queryContext.getQueryMeasures()) { if (!conditions.get(EmptyCondition.getInstance()).contains(m.getName())) { conditions.get(EmptyCondition.getInstance()).add(m.getName()); } } } if (MapUtils.isNotEmpty(conditions)) { conditions.forEach((con, vars) -> { QueryContext context = con.processCondition(ParseCoditionUtils.decorateQueryContext( DeepcopyUtils.deepCopy(queryContext), question, cube, dsInfo, queryContextBuilder)); context.getQueryMeasures().clear(); for (String var : vars) { MiniCubeMeasure measure = null; if (MetaNameUtil.isUniqueName(var)) { String name = MetaNameUtil.parseUnique2NameArray(var)[1]; measure = (MiniCubeMeasure) cube.getMeasures().get(name); } else { measure = (MiniCubeMeasure) cube.getMeasures() .get(PlaceHolderUtils.getKeyFromPlaceHolder(var)); } if (measure == null) { throw new IllegalSplitResultException(result, "can not get measure:" + var + " from cube", "SPILT_QUESTION"); } if (!context.getQueryMeasures().contains(measure)) { context.getQueryMeasures().add(measure); } } result.getConditionQueryContext().put(con, context); }); } } return result; }
From source file:org.mitre.mpf.wfm.businessrules.impl.StreamingJobRequestBoImpl.java
/** * Send health reports to the health report callbacks associated with the streaming jobs. * Note that OpenMPF supports sending periodic health reports that contain health for all streaming jobs who have * defined the same HealthReportCallbackUri. This method will filter out jobIds for any jobs which are not current streaming jobs, * plus will optionally filter out streaming jobs that have been terminated. * Note that out-of-cycle health reports that may have been sent due to a change in job status will not * delay sending of the periodic (i.e. scheduled) health report. * @param jobIds unique ids for the streaming jobs to be reported on. Must not be null or empty. * @param isActive If true, then streaming jobs which have terminal JobStatus will be * filtered out. Otherwise, all current streaming jobs will be processed. * @throws WfmProcessingException thrown if an error occurs *//* w w w. j a v a2 s .c o m*/ public void sendHealthReports(List<Long> jobIds, boolean isActive) throws WfmProcessingException { if (jobIds == null) { throw new WfmProcessingException("Error: jobIds must not be null."); } else if (jobIds.isEmpty()) { throw new WfmProcessingException("Error: jobIds must not be empty."); } else { // While we are receiving the list of all job ids known to the system, some of these jobs may not be current streaming jobs in REDIS. // Reduce the List of jobIds to ony include streaming jobIds that are in REDIS. Optionally reduce that set to only include non-terminal jobs. List<Long> currentActiveJobIds = redis.getCurrentStreamingJobs(jobIds, isActive); // If there are no active jobs, no health reports will be sent. if (currentActiveJobIds != null && !currentActiveJobIds.isEmpty()) { // Get the list of health report callback URIs associated with the specified active jobs. Note that // this usage will return unique healthReportCallbackUris. Doing this so streaming jobs which specify // the same healthReportCallbackUri will only POST the health report once to the single healthReportCallbackUri. Note that // POST method is always used for sending health reports, GET method is not supported. The health report that is sent // will contain health for all streaming jobs with the same healthReportCallbackUri. // healthReportCallbackUriToJobIdListMap: key is the healthReportCallbackUri, value is the List of active jobIds that specified that healthReportCallbackUri. Map<String, List<Long>> healthReportCallbackUriToActiveJobIdListMap = redis .getHealthReportCallbackURIAsMap(currentActiveJobIds); // For each healthReportCallbackUri, send a health report containing health information for each streaming job // that specified the same healthReportCallbackUri. Note that sendHealthReportCallback method won't be called // if healthReportCallbackUriToJobIdListMap is empty. This would be a possibility if no streaming jobs have // requested that a health report be sent. healthReportCallbackUriToActiveJobIdListMap.forEach(callbackUtils::sendHealthReportCallback); } } }
From source file:org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor.java
@Override public Object postProcessAfterInitialization(final Object bean, String beanName) { Class<?> targetClass = AopProxyUtils.ultimateTargetClass(bean); if (!this.nonAnnotatedClasses.contains(targetClass)) { Map<Method, Set<Scheduled>> annotatedMethods = MethodIntrospector.selectMethods(targetClass, (MethodIntrospector.MetadataLookup<Set<Scheduled>>) method -> { Set<Scheduled> scheduledMethods = AnnotatedElementUtils .getMergedRepeatableAnnotations(method, Scheduled.class, Schedules.class); return (!scheduledMethods.isEmpty() ? scheduledMethods : null); });// w w w.j a va 2 s . c om if (annotatedMethods.isEmpty()) { this.nonAnnotatedClasses.add(targetClass); if (logger.isTraceEnabled()) { logger.trace("No @Scheduled annotations found on bean class: " + bean.getClass()); } } else { // Non-empty set of methods annotatedMethods.forEach((method, scheduledMethods) -> scheduledMethods .forEach(scheduled -> processScheduled(scheduled, method, bean))); if (logger.isDebugEnabled()) { logger.debug(annotatedMethods.size() + " @Scheduled methods processed on bean '" + beanName + "': " + annotatedMethods); } } } return bean; }