List of usage examples for java.util Map getOrDefault
default V getOrDefault(Object key, V defaultValue)
From source file:org.polyfill.services.FinancialTimesPolyfillLoaderService.java
private Map getMap(Map<String, Object> map, String key) { return (Map) map.getOrDefault(key, Collections.emptyMap()); }
From source file:org.polyfill.services.FinancialTimesPolyfillLoaderService.java
private List<String> getList(Map<String, Object> map, String key) { return (List<String>) map.getOrDefault(key, Collections.emptyList()); }
From source file:uk.ac.kcl.testservices.JdbcMapWithoutSchedulingTests.java
@Test @DirtiesContext/*from www .ja v a 2s . c om*/ public void jdbcMapPipelineTest() { jobLauncher.launchJob(); try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } Map<String, Object> row = dbmsTestUtils.getRowInOutputTable(1); String stringContent = (String) row.getOrDefault("output", ""); assertTrue(stringContent.contains("Disproportionate dwarfism")); assertEquals(65, dbmsTestUtils.countRowsInOutputTable()); assertEquals(65, testUtils.countOutputDocsInES()); }
From source file:org.apache.metron.common.message.metadata.MetadataUtil.java
/** * Return the prefix that we want to use for metadata keys. This comes from the config and is defaulted to * 'metron.metadata'./*from w w w . ja v a 2s. c om*/ * * @param config The rawMessageStrategyConfig * @return */ public String getMetadataPrefix(Map<String, Object> config) { String prefix = (String) config.getOrDefault(METADATA_PREFIX_CONFIG, METADATA_PREFIX); if (StringUtils.isEmpty(prefix)) { return null; } return prefix; }
From source file:com.netflix.spinnaker.orca.clouddriver.tasks.pipeline.MigratePipelineClustersTask.java
private List<Map> getSources(Map<String, Object> pipeline) { List<Map> stages = (List<Map>) pipeline.getOrDefault("stages", new ArrayList<>()); return stages.stream().map(s -> { Optional<PipelineClusterExtractor> extractor = PipelineClusterExtractor.getExtractor(s, extractors); if (extractor.isPresent()) { return extractor.get().extractClusters(s).stream().map(c -> Collections.singletonMap("cluster", c)) .collect(Collectors.toList()); }/*from w w w . ja va2 s .co m*/ return new ArrayList<Map>(); }).flatMap(Collection::stream).collect(Collectors.toList()); }
From source file:com.acmutv.ontoqa.core.knowledge.KnowledgeManager.java
/** * Checks the query feasibility against ontology. * @param ontology the ontology.//from w ww . j av a2s .co m * @param query the query. * @return true, if the query is feasible with the ontology; false, otherwise. */ public static boolean checkFeasibility2(Ontology ontology, Query query) { final Set<Node> subjects = new HashSet<>(); final Set<Node> predicates = new HashSet<>(); final Set<Node> objects = new HashSet<>(); final Map<Node, Node> predicateSubjects = new HashMap<>(); final Map<Node, Node> predicateObjects = new HashMap<>(); ElementWalker.walk(query.getQueryPattern(), new ElementVisitorBase() { public void visit(ElementPathBlock el) { Iterator<TriplePath> triples = el.patternElts(); while (triples.hasNext()) { TriplePath triple = triples.next(); subjects.add(triple.getSubject()); predicates.add(triple.getPredicate()); objects.add(triple.getObject()); predicateSubjects.put(triple.getPredicate(), triple.getSubject()); predicateObjects.put(triple.getPredicate(), triple.getObject()); } } }); LOGGER.debug("subjects: {}", subjects); LOGGER.debug("predicates: {}", predicates); LOGGER.debug("objects: {}", objects); LOGGER.debug("predicateSubjects: {}", predicateSubjects); LOGGER.debug("predicateObjects: {}", predicateObjects); //int i = 0; Set<String> statements = new HashSet<>(); Map<String, String> resourceToClassVar = new HashMap<>(); for (Node predicate : predicates) { String subj_iri = predicateSubjects.get(predicate).toString(); String obj_iri = predicateObjects.get(predicate).toString(); String predicate_iri = predicate.toString(); boolean isSubjVar = subj_iri.startsWith("?"); boolean isObjVar = obj_iri.startsWith("?"); /*String subjClassVar = resourceToClassVar.getOrDefault(subj_iri, "?subjClass" + i); resourceToClassVar.put(subj_iri, subjClassVar); String objClassVar = resourceToClassVar.getOrDefault(obj_iri, "?objClass" + i); resourceToClassVar.put(obj_iri, objClassVar); */ String subjClassVar = resourceToClassVar.getOrDefault(subj_iri, "?class" + resourceToClassVar.keySet().size()); resourceToClassVar.put(subj_iri, subjClassVar); String objClassVar = resourceToClassVar.getOrDefault(obj_iri, "?class" + resourceToClassVar.keySet().size()); resourceToClassVar.put(obj_iri, objClassVar); LOGGER.trace("Processing: {} (var: {})| {} | {} (var: {})", subj_iri, isSubjVar, predicate_iri, obj_iri, isObjVar); String subjectConstraint = String.format("%s <%s> %s", (isSubjVar) ? subj_iri : '<' + subj_iri + '>', RDF_TYPE, subjClassVar); String objectConstraint = String.format("%s <%s> %s", (isObjVar) ? obj_iri : '<' + obj_iri + '>', RDF_TYPE, objClassVar); String predicateConstraint = String.format("<%s> <%s> %s . <%s> <%s> %s", predicate_iri, RDFS_DOMAIN, subjClassVar, predicate_iri, RDFS_RANGE, objClassVar); statements.add(subjectConstraint); statements.add(objectConstraint); statements.add(predicateConstraint); //i++; } String consistencyStatements = statements.stream().collect(Collectors.joining(" . ")); Query consistencyQuery = QueryFactory.create("ASK WHERE { " + consistencyStatements + " }"); LOGGER.debug("consistency query: {}", consistencyQuery); QueryResult qQueryResult; try { qQueryResult = KnowledgeManager.submit(ontology, consistencyQuery); } catch (com.acmutv.ontoqa.core.exception.QueryException exc) { LOGGER.warn(exc.getMessage()); return false; } Answer answer = qQueryResult.toAnswer(); if (SimpleAnswer.FALSE.equals(answer)) { return false; } return true; }
From source file:com.netflix.spinnaker.echo.pubsub.google.GooglePubsubEventListener.java
private void publishEvent(GooglePubsubPublisher p, Event event) { String jsonPayload;// ww w. jav a 2 s. c om try { jsonPayload = mapper.writeValueAsString(event); } catch (JsonProcessingException jpe) { log.error("Could not serialize event message: {}", jpe); return; } Map<String, String> attributes = new HashMap<>(); if (event.getDetails() != null) { Metadata m = event.getDetails(); String rawType = m.getType(); if (StringUtils.isNotEmpty(rawType)) { attributes.put("rawType", rawType); String[] eventDetails = rawType.split(":"); if (eventDetails.length == 3) { attributes.put("source", eventDetails[0]); attributes.put("type", eventDetails[1]); attributes.put("status", eventDetails[2]); } } if (StringUtils.isNotEmpty(m.getApplication())) { attributes.put("application", m.getApplication()); } if (m.getAttributes() != null && !m.getAttributes().isEmpty()) { attributes.putAll(m.getAttributes()); } } if (event.getContent() != null && !event.getContent().isEmpty()) { Map content = event.getContent(); String name = content.getOrDefault("name", "").toString(); if (StringUtils.isNotEmpty(name)) { attributes.put("name", name); } String taskName = content.getOrDefault("taskName", "").toString(); if (StringUtils.isNotEmpty(taskName)) { attributes.put("taskName", taskName); } } p.publish(jsonPayload, attributes); }
From source file:majordodo.worker.TaskModeAwareExecutorFactory.java
@Override public TaskExecutor createTaskExecutor(String taskType, Map<String, Object> parameters) { String mode = (String) parameters.getOrDefault("mode", Task.MODE_DEFAULT); if (mode.equals(Task.MODE_EXECUTE_FACTORY)) { return inner.createTaskExecutor(taskType, parameters); }// w ww . j av a2s .co m ClassLoader tccl = Thread.currentThread().getContextClassLoader(); try { String parameter = (String) parameters.getOrDefault("parameter", ""); if (parameter.startsWith("base64:")) { parameter = parameter.substring("base64:".length()); byte[] serializedObjectData = Base64.getDecoder().decode(parameter); ByteArrayInputStream ii = new ByteArrayInputStream(serializedObjectData); ObjectInputStream is = new ObjectInputStream(ii) { @Override public Class resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException { try { return tccl.loadClass(desc.getName()); } catch (Exception e) { } return super.resolveClass(desc); } }; TaskExecutor res = (TaskExecutor) is.readUnshared(); return res; } else if (parameter.startsWith("newinstance:")) { parameter = parameter.substring("newinstance:".length()); URLClassLoader cc = (URLClassLoader) tccl; Class clazz = Class.forName(parameter, true, tccl); TaskExecutor res = (TaskExecutor) clazz.newInstance(); return res; } else { throw new RuntimeException("bad parameter: " + parameter); } } catch (Exception err) { return new TaskExecutor() { @Override public String executeTask(Map<String, Object> parameters) throws Exception { throw err; } }; } }
From source file:org.cryptomator.frontend.webdav.mount.MacOsXShellScriptWebDavMounter.java
@Override public WebDavMount mount(URI uri, Map<MountParam, Optional<String>> mountParams) throws CommandFailedException { final String mountName = mountParams.getOrDefault(MountParam.MOUNT_NAME, Optional.empty()) .orElseThrow(() -> {/*www .ja v a 2s . co m*/ return new IllegalArgumentException("Missing mount parameter MOUNT_NAME."); }); // we don't use the uri to derive a path, as it *could* be longer than 255 chars. final String path = "/Volumes/Cryptomator_" + UUID.randomUUID().toString(); final Script mountScript = Script .fromLines("mkdir \"$MOUNT_PATH\"", "mount_webdav -S -v $MOUNT_NAME \"$DAV_AUTHORITY$DAV_PATH\" \"$MOUNT_PATH\"") .addEnv("DAV_AUTHORITY", uri.getRawAuthority()).addEnv("DAV_PATH", uri.getRawPath()) .addEnv("MOUNT_PATH", path).addEnv("MOUNT_NAME", mountName); mountScript.execute(); return new MacWebDavMount(path); }
From source file:com.netflix.spinnaker.orca.clouddriver.tasks.pipeline.MigratePipelineClustersTask.java
private void addMappings(Map<String, Object> context, Map<String, Object> operation) { operation.put("regionMapping", context.getOrDefault("regionMapping", new HashMap<>())); operation.put("accountMapping", context.getOrDefault("accountMapping", new HashMap<>())); operation.put("subnetTypeMapping", context.getOrDefault("subnetTypeMapping", new HashMap<>())); operation.put("elbSubnetTypeMapping", context.getOrDefault("elbSubnetTypeMapping", new HashMap<>())); operation.put("iamRoleMapping", context.getOrDefault("iamRoleMapping", new HashMap<>())); operation.put("keyPairMapping", context.getOrDefault("keyPairMapping", new HashMap<>())); operation.put("dryRun", context.getOrDefault("dryRun", false)); operation.put("allowIngressFromClassic", context.getOrDefault("allowIngressFromClassic", false)); }