List of usage examples for java.util Optional orElse
public T orElse(T other)
From source file:org.apache.metron.storm.kafka.flux.SimpleStormKafkaBuilder.java
private static <T> Class<Deserializer<T>> createDeserializer(Optional<String> deserializerClass, String defaultDeserializerClass) { try {//ww w. java 2 s.c o m return (Class<Deserializer<T>>) Class.forName(deserializerClass.orElse(defaultDeserializerClass)); } catch (Exception e) { throw new IllegalStateException("Unable to create a deserializer: " + deserializerClass.orElse(defaultDeserializerClass) + ": " + e.getMessage(), e); } }
From source file:org.opendatakit.briefcase.export.SubmissionParser.java
/** * Returns an sorted {@link List} of {@link Path} instances pointing to all the * submissions of a form that belong to the given {@link DateRange}. * <p>/*from w w w . j a v a 2s .co m*/ * Each file gets briefly parsed to obtain their submission date and use it as * the sorting criteria and for filtering. */ static List<Path> getListOfSubmissionFiles(FormDefinition formDef, DateRange dateRange, SubmissionExportErrorCallback onParsingError) { Path instancesDir = formDef.getFormDir().resolve("instances"); if (!Files.exists(instancesDir) || !Files.isReadable(instancesDir)) return Collections.emptyList(); // TODO Migrate this code to Try<Pair<Path, Option<OffsetDate>>> to be able to filter failed parsing attempts List<Pair<Path, OffsetDateTime>> paths = new ArrayList<>(); list(instancesDir).filter(UncheckedFiles::isInstanceDir).forEach(instanceDir -> { Path submissionFile = instanceDir.resolve("submission.xml"); try { Optional<OffsetDateTime> submissionDate = readSubmissionDate(submissionFile, onParsingError); paths.add(Pair.of(submissionFile, submissionDate.orElse(OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)))); } catch (Throwable t) { log.error("Can't read submission date", t); EventBus.publish(ExportEvent.failureSubmission(formDef, instanceDir.getFileName().toString(), t)); } }); return paths.parallelStream() // Filter out submissions outside the given date range .filter(pair -> dateRange.contains(pair.getRight())).map(Pair::getLeft).collect(toList()); }
From source file:com.spotify.styx.model.deprecated.WorkflowConfiguration.java
@JsonCreator public static WorkflowConfiguration create(@JsonProperty("id") String id, @JsonProperty("partitioning") Schedule partitioning, @JsonProperty("docker_image") Optional<String> dockerImage, @JsonProperty("docker_args") Optional<List<String>> dockerArgs, @JsonProperty("docker_termination_logging") Optional<Boolean> dockerTerminationLogging, @JsonProperty("secret") Optional<Secret> secret, @JsonProperty("resources") List<String> resources) { return new AutoValue_WorkflowConfiguration(id, partitioning, dockerImage, dockerArgs, dockerTerminationLogging.orElse(false), secret, resources == null ? Collections.emptyList() : resources); }
From source file:com.spotify.styx.model.WorkflowConfiguration.java
@JsonCreator public static WorkflowConfiguration create(@JsonProperty("id") String id, @JsonProperty("schedule") Schedule schedule, @JsonProperty("offset") Optional<String> offset, @JsonProperty("docker_image") Optional<String> dockerImage, @JsonProperty("docker_args") Optional<List<String>> dockerArgs, @JsonProperty("docker_termination_logging") Optional<Boolean> dockerTerminationLogging, @JsonProperty("secret") Optional<Secret> secret, @JsonProperty("resources") List<String> resources) { return new AutoValue_WorkflowConfiguration(id, schedule, offset, dockerImage, dockerArgs, dockerTerminationLogging.orElse(false), secret, resources == null ? Collections.emptyList() : resources); }
From source file:com.ikanow.aleph2.data_model.utils.TimeUtils.java
/** Returns a date from a human readable date - can be in the future or past * @param human_readable_date - the date expressed in words, eg "next wednesday".. Uses some simple regexes (1h,d, 1month etc), and Natty (try examples http://natty.joestelmach.com/try.jsp#) * @param base_date - for relative date, locks the date to this origin * @return the machine readable date, or an error *///from ww w .j a va 2 s . c o m public static Validation<String, Date> getSchedule(final String human_readable_date, Optional<Date> base_date) { try { // just read the first - note can ignore all the error checking here, just fail out using the try/catch final Date adjusted_date = base_date.orElse(new Date()); CalendarSource.setBaseDate(adjusted_date); final Parser parser = new Parser(); final List<DateGroup> l = parser.parse(human_readable_date); final DateGroup d = l.get(0); if (!d.getText().matches("^.*[a-zA-Z]+.*$")) { // only matches numbers, not allowed - must have missed a prefix return Validation.fail(ErrorUtils.get(ErrorUtils.INVALID_DATETIME_FORMAT, human_readable_date)); } final List<Date> l2 = d.getDates(); return Validation.success(l2.get(0)); } catch (Exception e) { final Pattern numChronoPattern = Pattern.compile("^([\\d]+)(.*)"); final Matcher m = numChronoPattern.matcher(human_readable_date); return m.find() ? getTimePeriod(m.group(2)).map(c -> c.getDuration().get(ChronoUnit.SECONDS)) .map(l -> new Date(base_date.orElse(new Date()).getTime() + Long.parseLong(m.group(1)) * l * 1000L)) : getTimePeriod(human_readable_date).map(c -> c.getDuration().get(ChronoUnit.SECONDS)) .map(l -> new Date(base_date.orElse(new Date()).getTime() + l * 1000L)); } }
From source file:com.netflix.spinnaker.kork.archaius.ArchaiusConfiguration.java
@Bean static ArchaiusInitializingBeanPostProcessor archaiusInitializingBeanPostProcessor( ConfigurableApplicationContext applicationContext, Optional<List<ClasspathPropertySource>> propertyBindings, AbstractPollingScheduler pollingScheduler, SpringEnvironmentPolledConfigurationSource polledConfigurationSource) { return new ArchaiusInitializingBeanPostProcessor(applicationContext, pollingScheduler, polledConfigurationSource, propertyBindings.orElse(Collections.emptyList())); }
From source file:com.ikanow.aleph2.example.flume_harvester.utils.FlumeUtils.java
public static String decodeValue(final String val, final String sub_prefix, Optional<String> morphline_cfg_path, String sig) {/*w ww .j a va2 s. c o m*/ return val.replace(sub_prefix + "signature", encodeSignature(sig)) .replace(sub_prefix + "morphline", morphline_cfg_path.orElse("")) .replace(sub_prefix + "hostname", HostInformationUtils.getHostname()); }
From source file:com.homeadvisor.kafdrop.util.JmxUtils.java
public static int getJmxPort(final Environment environment) { Optional<Integer> jmxPort = Optional.empty(); final Properties managementProperties = Agent.getManagementProperties(); if (managementProperties != null) { final String portProperty = managementProperties.getProperty(JMX_PORT_PROPERTY); if (portProperty != null) { final Optional<Integer> port = Optional.ofNullable(Ints.tryParse(portProperty)); jmxPort = port;//w w w .j a v a2 s. c o m } } return jmxPort.orElse(0); }
From source file:com.ikanow.aleph2.distributed_services.utils.KafkaUtils.java
/** Generates a connection string by reading ZooKeeper * @param curator/*w w w . j a v a 2s . co m*/ * @param path_override * @return * @throws Exception */ public static String getBrokerListFromZookeeper(final CuratorFramework curator, Optional<String> path_override, final ObjectMapper mapper) throws Exception { final String path = path_override.orElse("/brokers/ids"); final List<String> brokers = curator.getChildren().forPath(path); return brokers.stream() .map(Lambdas.wrap_u(broker_node -> new String(curator.getData().forPath(path + "/" + broker_node)))) .flatMap(Lambdas.flatWrap_i(broker_str -> mapper.readTree(broker_str))) // (just discard any badly formatted nodes) .flatMap(Lambdas.flatWrap_i(json -> json.get("host").asText() + ":" + json.get("port").asText())) .collect(Collectors.joining(",")); }
From source file:org.apache.metron.parsers.topology.ParserTopologyBuilder.java
/** * Create a spout that consumes tuples from a Kafka topic. * * @param zkQuorum Zookeeper URL/*from w ww . j a v a2s . c om*/ * @param sensorType Type of sensor * @param kafkaConfigOptional Configuration options for the kafka spout * @param parserConfig Configuration for the parser * @return */ private static StormKafkaSpout<Object, Object> createKafkaSpout(String zkQuorum, String sensorType, Optional<String> securityProtocol, Optional<Map<String, Object>> kafkaConfigOptional, SensorParserConfig parserConfig) { Map<String, Object> kafkaSpoutConfigOptions = kafkaConfigOptional.orElse(new HashMap<>()); String inputTopic = parserConfig.getSensorTopic() != null ? parserConfig.getSensorTopic() : sensorType; kafkaSpoutConfigOptions.putIfAbsent(SpoutConfiguration.FIRST_POLL_OFFSET_STRATEGY.key, KafkaSpoutConfig.FirstPollOffsetStrategy.UNCOMMITTED_EARLIEST.name()); kafkaSpoutConfigOptions.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, inputTopic + "_parser"); if (securityProtocol.isPresent()) { kafkaSpoutConfigOptions.putIfAbsent("security.protocol", KafkaUtils.INSTANCE.normalizeProtocol(securityProtocol.get())); } return SimpleStormKafkaBuilder.create(inputTopic, zkQuorum, Arrays.asList(SimpleStormKafkaBuilder.FieldsConfiguration.VALUE.getFieldName(), SimpleStormKafkaBuilder.FieldsConfiguration.KEY.getFieldName(), SimpleStormKafkaBuilder.FieldsConfiguration.TOPIC.getFieldName()), kafkaSpoutConfigOptions); }