Example usage for java.util Optional orElse

List of usage examples for java.util Optional orElse

Introduction

In this page you can find the example usage for java.util Optional orElse.

Prototype

public T orElse(T other) 

Source Link

Document

If a value is present, returns the value, otherwise returns other .

Usage

From source file:com.ikanow.aleph2.harvest.logstash.utils.LogstashUtils.java

/** Builds a process to execute
 * @param global/* w  w  w.  ja  v  a2  s .  c  om*/
 * @param bucket_config
 * @param logstash_config
 * @param requested_docs
 * @param bucket_path if this is present, will log output to /tmp/unique_sig
 * @param context 
 * @return
 */
public static ProcessBuilder buildLogstashTest(final LogstashHarvesterConfigBean global,
        final LogstashBucketConfigBean bucket_config, final String logstash_config, final long requested_docs,
        final Optional<String> bucket_path) {

    final String log_file = System.getProperty("java.io.tmpdir") + File.separator
            + BucketUtils.getUniqueSignature(bucket_path.orElse("DNE"), Optional.empty());
    try { //(delete log file if it exists)
        new File(log_file).delete();
    } catch (Exception e) {
    }

    ArrayList<String> args = new ArrayList<String>();
    args.addAll(Arrays.asList(global.binary_path(), "-e", logstash_config));
    if (bucket_path.isPresent()) {
        args.addAll(Arrays.asList("-l", log_file));
    }
    if (0L == requested_docs) {
        args.add("-t"); // test mode, must faster
    } //TESTED

    if (bucket_config.debug_verbosity()) {
        args.add("--debug");
    } else {
        args.add("--verbose");
    }
    ProcessBuilder logstashProcessBuilder = new ProcessBuilder(args);
    logstashProcessBuilder = logstashProcessBuilder.directory(new File(global.working_dir()))
            .redirectErrorStream(true);
    logstashProcessBuilder.environment().put("JAVA_OPTS", "");

    return logstashProcessBuilder;
}

From source file:com.ikanow.aleph2.data_model.utils.ProcessUtils.java

private static boolean killProcess(final String pid, final Optional<Integer> kill_signal) throws IOException {
    //      kill -15 the process, wait a few cycles to let it die            
    final ProcessBuilder pb = new ProcessBuilder(Arrays.asList("kill", "-" + kill_signal.orElse(15), pid));
    logger.debug("trying to kill -" + kill_signal.orElse(15) + " pid: " + pid);
    final Process px = pb.start();
    for (int i = 0; i < 5; ++i) {
        try {//from   w w  w .  ja  v  a 2  s.c o m
            Thread.sleep(1000L);
        } catch (Exception e) {
        }
        if (!isProcessRunning(pid)) {
            break;
        }
    }
    if (!isProcessRunning(pid)) {
        return 0 == px.exitValue();
    } else {
        //we are still alive, so send a harder kill signal if we haven't already sent a 9
        if (kill_signal.isPresent() && kill_signal.get() == 9) {
            return false;
        } else {
            logger.debug("Timed out trying to kill: " + pid + " sending kill -9 to force kill");
            return killProcess(pid, Optional.of(9));
        }

    }
}

From source file:com.uber.hoodie.common.model.HoodieTestUtils.java

public static final String getLogFilePath(String basePath, String partitionPath, String commitTime,
        String fileID, Optional<Integer> version) throws IOException {
    return basePath + "/" + partitionPath + "/"
            + FSUtils.makeLogFileName(fileID, ".log", commitTime, version.orElse(DEFAULT_TASK_PARTITIONID));
}

From source file:com.ikanow.aleph2.harvest.logstash.utils.LogstashUtils.java

/**
 * Reads the given output file and outputs it to the logger with the spec'd log level.
 * @param logger//from   w  w  w .  j a va  2 s  .co  m
 * @param level
 * @param output_file
 * @throws IOException 
 */
public static void sendOutputToLogger(final IBucketLogger logger, final Level level, final File output_file,
        final Optional<Long> max_lines) throws IOException {
    //      _logger.error("Reading output file: " + output_file + " to send to logger at level: " + level);
    Files.lines(output_file.toPath()).limit(max_lines.orElse(10000L)).forEach(line -> {
        try {
            //convert line to valid json, then parse json, build BMB object from it
            final String fixed_line = line.replaceAll(logstash_colon_search, logstash_colon_replace)
                    .replaceAll(logstash_arrow_search, logstash_arrow_replace)
                    .replaceAll(logstash_newline_search, logstash_newline_replace);
            final String plugin_fixed = fixPlugin(fixed_line);
            final ObjectNode line_object = (ObjectNode) _mapper.readTree(plugin_fixed);
            //move specific fields we want into BMB
            final Date date = parseLogstashDate(line_object.remove("timestamp").asText());
            final Level logstash_level = Level.valueOf(line_object.remove("level").asText());
            final String message = line_object.remove("message").asText();
            //move everything else into details map
            logger.inefficientLog(logstash_level,
                    new BasicMessageBean(date, true, LogstashHarvestService.class.getSimpleName(),
                            "test_output", null, message,
                            StreamSupport
                                    .stream(Spliterators.spliteratorUnknownSize(line_object.fields(),
                                            Spliterator.ORDERED), true)
                                    .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().asText()))));
        } catch (Exception ex) {
            //fallback on conversion failure
            logger.inefficientLog(level, ErrorUtils
                    .buildSuccessMessage(LogstashHarvestService.class.getSimpleName(), "test_output", line));
        }
    });
    //TODO should we delete log file after we've read it?
}

From source file:com.ikanow.aleph2.distributed_services.utils.KafkaUtils.java

/** A simpler set of Kafka properties, just requiring the ZK/broker list (note you can get the broker list from ZK using getBrokerListFromZookeeper)
 * @param zk_connection//from   ww  w  . j  av  a  2 s .c o m
 * @param broker_list
 */
public static void setStandardKafkaProperties(final String zk_connection, final String broker_list,
        final String cluster_name, Optional<Map<String, String>> optional_kafka_properties) {
    final Map<String, Object> config_map_kafka = ImmutableMap.<String, Object>builder()
            .putAll(optional_kafka_properties.orElse(Collections.emptyMap()))
            .put("metadata.broker.list", broker_list).put("zookeeper.connect", zk_connection)
            .put("group.id", cluster_name).build();
    KafkaUtils.setProperties(ConfigFactory.parseMap(config_map_kafka));
}

From source file:net.sf.jabref.logic.util.io.FileUtil.java

/**
 * Converts a relative filename to an absolute one, if necessary. Returns
 * null if the file does not exist.<br/>
 * <p>//from w w  w  . j  ava  2s . co  m
 * Uses <ul>
 * <li>the default directory associated with the extension of the file</li>
 * <li>the standard file directory</li>
 * <li>the directory of the bib file</li>
 * </ul>
 *
 * @param databaseContext The database this file belongs to.
 * @param name     The filename, may also be a relative path to the file
 */
public static Optional<File> expandFilename(final BibDatabaseContext databaseContext, String name) {
    Optional<String> extension = getFileExtension(name);
    // Find the default directory for this field type, if any:
    List<String> directories = databaseContext.getFileDirectory(extension.orElse(null));
    // Include the standard "file" directory:
    List<String> fileDir = databaseContext.getFileDirectory();
    // Include the directory of the bib file:
    List<String> al = new ArrayList<>();
    for (String dir : directories) {
        if (!al.contains(dir)) {
            al.add(dir);
        }
    }
    for (String aFileDir : fileDir) {
        if (!al.contains(aFileDir)) {
            al.add(aFileDir);
        }
    }

    return expandFilename(name, al);
}

From source file:org.kontalk.system.Control.java

private static KonThread getThread(String xmppThreadID, User user) {
    ThreadList threadList = ThreadList.getInstance();
    Optional<KonThread> optThread = threadList.get(xmppThreadID);
    return optThread.orElse(threadList.get(user));
}

From source file:com.ikanow.aleph2.data_model.utils.BeanTemplateUtils.java

/** Configures a mapper with the desired properties for use in Aleph2
 * @param configure_me - leave this empty to create a new mapper, or add one to configure an existing mapper
 * @return//from  w w  w . j av a2s. com
 */
public static ObjectMapper configureMapper(final Optional<ObjectMapper> configure_me) {
    final ObjectMapper mapper = configure_me.orElse(new ObjectMapper());
    mapper.disable(SerializationFeature.FAIL_ON_EMPTY_BEANS);
    mapper.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
    mapper.setSerializationInclusion(Include.NON_NULL);
    mapper.setVisibility(PropertyAccessor.IS_GETTER, JsonAutoDetect.Visibility.NONE);

    final SimpleModule module = new SimpleModule();
    module.addDeserializer(Number.class, new NumberDeserializer());
    mapper.registerModule(module);

    return mapper;
}

From source file:org.opendatakit.briefcase.util.ExportAction.java

public static List<String> export(BriefcaseFormDefinition formDefinition, ExportConfiguration configuration,
        TerminationFuture terminationFuture) {
    List<String> errors = new ArrayList<>();
    Optional<File> pemFile = configuration.mapPemFile(Path::toFile).filter(File::exists);
    if ((formDefinition.isFileEncryptedForm() || formDefinition.isFieldEncryptedForm()) && !pemFile.isPresent())
        errors.add(formDefinition.getFormName() + " form is encrypted");
    else//from  w  ww .ja v  a2  s .  c o  m
        try {
            export(configuration.mapExportDir(Path::toFile)
                    .orElseThrow(() -> new RuntimeException("Wrong export configuration")), ExportType.CSV,
                    formDefinition, pemFile.orElse(null), terminationFuture,
                    configuration.mapStartDate(
                            (LocalDate ld) -> Date.from(ld.atStartOfDay(ZoneId.systemDefault()).toInstant()))
                            .orElse(null),
                    configuration.mapEndDate(
                            (LocalDate ld) -> Date.from(ld.atStartOfDay(ZoneId.systemDefault()).toInstant()))
                            .orElse(null));
        } catch (IOException ex) {
            errors.add("Export of form " + formDefinition.getFormName() + " has failed: " + ex.getMessage());
        }
    return errors;
}

From source file:alfio.manager.NotificationManager.java

private static Function<Map<String, String>, byte[]> receiptOrInvoiceFactory(EventRepository eventRepository,
        Function<Triple<Event, Locale, Map<String, Object>>, Optional<byte[]>> pdfGenerator) {
    return (model) -> {
        String reservationId = model.get("reservationId");
        Event event = eventRepository.findById(Integer.valueOf(model.get("eventId"), 10));
        Locale language = Json.fromJson(model.get("language"), Locale.class);

        Map<String, Object> reservationEmailModel = Json.fromJson(model.get("reservationEmailModel"),
                new TypeReference<Map<String, Object>>() {
                });//from ww w  . jav  a  2  s. c o m
        //FIXME hack: reservationEmailModel should be a minimal and typed container
        reservationEmailModel.put("event", event);
        Optional<byte[]> receipt = pdfGenerator.apply(Triple.of(event, language, reservationEmailModel));

        if (!receipt.isPresent()) {
            log.warn("was not able to generate the receipt for reservation id " + reservationId + " for locale "
                    + language);
        }
        return receipt.orElse(null);
    };
}