Example usage for java.util Optional isPresent

List of usage examples for java.util Optional isPresent

Introduction

In this page you can find the example usage for java.util Optional isPresent.

Prototype

public boolean isPresent() 

Source Link

Document

If a value is present, returns true , otherwise false .

Usage

From source file:com.facebook.presto.accumulo.AccumuloClient.java

/**
 * Searches through the given locality groups to find if this column has a locality group.
 *
 * @param columnName Column name to get the locality group of
 * @param groups Optional locality group configuration
 * @return Optional string containing the name of the locality group, if present
 *//*from   w  w w .  j a v a2  s .c  o m*/
private static Optional<String> getColumnLocalityGroup(String columnName,
        Optional<Map<String, Set<String>>> groups) {
    if (groups.isPresent()) {
        for (Map.Entry<String, Set<String>> group : groups.get().entrySet()) {
            if (group.getValue().contains(columnName.toLowerCase(Locale.ENGLISH))) {
                return Optional.of(group.getKey());
            }
        }
    }

    return Optional.empty();
}

From source file:com.ikanow.aleph2.analytics.spark.utils.SparkTechnologyUtils.java

/** Builds objects for all the aleph2 inputs and provides a method to use them in context-dependent ways 
 * @param context//from   w  ww  .j a va  2  s.  c  o  m
 * @param bucket
 * @param job
 * @param config
 * @param per_input_action - user lambda that determines how they are used
 */
public static final void buildAleph2Inputs(final IAnalyticsContext context, final DataBucketBean bucket,
        final AnalyticThreadJobBean job, final Optional<ProcessingTestSpecBean> maybe_test_spec,
        final Configuration config, final Set<String> exclude_names,
        BiConsumer<AnalyticThreadJobInputBean, Job> per_input_action) {
    transformInputBean(Optionals.ofNullable(job.inputs()).stream(), maybe_test_spec)
            .filter(input -> !exclude_names.contains(input.name()))
            .forEach(Lambdas.wrap_consumer_u(input_with_test_settings -> {

                final Optional<IBucketLogger> a2_logger = Optional
                        .ofNullable(context.getLogger(Optional.of(bucket)));

                final List<String> paths = context.getInputPaths(Optional.empty(), job,
                        input_with_test_settings);

                if (!paths.isEmpty()) {

                    _logger.info(ErrorUtils.get("Adding storage paths for bucket {0}: {1}", bucket.full_name(),
                            paths.stream().collect(Collectors.joining(";"))));

                    a2_logger.ifPresent(l -> l.log(Level.INFO, true,
                            () -> ErrorUtils.get("Adding storage paths for bucket {0}: {1}", bucket.full_name(),
                                    paths.stream().collect(Collectors.joining(";"))),
                            () -> SparkTechnologyService.class.getSimpleName() + "."
                                    + Optional.ofNullable(job.name()).orElse("no_name"),
                            () -> "startAnalyticJobOrTest"));

                    //DEBUG
                    //System.out.println(ErrorUtils.get("Adding storage paths for bucket {0}: {1}", bucket.full_name(), paths.stream().collect(Collectors.joining(";"))));   

                    final Job input_job = Job.getInstance(config);
                    input_job.setInputFormatClass(BeFileInputFormat_Pure.class);
                    paths.stream().forEach(Lambdas
                            .wrap_consumer_u(path -> FileInputFormat.addInputPath(input_job, new Path(path))));
                    // (Add the input config in)
                    input_job.getConfiguration().set(HadoopBatchEnrichmentUtils.BE_BUCKET_INPUT_CONFIG,
                            BeanTemplateUtils.toJson(input_with_test_settings).toString());
                    per_input_action.accept(input_with_test_settings, input_job);
                } else { // not easily available in HDFS directory format, try getting from the context

                    Optional<HadoopBatchEnrichmentUtils.HadoopAccessContext> input_format_info = context
                            .getServiceInput(HadoopBatchEnrichmentUtils.HadoopAccessContext.class,
                                    Optional.empty(), job, input_with_test_settings);
                    if (!input_format_info.isPresent()) {
                        _logger.warn(ErrorUtils.get("Tried but failed to get input format from {0}",
                                BeanTemplateUtils.toJson(input_with_test_settings)));

                        a2_logger.ifPresent(l -> l.log(Level.WARN, true,
                                () -> ErrorUtils.get("Tried but failed to get input format from {0}",
                                        BeanTemplateUtils.toJson(input_with_test_settings)),
                                () -> SparkTechnologyService.class.getSimpleName() + "."
                                        + Optional.ofNullable(job.name()).orElse("no_name"),
                                () -> "startAnalyticJobOrTest"));

                        //DEBUG
                        //System.out.println(ErrorUtils.get("Tried but failed to get input format from {0}", BeanTemplateUtils.toJson(input_with_test_settings)));
                    } else {
                        _logger.info(ErrorUtils.get("Adding data service path for bucket {0}: {1}",
                                bucket.full_name(), input_format_info.get().describe()));

                        a2_logger.ifPresent(l -> l.log(Level.INFO, true,
                                () -> ErrorUtils.get("Adding data service path for bucket {0}: {1}",
                                        bucket.full_name(), input_format_info.get().describe()),
                                () -> SparkTechnologyService.class.getSimpleName() + "."
                                        + Optional.ofNullable(job.name()).orElse("no_name"),
                                () -> "startAnalyticJobOrTest"));

                        //DEBUG
                        //System.out.println(ErrorUtils.get("Adding data service path for bucket {0}: {1}", bucket.full_name(),input_format_info.get().describe()));

                        final Job input_job = Job.getInstance(config);
                        input_job.setInputFormatClass(
                                input_format_info.get().getAccessService().either(l -> l.getClass(), r -> r));
                        input_format_info.get().getAccessConfig().ifPresent(map -> {
                            map.entrySet().forEach(kv -> input_job.getConfiguration().set(kv.getKey(),
                                    kv.getValue().toString()));
                        });
                        per_input_action.accept(input_with_test_settings, input_job);
                    }
                }
            }));
}

From source file:de.tu_dortmund.ub.data.dswarm.TaskProcessingUnit.java

public static String startTPU(final String confFile, final Properties config) throws Exception {

    final String serviceName = config.getProperty(TPUStatics.SERVICE_NAME_IDENTIFIER);

    LOG.info(String.format("[%s] Starting 'Task Processing Unit' ...", serviceName));
    LOG.info(String.format("[%s] conf-file = %s", serviceName, confFile));

    final String resourceWatchFolder = config.getProperty(TPUStatics.RESOURCE_WATCHFOLDER_IDENTIFIER);
    String[] watchFolderFiles = new File(resourceWatchFolder).list();

    if (watchFolderFiles == null) {

        final String message = String.format(
                "could not determine files from watchfolder '%s'; watch folder file list does not exist",
                resourceWatchFolder);/* w  w  w . j  av  a 2 s . c  o  m*/

        TaskProcessingUnit.LOG.error(message);

        throw new TPUException(message);
    }

    if (watchFolderFiles.length == 0) {

        final String message = String.format(
                "could not determine files from watchfolder; there are no files in folder '%s'",
                resourceWatchFolder);

        TaskProcessingUnit.LOG.error(message);

        throw new TPUException(message);
    }

    Arrays.sort(watchFolderFiles);

    final String filesMessage = String.format("[%s] '%s' files in resource watch folder '%s'", serviceName,
            watchFolderFiles.length, resourceWatchFolder);

    LOG.info(filesMessage);
    LOG.info("\tfile names: '" + Arrays.toString(watchFolderFiles) + "'");

    // Init time counter
    final long global = System.currentTimeMillis();

    final Integer engineThreads = Integer.parseInt(config.getProperty(TPUStatics.ENGINE_THREADS_IDENTIFIER));

    final Optional<Boolean> optionalDoInit = TPUUtil.getBooleanConfigValue(TPUStatics.DO_INIT_IDENTIFIER,
            config);
    final Optional<Boolean> optionalDoTransformations = TPUUtil
            .getBooleanConfigValue(TPUStatics.DO_TRANSFORMATIONS_IDENTIFIER, config);
    final Optional<Boolean> optionalAllowMultipleDataModels = TPUUtil
            .getBooleanConfigValue(TPUStatics.ALLOW_MULTIPLE_DATA_MODELS_IDENTIFIER, config);
    final Optional<Boolean> optionalDoIngestOnTheFly = TPUUtil
            .getBooleanConfigValue(TPUStatics.DO_INGEST_ON_THE_FLY_IDENTIFIER, config);
    final Optional<Boolean> optionalDoExportOnTheFly = TPUUtil
            .getBooleanConfigValue(TPUStatics.DO_EXPORT_ON_THE_FLY_IDENTIFIER, config);
    final Optional<String> optionalOutputDataModelID = TPUUtil
            .getStringConfigValue(TPUStatics.PROTOTYPE_OUTPUT_DATA_MODEL_ID_IDENTIFIER, config);

    final Optional<String> optionalExportMimeType;

    if (optionalDoExportOnTheFly.isPresent() && optionalDoExportOnTheFly.get()) {

        optionalExportMimeType = TPUUtil.getStringConfigValue(TPUStatics.EXPORT_MIME_TYPE, config);
    } else {

        optionalExportMimeType = Optional.empty();
    }

    final Optional<String> optionalExportFileExtension;

    if (optionalExportMimeType.isPresent()) {

        String exportFileExtension;
        try {

            final MediaType mediaType = MediaType.getMediaTypeByName(optionalExportMimeType.get());
            exportFileExtension = mediaType.getFileExtension();
        } catch (final DSWARMException e) {

            // xml as default file extension
            exportFileExtension = XML_FILE_ENDING;
        }

        optionalExportFileExtension = Optional.ofNullable(exportFileExtension);
    } else {

        optionalExportFileExtension = Optional.empty();
    }

    final String result;

    if (goMultiThreaded(optionalDoInit, optionalDoTransformations, optionalAllowMultipleDataModels,
            optionalDoIngestOnTheFly, optionalDoExportOnTheFly)) {

        result = executeTPUTask(watchFolderFiles, resourceWatchFolder, optionalOutputDataModelID,
                optionalExportMimeType, optionalExportFileExtension, engineThreads, serviceName, config);
    } else {

        executeTPUPartsOnDemand(optionalDoInit, optionalAllowMultipleDataModels, watchFolderFiles,
                resourceWatchFolder, optionalOutputDataModelID, serviceName, engineThreads,
                optionalDoTransformations, optionalDoIngestOnTheFly, optionalDoExportOnTheFly,
                optionalExportMimeType, optionalExportFileExtension, config);

        result = "[no result available]";
    }

    final String tasksExecutedMessage = String.format("[%s] d:swarm tasks executed. (Processing time: %d s)",
            serviceName, ((System.currentTimeMillis() - global) / 1000));

    LOG.info(tasksExecutedMessage);

    return result;
}

From source file:com.ikanow.aleph2.harvest.script.utils.ScriptUtils.java

/**
 * Creates a processbuilder pointed at the given script path and adds the working dir and environment vars for you.
 * Just runs a process that does "sh <script_file_path>"
 * @param script_file_path/*from   w w w . j a  va 2s  .c o m*/
 * @param working_dir
 * @return
 * @throws JsonProcessingException 
 * @throws ExecutionException 
 * @throws InterruptedException 
 */
public static ProcessBuilder createProcessBuilderForScriptFile(final String script_file_path,
        final String working_dir, final Optional<Long> test_requested_num_objects,
        final Optional<Long> test_max_runtime_s, final Map<String, String> user_args,
        final IHarvestContext context, final DataBucketBean bucket, final String aleph_global_root_path)
        throws JsonProcessingException, InterruptedException, ExecutionException {
    _logger.debug("create pb for script file: " + script_file_path);

    ArrayList<String> args = new ArrayList<String>();
    args.add("sh");
    args.add(script_file_path);
    final ProcessBuilder pb = new ProcessBuilder(args);
    pb.directory(new File(working_dir)).redirectErrorStream(true);
    pb.environment().put("JAVA_OPTS", "");
    if (test_requested_num_objects.isPresent())
        pb.environment().put(ENV_TEST_NUM_OBJ, test_requested_num_objects.get().toString());
    if (test_max_runtime_s.isPresent())
        pb.environment().put(ENV_TEST_MAX_RUNTIME_S, test_max_runtime_s.get().toString());
    //add in default env vars
    final String classpath = Stream
            .concat(context.getHarvestContextLibraries(Optional.empty()).stream(),
                    context.getHarvestLibraries(Optional.of(bucket)).get().values().stream())
            .collect(Collectors.joining(":"));
    pb.environment().put(ENV_MODULE_PATH,
            context.getHarvestContextLibraries(Optional.empty()).stream().collect(Collectors.joining(":")));
    pb.environment().put(ENV_LIBRARY_PATH, context.getHarvestLibraries(Optional.of(bucket)).get().values()
            .stream().collect(Collectors.joining(":")));
    pb.environment().put(ENV_CLASS_PATH, classpath);
    pb.environment().put(ENV_BUCKET_HDFS_PATH, aleph_global_root_path + "/data" + bucket.full_name());
    pb.environment().put(ENV_BUCKET_SIGNATURE,
            BucketUtils.getUniqueSignature(bucket.full_name(), Optional.empty()));
    pb.environment().put(ENV_BUCKET_PATH, bucket.full_name());
    pb.environment().put(ENV_BUCKET_STR, BeanTemplateUtils.toJson(bucket).toString());
    //add user args   as env vars
    user_args.forEach((k, val) -> pb.environment().put(k, val));
    return pb;
}

From source file:alfio.util.TemplateResource.java

public static Map<String, Object> prepareModelForConfirmationEmail(Organization organization, Event event,
        TicketReservation reservation, Optional<String> vat, List<Ticket> tickets, OrderSummary orderSummary,
        String reservationUrl, String reservationShortID, Optional<String> invoiceAddress,
        Optional<String> bankAccountNr, Optional<String> bankAccountOwner) {
    Map<String, Object> model = new HashMap<>();
    model.put("organization", organization);
    model.put("event", event);
    model.put("ticketReservation", reservation);
    model.put("hasVat", vat.isPresent());
    model.put("vatNr", vat.orElse(""));
    model.put("tickets", tickets);
    model.put("orderSummary", orderSummary);
    model.put("reservationUrl", reservationUrl);
    model.put("locale", reservation.getUserLanguage());

    ZonedDateTime confirmationTimestamp = Optional.ofNullable(reservation.getConfirmationTimestamp())
            .orElseGet(ZonedDateTime::now);
    model.put("confirmationDate", confirmationTimestamp.withZoneSameInstant(event.getZoneId()));

    if (reservation.getValidity() != null) {
        model.put("expirationDate",
                ZonedDateTime.ofInstant(reservation.getValidity().toInstant(), event.getZoneId()));
    }//  ww w. j  av  a  2s .  c  o m

    model.put("reservationShortID", reservationShortID);

    model.put("hasInvoiceAddress", invoiceAddress.isPresent());
    invoiceAddress.ifPresent(addr -> {
        model.put("invoiceAddress", StringUtils.replace(addr, "\n", ", "));
        model.put("invoiceAddressAsList", Arrays.asList(StringUtils.split(addr, '\n')));
    });

    model.put("hasBankAccountNr", bankAccountNr.isPresent());
    bankAccountNr.ifPresent(nr -> {
        model.put("bankAccountNr", nr);
    });

    model.put("isOfflinePayment",
            reservation.getStatus() == TicketReservation.TicketReservationStatus.OFFLINE_PAYMENT);
    model.put("paymentReason", event.getShortName() + " " + reservationShortID);
    model.put("hasBankAccountOnwer", bankAccountOwner.isPresent());
    bankAccountOwner.ifPresent(owner -> {
        model.put("bankAccountOnwer", StringUtils.replace(owner, "\n", ", "));
        model.put("bankAccountOnwerAsList", Arrays.asList(StringUtils.split(owner, '\n')));
    });

    return model;
}

From source file:com.facebook.presto.accumulo.AccumuloClient.java

private static void validateLocalityGroups(ConnectorTableMetadata meta) {
    // Validate any configured locality groups
    Optional<Map<String, Set<String>>> groups = AccumuloTableProperties.getLocalityGroups(meta.getProperties());
    if (!groups.isPresent()) {
        return;/*from  ww  w . j a v a2  s .  c o m*/
    }

    String rowIdColumn = getRowIdColumn(meta);

    // For each locality group
    for (Map.Entry<String, Set<String>> g : groups.get().entrySet()) {
        if (g.getValue().contains(rowIdColumn)) {
            throw new PrestoException(INVALID_TABLE_PROPERTY, "Row ID column cannot be in a locality group");
        }

        // Validate the specified column names exist in the table definition,
        // incrementing a counter for each matching column
        int matchingColumns = 0;
        for (ColumnMetadata column : meta.getColumns()) {
            if (g.getValue().contains(column.getName().toLowerCase(Locale.ENGLISH))) {
                ++matchingColumns;

                // Break out early if all columns are found
                if (matchingColumns == g.getValue().size()) {
                    break;
                }
            }
        }

        // If the number of matched columns does not equal the defined size,
        // then a column was specified that does not exist
        // (or there is a duplicate column in the table DDL, which is also an issue but has been checked before in validateColumns).
        if (matchingColumns != g.getValue().size()) {
            throw new PrestoException(INVALID_TABLE_PROPERTY,
                    "Unknown Presto column defined for locality group " + g.getKey());
        }
    }
}

From source file:io.prestosql.plugin.accumulo.AccumuloClient.java

/**
 * Gets a collection of Accumulo Range objects from the given Presto domain.
 * This maps the column constraints of the given Domain to an Accumulo Range scan.
 *
 * @param domain Domain, can be null (returns (-inf, +inf) Range)
 * @param serializer Instance of an {@link AccumuloRowSerializer}
 * @return A collection of Accumulo Range objects
 * @throws TableNotFoundException If the Accumulo table is not found
 *//*from w w  w.  j  a v a 2  s. c om*/
public static Collection<Range> getRangesFromDomain(Optional<Domain> domain, AccumuloRowSerializer serializer)
        throws TableNotFoundException {
    // if we have no predicate pushdown, use the full range
    if (!domain.isPresent()) {
        return ImmutableSet.of(new Range());
    }

    ImmutableSet.Builder<Range> rangeBuilder = ImmutableSet.builder();
    for (io.prestosql.spi.predicate.Range range : domain.get().getValues().getRanges().getOrderedRanges()) {
        rangeBuilder.add(getRangeFromPrestoRange(range, serializer));
    }

    return rangeBuilder.build();
}

From source file:com.facebook.presto.accumulo.AccumuloClient.java

/**
 * Gets a collection of Accumulo Range objects from the given Presto domain.
 * This maps the column constraints of the given Domain to an Accumulo Range scan.
 *
 * @param domain Domain, can be null (returns (-inf, +inf) Range)
 * @param serializer Instance of an {@link AccumuloRowSerializer}
 * @return A collection of Accumulo Range objects
 * @throws TableNotFoundException If the Accumulo table is not found
 *///from  ww  w.ja  v  a2 s.c o  m
public static Collection<Range> getRangesFromDomain(Optional<Domain> domain, AccumuloRowSerializer serializer)
        throws TableNotFoundException {
    // if we have no predicate pushdown, use the full range
    if (!domain.isPresent()) {
        return ImmutableSet.of(new Range());
    }

    ImmutableSet.Builder<Range> rangeBuilder = ImmutableSet.builder();
    for (com.facebook.presto.spi.predicate.Range range : domain.get().getValues().getRanges()
            .getOrderedRanges()) {
        rangeBuilder.add(getRangeFromPrestoRange(range, serializer));
    }

    return rangeBuilder.build();
}

From source file:com.facebook.presto.accumulo.AccumuloClient.java

private static void validateColumns(ConnectorTableMetadata meta) {
    // Check all the column types, and throw an exception if the types of a map are complex
    // While it is a rare case, this is not supported by the Accumulo connector
    ImmutableSet.Builder<String> columnNameBuilder = ImmutableSet.builder();
    for (ColumnMetadata column : meta.getColumns()) {
        if (Types.isMapType(column.getType())) {
            if (Types.isMapType(Types.getKeyType(column.getType()))
                    || Types.isMapType(Types.getValueType(column.getType()))
                    || Types.isArrayType(Types.getKeyType(column.getType()))
                    || Types.isArrayType(Types.getValueType(column.getType()))) {
                throw new PrestoException(INVALID_TABLE_PROPERTY,
                        "Key/value types of a MAP column must be plain types");
            }//  w  w w  .java  2  s.co  m
        }

        columnNameBuilder.add(column.getName().toLowerCase(Locale.ENGLISH));
    }

    // Validate the columns are distinct
    if (columnNameBuilder.build().size() != meta.getColumns().size()) {
        throw new PrestoException(INVALID_TABLE_PROPERTY, "Duplicate column names are not supported");
    }

    Optional<Map<String, Pair<String, String>>> columnMapping = AccumuloTableProperties
            .getColumnMapping(meta.getProperties());
    if (columnMapping.isPresent()) {
        // Validate there are no duplicates in the column mapping
        long distinctMappings = columnMapping.get().values().stream().distinct().count();
        if (distinctMappings != columnMapping.get().size()) {
            throw new PrestoException(INVALID_TABLE_PROPERTY,
                    "Duplicate column family/qualifier pair detected in column mapping, check the value of "
                            + AccumuloTableProperties.COLUMN_MAPPING);
        }

        // Validate no column is mapped to the reserved entry
        String reservedRowIdColumn = AccumuloPageSink.ROW_ID_COLUMN.toString();
        if (columnMapping.get().values().stream().filter(pair -> pair.getKey().equals(reservedRowIdColumn)
                && pair.getValue().equals(reservedRowIdColumn)).count() > 0) {
            throw new PrestoException(INVALID_TABLE_PROPERTY,
                    format("Column familiy/qualifier mapping of %s:%s is reserved", reservedRowIdColumn,
                            reservedRowIdColumn));
        }
    } else if (AccumuloTableProperties.isExternal(meta.getProperties())) {
        // Column mapping is not defined (i.e. use column generation) and table is external
        // But column generation is for internal tables only
        throw new PrestoException(INVALID_TABLE_PROPERTY,
                "Column generation for external tables is not supported, must specify "
                        + AccumuloTableProperties.COLUMN_MAPPING);
    }
}

From source file:io.helixservice.feature.configuration.locator.AbstractResourceLocator.java

/**
 * {@inheritDoc}//from  ww  w.j av a 2  s .  com
 */
@Override
public Optional<JsonObject> getJsonObject(String resourcePath) {
    Optional<String> string = getString(resourcePath);

    if (string.isPresent()) {
        return Optional.of(new JsonObject(string.get()));
    } else {
        return Optional.empty();
    }
}