Example usage for java.util Optional orElse

List of usage examples for java.util Optional orElse

Introduction

In this page you can find the example usage for java.util Optional orElse.

Prototype

public T orElse(T other) 

Source Link

Document

If a value is present, returns the value, otherwise returns other .

Usage

From source file:com.orange.ngsi2.server.Ngsi2BaseController.java

/**
 * Endpoint get /v2/entities/* w ww .jav  a2 s . com*/
 * @param id an optional list of entity IDs separated by comma (cannot be used with idPatterns)
 * @param type an optional list of types of entity separated by comma
 * @param idPattern a optional pattern of entity IDs (cannot be used with ids)
 * @param limit an optional limit (0 for none)
 * @param offset an optional offset (0 for none)
 * @param attrs an optional list of attributes separated by comma to return for all entities
 * @param query an optional Simple Query Language query
 * @param georel an optional Geo query. Possible values: near, coveredBy, intersects, equals, disjoint.
 * @param geometry an optional geometry. Possible values: point, line, polygon, box.
 * @param coords an optional coordinate
 * @param orderBy an option list of attributes to difine the order of entities
 * @param options an optional list of options separated by comma. Possible value for option: count.
 *        Theses keyValues,values and unique options are not supported.
 *        If count is present then the total number of entities is returned in the response as a HTTP header named `X-Total-Count`.
 * @return a list of Entities http status 200 (ok)
 * @throws Exception
 */
@RequestMapping(method = RequestMethod.GET, value = { "/entities" })
final public ResponseEntity<List<Entity>> listEntitiesEndpoint(@RequestParam Optional<Set<String>> id,
        @RequestParam Optional<Set<String>> type, @RequestParam Optional<String> idPattern,
        @RequestParam Optional<Integer> limit, @RequestParam Optional<Integer> offset,
        @RequestParam Optional<List<String>> attrs, @RequestParam Optional<String> query,
        @RequestParam Optional<String> georel, @RequestParam Optional<String> geometry,
        @RequestParam Optional<String> coords, @RequestParam Optional<List<String>> orderBy,
        @RequestParam Optional<Set<String>> options) throws Exception {

    if (id.isPresent() && idPattern.isPresent()) {
        throw new IncompatibleParameterException("id", "idPattern", "List entities");
    }

    validateSyntax(id.orElse(null), type.orElse(null), attrs.orElse(null));

    Optional<GeoQuery> geoQuery = Optional.empty();
    // If one of them is present, all are mandatory
    if (georel.isPresent() || geometry.isPresent() || coords.isPresent()) {
        if (!(georel.isPresent() && geometry.isPresent() && coords.isPresent())) {
            throw new BadRequestException("Missing one argument of georel, geometry or coords");
        }
        geoQuery = Optional.of(Ngsi2ParsingHelper.parseGeoQuery(georel.get(), geometry.get(), coords.get()));
    }

    boolean count = false;
    if (options.isPresent()) {
        Set<String> optionsSet = options.get();
        //TODO: to support keyValues, values and unique as options
        if (optionsSet.contains("keyValues") || optionsSet.contains("values")
                || optionsSet.contains("unique")) {
            throw new UnsupportedOptionException("keyValues, values or unique");
        }
        count = optionsSet.contains("count");
    }

    Paginated<Entity> paginatedEntity = listEntities(id.orElse(null), type.orElse(null), idPattern.orElse(null),
            limit.orElse(0), offset.orElse(0), attrs.orElse(new ArrayList<>()), query.orElse(null),
            geoQuery.orElse(null), orderBy.orElse(new ArrayList<>()));
    if (count) {
        return new ResponseEntity<>(paginatedEntity.getItems(), xTotalCountHeader(paginatedEntity.getTotal()),
                HttpStatus.OK);
    } else {
        return new ResponseEntity<>(paginatedEntity.getItems(), HttpStatus.OK);
    }
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.TestElasticsearchIndexUtils.java

@Test
public void test_overrideMappings() throws IOException {

    // use the pure defaults

    final ElasticsearchIndexServiceConfigBean config = ElasticsearchIndexConfigUtils
            .buildConfigBean(ConfigFactory.empty());

    //TODO: ok the field ordering is a disaster here ... it should be sorted by most specific first
    // eg !* > *!* > * and t2._1 then t2._2

    // Build a bucket with a columnar and search index schema
    {/*from w  ww . j a  va2s  .c  o m*/
        final DataBucketBean search_index_test = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::full_name, "/test/test")
                .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class).with(
                        DataSchemaBean::document_schema,
                        BeanTemplateUtils.build(DataSchemaBean.DocumentSchemaBean.class)
                                .with(DataSchemaBean.DocumentSchemaBean::deduplication_fields, Arrays.asList(
                                        "id1", "test_timestamp1", "test_not_override1", "test_not_override2")) //TODO: check vs columnar
                                //(test timestamp and test_not_override2 are ignored because manually specified, test_not_override1 is duplicated as both string and dyn type)
                                .done().get())
                        .with(DataSchemaBean::search_index_schema, BeanTemplateUtils
                                .build(DataSchemaBean.SearchIndexSchemaBean.class)
                                .with(DataSchemaBean.SearchIndexSchemaBean::tokenize_by_default, false)
                                .with(DataSchemaBean.SearchIndexSchemaBean::type_override, ImmutableMap.of(
                                        "string",
                                        BeanTemplateUtils.build(DataSchemaBean.ColumnarSchemaBean.class)
                                                .with(DataSchemaBean.ColumnarSchemaBean::field_include_list,
                                                        Arrays.asList("test_not_override1",
                                                                "test.nested.string"))
                                                .done().get(),
                                        "date",
                                        BeanTemplateUtils.build(DataSchemaBean.ColumnarSchemaBean.class)
                                                .with(DataSchemaBean.ColumnarSchemaBean::field_include_list,
                                                        Arrays.asList("test_timestamp1", "test_timestamp2"))
                                                .with(DataSchemaBean.ColumnarSchemaBean::field_include_pattern_list,
                                                        Arrays.asList("test_timestamp1*", "test_timestamp2*"))
                                                .with(DataSchemaBean.ColumnarSchemaBean::field_type_include_list,
                                                        Arrays.asList("string"))
                                                .done().get()))
                                .with(DataSchemaBean.SearchIndexSchemaBean::tokenization_override,
                                        ImmutableMap.of("_default_", BeanTemplateUtils
                                                .build(DataSchemaBean.ColumnarSchemaBean.class)
                                                .with(DataSchemaBean.ColumnarSchemaBean::field_include_list,
                                                        Arrays.asList("test_not_override1", "test_override",
                                                                "test_dual_default", "test.nested.string"))
                                                .with(DataSchemaBean.ColumnarSchemaBean::field_include_pattern_list,
                                                        Arrays.asList("test_not_override*", "test_override*"))
                                                .with(DataSchemaBean.ColumnarSchemaBean::field_type_include_list,
                                                        Arrays.asList("string"))
                                                .done().get(), "_none_",
                                                BeanTemplateUtils.build(DataSchemaBean.ColumnarSchemaBean.class)
                                                        // (nothing needed here, see inverse version below)
                                                        .done().get()))
                                .with(DataSchemaBean.SearchIndexSchemaBean::technology_override_schema,
                                        ImmutableMap.of( // add some dummy extra field mappings to check they get included
                                                "extra_field_mappings",
                                                ImmutableMap.of("properties",
                                                        ImmutableMap.of("test1",
                                                                ImmutableMap.of("type", "test_type1")),
                                                        "dynamic_templates",
                                                        Arrays.asList(ImmutableMap.of("test2_name",
                                                                ImmutableMap.of("mapping",
                                                                        ImmutableMap.of("type", "test_type2"),
                                                                        "path_match", "test2*",
                                                                        "match_mapping_type", "*")))),
                                                "dual_tokenization_override",
                                                _mapper.convertValue(BeanTemplateUtils
                                                        .build(DataSchemaBean.ColumnarSchemaBean.class)
                                                        .with(DataSchemaBean.ColumnarSchemaBean::field_include_list,
                                                                Arrays.asList("test_dual_default",
                                                                        "test_dual_none", "test_dual_column",
                                                                        "test.nested.string"))
                                                        .with(DataSchemaBean.ColumnarSchemaBean::field_include_pattern_list,
                                                                Arrays.asList("test_pattern1*",
                                                                        "test_dual_column*"))
                                                        .done().get(), Map.class)))
                                .done().get())
                        .with(DataSchemaBean::columnar_schema, BeanTemplateUtils
                                .build(DataSchemaBean.ColumnarSchemaBean.class)
                                .with(DataSchemaBean.ColumnarSchemaBean::field_include_list,
                                        Arrays.asList("test_not_override2", "test_override", "test_dual_column",
                                                "test_timestamp2", "test.nested.string"))
                                .with(DataSchemaBean.ColumnarSchemaBean::field_include_pattern_list,
                                        Arrays.asList("test_override*", "test_pattern2*", "test_dual_column*",
                                                "test_timestamp2*"))
                                .with(DataSchemaBean.ColumnarSchemaBean::field_type_include_list,
                                        Arrays.asList("date"))
                                .done().get())
                        .done().get())
                .done().get();

        // Should have the following:
        // test_not_override1 ... prop ... single/analyzed/disabled (CHECK)
        // test_override ... prop ... single/not_analyzed/doc_values (CHECK)
        // test_not_override* ... temp ... single/analyzed/disabled (CHECK)
        // test_override* ... temp ... single/not_analyzed/doc_values (CHECK)
        // string ... temp ... single/analyzed/disabled (CHECK)
        //
        // test_dual_default ... prop ... dual/both(raw)/disabled (CHECK)
        // test_dual_none ... prop ... dual/both(token)/disabled (CHECK)
        // test_dual_column ..prop ... dual/both(token)/doc_values+paged (CHECK)
        // test_pattern1* ... temp ... dual/both(token)/disabled (CHECK)
        // test_dual_column* ... temp ... dual/both(token)/doc_values+paged (CHECK)
        //
        // test_not_override2 ... temp ... single/not_analyzed/doc_values (CHECK)
        // test_pattern2* ... temp ... single/not_analyzed/doc_values (CHECK)
        // date ... temp ... single/not_analyized/doc_values (CHECK)

        // (timestamp, * - as their defaults) (CHECK)

        final ElasticsearchIndexServiceConfigBean schema_config = ElasticsearchIndexConfigUtils
                .buildConfigBeanFromSchema(search_index_test, config, _mapper);

        final Optional<String> type = Optional.ofNullable(schema_config.search_technology_override())
                .map(t -> t.type_name_or_prefix());
        final String index_type = CollidePolicy.new_type == Optional
                .ofNullable(schema_config.search_technology_override()).map(t -> t.collide_policy())
                .orElse(CollidePolicy.new_type) ? "_default_"
                        : type.orElse(ElasticsearchIndexServiceConfigBean.DEFAULT_FIXED_TYPE_NAME);

        final XContentBuilder mapping = ElasticsearchIndexUtils.createIndexMapping(search_index_test,
                Optional.empty(), true, schema_config, _mapper, index_type);

        final String expected = Resources.toString(
                Resources.getResource(
                        "com/ikanow/aleph2/search_service/elasticsearch/utils/mapping_override_test.json"),
                Charsets.UTF_8);

        assertEquals("Get expected search_index_test schema", _mapper.readTree(expected).toString(),
                mapping.bytes().toUtf8());
    }
}

From source file:org.apache.pulsar.compaction.TwoPhaseCompactor.java

private void phaseOneLoop(RawReader reader, Optional<MessageId> firstMessageId, Optional<MessageId> toMessageId,
        MessageId lastMessageId, Map<String, MessageId> latestForKey,
        CompletableFuture<PhaseOneResult> loopPromise) {
    if (loopPromise.isDone()) {
        return;/*from   www  .j a va  2 s .  c o  m*/
    }
    CompletableFuture<RawMessage> future = reader.readNextAsync();
    scheduleTimeout(future);
    future.whenCompleteAsync((m, exception) -> {
        try {
            if (exception != null) {
                loopPromise.completeExceptionally(exception);
                return;
            }
            MessageId id = m.getMessageId();
            boolean deletedMessage = false;
            if (RawBatchConverter.isReadableBatch(m)) {
                try {
                    RawBatchConverter.extractIdsAndKeys(m)
                            .forEach(e -> latestForKey.put(e.getRight(), e.getLeft()));
                } catch (IOException ioe) {
                    log.info("Error decoding batch for message {}. Whole batch will be included in output", id,
                            ioe);
                }
            } else {
                Pair<String, Integer> keyAndSize = extractKeyAndSize(m);
                if (keyAndSize != null) {
                    if (keyAndSize.getRight() > 0) {
                        latestForKey.put(keyAndSize.getLeft(), id);
                    } else {
                        deletedMessage = true;
                        latestForKey.remove(keyAndSize.getLeft());
                    }
                }
            }

            MessageId first = firstMessageId.orElse(deletedMessage ? null : id);
            MessageId to = deletedMessage ? toMessageId.orElse(null) : id;
            if (id.compareTo(lastMessageId) == 0) {
                loopPromise.complete(new PhaseOneResult(first, to, lastMessageId, latestForKey));
            } else {
                phaseOneLoop(reader, Optional.ofNullable(first), Optional.ofNullable(to), lastMessageId,
                        latestForKey, loopPromise);
            }
        } finally {
            m.close();
        }
    }, scheduler);
}

From source file:com.ikanow.aleph2.logging.service.TestLoggingService.java

/**
 * @param name/*w w  w .ja  va  2  s  .c o m*/
 * @param override_good_single
 * @return
 */
private DataBucketBean getTestBucket(final String name, final Optional<String> min_log_level,
        final Optional<Map<String, String>> overrides) {
    return BeanTemplateUtils.build(DataBucketBean.class)
            .with(DataBucketBean::full_name, "/test/logtest/" + name + "/")
            .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class)
                    .with(DataSchemaBean::search_index_schema,
                            BeanTemplateUtils.build(SearchIndexSchemaBean.class)
                                    .with(SearchIndexSchemaBean::enabled, true).done().get())
                    .done().get())
            .with(DataBucketBean::management_schema, BeanTemplateUtils.build(ManagementSchemaBean.class)
                    .with(ManagementSchemaBean::logging_schema, BeanTemplateUtils.build(LoggingSchemaBean.class)
                            .with(LoggingSchemaBean::log_level, min_log_level.orElse(Level.OFF.toString()))
                            .with(LoggingSchemaBean::log_level_overrides, overrides.orElse(ImmutableMap.of()))
                            .done().get())
                    .done().get())
            .done().get();
}

From source file:com.orange.ngsi2.server.Ngsi2BaseController.java

/**
 * Query multiple entities in a single operation
 * @param bulkQueryRequest defines the list of entities, attributes and scopes to match entities
 * @param limit an optional limit/*  w w  w .  j av  a 2 s  .  c om*/
 * @param offset an optional offset
 * @param orderBy an optional list of attributes to order the entities
 * @param options an optional list of options separated by comma. Possible value for option: count.
 *        Theses keyValues,values and unique options are not supported.
 *        If count is present then the total number of entities is returned in the response as a HTTP header named `X-Total-Count`.
 * @return a list of Entities http status 200 (ok)
 * @throws Exception
 */
@RequestMapping(method = RequestMethod.POST, value = {
        "/op/query" }, consumes = MediaType.APPLICATION_JSON_VALUE)
final public ResponseEntity<List<Entity>> bulkQueryEndpoint(@RequestBody BulkQueryRequest bulkQueryRequest,
        @RequestParam Optional<Integer> limit, @RequestParam Optional<Integer> offset,
        @RequestParam Optional<List<String>> orderBy, @RequestParam Optional<Set<String>> options)
        throws Exception {

    validateSyntax(bulkQueryRequest);
    boolean count = false;
    if (options.isPresent()) {
        Set<String> optionsSet = options.get();
        //TODO: to support keyValues, values and unique as options
        if (optionsSet.contains("keyValues") || optionsSet.contains("values")
                || optionsSet.contains("unique")) {
            throw new UnsupportedOptionException("keyValues, values or unique");
        }
        count = optionsSet.contains("count");
    }
    Paginated<Entity> paginatedEntity = bulkQuery(bulkQueryRequest, limit.orElse(0), offset.orElse(0),
            orderBy.orElse(new ArrayList<>()), count);
    if (count) {
        return new ResponseEntity<>(paginatedEntity.getItems(), xTotalCountHeader(paginatedEntity.getTotal()),
                HttpStatus.OK);
    } else {
        return new ResponseEntity<>(paginatedEntity.getItems(), HttpStatus.OK);
    }
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.TestElasticsearchIndexUtils.java

@Test
public void test_defaultMappings() throws IOException {

    final DataBucketBean search_index_test = BeanTemplateUtils.build(DataBucketBean.class)
            .with(DataBucketBean::full_name, "/test/test")
            .with(DataBucketBean::data_schema,
                    BeanTemplateUtils.build(DataSchemaBean.class)
                            .with(DataSchemaBean::search_index_schema,
                                    BeanTemplateUtils.build(DataSchemaBean.SearchIndexSchemaBean.class)
                                            //(empty)
                                            .done().get())
                            .done().get())
            .done().get();//from   w  ww  .j  av  a  2s.  c om

    final String expected = "{\"template\":\"test_test__f19167d49eac*\",\"settings\":{\"index.indices.fielddata.cache.size\":\"10%\",\"index.refresh_interval\":\"5s\"},\"aliases\":{\"r__test_test__f19167d49eac\":{}},\"mappings\":{\"_default_\":{\"_meta\":{\"bucket_path\":\"/test/test\",\"is_primary\":\"true\",\"secondary_buffer\":\"\"},\"_all\":{\"enabled\":false},\"_source\":{\"enabled\":true},\"properties\":{\"@timestamp\":{\"fielddata\":{\"format\":\"doc_values\"},\"index\":\"not_analyzed\",\"type\":\"date\"}},\"dynamic_templates\":[{\"STAR_string\":{\"mapping\":{\"fielddata\":{\"format\":\"disabled\"},\"fields\":{\"raw\":{\"fielddata\":{\"format\":\"disabled\"},\"ignore_above\":256,\"index\":\"not_analyzed\",\"type\":\"string\"}},\"index\":\"analyzed\",\"omit_norms\":true,\"type\":\"string\"},\"match_mapping_type\":\"string\",\"path_match\":\"*\"}},{\"STAR_STAR\":{\"mapping\":{\"fielddata\":{\"format\":\"disabled\"},\"index\":\"not_analyzed\",\"type\":\"{dynamic_type}\"},\"match_mapping_type\":\"*\",\"path_match\":\"*\"}}]}}}";

    // Search index schema only
    {
        final ElasticsearchIndexServiceConfigBean schema_config = ElasticsearchIndexConfigUtils
                .buildConfigBeanFromSchema(search_index_test, _config, _mapper);

        final Optional<String> type = Optional.ofNullable(schema_config.search_technology_override())
                .map(t -> t.type_name_or_prefix());
        final String index_type = CollidePolicy.new_type == Optional
                .ofNullable(schema_config.search_technology_override()).map(t -> t.collide_policy())
                .orElse(CollidePolicy.new_type) ? "_default_"
                        : type.orElse(ElasticsearchIndexServiceConfigBean.DEFAULT_FIXED_TYPE_NAME);

        final XContentBuilder mapping = ElasticsearchIndexUtils.createIndexMapping(search_index_test,
                Optional.empty(), true, schema_config, _mapper, index_type);

        assertEquals("Get expected search_index_test schema", expected, mapping.bytes().toUtf8());
    }

    // (Search index schema and doc schema only)
    {
        final DataBucketBean doc_test = BeanTemplateUtils.clone(search_index_test)
                .with(DataBucketBean::data_schema,
                        BeanTemplateUtils.clone(search_index_test.data_schema())
                                .with(DataSchemaBean::document_schema,
                                        BeanTemplateUtils.build(DataSchemaBean.DocumentSchemaBean.class)
                                                .with(DataSchemaBean.DocumentSchemaBean::deduplication_fields,
                                                        Arrays.asList("misc_id"))
                                                .done().get())
                                .done())
                .done();

        final ElasticsearchIndexServiceConfigBean schema_config = ElasticsearchIndexConfigUtils
                .buildConfigBeanFromSchema(doc_test, _config, _mapper);

        final Optional<String> type = Optional.ofNullable(schema_config.search_technology_override())
                .map(t -> t.type_name_or_prefix());
        final String index_type = CollidePolicy.new_type == Optional
                .ofNullable(schema_config.search_technology_override()).map(t -> t.collide_policy())
                .orElse(CollidePolicy.new_type) ? "_default_"
                        : type.orElse(ElasticsearchIndexServiceConfigBean.DEFAULT_FIXED_TYPE_NAME);

        final XContentBuilder mapping = ElasticsearchIndexUtils.createIndexMapping(doc_test, Optional.empty(),
                true, schema_config, _mapper, index_type);

        assertTrue("Should contain the annotation logic: " + mapping.string(),
                mapping.string().contains("\"__a\":{\"properties\":{"));

    }

    // Temporal + search index schema
    {
        final DataBucketBean temporal_test = BeanTemplateUtils.clone(search_index_test)
                .with(DataBucketBean::data_schema,
                        BeanTemplateUtils.clone(search_index_test.data_schema())
                                .with(DataSchemaBean::temporal_schema, BeanTemplateUtils
                                        .build(DataSchemaBean.TemporalSchemaBean.class).done().get())
                                .done())
                .done();

        final ElasticsearchIndexServiceConfigBean schema_config = ElasticsearchIndexConfigUtils
                .buildConfigBeanFromSchema(temporal_test, _config, _mapper);

        final Optional<String> type = Optional.ofNullable(schema_config.search_technology_override())
                .map(t -> t.type_name_or_prefix());
        final String index_type = CollidePolicy.new_type == Optional
                .ofNullable(schema_config.search_technology_override()).map(t -> t.collide_policy())
                .orElse(CollidePolicy.new_type) ? "_default_"
                        : type.orElse(ElasticsearchIndexServiceConfigBean.DEFAULT_FIXED_TYPE_NAME);

        final XContentBuilder mapping = ElasticsearchIndexUtils.createIndexMapping(temporal_test,
                Optional.empty(), true, schema_config, _mapper, index_type);

        assertEquals("Get expected search_index_test schema", expected, mapping.bytes().toUtf8());
    }

    // Temporal + search index schema, with time field specified
    {
        final DataBucketBean temporal_test = BeanTemplateUtils.clone(search_index_test).with(
                DataBucketBean::data_schema,
                BeanTemplateUtils.clone(search_index_test.data_schema()).with(DataSchemaBean::temporal_schema,
                        BeanTemplateUtils.build(DataSchemaBean.TemporalSchemaBean.class)
                                .with(DataSchemaBean.TemporalSchemaBean::time_field, "testtime").done().get())
                        .done())
                .done();

        final ElasticsearchIndexServiceConfigBean schema_config = ElasticsearchIndexConfigUtils
                .buildConfigBeanFromSchema(temporal_test, _config, _mapper);

        //

        //(has testtime inserted)
        final String expected2 = "{\"template\":\"test_test__f19167d49eac*\",\"settings\":{\"index.indices.fielddata.cache.size\":\"10%\",\"index.refresh_interval\":\"5s\"},\"mappings\":{\"_default_\":{\"_meta\":{\"bucket_path\":\"/test/test\",\"is_primary\":\"false\",\"secondary_buffer\":\"\"},\"_all\":{\"enabled\":false},\"_source\":{\"enabled\":true},\"properties\":{\"@timestamp\":{\"fielddata\":{\"format\":\"doc_values\"},\"index\":\"not_analyzed\",\"type\":\"date\"},\"testtime\":{\"fielddata\":{\"format\":\"doc_values\"},\"index\":\"not_analyzed\",\"type\":\"date\"}},\"dynamic_templates\":[{\"STAR_string\":{\"mapping\":{\"fielddata\":{\"format\":\"disabled\"},\"fields\":{\"raw\":{\"fielddata\":{\"format\":\"disabled\"},\"ignore_above\":256,\"index\":\"not_analyzed\",\"type\":\"string\"}},\"index\":\"analyzed\",\"omit_norms\":true,\"type\":\"string\"},\"match_mapping_type\":\"string\",\"path_match\":\"*\"}},{\"STAR_STAR\":{\"mapping\":{\"fielddata\":{\"format\":\"disabled\"},\"index\":\"not_analyzed\",\"type\":\"{dynamic_type}\"},\"match_mapping_type\":\"*\",\"path_match\":\"*\"}}]}}}";

        final Optional<String> type = Optional.ofNullable(schema_config.search_technology_override())
                .map(t -> t.type_name_or_prefix());
        final String index_type = CollidePolicy.new_type == Optional
                .ofNullable(schema_config.search_technology_override()).map(t -> t.collide_policy())
                .orElse(CollidePolicy.new_type) ? "_default_"
                        : type.orElse(ElasticsearchIndexServiceConfigBean.DEFAULT_FIXED_TYPE_NAME);

        final XContentBuilder mapping = ElasticsearchIndexUtils.createIndexMapping(temporal_test,
                Optional.empty(), false, schema_config, _mapper, index_type);

        assertEquals("Get expected search_index_test schema", expected2, mapping.bytes().toUtf8());
    }

    // Columnar + search index schema (note default columnar schema => revert to defaults)
    {
        final String expected_with_columnar_defaults = "{\"template\":\"test_test__f19167d49eac*\",\"settings\":{\"index.indices.fielddata.cache.size\":\"10%\",\"index.refresh_interval\":\"5s\"},\"aliases\":{\"r__test_test__f19167d49eac\":{}},\"mappings\":{\"_default_\":{\"_meta\":{\"bucket_path\":\"/test/test\",\"is_primary\":\"true\",\"secondary_buffer\":\"\"},\"_all\":{\"enabled\":false},\"_source\":{\"enabled\":true},\"properties\":{\"@timestamp\":{\"fielddata\":{\"format\":\"doc_values\"},\"index\":\"not_analyzed\",\"type\":\"date\"}},\"dynamic_templates\":[{\"STAR_string\":{\"mapping\":{\"fielddata\":{\"format\":\"paged_bytes\"},\"fields\":{\"raw\":{\"fielddata\":{\"format\":\"doc_values\"},\"ignore_above\":256,\"index\":\"not_analyzed\",\"type\":\"string\"}},\"index\":\"analyzed\",\"omit_norms\":true,\"type\":\"string\"},\"match_mapping_type\":\"string\",\"path_match\":\"*\"}},{\"STAR_number\":{\"mapping\":{\"index\":\"not_analyzed\",\"type\":\"number\",\"fielddata\":{\"format\":\"doc_values\"}},\"path_match\":\"*\",\"match_mapping_type\":\"number\"}},{\"STAR_date\":{\"mapping\":{\"index\":\"not_analyzed\",\"type\":\"date\",\"fielddata\":{\"format\":\"doc_values\"}},\"path_match\":\"*\",\"match_mapping_type\":\"date\"}},{\"STAR_STAR\":{\"mapping\":{\"fielddata\":{\"format\":\"disabled\"},\"index\":\"not_analyzed\",\"type\":\"{dynamic_type}\"},\"match_mapping_type\":\"*\",\"path_match\":\"*\"}}]}}}";

        final DataBucketBean columnar_test = BeanTemplateUtils.clone(search_index_test)
                .with(DataBucketBean::data_schema,
                        BeanTemplateUtils.clone(search_index_test.data_schema())
                                .with(DataSchemaBean::columnar_schema, BeanTemplateUtils
                                        .build(DataSchemaBean.ColumnarSchemaBean.class).done().get())
                                .done())
                .done();

        final ElasticsearchIndexServiceConfigBean schema_config = ElasticsearchIndexConfigUtils
                .buildConfigBeanFromSchema(columnar_test, _config, _mapper);

        final Optional<String> type = Optional.ofNullable(schema_config.search_technology_override())
                .map(t -> t.type_name_or_prefix());
        final String index_type = CollidePolicy.new_type == Optional
                .ofNullable(schema_config.search_technology_override()).map(t -> t.collide_policy())
                .orElse(CollidePolicy.new_type) ? "_default_"
                        : type.orElse(ElasticsearchIndexServiceConfigBean.DEFAULT_FIXED_TYPE_NAME);

        final XContentBuilder mapping = ElasticsearchIndexUtils.createIndexMapping(columnar_test,
                Optional.empty(), true, schema_config, _mapper, index_type);

        assertEquals("Get expected search_index_test schema", expected_with_columnar_defaults,
                mapping.bytes().toUtf8());
    }

    // Columnar + temporal search index schema (add one field to columnar schema to ensure that the defaults aren't applied)
    {
        final DataBucketBean temporal_columnar_test = BeanTemplateUtils.clone(search_index_test)
                .with(DataBucketBean::data_schema, BeanTemplateUtils.clone(search_index_test.data_schema())
                        .with(DataSchemaBean::temporal_schema,
                                BeanTemplateUtils.build(DataSchemaBean.TemporalSchemaBean.class).done().get())
                        .with(DataSchemaBean::columnar_schema,
                                BeanTemplateUtils.build(DataSchemaBean.ColumnarSchemaBean.class)
                                        .with(DataSchemaBean.ColumnarSchemaBean::field_type_include_list,
                                                Arrays.asList())
                                        .done().get())
                        .done())
                .done();

        final ElasticsearchIndexServiceConfigBean schema_config = ElasticsearchIndexConfigUtils
                .buildConfigBeanFromSchema(temporal_columnar_test, _config, _mapper);

        final Optional<String> type = Optional.ofNullable(schema_config.search_technology_override())
                .map(t -> t.type_name_or_prefix());
        final String index_type = CollidePolicy.new_type == Optional
                .ofNullable(schema_config.search_technology_override()).map(t -> t.collide_policy())
                .orElse(CollidePolicy.new_type) ? "_default_"
                        : type.orElse(ElasticsearchIndexServiceConfigBean.DEFAULT_FIXED_TYPE_NAME);

        final XContentBuilder mapping = ElasticsearchIndexUtils.createIndexMapping(temporal_columnar_test,
                Optional.empty(), true, schema_config, _mapper, index_type);

        assertEquals("Get expected search_index_test schema", expected, mapping.bytes().toUtf8());
    }

}

From source file:org.apache.pulsar.broker.admin.impl.NamespacesBase.java

protected List<String> internalGetAntiAffinityNamespaces(String cluster, String antiAffinityGroup,
        String tenant) {/*from   w w  w  .  j  ava2 s  . co m*/
    validateAdminAccessForTenant(tenant);

    log.info("[{}]-{} Finding namespaces for {} in {}", clientAppId(), tenant, antiAffinityGroup, cluster);

    if (isBlank(antiAffinityGroup)) {
        throw new RestException(Status.PRECONDITION_FAILED, "anti-affinity group can't be empty.");
    }
    validateClusterExists(cluster);

    try {
        List<String> namespaces = getListOfNamespaces(tenant);

        return namespaces.stream().filter(ns -> {
            Optional<Policies> policies;
            try {
                policies = policiesCache().get(AdminResource.path(POLICIES, ns.toString()));
            } catch (Exception e) {
                throw new RuntimeException(e);
            }

            String storedAntiAffinityGroup = policies.orElse(new Policies()).antiAffinityGroup;
            return antiAffinityGroup.equalsIgnoreCase(storedAntiAffinityGroup);
        }).collect(Collectors.toList());

    } catch (Exception e) {
        log.warn("Failed to list of properties/namespace from global-zk", e);
        throw new RestException(e);
    }
}

From source file:org.openmhealth.shim.withings.mapper.WithingsSleepDurationDataPointMapper.java

/**
 * Maps an individual list node from the array in the Withings sleep summary endpoint response into a {@link
 * SleepDuration} data point.// ww w  .  ja  v a 2  s. com
 *
 * @param node activity node from the array "series" contained in the "body" of the endpoint response
 * @return a {@link DataPoint} object containing a {@link SleepDuration} measure with the appropriate values from
 * the JSON node parameter, wrapped as an {@link Optional}
 */
@Override
Optional<DataPoint<SleepDuration>> asDataPoint(JsonNode node) {

    Long lightSleepInSeconds = asRequiredLong(node, "data.lightsleepduration");
    Long deepSleepInSeconds = asRequiredLong(node, "data.deepsleepduration");
    Long remSleepInSeconds = asRequiredLong(node, "data.remsleepduration");

    Long totalSleepInSeconds = lightSleepInSeconds + deepSleepInSeconds + remSleepInSeconds;

    SleepDuration.Builder sleepDurationBuilder = new SleepDuration.Builder(
            new DurationUnitValue(DurationUnit.SECOND, totalSleepInSeconds));

    Optional<Long> startDateInEpochSeconds = asOptionalLong(node, "startdate");
    Optional<Long> endDateInEpochSeconds = asOptionalLong(node, "enddate");

    if (startDateInEpochSeconds.isPresent() && endDateInEpochSeconds.isPresent()) {
        OffsetDateTime offsetStartDateTime = OffsetDateTime
                .ofInstant(Instant.ofEpochSecond(startDateInEpochSeconds.get()), of("Z"));
        OffsetDateTime offsetEndDateTime = OffsetDateTime
                .ofInstant(Instant.ofEpochSecond(endDateInEpochSeconds.get()), of("Z"));
        sleepDurationBuilder.setEffectiveTimeFrame(
                TimeInterval.ofStartDateTimeAndEndDateTime(offsetStartDateTime, offsetEndDateTime));
    }

    Optional<Long> externalId = asOptionalLong(node, "id");
    Optional<Long> modelId = asOptionalLong(node, "model");
    String modelName = null;

    if (modelId.isPresent()) {
        modelName = SleepDeviceTypes.valueOf(modelId.get());
    }

    SleepDuration sleepDuration = sleepDurationBuilder.build();
    Optional<Long> wakeupCount = asOptionalLong(node, "data.wakeupcount");
    if (wakeupCount.isPresent()) {
        sleepDuration.setAdditionalProperty("wakeup_count", new Integer(wakeupCount.get().intValue()));
    }

    // These sleep phase values are Withings platform-specific, so we pass them through as additionalProperties to
    // ensure we keep relevant platform specific values. Should be interpreted according to Withings API spec
    sleepDuration.setAdditionalProperty("light_sleep_duration",
            new DurationUnitValue(DurationUnit.SECOND, lightSleepInSeconds));
    sleepDuration.setAdditionalProperty("deep_sleep_duration",
            new DurationUnitValue(DurationUnit.SECOND, deepSleepInSeconds));
    sleepDuration.setAdditionalProperty("rem_sleep_duration",
            new DurationUnitValue(DurationUnit.SECOND, remSleepInSeconds));

    // This is an additional piece of information captured by Withings devices around sleep and should be
    // interpreted according to the Withings API specification. We do not capture durationtowakeup or
    // wakeupduration properties from the Withings API because it is unclear the distinction between them and we
    // aim to avoid creating more ambiguity through passing through these properties
    Optional<Long> timeToSleepValue = asOptionalLong(node, "data.durationtosleep");
    if (timeToSleepValue.isPresent()) {
        sleepDuration.setAdditionalProperty("duration_to_sleep",
                new DurationUnitValue(DurationUnit.SECOND, timeToSleepValue.get()));
    }

    return Optional.of(newDataPoint(sleepDuration, externalId.orElse(null), true, modelName));
}

From source file:alfio.manager.TicketReservationManager.java

/**
 * Create a ticket reservation. It will create a reservation _only_ if it can find enough tickets. Note that it will not do date/validity validation. This must be ensured by the
 * caller./*  w  w w.j a  v  a2  s .c o m*/
 *
 * @param event
 * @param list
 * @param reservationExpiration
 * @param forWaitingQueue
 * @return
 */
public String createTicketReservation(Event event, List<TicketReservationWithOptionalCodeModification> list,
        List<ASReservationWithOptionalCodeModification> additionalServices, Date reservationExpiration,
        Optional<String> specialPriceSessionId, Optional<String> promotionCodeDiscount, Locale locale,
        boolean forWaitingQueue)
        throws NotEnoughTicketsException, MissingSpecialPriceTokenException, InvalidSpecialPriceTokenException {
    String reservationId = UUID.randomUUID().toString();

    Optional<PromoCodeDiscount> discount = promotionCodeDiscount
            .flatMap((promoCodeDiscount) -> promoCodeDiscountRepository
                    .findPromoCodeInEventOrOrganization(event.getId(), promoCodeDiscount));

    ticketReservationRepository.createNewReservation(reservationId, reservationExpiration,
            discount.map(PromoCodeDiscount::getId).orElse(null), locale.getLanguage(), event.getId(),
            event.getVat(), event.isVatIncluded());
    list.forEach(t -> reserveTicketsForCategory(event, specialPriceSessionId, reservationId, t, locale,
            forWaitingQueue, discount.orElse(null)));

    int ticketCount = list.stream().map(TicketReservationWithOptionalCodeModification::getAmount)
            .mapToInt(Integer::intValue).sum();

    // apply valid additional service with supplement policy mandatory one for ticket
    additionalServiceRepository
            .findAllInEventWithPolicy(event.getId(),
                    AdditionalService.SupplementPolicy.MANDATORY_ONE_FOR_TICKET)
            .stream().filter(AdditionalService::getSaleable).forEach(as -> {
                AdditionalServiceReservationModification asrm = new AdditionalServiceReservationModification();
                asrm.setAdditionalServiceId(as.getId());
                asrm.setQuantity(ticketCount);
                reserveAdditionalServicesForReservation(event.getId(), reservationId,
                        new ASReservationWithOptionalCodeModification(asrm, Optional.empty()),
                        discount.orElse(null));
            });

    additionalServices.forEach(as -> reserveAdditionalServicesForReservation(event.getId(), reservationId, as,
            discount.orElse(null)));

    TicketReservation reservation = ticketReservationRepository.findReservationById(reservationId);

    OrderSummary orderSummary = orderSummaryForReservationId(reservation.getId(), event,
            Locale.forLanguageTag(reservation.getUserLanguage()));
    ticketReservationRepository.addReservationInvoiceOrReceiptModel(reservationId, Json.toJson(orderSummary));

    auditingRepository.insert(reservationId, null, event.getId(), Audit.EventType.RESERVATION_CREATE,
            new Date(), Audit.EntityType.RESERVATION, reservationId);

    return reservationId;
}

From source file:no.asgari.civilization.server.action.GameAction.java

/**
 * Joins a game. If it is full it will throw exception
 *///from   w w  w  . j  a v a2 s .  c  o  m
private void joinGame(PBF pbf, String playerId, Optional<String> colorOpt, boolean gameCreator) {
    if (pbf.getNumOfPlayers() == pbf.getPlayers().size()) {
        log.warn("Cannot join the game. Its full");
        Response badReq = Response.status(Response.Status.BAD_REQUEST)
                .entity(new MessageDTO("Cannot join the game. Its full!")).build();
        throw new WebApplicationException(badReq);
    }

    Player player = playerCollection.findOneById(playerId);

    boolean playerAlreadyJoined = pbf.getPlayers().stream()
            .anyMatch(p -> p.getPlayerId().equals(player.getId()));
    if (playerAlreadyJoined) {
        log.warn("Cannot join the game. Player has already joined it");
        Response badReq = Response.status(Response.Status.BAD_REQUEST)
                .entity(new MessageDTO("Cannot join the game. You have already joined!")).build();
        throw new WebApplicationException(badReq);
    }

    player.getGameIds().add(pbf.getId());
    playerCollection.updateById(player.getId(), player);
    Playerhand playerhand;
    if (!pbf.getWithdrawnPlayers().isEmpty()) {
        playerhand = pbf.getWithdrawnPlayers().remove(0);
        boolean updated = gameLogAction.updateGameLog(pbf.getId(), playerhand.getUsername(),
                player.getUsername());
        log.info("Managed to update gameLog: " + updated);
        playerhand.setEmail(player.getEmail());
        playerhand.setPlayerId(player.getId());
        playerhand.setUsername(player.getUsername());
    } else {
        String color = colorOpt.orElse(chooseColorForPlayer(pbf));
        playerhand = createPlayerHand(player, color, gameCreator);
    }
    if (!pbf.getPlayers().contains(playerhand)) {
        createInfoLog(pbf.getId(),
                playerhand.getUsername() + " joined the game and is playing color " + playerhand.getColor());
        pbf.getPlayers().add(playerhand);
    }
    pbf = startIfAllPlayers(pbf);
    pbfCollection.updateById(pbf.getId(), pbf);
}