Example usage for java.util.stream IntStream rangeClosed

List of usage examples for java.util.stream IntStream rangeClosed

Introduction

In this page you can find the example usage for java.util.stream IntStream rangeClosed.

Prototype

public static IntStream rangeClosed(int startInclusive, int endInclusive) 

Source Link

Document

Returns a sequential ordered IntStream from startInclusive (inclusive) to endInclusive (inclusive) by an incremental step of 1 .

Usage

From source file:com.ikanow.aleph2.logging.service.TestLoggingService.java

/**
 * Tests writing messages as user, system, external and checks all the messages were stored.
 * //from   w w w .  j ava2  s  .c  o m
 * @throws InterruptedException
 * @throws ExecutionException
 */
@Test
public void test_logBucket() throws InterruptedException, ExecutionException {
    final String subsystem_name = "logging_test1";
    final int num_messages_to_log = 50;
    final DataBucketBean test_bucket = getTestBucket("test1", Optional.of(Level.ALL.toString()),
            Optional.empty());
    final IBucketLogger user_logger = logging_service.getLogger(test_bucket);
    final IBucketLogger system_logger = logging_service.getSystemLogger(test_bucket);
    final IBucketLogger external_logger = logging_service.getExternalLogger(subsystem_name);
    //log a few messages
    IntStream.rangeClosed(1, num_messages_to_log).boxed().forEach(i -> {
        user_logger.log(Level.ERROR, ErrorUtils.lazyBuildMessage(true, () -> subsystem_name,
                () -> "test_message " + i, () -> null, () -> "no error", () -> Collections.emptyMap()));
        system_logger.log(Level.ERROR, ErrorUtils.lazyBuildMessage(true, () -> subsystem_name,
                () -> "test_message " + i, () -> null, () -> "no error", () -> Collections.emptyMap()));
        external_logger.log(Level.ERROR, ErrorUtils.lazyBuildMessage(true, () -> subsystem_name,
                () -> "test_message " + i, () -> null, () -> "no error", () -> Collections.emptyMap()));
    });

    user_logger.flush();
    system_logger.flush();
    external_logger.flush();

    //check its in ES, wait 10s max for the index to refresh
    final DataBucketBean logging_test_bucket = BucketUtils.convertDataBucketBeanToLogging(test_bucket);
    final IDataWriteService<BasicMessageBean> logging_crud = search_index_service.getDataService().get()
            .getWritableDataService(BasicMessageBean.class, logging_test_bucket, Optional.empty(),
                    Optional.empty())
            .get();
    waitForResults(logging_crud, 10, num_messages_to_log * 2);
    assertEquals(num_messages_to_log * 2, logging_crud.countObjects().get().longValue());

    final DataBucketBean logging_external_test_bucket = BucketUtils
            .convertDataBucketBeanToLogging(BeanTemplateUtils.clone(test_bucket)
                    .with(DataBucketBean::full_name, "/external/" + subsystem_name + "/").done());
    final IDataWriteService<BasicMessageBean> logging_crud_external = search_index_service.getDataService()
            .get().getWritableDataService(BasicMessageBean.class, logging_external_test_bucket,
                    Optional.empty(), Optional.empty())
            .get();
    waitForResults(logging_crud_external, 10, num_messages_to_log);
    assertEquals(num_messages_to_log, logging_crud_external.countObjects().get().longValue());

    //cleanup
    logging_crud.deleteDatastore().get();
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchHiveUtils.java

/** Handles the prefix and suffix of the full hive schema
 *  https://www.elastic.co/guide/en/elasticsearch/hadoop/current/hive.html
 * @param table_name - if empty then "main_table"
 * @param bucket/*from   w  ww. j a v a  2 s .com*/
 * @param schema
 * @param partial_hive_schema
 * @return
 */
public static Validation<String, String> generateFullHiveSchema(final Optional<String> table_name,
        final DataBucketBean bucket, final DataSchemaBean.DataWarehouseSchemaBean schema,
        Optional<Client> maybe_client, ElasticsearchIndexServiceConfigBean config) {
    // (ignore views for the moment)

    final String prefix = ErrorUtils.get("CREATE EXTERNAL TABLE {0} ", getTableName(bucket, schema));

    final DataSchemaBean.DataWarehouseSchemaBean.Table table = table_name.flatMap(t -> Optionals
            .ofNullable(schema.views()).stream().filter(v -> t.equals(v.database_name())).findFirst())
            .orElse(schema.main_table());

    final JsonNode user_schema = _mapper.convertValue(table.table_format(), JsonNode.class);

    final Validation<String, String> partial_table = generatePartialHiveSchema(prefix, user_schema, true);

    // (for the main table, just going to be the full alias - for views will need to be cleverer)
    final String index = Optionals
            .of(() -> bucket.data_schema().search_index_schema().technology_override_schema()
                    .get(SearchIndexSchemaDefaultBean.index_name_override_).toString())
            .orElseGet(() -> "r__" + BucketUtils.getUniqueSignature(bucket.full_name(), Optional.empty()));

    final Optional<ElasticsearchHiveOverrideBean> maybe_override = Optionals
            .of(() -> schema.technology_override_schema())
            .map(m -> BeanTemplateUtils.from(m, ElasticsearchHiveOverrideBean.class).get());

    // OK all this horrible code is intended to sort out the list of types to apply in the hive query
    final Optional<ElasticsearchHiveOverrideBean.TableOverride> table_override = maybe_override
            .map(cfg -> cfg.table_overrides().get(table_name.orElse(MAIN_TABLE_NAME)));
    final Optional<Set<String>> user_type_overrides = table_override.map(t -> t.types())
            .filter(l -> !l.isEmpty()).map(l -> new TreeSet<String>(l));
    final Set<String> mutable_type_set = user_type_overrides.orElseGet(() -> {
        return new TreeSet<String>(
                maybe_client.map(client -> ElasticsearchIndexUtils.getTypesForIndex(client, index).values())
                        .orElse(Collections.emptySet()));
    });

    final ElasticsearchIndexServiceConfigBean schema_config = ElasticsearchIndexConfigUtils
            .buildConfigBeanFromSchema(bucket, config, _mapper);
    final CollidePolicy collide_policy = Optionals
            .of(() -> schema_config.search_technology_override().collide_policy())
            .orElse(CollidePolicy.new_type);

    Optionals.of(() -> schema_config.search_technology_override().type_name_or_prefix()).map(Optional::of)
            .orElseGet(() -> Optional.of((collide_policy == CollidePolicy.new_type)
                    ? ElasticsearchContext.TypeContext.ReadWriteTypeContext.AutoRwTypeContext.DEFAULT_PREFIX
                    : ElasticsearchIndexServiceConfigBean.DEFAULT_FIXED_TYPE_NAME))
            .ifPresent(type_or_prefix -> {
                if (!user_type_overrides.isPresent()) { // leave alone if manually specified
                    if (collide_policy == CollidePolicy.new_type) { // add a few types
                        //TODO (ALEPH-17): need to make this get auto populated as new types are added, see the ALEPH-17 comment in ElasticsearchIndexService
                        if (mutable_type_set.size() < 10) {
                            IntStream.rangeClosed(1, 10).boxed().map(i -> type_or_prefix + i.toString())
                                    .forEach(type -> mutable_type_set.add(type));
                        }
                    } else { // OK in this case just make sure the default type is represented
                        mutable_type_set.add(type_or_prefix);
                    }
                }
            });

    final String suffix = Optional.of(" STORED BY 'org.elasticsearch.hadoop.hive.EsStorageHandler' ")
            .map(s -> s + ErrorUtils.get(
                    "TBLPROPERTIES(''es.index.auto.create'' = ''false'', ''es.resource'' = ''{0}/{1}''", index,
                    mutable_type_set.stream().collect(Collectors.joining(","))))
            .map(s -> table_override.map(t -> t.name_mappings()).filter(m -> !m.isEmpty())
                    .map(m -> s + ", 'es.mapping.names' = '"
                            + m.entrySet().stream().map(kv -> kv.getKey() + ":" + kv.getValue())
                                    .collect(Collectors.joining(","))
                            + "'")
                    .orElse(s))
            .map(s -> table_override
                    .flatMap(t -> Optional.ofNullable(t.url_query()).map(ss -> "?" + ss).map(Optional::of)
                            .orElseGet(() -> Optional.ofNullable(t.json_query())
                                    .map(jq -> _mapper.convertValue(jq, JsonNode.class).toString())))
                    .map(ss -> s + ", 'es.query' = '" + ss + "'").orElse(s))
            .map(s -> s + ") ").get();

    return partial_table.map(s -> s + suffix);
}

From source file:org.ow2.proactive.workflow_catalog.rest.controller.WorkflowRevisionControllerIntegrationTest.java

@Test
public void testListWorkflowRevisionsShouldReturnSavedRevisions() {
    IntStream.rangeClosed(1, 25).forEach(i -> {
        try {/*from  www.  j av  a 2s.  c o  m*/
            workflowRevisionService.createWorkflowRevision(secondWorkflowRevision.bucketId,
                    Optional.of(secondWorkflowRevision.id),
                    IntegrationTestUtil.getWorkflowAsByteArray("workflow.xml"));
        } catch (IOException e) {
            Assert.fail(e.getMessage());
        }
    });

    Response response = given().pathParam("bucketId", 1).pathParam("workflowId", 1).when()
            .get(WORKFLOW_REVISIONS_RESOURCE);

    int pageSize = response.getBody().jsonPath().getInt("page.size");

    response.then().assertThat().statusCode(HttpStatus.SC_OK)
            .body("_embedded.workflowMetadataList", hasSize(pageSize)).body("page.number", is(0))
            .body("page.totalElements", is(25 + 2));
}

From source file:com.ikanow.aleph2.shared.crud.mongodb.services.TestMongoDbCrudService.java

@Test
public void testIndexes() throws InterruptedException, ExecutionException {

    final MongoDbCrudService<TestBean, String> service = getTestService("testIndexes", TestBean.class,
            String.class);

    // Insert some objects to index

    final List<TestBean> l = IntStream.rangeClosed(1, 1000).boxed().map(
            i -> BeanTemplateUtils.build(TestBean.class).with("test_string", "test_string" + i).done().get())
            .collect(Collectors.toList());

    service.storeObjects(l);/* w w  w  .ja v  a  2  s. com*/

    assertEquals(1000, service._state.orig_coll.count());

    // 1) Add a new index

    final List<DBObject> initial_indexes = service._state.orig_coll.getIndexInfo();
    if (null == this._real_mongodb_connection) { // slightly different format:
        assertEquals(
                "[{ \"v\" : 1 , \"key\" : { \"_id\" : 1} , \"ns\" : \"test_db.testIndexes\" , \"name\" : \"_id_\"}]",
                initial_indexes.toString());
    } else {
        assertEquals(
                "[{ \"v\" : 1 , \"key\" : { \"_id\" : 1} , \"name\" : \"_id_\" , \"ns\" : \"test_db.testIndexes\"}]",
                initial_indexes.toString());
    }

    final Future<Boolean> done = service.optimizeQuery(Arrays.asList("test_string", "_id"));

    assertEquals(true, done.get());

    final List<DBObject> new_indexes = service._state.orig_coll.getIndexInfo();

    final BasicDBObject expected_index_nested = new BasicDBObject("test_string", 1);
    expected_index_nested.put("_id", 1);
    final BasicDBObject expected_index = new BasicDBObject("v", 1);
    expected_index.put("key", expected_index_nested);
    if (null == this._real_mongodb_connection) { // slightly different format:
        expected_index.put("ns", "test_db.testIndexes");
        expected_index.put("name", "test_string_1__id_1");
    } else {
        expected_index.put("name", "test_string_1__id_1");
        expected_index.put("ns", "test_db.testIndexes");
    }
    expected_index.put("background", true);

    final List<DBObject> expected_new_indexes = Arrays.asList(initial_indexes.get(0), expected_index);

    assertEquals(expected_new_indexes.toString(), new_indexes.toString());

    // 3) Remove an index that doesn't exist

    final boolean index_existed = service.deregisterOptimizedQuery(Arrays.asList("test_string", "test_long"));

    assertEquals(false, index_existed);

    final List<DBObject> nearly_final_indexes = service._state.orig_coll.getIndexInfo();

    assertEquals(expected_new_indexes.toString(), nearly_final_indexes.toString());

    // 4) Remove the index we just added

    final boolean index_existed_4 = service.deregisterOptimizedQuery(Arrays.asList("test_string", "_id"));

    assertEquals(true, index_existed_4);

    final List<DBObject> expected_new_indexes_4 = Arrays.asList(initial_indexes.get(0));

    final List<DBObject> final_indexes = service._state.orig_coll.getIndexInfo();

    assertEquals(expected_new_indexes_4.toString(), final_indexes.toString());

    //Finally check that you get an error if the index is too long:

    try {
        @SuppressWarnings("unused")
        final Future<Boolean> error = service
                .optimizeQuery(Arrays.asList("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX01",
                        "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX02", "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX03",
                        "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX04", "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX05"));

        fail("Should have errored");
    } catch (Exception e) {
        System.out.println("Correctly got error: " + e.getMessage());
    }
}

From source file:energy.usef.core.service.business.CorePlanboardBusinessServiceTest.java

@Test
public void testStorePrognosisWithAplan() {
    // given/*from  w  w  w  . j  a  v  a  2  s .  c  o  m*/
    Random random = new Random();
    Prognosis prognosis = new Prognosis();
    prognosis.getPTU().addAll(IntStream.rangeClosed(1, 96).mapToObj(elem -> {
        PTU ptuDto = new PTU();
        ptuDto.setPower(BigInteger.valueOf(random.nextInt(500)));
        ptuDto.setStart(BigInteger.valueOf(elem));
        return ptuDto;
    }).collect(Collectors.toList()));
    prognosis.setPeriod(new LocalDate());
    prognosis.setSequence(random.nextLong());
    prognosis.setType(PrognosisType.A_PLAN);

    PowerMockito.when(connectionGroupRepository.find(Matchers.anyString()))
            .thenReturn(PowerMockito.mock(ConnectionGroup.class));
    // when
    corePlanboardBusinessService.storePrognosis("agr.usef-example.com", prognosis, DocumentType.A_PLAN,
            DocumentStatus.PROCESSED, "agr.usef-example.com", null, false);

    // then
    Mockito.verify(ptuPrognosisRepository, Mockito.times(96)).persist(Matchers.any(PtuPrognosis.class));
}

From source file:com.ikanow.aleph2.logging.service.TestLoggingService.java

/**
 * Tests writing messages as user, system, external at 3 different log levels and verifies
 * the too low of level messages were filtered out (not written to storage).
 * //from   www.  jav a 2  s.c  o  m
 * @throws InterruptedException
 * @throws ExecutionException
 */
@Test
public void test_logFilter() throws InterruptedException, ExecutionException {
    final String subsystem_name = "logging_test2";
    final int num_messages_to_log_each_type = 5;
    final List<Level> levels = Arrays.asList(Level.DEBUG, Level.INFO, Level.ERROR);
    final DataBucketBean test_bucket = getTestBucket("test2", Optional.of(Level.ERROR.toString()),
            Optional.empty());
    final IBucketLogger user_logger = logging_service.getLogger(test_bucket);
    final IBucketLogger system_logger = logging_service.getSystemLogger(test_bucket);
    final IBucketLogger external_logger = logging_service.getExternalLogger(subsystem_name);
    //log a few messages
    IntStream.rangeClosed(1, num_messages_to_log_each_type).boxed().forEach(i -> {
        levels.stream().forEach(level -> {
            user_logger.log(level, ErrorUtils.lazyBuildMessage(true, () -> subsystem_name,
                    () -> "test_message " + i, () -> null, () -> "no error", () -> Collections.emptyMap()));
            system_logger.log(level, ErrorUtils.lazyBuildMessage(true, () -> subsystem_name,
                    () -> "test_message " + i, () -> null, () -> "no error", () -> Collections.emptyMap()));
            external_logger.log(level, ErrorUtils.lazyBuildMessage(true, () -> subsystem_name,
                    () -> "test_message " + i, () -> null, () -> "no error", () -> Collections.emptyMap()));
        });
    });

    user_logger.flush();
    system_logger.flush();
    external_logger.flush();

    //check its in ES, wait 10s max for the index to refresh
    final DataBucketBean logging_test_bucket = BucketUtils.convertDataBucketBeanToLogging(test_bucket);
    final IDataWriteService<BasicMessageBean> logging_crud = search_index_service.getDataService().get()
            .getWritableDataService(BasicMessageBean.class, logging_test_bucket, Optional.empty(),
                    Optional.empty())
            .get();
    waitForResults(logging_crud, 10, 10);
    assertEquals(10, logging_crud.countObjects().get().longValue()); //should only have logged ERROR messages

    final DataBucketBean logging_external_test_bucket = BucketUtils
            .convertDataBucketBeanToLogging(BeanTemplateUtils.clone(test_bucket)
                    .with(DataBucketBean::full_name, "/external/" + subsystem_name + "/").done());
    final IDataWriteService<BasicMessageBean> logging_crud_external = search_index_service.getDataService()
            .get().getWritableDataService(BasicMessageBean.class, logging_external_test_bucket,
                    Optional.empty(), Optional.empty())
            .get();
    waitForResults(logging_crud_external, 10, 15);
    assertEquals(15, logging_crud_external.countObjects().get().longValue());

    //cleanup
    logging_crud.deleteDatastore().get();
}

From source file:io.github.retz.inttest.RetzIntTest.java

@Test
public void scheduleAppTest2() throws Exception {
    URI uri = new URI("http://" + RETZ_HOST + ":" + RETZ_PORT);
    try (Client client = Client.newBuilder(uri).setAuthenticator(config.getAuthenticator()).build()) {
        loadSimpleApp(client, "echo3");

        List<EchoJob> finishedJobs = new LinkedList<>();
        List<Integer> argvList = IntStream.rangeClosed(0, 32).boxed().collect(Collectors.toList());
        argvList.addAll(Arrays.asList(42, 63, 64, 127, 128, 151, 192, 255));
        int jobNum = argvList.size();
        List<EchoJob> echoJobs = scheduleEchoJobs(client, "echo3", "echo ", argvList);
        assertThat(echoJobs.size(), is(jobNum));

        for (int i = 0; i < 32; i++) {
            List<EchoJob> toRemove = toRemove(client, echoJobs, false);
            if (!toRemove.isEmpty()) {
                i = 0;/*from w  w w  .j  a  va 2  s  . c o  m*/
            }
            echoJobs.removeAll(toRemove);
            finishedJobs.addAll(toRemove);
            if (echoJobs.isEmpty()) {
                break;
            }
            Thread.sleep(1000);

            System.err.println(TimestampHelper.now() + ": Finished=" + ClientHelper.finished(client).size()
                    + ", Running=" + ClientHelper.running(client).size() + ", Scheduled="
                    + ClientHelper.queue(client).size());
            for (Job finished : ClientHelper.finished(client)) {
                assertThat(finished.retry(), is(0));
                assertThat(finished.state(), is(Job.JobState.FINISHED));
                assertThat(finished.result(), is(RES_OK));
            }
        }
        assertThat(finishedJobs.size(), is(jobNum));

        assertThat(ClientHelper.finished(client).size(), greaterThanOrEqualTo(jobNum));
        assertThat(ClientHelper.running(client).size(), is(0));
        assertThat(ClientHelper.queue(client).size(), is(0));

        UnloadAppResponse unloadRes = (UnloadAppResponse) client.unload("echo3");
        assertThat(unloadRes.status(), is("ok"));
    }
}

From source file:com.dgtlrepublic.anitomyj.ParserNumber.java

/**
 * Match partial episodes episodes. "4a", "111C".
 *
 * @param word  the word/*from w ww  .ja  v  a  2s .c om*/
 * @param token the token
 * @return true if the token matched
 */
public boolean matchPartialEpisodePattern(String word, Token token) {
    if (StringUtils.isEmpty(word))
        return false;
    int foundIdx = IntStream.rangeClosed(0, word.length())
            .filter(value -> !Character.isDigit(word.charAt(value))).findFirst().orElse(word.length());
    int suffixLength = word.length() - foundIdx;

    Function<Integer, Boolean> isValidSuffix = c -> (c >= 'A' && c <= 'C') || (c >= 'a' && c <= 'c');

    if (suffixLength == 1 && isValidSuffix.apply((int) word.charAt(foundIdx)))
        if (setEpisodeNumber(word, token, true))
            return true;

    return false;
}

From source file:com.ikanow.aleph2.shared.crud.mongodb.services.TestMongoDbCrudService.java

@Test
public void singleObjectRetrieve() throws InterruptedException, ExecutionException {

    final MongoDbCrudService<TestBean, String> service = getTestService("singleObjectRetrieve", TestBean.class,
            String.class);

    final List<TestBean> l = IntStream.rangeClosed(1, 10).boxed()
            .map(i -> BeanTemplateUtils.build(TestBean.class).with("_id", "id" + i)
                    .with("test_string", "test_string" + i).with("test_long", (Long) (long) i).done().get())
            .collect(Collectors.toList());

    service.storeObjects(l);//w  ww .  ja  v a 2s  . co  m

    assertEquals(10, service._state.orig_coll.count());

    service.optimizeQuery(Arrays.asList("test_string")).get(); // (The get() waits for completion)

    // For asserting vs strings where possible:
    final JacksonDBCollection<TestBean, String> mapper = service._state.coll;

    // 1) Get object by _id, exists

    final Future<Optional<TestBean>> obj1 = service.getObjectById("id1");

    //DEBUG
    //sysOut(mapper.convertToDbObject(obj1.get().get()).toString());

    assertEquals("{ \"_id\" : \"id1\" , \"test_string\" : \"test_string1\" , \"test_long\" : 1}",
            mapper.convertToDbObject(obj1.get().get()).toString());

    // 2) Get object by _id, exists, subset of fields

    // 2a) inclusive:

    final Future<Optional<TestBean>> obj2a = service.getObjectById("id2", Arrays.asList("_id", "test_string"),
            true);

    //DEBUG
    //sysOut(mapper.convertToDbObject(obj2a.get().get()).toString());

    assertEquals("{ \"_id\" : \"id2\" , \"test_string\" : \"test_string2\"}",
            mapper.convertToDbObject(obj2a.get().get()).toString());

    // 2a.1) inclusive without _id (which mongodb treats differently)

    final Future<Optional<TestBean>> obj2a_1 = service.getObjectById("id2", Arrays.asList("test_string"), true);

    assertEquals("{ \"test_string\" : \"test_string2\"}",
            mapper.convertToDbObject(obj2a_1.get().get()).toString());

    // 2b) exclusive:

    final Future<Optional<TestBean>> obj2b = service.getObjectById("id3", Arrays.asList("_id", "test_string"),
            false);

    //DEBUG
    //sysOut(mapper.convertToDbObject(obj2b.get().get()).toString());

    assertEquals("{ \"test_long\" : 3}", mapper.convertToDbObject(obj2b.get().get()).toString());

    // 3) Get object by _id, doesn't exist

    final Future<Optional<TestBean>> obj3 = service.getObjectById("id100", Arrays.asList("_id", "test_string"),
            false);

    assertEquals(false, obj3.get().isPresent());

    // 4) Get object by spec, exists

    final QueryComponent<TestBean> query = CrudUtils.allOf(TestBean.class).when("_id", "id4")
            .withAny("test_string", Arrays.asList("test_string1", "test_string4")).withPresent("test_long");

    final Future<Optional<TestBean>> obj4 = service.getObjectBySpec(query);

    assertEquals("{ \"_id\" : \"id4\" , \"test_string\" : \"test_string4\" , \"test_long\" : 4}",
            mapper.convertToDbObject(obj4.get().get()).toString());

    // 5) Get object by spec, exists, subset of fields

    final Future<Optional<TestBean>> obj5 = service.getObjectBySpec(query, Arrays.asList("_id", "test_string"),
            true);

    assertEquals("{ \"_id\" : \"id4\" , \"test_string\" : \"test_string4\"}",
            mapper.convertToDbObject(obj5.get().get()).toString());

    // 6) Get object by spec, doesn't exist

    final QueryComponent<TestBean> query6 = CrudUtils.allOf(TestBean.class).when("_id", "id3")
            .withAny("test_string", Arrays.asList("test_string1", "test_string4")).withPresent("test_long");

    final Future<Optional<TestBean>> obj6 = service.getObjectBySpec(query6, Arrays.asList("_id", "test_string"),
            false);
    assertEquals(false, obj6.get().isPresent());
}