List of usage examples for java.util Optional orElseGet
public T orElseGet(Supplier<? extends T> supplier)
From source file:com.dickthedeployer.dick.web.service.BuildService.java
private void buildProject(Project project, String sha, Optional<String> lastMessageOptional, List<EnvironmentVariable> variables) throws BuildAlreadyQueuedException { Optional<Build> inQueue = buildDao.findByProjectAndInQueueTrue(project); if (inQueue.isPresent()) { throw new BuildAlreadyQueuedException(); }//from w w w.j a va2s. c om String lastMessage = lastMessageOptional.orElseGet(() -> repositoryService.getLastMessage(project, sha)); Build build = buildDao.save(new Build.Builder().withProject(project).withSha(sha).withRef(project.getRef()) .withRepository(project.getRepository()).withLastMessage(lastMessage) .withEnvironmentVariables(variables).build()); try { Dickfile dickfile = dickYmlService.loadDickFile(build); addGlobalVariables(build, dickfile); build.setStages(dickfile.getStageNames()); build.setStatus(Build.Status.READY); buildDao.save(build); Stage firstStage = dickfile.getFirstStage(); jobBuildService.prepareJobs(build, dickfile); if (firstStage.isAutorun()) { jobBuildService.buildStage(build, dickfile, firstStage); } } catch (DickFileMissingException ex) { log.info("Dickfile is missing", ex); build.setStatus(Build.Status.MISSING_DICKFILE); build.setInQueue(false); buildDao.save(build); } }
From source file:com.github.lukaszbudnik.dqueue.jaxrs.service.QueueService.java
private Response consumeItem(Map<String, String> filtersMap, boolean ordered) throws InterruptedException, java.util.concurrent.ExecutionException { Optional<Response> response = null; if (ordered) { Future<Optional<OrderedItem>> itemFuture = queueClient.consumeOrdered(filtersMap); response = createConsumeOrderedResponse(itemFuture); } else {// ww w. ja va 2 s . co m Future<Optional<Item>> itemFuture = queueClient.consume(filtersMap); response = createConsumeResponse(itemFuture); } return response.orElseGet(() -> Response.noContent().build()); }
From source file:com.rcn.controller.ResourceController.java
@RequestMapping(value = "/create-certificate", method = RequestMethod.POST) public String createCertPost(@RequestParam("resourceType") String resourceType, @RequestParam("certName") String certName, @RequestParam("validDays") int validDays, @RequestParam("certDesc") String certDesc, @RequestParam("certType") String certType, @RequestParam("taPemCert") String taPemCert, @RequestParam("taPkcs10") String taPkcs10, @RequestParam("ca") String ca, @RequestParam("caPassword") String caPassword, @RequestParam("password1") String password1, @RequestParam("password2") String password2, Authentication principal, Model model) { Optional<String> optError = !password1.equals(password2) ? Optional.of(l("password.does.not.match")) : Optional.empty();/*from w ww .jav a2 s .co m*/ if (!optError.isPresent()) { try { String type = ResoureMapping.computeIfAbsent(resourceType, a -> { throw new IllegalArgumentException("could not map a resource key:" + a); }); RcnUserDetail user = (RcnUserDetail) principal.getPrincipal(); Long targetUserId = user.getTargetUser().getId(); Optional<String> caCert = ca.trim().length() > 0 ? Optional.ofNullable( resourceRepository.certById(targetUserId, user.getId(), Long.valueOf(ca))) : Optional.empty(); Optional<String> clientCert = Optional.ofNullable("certImport".equals(certType) ? taPemCert : null); Optional<String> pkcs10Req = Optional .ofNullable("certGeneratePkcs10".equals(certType) ? taPkcs10 : null); String cnName = certName.startsWith("cn=") ? certName : "cn=" + certName; String certPem = clientCert.orElseGet(() -> certificateService.generateCert(cnName, password1, validDays, caCert, caPassword, TYPE_CA.equals(type), pkcs10Req)); Long resourceId = resourceRepository.createResource(targetUserId, type, certName, certDesc); certificateService.storeCert(resourceId, certPem, password1); } catch (Exception e) { log.error("createCertPost", e); optError = Optional.of(e.getMessage()); } } optError.ifPresent(e -> model.addAttribute("error", e)); return optError.map(a -> "create-certificate").orElse("redirect:/resources"); }
From source file:com.ikanow.aleph2.analytics.storm.services.StreamingEnrichmentContextService.java
@SuppressWarnings("unchecked") @Override// ww w .j av a 2 s . c om public <T> T getTopologyStorageEndpoint(final Class<T> clazz, final Optional<DataBucketBean> bucket) { if (_state_name == State.IN_TECHNOLOGY) { final DataBucketBean my_bucket = bucket.orElseGet(() -> _bucket.get()); if (!Optionals.of(() -> _job.get().output().is_transient()).orElse(false)) { // Final output for this analytic //TODO (ALEPH-12): handle child-buckets // Just return an aleph2 output bolt: return (T) new OutputBolt(my_bucket, this.getEnrichmentContextSignature(bucket, Optional.empty()), _user_topology.get().getClass().getName()); } else { final Optional<String> topic_name = _delegate.get().getOutputTopic(bucket, _job.get()); if (topic_name.isPresent()) { final ICoreDistributedServices cds = _delegate.get().getServiceContext() .getService(ICoreDistributedServices.class, Optional.empty()).get(); cds.createTopic(topic_name.get(), Optional.empty()); return (T) new TransientStreamingOutputBolt(my_bucket, _job.get(), _delegate.get().getAnalyticsContextSignature(bucket, Optional.empty()), _user_topology.get().getClass().getName(), topic_name.get()); } else { //TODO (ALEPH-12): Write an output bolt for temporary HDFS storage, and another one for both throw new RuntimeException(ErrorUtils.get(ErrorUtils.NOT_YET_IMPLEMENTED, "batch output from getTopologyStorageEndpoint")); } } } else { throw new RuntimeException(ErrorUtils.TECHNOLOGY_NOT_MODULE); } }
From source file:alfio.manager.StripeManager.java
private StripeExceptionHandler findExceptionHandler(StripeException exc) { final Optional<StripeExceptionHandler> eh = Optional.ofNullable(handlers.get(exc.getClass())); if (!eh.isPresent()) { log.warn("cannot find an ExceptionHandler for {}. Falling back to the default one.", exc.getClass()); }/*from w w w .j a va 2 s . co m*/ return eh.orElseGet(() -> handlers.get(StripeException.class)); }
From source file:org.zanata.tmx.TMXParserTest.java
@Test @InRequestScope//from www .ja v a2 s . co m public void parseTMX() throws Exception { // Create a TM TransMemory tm = createTMFromFile("/tmx/default-valid-tm.tmx"); // Make sure everything is stored properly tm = getEm().find(TransMemory.class, tm.getId()); assertThat(tm.getTranslationUnits().size()).isEqualTo(4); // Dates were modified to match the TM header in the file Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); cal.setTime(tm.getCreationDate()); assertThat(cal.get(Calendar.YEAR)).isEqualTo(2013); assertThat(cal.get(Calendar.MONTH)).isEqualTo(4); assertThat(cal.get(Calendar.DATE)).isEqualTo(9); assertThat(tm.getSourceLanguage()).isEqualTo("en"); // TM metadata assertThat(tm.getMetadata().size()).isGreaterThan(0); assertThat(tm.getMetadata().get(TMMetadataType.TMX14)).isNotNull(); // Translation Units for (TransMemoryUnit tu : tm.getTranslationUnits()) { assertThat(tu.getTransUnitVariants().size()).isGreaterThan(0); } Optional<String> seg = tm.getTranslationUnits().stream() .filter(tu -> tu.getTransUnitId() != null && tu.getTransUnitId().equals("tuid2")) .map(TransMemoryUnit::getTransUnitVariants).map(map -> map.get("ja")) .map(TransMemoryUnitVariant::getPlainTextSegment).findAny(); assertThat(seg.orElseGet(null)).contains("????"); }
From source file:com.ikanow.aleph2.analytics.storm.services.MockAnalyticsContext.java
@Override public IBucketLogger getLogger(Optional<DataBucketBean> bucket) { return _logging_service.getLogger(bucket.orElseGet(() -> _mutable_state.bucket.get())); }
From source file:com.vsct.dt.strowgr.admin.repository.consul.ConsulReaderTest.java
private void checkValidStatus(int status, boolean with404) throws ClientProtocolException { // given//from www . ja va 2 s . c om HttpResponse httpResponse = mock(HttpResponse.class); when(httpResponse.getStatusLine()) .thenReturn(new BasicStatusLine(new ProtocolVersion("http1.1", 1, 1), status, "")); BasicHttpEntity givenHttpEntity = new BasicHttpEntity(); givenHttpEntity.setContent(new ByteArrayInputStream("".getBytes(StandardCharsets.UTF_8))); when(httpResponse.getEntity()).thenReturn(givenHttpEntity); Optional<HttpEntity> httpEntity; // test if (with404) { httpEntity = new ConsulReader(null).parseHttpResponseAccepting404(httpResponse, this::getHttpEntity); } else { httpEntity = new ConsulReader(null).parseHttpResponse(httpResponse, this::getHttpEntity); } // check if (with404 && status == 404) { assertThat(httpEntity).isNotNull(); assertThat(httpEntity.isPresent()).isFalse(); } else { assertThat(httpEntity.isPresent()).as("for status " + status).isTrue(); assertThat(httpEntity.orElseGet(() -> null)).as("for status " + status).isEqualTo(givenHttpEntity); } }
From source file:com.ikanow.aleph2.analytics.storm.services.StreamingEnrichmentContextService.java
@SuppressWarnings("unchecked") @Override//from ww w .j a v a 2 s .c o m public <T> Collection<Tuple2<T, String>> getTopologyEntryPoints(final Class<T> clazz, final Optional<DataBucketBean> bucket) { if (_state_name == State.IN_TECHNOLOGY) { if (!ISpout.class.isAssignableFrom(clazz)) { throw new RuntimeException(ErrorUtils.get(ErrorUtils.INVALID_TOPOLOGY_CLASSES, clazz)); } final DataBucketBean my_bucket = bucket.orElseGet(() -> _bucket.get()); final BrokerHosts hosts = new ZkHosts(KafkaUtils.getZookeperConnectionString()); final String full_path = (_delegate.get().getServiceContext().getGlobalProperties() .distributed_root_dir() + GlobalPropertiesBean.BUCKET_DATA_ROOT_OFFSET + my_bucket.full_name()) .replace("//", "/"); return Optionals.ofNullable(_job.get().inputs()).stream() .flatMap(input -> _delegate.get().getInputTopics(bucket, _job.get(), input).stream()) .map(topic_name -> { _logger.debug("Created input topic for topology entry point: " + topic_name + " for bucket " + my_bucket.full_name()); final SpoutConfig spout_config = new SpoutConfig(hosts, topic_name, full_path, BucketUtils .getUniqueSignature(my_bucket.full_name(), Optional.of(_job.get().name()))); spout_config.scheme = new SchemeAsMultiScheme(new StringScheme()); final KafkaSpout kafka_spout = new KafkaSpout(spout_config); return Tuples._2T((T) kafka_spout, topic_name); }).collect(Collectors.toList()); } else { throw new RuntimeException(ErrorUtils.TECHNOLOGY_NOT_MODULE); } //TODO (ALEPH-12): More sophisticated spout building functionality (eg generic batch->storm checking via CRUD service), handle storage service possibly via Camus? //TODO (ALEPH-12): if a legit data service is specified then see if that service contains a spout and if so use that, else throw error //TODO: (if a legit data service is specified then need to ensure that the service is included in the underlying artefacts) }
From source file:com.ikanow.aleph2.analytics.storm.services.MockAnalyticsContext.java
@Override public Optional<String> getOutputTopic(final Optional<DataBucketBean> bucket, final AnalyticThreadJobBean job) { final DataBucketBean this_bucket = bucket.orElseGet(() -> _mutable_state.bucket.get()); if ((MasterEnrichmentType.streaming == job.output().transient_type()) || (MasterEnrichmentType.streaming_and_batch == job.output().transient_type())) { final String topic = job.output().is_transient() ? _distributed_services.generateTopicName(this_bucket.full_name(), Optional.of(job.name())) : _distributed_services.generateTopicName( Optional.ofNullable(job.output().sub_bucket_path()).orElse(this_bucket.full_name()), ICoreDistributedServices.QUEUE_END_NAME); return Optional.of(topic); } else/* w w w.j a v a 2 s . co m*/ return Optional.empty(); }