Example usage for java.util Optional get

List of usage examples for java.util Optional get

Introduction

In this page you can find the example usage for java.util Optional get.

Prototype

public T get() 

Source Link

Document

If a value is present, returns the value, otherwise throws NoSuchElementException .

Usage

From source file:io.github.retz.web.WebConsole.java

public static boolean kill(int id) throws Exception {
    if (!driver.isPresent()) {
        LOG.error("Driver is not present; this setup should be wrong");
        return false;
    }/*from   ww  w  . ja v  a2 s .com*/

    Optional<Boolean> result = Stanchion.call(() -> {
        // TODO: non-application owner is even possible to kill job
        Optional<String> maybeTaskId = JobQueue.cancel(id, "Canceled by user");

        // There's a slight pitfall between cancel above and kill below where
        // no kill may be sent, RetzScheduler is exactly in resourceOffers and being scheduled.
        // Then this protocol returns false for sure.
        if (maybeTaskId.isPresent()) {
            Protos.TaskID taskId = Protos.TaskID.newBuilder().setValue(maybeTaskId.get()).build();
            Protos.Status status = driver.get().killTask(taskId);
            LOG.info("Job id={} was running and killed.");
            return status == Protos.Status.DRIVER_RUNNING;
        }
        return false;
    });

    if (result.isPresent()) {
        return result.get();
    } else {
        return false;
    }
}

From source file:com.msd.gin.halyard.tools.HalyardExport.java

/**
 * Export function is called for the export execution with given arguments.
 * @param conf Hadoop Configuration instance
 * @param log StatusLog notification service implementation for back-calls
 * @param source String source HTable name
 * @param query String SPARQL Graph query
 * @param targetUrl String URL of the target system (+folder or schema, +table or file name)
 * @param driverClass String JDBC Driver class name (for JDBC export only)
 * @param driverClasspath Array of URLs with JDBC Driver classpath (for JDB export only)
 * @param jdbcProperties Arrays of String JDBC connection properties (for JDB export only)
 * @param trimTable boolean option to trim target JDBC table before export (for JDB export only)
 * @throws ExportException in case of an export problem
 *///  w w  w . j av  a2s .com
public static void export(Configuration conf, StatusLog log, String source, String query, String targetUrl,
        String driverClass, URL[] driverClasspath, String[] jdbcProperties, boolean trimTable)
        throws ExportException {
    try {
        QueryResultWriter writer = null;
        if (targetUrl.startsWith("file:") || targetUrl.startsWith("hdfs:")) {
            OutputStream out = FileSystem.get(URI.create(targetUrl), conf).create(new Path(targetUrl));
            try {
                if (targetUrl.endsWith(".bz2")) {
                    out = new CompressorStreamFactory()
                            .createCompressorOutputStream(CompressorStreamFactory.BZIP2, out);
                    targetUrl = targetUrl.substring(0, targetUrl.length() - 4);
                } else if (targetUrl.endsWith(".gz")) {
                    out = new CompressorStreamFactory()
                            .createCompressorOutputStream(CompressorStreamFactory.GZIP, out);
                    targetUrl = targetUrl.substring(0, targetUrl.length() - 3);
                }
            } catch (CompressorException e) {
                IOUtils.closeQuietly(out);
                throw new ExportException(e);
            }
            if (targetUrl.endsWith(".csv")) {
                writer = new CSVResultWriter(log, out);
            } else {
                Optional<RDFFormat> form = Rio.getWriterFormatForFileName(targetUrl);
                if (!form.isPresent())
                    throw new ExportException("Unsupported target file format extension: " + targetUrl);
                writer = new RIOResultWriter(log, form.get(), out);
            }
        } else if (targetUrl.startsWith("jdbc:")) {
            int i = targetUrl.lastIndexOf('/');
            if (i < 0)
                throw new ExportException("Taret URL does not end with /<table_name>");
            if (driverClass == null)
                throw new ExportException(
                        "Missing mandatory JDBC driver class name argument -c <driver_class>");
            writer = new JDBCResultWriter(log, targetUrl.substring(0, i), targetUrl.substring(i + 1),
                    jdbcProperties, driverClass, driverClasspath, trimTable);
        } else {
            throw new ExportException("Unsupported target URL protocol " + targetUrl);
        }
        new HalyardExport(source, query, writer, log).run(conf);
    } catch (IOException e) {
        throw new ExportException(e);
    }
}

From source file:com.exalttech.trex.util.Util.java

/**
 * Confirm deletion message window/*  ww w.j  a v a2s.co  m*/
 *
 * @param deleteMsg
 * @return
 */
public static boolean isConfirmed(String deleteMsg) {
    Alert confirmMsgBox = Util.getAlert(Alert.AlertType.CONFIRMATION);
    confirmMsgBox.getButtonTypes().clear();
    confirmMsgBox.getButtonTypes().addAll(ButtonType.YES, ButtonType.NO);
    confirmMsgBox.setContentText(deleteMsg);
    Optional<ButtonType> result = confirmMsgBox.showAndWait();
    return result.get() == ButtonType.YES;
}

From source file:io.github.retz.web.JobRequestHandler.java

static String schedule(spark.Request req, spark.Response res) throws IOException, InterruptedException {
    ScheduleRequest scheduleRequest = MAPPER.readValue(req.bodyAsBytes(), ScheduleRequest.class);
    res.type("application/json");
    Optional<Application> maybeApp = Applications.get(scheduleRequest.job().appid()); // TODO check owner right here
    if (!maybeApp.isPresent()) {
        // TODO: this warn log cannot be written in real stable release
        LOG.warn("No such application loaded: {}", scheduleRequest.job().appid());
        ErrorResponse response = new ErrorResponse("No such application: " + scheduleRequest.job().appid());
        res.status(404);//from www  .  j  a v a 2s  .c o  m
        return MAPPER.writeValueAsString(response);

    } else if (maybeApp.get().enabled()) {

        validateOwner(req, maybeApp.get());

        Job job = scheduleRequest.job();
        if (scheduler.isPresent()) {
            if (!scheduler.get().validateJob(job)) {
                String msg = "Job " + job.toString() + " does not fit system limit "
                        + scheduler.get().maxJobSize();
                // TODO: this warn log cannot be written in real stable release
                LOG.warn(msg);
                halt(400, msg);
            }
        }

        job.schedule(JobQueue.issueJobId(), TimestampHelper.now());

        JobQueue.push(job);
        if (scheduler.isPresent() && driver.isPresent()) {
            LOG.info("Trying invocation from offer stock: {}", job);
            scheduler.get().maybeInvokeNow(driver.get(), job);

        }

        ScheduleResponse scheduleResponse = new ScheduleResponse(job);
        scheduleResponse.ok();
        LOG.info("Job '{}' at {} has been scheduled at {}.", job.cmd(), job.appid(), job.scheduled());

        res.status(201);
        return MAPPER.writeValueAsString(scheduleResponse);

    } else {
        // Application is currently disabled
        res.status(401);
        ErrorResponse response = new ErrorResponse("Application " + maybeApp.get().getAppid() + " is disabled");
        return MAPPER.writeValueAsString(response);
    }
}

From source file:eu.mihosoft.vrl.v3d.Edge.java

public static List<Polygon> boundaryPathsWithHoles(List<Polygon> boundaryPaths) {

    List<Polygon> result = boundaryPaths.stream().map(p -> p.clone()).collect(Collectors.toList());

    List<List<Integer>> parents = new ArrayList<>();
    boolean[] isHole = new boolean[result.size()];

    for (int i = 0; i < result.size(); i++) {
        Polygon p1 = result.get(i);
        List<Integer> parentsOfI = new ArrayList<>();
        parents.add(parentsOfI);//from ww  w .  ja v  a2  s.  c  om
        for (int j = 0; j < result.size(); j++) {
            Polygon p2 = result.get(j);
            if (i != j) {
                if (p2.contains(p1)) {
                    parentsOfI.add(j);
                }
            }
        }
        isHole[i] = parentsOfI.size() % 2 != 0;
    }

    int[] parent = new int[result.size()];

    for (int i = 0; i < parent.length; i++) {
        parent[i] = -1;
    }

    for (int i = 0; i < parents.size(); i++) {
        List<Integer> par = parents.get(i);

        int max = 0;
        int maxIndex = 0;
        for (int pIndex : par) {

            int pSize = parents.get(pIndex).size();

            if (max < pSize) {
                max = pSize;
                maxIndex = pIndex;
            }
        }

        parent[i] = maxIndex;

        if (!isHole[maxIndex] && isHole[i]) {

            List<Polygon> holes;

            Optional<List<Polygon>> holesOpt = result.get(maxIndex).getStorage().getValue(KEY_POLYGON_HOLES);

            if (holesOpt.isPresent()) {
                holes = holesOpt.get();
            } else {
                holes = new ArrayList<>();
                result.get(maxIndex).getStorage().set(KEY_POLYGON_HOLES, holes);
            }

            holes.add(result.get(i));
        }
    }

    return result;
}

From source file:io.pravega.controller.store.stream.tables.TableHelper.java

/**
 * Method to compute segments created and deleted in latest scale event.
 *
 * @param historyTable history table/*from www  .  j  a  va 2 s  .c o  m*/
 * @return pair of segments sealed and segments created in last scale event.
 */
public static Pair<List<Integer>, List<Integer>> getLatestScaleData(final byte[] historyTable) {
    final Optional<HistoryRecord> current = HistoryRecord.readLatestRecord(historyTable, false);
    ImmutablePair<List<Integer>, List<Integer>> result;
    if (current.isPresent()) {
        final Optional<HistoryRecord> previous = HistoryRecord.fetchPrevious(current.get(), historyTable);
        result = previous
                .map(historyRecord -> new ImmutablePair<>(
                        diff(historyRecord.getSegments(), current.get().getSegments()),
                        diff(current.get().getSegments(), historyRecord.getSegments())))
                .orElseGet(() -> new ImmutablePair<>(Collections.emptyList(), current.get().getSegments()));
    } else {
        result = new ImmutablePair<>(Collections.emptyList(), Collections.emptyList());
    }
    return result;
}

From source file:io.github.retz.web.JobRequestHandler.java

static String getDir(spark.Request req, spark.Response res) throws JsonProcessingException {
    Optional<Job> job;/*  ww w  . jav  a  2 s  . c  o  m*/
    try {
        job = getJobAndVerify(req);
    } catch (IOException e) {
        return MAPPER.writeValueAsString(new ErrorResponse(e.toString()));
    }

    String path = req.queryParams("path");
    LOG.debug("get-path: path={}", path);
    res.type("application/json");

    // Translating default as SparkJava's router doesn't route '.' or empty string
    if (ListFilesRequest.DEFAULT_SANDBOX_PATH.equals(path)) {
        path = "";
    }

    List ret;
    if (job.isPresent() && job.get().url() != null) {
        try {
            Pair<Integer, String> maybeJson = MesosHTTPFetcher.fetchHTTPDir(job.get().url(), path);
            if (maybeJson.left() == 200) {
                ret = MAPPER.readValue(maybeJson.right(), new TypeReference<List<DirEntry>>() {
                });
            } else {
                return MAPPER.writeValueAsString(
                        new ErrorResponse(path + ":" + maybeJson.left() + " " + maybeJson.right()));
            }
        } catch (FileNotFoundException e) {
            res.status(404);
            LOG.warn("path {} not found", path);
            return MAPPER.writeValueAsString(new ErrorResponse(path + " not found"));
        } catch (IOException e) {
            return MAPPER.writeValueAsString(new ErrorResponse(e.toString()));
        }
    } else {
        ret = Arrays.asList();
    }

    ListFilesResponse listFilesResponse = new ListFilesResponse(job, ret);
    listFilesResponse.status("ok");
    return MAPPER.writeValueAsString(listFilesResponse);
}

From source file:com.thinkbiganalytics.nifi.rest.support.NifiPropertyUtil.java

/**
 * Validates that the specified value is valid for the property.
 *
 * @param property the property//from ww  w .j a  v a2 s .  c om
 * @param value    the value to validate
 * @return {@code true} if the value is valid for the property, or {@code false} otherwise
 */
private static boolean isValidPropertyValue(@Nonnull final NifiProperty property, final String value) {
    // Check for list of allowable values
    final Optional<List<NiFiAllowableValue>> allowableValues = Optional.of(property)
            .map(NifiProperty::getPropertyDescriptor).map(NiFiPropertyDescriptor::getAllowableValues);
    if (allowableValues.isPresent()) {
        return allowableValues.get().stream().filter(allowableValue -> allowableValue.getValue().equals(value))
                .findAny().isPresent();
    }
    return true;
}

From source file:com.ikanow.aleph2.harvest.script.services.TestScriptHarvestService.java

private static DataBucketBean getTestbucket(final String full_name, final Optional<String> script,
        final Optional<String> local_script_file, Optional<String> resource_file,
        final Map<String, String> args, final List<String> required_assets) {
    final LinkedHashMap<String, Object> config = new LinkedHashMap<String, Object>();
    if (script.isPresent())
        config.put("script", script.get());
    if (local_script_file.isPresent())
        config.put("local_script_url", local_script_file.get());
    if (resource_file.isPresent())
        config.put("resource_name", resource_file.get());
    config.put("args", args);
    config.put("required_assets", required_assets);
    final List<HarvestControlMetadataBean> harvest_configs = new ArrayList<HarvestControlMetadataBean>();
    harvest_configs.add(//from w  w w .  ja  v a2 s  .  com
            new HarvestControlMetadataBean("harvester_1", true, null, new ArrayList<String>(), null, config));
    return BeanTemplateUtils.build(DataBucketBean.class).with(DataBucketBean::full_name, full_name)
            .with(DataBucketBean::harvest_configs, harvest_configs)
            .with(DataBucketBean::owner_id, "test_owner_id1234").done().get();
}

From source file:org.kontalk.crypto.Coder.java

private static KeysResult getKeys(User user) {
    KeysResult result = new KeysResult();

    Optional<PersonalKey> optMyKey = AccountLoader.getInstance().getPersonalKey();
    if (!optMyKey.isPresent()) {
        LOGGER.log(Level.WARNING, "can't get personal key");
        result.errors.add(Error.MY_KEY_UNAVAILABLE);
        return result;
    }/*  w  w  w .  jav  a2s  .  c  om*/
    result.myKey = optMyKey.get();

    if (!user.hasKey()) {
        LOGGER.warning("key not found for user, id: " + user.getID());
        result.errors.add(Error.KEY_UNAVAILABLE);
        return result;
    }

    Optional<PGPCoderKey> optKey = PGPUtils.readPublicKey(user.getKey());
    if (!optKey.isPresent()) {
        LOGGER.warning("can't get sender key");
        result.errors.add(Error.INVALID_KEY);
        return result;
    }
    result.otherKey = optKey.get();

    return result;
}