Example usage for java.util Optional get

List of usage examples for java.util Optional get

Introduction

In this page you can find the example usage for java.util Optional get.

Prototype

public T get() 

Source Link

Document

If a value is present, returns the value, otherwise throws NoSuchElementException .

Usage

From source file:info.archinnov.achilles.internals.parser.UDFParser.java

public static List<UDFSignature> parseFunctionRegistryAndValidateTypes(AptUtils aptUtils, TypeElement elm) {
    final List<ExecutableElement> methods = ElementFilter.methodsIn(elm.getEnclosedElements());
    final Optional<String> keyspace = AnnotationTree.findKeyspaceForFunctionRegistry(aptUtils, elm);

    final TypeName sourceClass = TypeName.get(aptUtils.erasure(elm));

    //Not allow to declare function in system keyspaces
    if (keyspace.isPresent()) {
        final String keyspaceName = keyspace.get();
        aptUtils.validateFalse(/*from  w w w. ja v  a  2 s  .  c o m*/
                FORBIDDEN_KEYSPACES.contains(keyspaceName)
                        || FORBIDDEN_KEYSPACES.contains(keyspaceName.toLowerCase()),
                "The provided keyspace '%s' on function registry class '%s' is forbidden because it is a system keyspace",
                keyspaceName, sourceClass);
    }

    aptUtils.validateFalse(keyspace.isPresent() && isBlank(keyspace.get()),
            "The declared keyspace for function registry '%s' should not be blank",
            elm.getSimpleName().toString());

    return methods.stream().map(method -> {
        final List<TypeName> parametersType = method.getParameters().stream().map(VariableElement::asType)
                .map(TypeName::get).collect(toList());
        final String methodName = method.getSimpleName().toString();
        final List<UDFParamSignature> parameterSignatures = method.getParameters().stream()
                .map(x -> new UDFParamSignature(TypeName.get(x.asType()).box(), x.getSimpleName().toString()))
                .collect(toList());

        //Validate parameter types
        for (TypeName param : parametersType) {
            TypeValidator.validateNativeTypesForFunction(aptUtils, method, param, "argument");
        }

        //Validate return type
        final TypeMirror returnTypeMirror = method.getReturnType();
        aptUtils.validateFalse(returnTypeMirror.getKind() == TypeKind.VOID,
                "The return type for the method '%s' on class '%s' should not be VOID", method.toString(),
                elm.getSimpleName().toString());

        final TypeName returnType = TypeName.get(returnTypeMirror).box();
        TypeValidator.validateNativeTypesForFunction(aptUtils, method, returnType, "return type");

        // Validate NOT system function comparing only name lowercase
        aptUtils.validateFalse(SYSTEM_FUNCTIONS_NAME.contains(methodName.toLowerCase()),
                "The name of the function '%s' in class '%s' is reserved for system functions", method,
                sourceClass);

        return new UDFSignature(keyspace, sourceClass, methodName, returnType, parametersType,
                parameterSignatures);
    }).collect(toList());
}

From source file:io.github.jeddict.relation.mapper.initializer.ColumnUtil.java

/**
 * Exception Description: The @JoinColumns on the annotated element [method
 * get] from the entity class [class Employee] is incomplete.
 *
 * When the source entity class uses a composite primary key, a @JoinColumn
 * must be specified for each join column using the @JoinColumns. Both the
 * name and the referencedColumnName elements must be specified in each such
 * '@JoinColumn'./*from w  w w  . ja v a 2 s .  co  m*/
 */
public static void syncronizeCompositeKeyJoincolumn(TableWidget<DBTable> sourceTableWidget,
        final TableWidget<DBTable> targetTableWidget) {
    if (sourceTableWidget.getPrimaryKeyWidgets().size() > 1) {
        for (IPrimaryKeyWidget<DBColumn<Id>> primaryKeyWidget : sourceTableWidget.getPrimaryKeyWidgets()) {
            Optional<ReferenceFlowWidget> optionalReferenceFlowWidget = primaryKeyWidget
                    .getReferenceFlowWidget().stream()
                    .filter(r -> r.getForeignKeyWidget().getTableWidget() == targetTableWidget).findFirst();
            if (optionalReferenceFlowWidget.isPresent()) {
                ForeignKeyWidget foreignKeyWidget = optionalReferenceFlowWidget.get().getForeignKeyWidget();
                IJoinColumn joinColumn;
                if (foreignKeyWidget instanceof ParentAssociationColumnWidget) {
                    joinColumn = ((DBParentAssociationColumn) foreignKeyWidget.getBaseElementSpec())
                            .getJoinColumnOverride();
                } else {
                    joinColumn = ((DBForeignKey) foreignKeyWidget.getBaseElementSpec()).getJoinColumn();
                }
                if (StringUtils.isEmpty(joinColumn.getReferencedColumnName())) {
                    joinColumn.setReferencedColumnName(primaryKeyWidget.getName());
                }
                if (StringUtils.isEmpty(joinColumn.getName())) {
                    joinColumn.setName(foreignKeyWidget.getName());
                }
            }
        }
    }
}

From source file:com.baifendian.swordfish.common.storm.StormRestUtil.java

/**
 * ???ID,???ID//ww  w  .j a  v a2  s.  co  m
 */
public static String getTopologyId(String topologyName) throws IOException {
    Optional<TopologyDto> result = getTopologySummary().getTopologies().stream()
            .filter(t -> StringUtils.equals(t.getName(), topologyName)).findFirst();

    if (result.isPresent()) {
        return result.get().getId();
    }

    return StringUtils.EMPTY;
}

From source file:io.github.binout.jaxrs.csv.CsvSchemaFactory.java

static CsvSchema buildSchema(CsvMapper mapper, Class csvClass) {
    CsvAnnotationIntrospector introspector = new CsvAnnotationIntrospector(csvClass);
    char separatorChar = introspector.separator();
    Optional<String[]> columns = introspector.columns();

    CsvSchema csvSchema = mapper.schemaFor(csvClass).withColumnSeparator(separatorChar)
            .withSkipFirstDataRow(introspector.skipFirstDataRow());
    if (columns.isPresent()) {
        // Rebuild columns to take account of order
        CsvSchema.Builder builder = csvSchema.rebuild().clearColumns();
        for (String column : columns.get()) {
            CsvSchema.Column oldColumn = csvSchema.column(column);
            builder.addColumn(column, oldColumn.getType());
        }/*from   w ww.j a  v  a 2s .com*/
        csvSchema = builder.build();
    }

    return csvSchema;
}

From source file:io.github.lxgaming.teleportbow.managers.CommandManager.java

public static boolean registerCommand(AbstractCommand parentCommand,
        Class<? extends AbstractCommand> commandClass) {
    if (parentCommand.getClass() == commandClass) {
        TeleportBow.getInstance().getLogger().warn("{} attempted to register itself",
                parentCommand.getClass().getSimpleName());
        return false;
    }//  w  w w  .  java2  s .  c o  m

    if (getCommandClasses().contains(commandClass)) {
        TeleportBow.getInstance().getLogger().warn("{} has already been registered",
                commandClass.getSimpleName());
        return false;
    }

    getCommandClasses().add(commandClass);
    Optional<AbstractCommand> command = Toolbox.newInstance(commandClass);
    if (!command.isPresent()) {
        TeleportBow.getInstance().getLogger().error("{} failed to initialize", commandClass.getSimpleName());
        return false;
    }

    parentCommand.getChildren().add(command.get());
    TeleportBow.getInstance().getLogger().debug("{} registered for {}", commandClass.getSimpleName(),
            parentCommand.getClass().getSimpleName());
    return true;
}

From source file:io.github.retz.web.JobRequestHandler.java

static String listJob(spark.Request req, spark.Response res) throws IOException {
    Optional<AuthHeader> authHeaderValue = WebConsole.getAuthInfo(req);
    LOG.debug("list jobs owned by {}", authHeaderValue.get().key());
    ListJobRequest listJobRequest = MAPPER.readValue(req.body(), ListJobRequest.class);
    LOG.debug("q: state={}, tag={}", listJobRequest.state(), listJobRequest.tag());
    String user = Objects.requireNonNull(authHeaderValue.get().key());
    try {/* w ww  . ja  va 2  s.  c om*/
        List<Job> jobs = JobQueue.list(user, listJobRequest.state(), listJobRequest.tag(), MAX_LIST_JOB_SIZE);

        boolean more = false;
        if (jobs.size() > ListJobResponse.MAX_JOB_NUMBER) {
            more = true;
            jobs = jobs.subList(0, ListJobResponse.MAX_JOB_NUMBER);
        }
        ListJobResponse listJobResponse = new ListJobResponse(jobs, more);
        listJobResponse.ok();
        res.status(200);
        res.type("application/json");
        return MAPPER.writeValueAsString(listJobResponse);
    } catch (SQLException e) {
        LOG.error(e.toString(), e);
        res.status(500);
        return "\"Internal Error\"";
    }
}

From source file:net.sourceforge.fullsync.cli.Main.java

public static void finishStartup(Injector injector) {
    RuntimeConfiguration rt = injector.getInstance(RuntimeConfiguration.class);
    Preferences preferences = injector.getInstance(Preferences.class);
    Scheduler scheduler = injector.getInstance(Scheduler.class);
    ProfileManager profileManager = injector.getInstance(ProfileManager.class);
    Synchronizer synchronizer = injector.getInstance(Synchronizer.class);
    Optional<String> profile = rt.getProfileToRun();
    profileManager.loadProfiles();/*  ww  w .  j a  v  a 2s  .  c om*/
    if (profile.isPresent()) {
        handleRunProfile(synchronizer, profileManager, profile.get());
    }
    if (rt.isDaemon().orElse(false).booleanValue()) {
        daemonSchedulerListener = injector.getInstance(DaemonSchedulerListener.class);
        scheduler.start();
    }
    if (preferences.getAutostartScheduler()) {
        scheduler.start();
    }
}

From source file:com.hurence.logisland.plugin.PluginManager.java

private static void removePlugin(String artifact) {
    Optional<ModuleInfo> moduleInfo = findPluginMeta().entrySet().stream()
            .filter(e -> artifact.equals(e.getKey().getArtifact())).map(Map.Entry::getKey).findFirst();
    if (moduleInfo.isPresent()) {
        String filename = moduleInfo.get().getSourceArchive();
        System.out.println("Removing component jar: " + filename);
        if (!new File(filename).delete()) {
            System.err.println("Unable to delete file " + artifact);
            System.exit(-1);/*from w w w .  jav  a2  s.co m*/
        }

    } else {
        System.err.println("Found no installed component matching artifact " + artifact);
        System.exit(-1);
    }
}

From source file:io.github.retz.web.JobRequestRouter.java

public static String getFile(spark.Request req, spark.Response res) throws IOException {
    int id = Integer.parseInt(req.params(":id"));

    String file = req.queryParams("path");
    long offset = Long.parseLong(req.queryParams("offset"));
    long length = Long.parseLong(req.queryParams("length"));
    Optional<Job> job = JobQueue.getJob(id);

    LOG.debug("get-file: id={}, path={}, offset={}, length={}", id, file, offset, length);
    res.type("application/json");

    Optional<FileContent> fileContent;
    if (job.isPresent() && job.get().url() != null // If url() is null, the job hasn't yet been started at Mesos
            && statHTTPFile(job.get().url(), file)) {
        String payload = fetchHTTPFile(job.get().url(), file, offset, length);
        LOG.debug("Payload length={}, offset={}", payload.length(), offset);
        // TODO: what the heck happens when a file is not UTF-8 encodable???? How Mesos works?
        fileContent = Optional.ofNullable(MAPPER.readValue(payload, FileContent.class));
    } else {/*from w  w w . j  a  v a 2  s. co  m*/
        fileContent = Optional.empty();
    }
    GetFileResponse getFileResponse = new GetFileResponse(job, fileContent);
    getFileResponse.ok();
    res.status(200);

    return MAPPER.writeValueAsString(getFileResponse);
}

From source file:com.act.biointerpretation.sarinference.ProductScorer.java

/**
 * Reads in scored SARs, checks them against a prediction corpus and positive inchi list to get a product ranking.
 * This method is static because it does not rely on any properties of the enclosing class to construct the job.
 * TODO: It would probably make more sense to make this its own class, i.e. <ProductScorer implements JavaRunnable>
 * TODO: improve the data structure used to store scored products- using an L2PredictionCorpus is pretty ugly
 *
 * @param predictionCorpus The prediction corpus to score.
 * @param scoredSars The scored SARs to use.
 * @param lcmsFile The set of positive LCMS inchis, to use in scoring.
 * @return A JavaRunnable to run the product scoring.
 *//*from   w  w  w . j  a  v a  2  s .com*/
public static JavaRunnable getProductScorer(File predictionCorpus, File scoredSars, File lcmsFile,
        File outputFile) {

    return new JavaRunnable() {
        @Override
        public void run() throws IOException {
            // Verify files
            FileChecker.verifyInputFile(predictionCorpus);
            FileChecker.verifyInputFile(scoredSars);
            FileChecker.verifyInputFile(lcmsFile);
            FileChecker.verifyAndCreateOutputFile(outputFile);

            // Build SAR node list and best sar finder
            SarTreeNodeList nodeList = new SarTreeNodeList();
            nodeList.loadFromFile(scoredSars);
            BestSarFinder sarFinder = new BestSarFinder(nodeList);

            // Build prediction corpus
            L2PredictionCorpus predictions = L2PredictionCorpus.readPredictionsFromJsonFile(predictionCorpus);

            // Build LCMS results
            IonAnalysisInterchangeModel lcmsResults = new IonAnalysisInterchangeModel();
            lcmsResults.loadResultsFromFile(lcmsFile);

            /**
             * Build map from predictions to their scores based on SAR
             * For each prediction, we add on auxiliary info about its SARs and score to its projector name.
             * TODO: build data structure to store a scored prediction, instead of hijacking the projector name.
             */
            Map<L2Prediction, Double> predictionToScoreMap = new HashMap<>();
            LOGGER.info("Scoring predictions.");
            for (L2Prediction prediction : predictions.getCorpus()) {
                String nameAppendage = lcmsResults.getLcmsDataForPrediction(prediction).toString(); // Always tack LCMS result onto name

                Optional<SarTreeNode> maybeBestSar = sarFinder.apply(prediction);

                if (maybeBestSar.isPresent()) {
                    // If a SAR was matched, add info about it to the projector name, and put its score into the map
                    SarTreeNode bestSar = maybeBestSar.get();
                    nameAppendage += ":" + bestSar.getHierarchyId() + ":" + bestSar.getRankingScore();
                    prediction.setProjectorName(prediction.getProjectorName() + nameAppendage);
                    predictionToScoreMap.put(prediction, bestSar.getRankingScore());
                } else {
                    // If no SAR is found, append "NO_SAR" to the prediction, and give it a ranking score of 0
                    nameAppendage += "NO_SAR";
                    prediction.setProjectorName(prediction.getProjectorName() + nameAppendage);
                    predictionToScoreMap.put(prediction, 0D);
                }
            }

            LOGGER.info("Sorting predictions in decreasing order of best associated SAR rank.");
            List<L2Prediction> predictionList = new ArrayList<>(predictionToScoreMap.keySet());
            predictionList
                    .sort((a, b) -> -Double.compare(predictionToScoreMap.get(a), predictionToScoreMap.get(b)));

            // Wrap results in a corpus and write to file.
            L2PredictionCorpus finalCorpus = new L2PredictionCorpus(predictionList);
            finalCorpus.writePredictionsToJsonFile(outputFile);
            LOGGER.info("Complete!.");
        }

        @Override
        public String toString() {
            return "ProductScorer:" + scoredSars.getName();
        }
    };
}