Example usage for java.util Map forEach

List of usage examples for java.util Map forEach

Introduction

In this page you can find the example usage for java.util Map forEach.

Prototype

default void forEach(BiConsumer<? super K, ? super V> action) 

Source Link

Document

Performs the given action for each entry in this map until all entries have been processed or the action throws an exception.

Usage

From source file:client.DockerPackageClient.java

/**
 * Convert a response from a docker registry GET tags call into a list of ImageMetadata objects,
 * effectively inverting the JSON map./*from w  w w .  j  av  a 2s  .  c  om*/
 *
 * @param tagsResponse Convertible to JSON which is expected to look like:
 *  {
 *    "latest": "9e89cc6f0bc3c38722009fe6857087b486531f9a779a0c17e3ed29dae8f12c4f",
 *    "0.1.1":  "b486531f9a779a0c17e3ed29dae8f12c4f9e89cc6f0bc3c38722009fe6857087"
 *  }
 * @return A list of image's metadata
 */
private List<ImageMetadata> repoTagListToImages(final WSResponse tagsResponse, final String repoName) {
    final JsonNode jsonTagToShaMap = tagsResponse.asJson();
    final Map<String, List<String>> shaToTagList = Maps.newHashMap();

    final Iterator<Map.Entry<String, JsonNode>> tags = jsonTagToShaMap.fields();
    while (tags.hasNext()) {
        final Map.Entry<String, JsonNode> tagShaPair = tags.next();
        final String tagName = tagShaPair.getKey();
        final String sha = tagShaPair.getValue().textValue();
        // Add/create the tag list for this SHA
        if (shaToTagList.containsKey(sha)) {
            shaToTagList.get(sha).add(tagName);
        } else {
            final LinkedList<String> tagList = new LinkedList<>();
            tagList.add(tagName);
            shaToTagList.put(sha, tagList);
        }
    }

    final List<ImageMetadata> images = new LinkedList<>();
    shaToTagList.forEach((sha, tagList) -> images.add(new ImageMetadata(repoName, sha, tagList)));
    return images;
}

From source file:alfio.manager.EventManager.java

public void updateTicketFieldDescriptions(Map<String, TicketFieldDescriptionModification> descriptions) {
    descriptions.forEach((locale, value) -> {
        String description = Json.GSON.toJson(value.getDescription());
        if (0 == ticketFieldRepository.updateDescription(value.getTicketFieldConfigurationId(), locale,
                description)) {//  www . j a v  a 2s  . co  m
            ticketFieldRepository.insertDescription(value.getTicketFieldConfigurationId(), locale, description);
        }
    });
}

From source file:nl.knaw.huygens.alexandria.dropwizard.cli.commands.AlexandriaCommand.java

void checkoutView(final String viewName) {
    boolean showAll = MAIN_VIEW.equals(viewName);

    if (showAll) {
        System.out.println("Checking out main view...");
    } else {/*w  w w .j  a  v a  2s.  c  om*/
        System.out.printf("Checking out view %s...%n", viewName);
    }
    try (TAGStore store = getTAGStore()) {
        CLIContext context = readContext();
        Map<String, FileInfo> watchedTranscriptions = new HashMap<>();
        context.getWatchedFiles().entrySet().stream()
                .filter(e -> e.getValue().getFileType().equals(FileType.tagmlSource)).forEach(e -> {
                    String fileName = e.getKey();
                    FileInfo fileInfo = e.getValue();
                    watchedTranscriptions.put(fileName, fileInfo);
                });

        Map<String, DocumentInfo> documentIndex = context.getDocumentInfo();
        store.runInTransaction(() -> {
            TAGView tagView = showAll ? TAGViews.getShowAllMarkupView(store)
                    : getExistingView(viewName, store, context);
            watchedTranscriptions.forEach((fileName, fileInfo) -> {
                System.out.printf("  updating %s...%n", fileName);
                String documentName = fileInfo.getObjectName();
                final Long docId = documentIndex.get(documentName).getDbId();
                exportTAGML(context, store, tagView, fileName, docId);
                try {
                    Instant lastModified = Files.getLastModifiedTime(workFilePath(fileName)).toInstant();
                    context.getWatchedFiles().get(fileName).setLastCommit(lastModified);
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }
            });
        });
        context.setActiveView(viewName);
        storeContext(context);
    }
}

From source file:com.redhat.red.build.koji.KojiClient.java

private UrlBuilder sessionUrlBuilder(KojiSessionInfo session, Supplier<Map<String, Object>> paramEditor) {
    return (url) -> {
        if (session == null) {
            return new UrlBuildResult(url);
        }//ww  w  .  java2 s . co  m

        Map<String, String> params = new HashMap<>();
        params.put(SESSION_ID_PARAM, Integer.toString(session.getSessionId()));
        params.put(SESSION_KEY_PARAM, session.getSessionKey());
        params.put(CALL_NUMBER_PARAM, Integer.toString(callCount.getAndIncrement()));

        if (paramEditor != null) {
            Map<String, Object> extraParams = paramEditor.get();
            if (extraParams != null) {
                MalformedURLException error = (MalformedURLException) extraParams.get(EMBEDDED_ERROR_PARAM);
                if (error != null) {
                    return new UrlBuildResult(error);
                } else {
                    extraParams.forEach((key, value) -> {
                        params.put(key, String.valueOf(value));
                    });
                }
            }
        }

        String result = UrlUtils.buildUrl(url, params);

        Logger logger = LoggerFactory.getLogger(KojiClient.class);
        logger.debug("\n\n\n\nBuild URL: {}\n\n\n\n", result);
        return new UrlBuildResult(result);
    };
}

From source file:ijfx.core.batch.BatchService.java

public boolean executeModule(BatchSingleInput input, Module module, Map<String, Object> parameters) {

    logger.info("Executing module " + module.getDelegateObject().getClass().getSimpleName());
    logger.info("Injecting input");
    boolean inputInjectionSuccess = injectInput(input, module);

    if (!inputInjectionSuccess) {
        logger.warning("Error when injecting input.");
        return false;
    }/*from w w w.j  a  v  a 2 s.  c o m*/

    logger.info("Injecting parameters");
    parameters.forEach((key, value) -> {
        if (value == null) {
            return;
        }
        logger.info(String.format("Parameter : %s = %s", key, value.toString()));
        module.setInput(key, value);
        module.setResolved(key, true);
    });

    module.getInputs().forEach((key, value) -> {
        module.setResolved(key, true);
    });

    // calling the batch preprocessor plugins
    pluginService.createInstancesOfType(BatchPrepreprocessorPlugin.class)
            .forEach(processor -> processor.process(input, module, parameters));

    String moduleName = module.getInfo().getDelegateClassName();
    logger.info(String.format("[%s] starting module", moduleName));

    logger.info("Running module");
    Future<Module> run;

    try {
        //getContext().inject(run);
        getContext().inject(module);
        module.initialize();
    } catch (Exception e) {
        logger.info("Context already injected.");
        //   ImageJFX.getLogger().log(Level.SEVERE,null,e);
    }
    run = moduleService.run(module, getPreProcessors(), getPostprocessors(), parameters);
    // } else {
    //     run = moduleService.run(module, process, parameters);

    //}
    logger.info(String.format("[%s] module started", moduleName));

    try {
        run.get();
        logger.info(String.format("[%s] module finished", moduleName));
        extractOutput(input, module);
    } catch (Exception ex) {
        logger.log(Level.SEVERE, "Error when extracting ouput from module module " + moduleName, ex);
        ;
        return false;

    }

    return true;

}

From source file:org.apache.samza.container.grouper.task.GroupByContainerIds.java

/**
 * {@inheritDoc}/*from   w w w.j a  v a  2  s .c om*/
 *
 * When the are `t` tasks and `p` processors, where t &lt;= p, a fair task distribution should ideally assign
 * (t / p) tasks to each processor. In addition to guaranteeing a fair distribution, this {@link TaskNameGrouper}
 * implementation generates a locationId aware task assignment to processors where it makes best efforts in assigning
 * the tasks to processors with the same locality.
 *
 * Task assignment to processors is accomplished through the following two phases:
 *
 * 1. In the first phase, each task(T) is assigned to a processor(P) that satisfies the following constraints:
 *    A. The processor(P) should have the same locality of the task(T).
 *    B. Number of tasks already assigned to the processor should be less than the (number of tasks / number of processors).
 *
 * 2. Each unassigned task from phase 1 are then mapped to any processor with task count less than the
 * (number of tasks / number of processors). When no such processor exists, then the unassigned
 * task is mapped to any processor from available processors in a round robin fashion.
 */
@Override
public Set<ContainerModel> group(Set<TaskModel> taskModels, GrouperMetadata grouperMetadata) {
    // Validate that the task models are not empty.
    Map<TaskName, LocationId> taskLocality = grouperMetadata.getTaskLocality();
    Preconditions.checkArgument(!taskModels.isEmpty(),
            "No tasks found. Likely due to no input partitions. Can't run a job with no tasks.");

    // Invoke the default grouper when the processor locality does not exist.
    if (MapUtils.isEmpty(grouperMetadata.getProcessorLocality())) {
        LOG.info("ProcessorLocality is empty. Generating with the default group method.");
        return group(taskModels, new ArrayList<>());
    }

    Map<String, LocationId> processorLocality = new TreeMap<>(grouperMetadata.getProcessorLocality());
    /**
     * When there're more task models than processors then choose the lexicographically least `x` processors(where x = tasks.size()).
     */
    if (processorLocality.size() > taskModels.size()) {
        processorLocality = processorLocality.entrySet().stream().limit(taskModels.size())
                .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
    }

    Map<LocationId, List<String>> locationIdToProcessors = new HashMap<>();
    Map<String, TaskGroup> processorIdToTaskGroup = new HashMap<>();

    // Generate the {@see LocationId} to processors mapping and processorId to {@see TaskGroup} mapping.
    processorLocality.forEach((processorId, locationId) -> {
        List<String> processorIds = locationIdToProcessors.getOrDefault(locationId, new ArrayList<>());
        processorIds.add(processorId);
        locationIdToProcessors.put(locationId, processorIds);
        processorIdToTaskGroup.put(processorId, new TaskGroup(processorId, new ArrayList<>()));
    });

    int numTasksPerProcessor = taskModels.size() / processorLocality.size();
    Set<TaskName> assignedTasks = new HashSet<>();

    /**
     * A processor is considered under-assigned when number of tasks assigned to it is less than
     * (number of tasks / number of processors).
     * Map the tasks to the under-assigned processors with same locality.
     */
    for (TaskModel taskModel : taskModels) {
        LocationId taskLocationId = taskLocality.get(taskModel.getTaskName());
        if (taskLocationId != null) {
            List<String> processorIds = locationIdToProcessors.getOrDefault(taskLocationId, new ArrayList<>());
            for (String processorId : processorIds) {
                TaskGroup taskGroup = processorIdToTaskGroup.get(processorId);
                if (taskGroup.size() < numTasksPerProcessor) {
                    taskGroup.addTaskName(taskModel.getTaskName().getTaskName());
                    assignedTasks.add(taskModel.getTaskName());
                    break;
                }
            }
        }
    }

    /**
     * In some scenarios, the task either might not have any previous locality or might not have any
     * processor that maps to its previous locality. This cyclic processorId's iterator helps us in
     * those scenarios to assign the processorIds to those kind of tasks in a round robin fashion.
     */
    Iterator<String> processorIdsCyclicIterator = Iterators.cycle(processorLocality.keySet());

    // Order the taskGroups to choose a task group in a deterministic fashion for unassigned tasks.
    List<TaskGroup> taskGroups = new ArrayList<>(processorIdToTaskGroup.values());
    taskGroups.sort(Comparator.comparing(TaskGroup::getContainerId));

    /**
     * For the tasks left over from the previous stage, map them to any under-assigned processor.
     * When a under-assigned processor doesn't exist, then map them to any processor from the
     * available processors in a round robin manner.
     */
    for (TaskModel taskModel : taskModels) {
        if (!assignedTasks.contains(taskModel.getTaskName())) {
            Optional<TaskGroup> underAssignedTaskGroup = taskGroups.stream()
                    .filter(taskGroup -> taskGroup.size() < numTasksPerProcessor).findFirst();
            if (underAssignedTaskGroup.isPresent()) {
                underAssignedTaskGroup.get().addTaskName(taskModel.getTaskName().getTaskName());
            } else {
                TaskGroup taskGroup = processorIdToTaskGroup.get(processorIdsCyclicIterator.next());
                taskGroup.addTaskName(taskModel.getTaskName().getTaskName());
            }
            assignedTasks.add(taskModel.getTaskName());
        }
    }

    return TaskGroup.buildContainerModels(taskModels, taskGroups);
}

From source file:org.wso2.carbon.identity.application.authentication.framework.config.model.graph.JsGraphBuilder.java

/**
 * Creates the graph with the given Script and step map.
 *
 * @param script the Dynamic authentication script.
 *///  w w  w. j  a  v a2s.  c om
public JsGraphBuilder createWith(String script) {

    try {
        currentBuilder.set(this);
        Bindings globalBindings = engine.getBindings(ScriptContext.GLOBAL_SCOPE);
        Bindings engineBindings = engine.getBindings(ScriptContext.ENGINE_SCOPE);
        globalBindings.put(FrameworkConstants.JSAttributes.JS_FUNC_EXECUTE_STEP,
                (StepExecutor) this::executeStep);
        globalBindings.put(FrameworkConstants.JSAttributes.JS_FUNC_SEND_ERROR,
                (BiConsumer<String, Map>) this::sendError);
        globalBindings.put(FrameworkConstants.JSAttributes.JS_FUNC_SHOW_PROMPT,
                (PromptExecutor) this::addShowPrompt);
        engineBindings.put("exit", (RestrictedFunction) this::exitFunction);
        engineBindings.put("quit", (RestrictedFunction) this::quitFunction);
        JsFunctionRegistry jsFunctionRegistrar = FrameworkServiceDataHolder.getInstance()
                .getJsFunctionRegistry();
        if (jsFunctionRegistrar != null) {
            Map<String, Object> functionMap = jsFunctionRegistrar
                    .getSubsystemFunctionsMap(JsFunctionRegistry.Subsystem.SEQUENCE_HANDLER);
            functionMap.forEach(globalBindings::put);
        }
        Invocable invocable = (Invocable) engine;
        engine.eval(script);
        invocable.invokeFunction(FrameworkConstants.JSAttributes.JS_FUNC_ON_LOGIN_REQUEST,
                new JsAuthenticationContext(authenticationContext));
        JsGraphBuilderFactory.persistCurrentContext(authenticationContext, engine);
    } catch (ScriptException e) {
        result.setBuildSuccessful(false);
        result.setErrorReason("Error in executing the Javascript. Nested exception is: " + e.getMessage());
        if (log.isDebugEnabled()) {
            log.debug("Error in executing the Javascript.", e);
        }
    } catch (NoSuchMethodException e) {
        result.setBuildSuccessful(false);
        result.setErrorReason("Error in executing the Javascript. "
                + FrameworkConstants.JSAttributes.JS_FUNC_ON_LOGIN_REQUEST + " function is not defined.");
        if (log.isDebugEnabled()) {
            log.debug("Error in executing the Javascript.", e);
        }
    } finally {
        clearCurrentBuilder();
    }
    return this;
}

From source file:com.nextdoor.bender.ipc.http.AbstractHttpTransportFactory.java

protected HttpClientBuilder getClientBuilder(boolean useSSL, String url, Map<String, String> stringHeaders,
        int socketTimeout) {

    HttpClientBuilder cb = HttpClientBuilder.create();

    /*/* w  ww  .jav  a 2 s  .c  o m*/
     * Setup SSL
     */
    if (useSSL) {
        /*
         * All trusting SSL context
         */
        try {
            cb.setSSLContext(getSSLContext());
        } catch (Exception e) {
            throw new RuntimeException(e);
        }

        /*
         * All trusting hostname verifier
         */
        cb.setSSLHostnameVerifier(new HostnameVerifier() {
            public boolean verify(String s, SSLSession sslSession) {
                return true;
            }
        });
    }

    /*
     * Add default headers
     */
    ArrayList<BasicHeader> headers = new ArrayList<BasicHeader>(stringHeaders.size());
    stringHeaders.forEach((k, v) -> headers.add(new BasicHeader(k, v)));
    cb.setDefaultHeaders(headers);

    /*
     * Set socket timeout and transport threads
     */
    SocketConfig sc = SocketConfig.custom().setSoTimeout(socketTimeout).build();
    cb.setDefaultSocketConfig(sc);
    cb.setMaxConnPerRoute(this.config.getThreads());
    cb.setMaxConnTotal(this.config.getThreads());

    return cb;
}

From source file:msi.gaml.statements.SaveStatement.java

private void computeInitsFromAttributesFacet(final IScope scope, final IExpression attributesFacet,
        final Map<String, IExpression> values, final SpeciesDescription species) throws GamaRuntimeException {
    if (attributesFacet instanceof MapExpression) {
        final Map<IExpression, IExpression> map = ((MapExpression) attributesFacet).getElements();
        map.forEach((key, value) -> {
            final String name = Cast.asString(scope, key.value(scope));
            values.put(name, value);/*from w ww  .  j  a  v  a2s . co  m*/
        });
    }
}

From source file:org.opencb.opencga.storage.core.search.solr.SolrQueryParser.java

/**
 * Create a SolrQuery object from Query and QueryOptions.
 *
 * @param query         Query//from  w  ww . j a v a 2s . co  m
 * @param queryOptions  Query Options
 * @return              SolrQuery
 */
public SolrQuery parse(Query query, QueryOptions queryOptions) {
    List<String> filterList = new ArrayList<>();

    SolrQuery solrQuery = new SolrQuery();

    //-------------------------------------
    // QueryOptions processing
    //-------------------------------------
    if (queryOptions.containsKey(QueryOptions.INCLUDE)) {
        solrQuery.setFields(queryOptions.getAsStringList(QueryOptions.INCLUDE).toString());
    }

    if (queryOptions.containsKey(QueryOptions.LIMIT)) {
        solrQuery.setRows(queryOptions.getInt(QueryOptions.LIMIT));
    }

    if (queryOptions.containsKey(QueryOptions.SKIP)) {
        solrQuery.setStart(queryOptions.getInt(QueryOptions.SKIP));
    }

    if (queryOptions.containsKey(QueryOptions.SORT)) {
        solrQuery.addSort(queryOptions.getString(QueryOptions.SORT), getSortOrder(queryOptions));
    }

    //-------------------------------------
    // Query processing
    //-------------------------------------

    // OR conditions
    // create a list for xrefs (without genes), genes, regions and cts
    // the function classifyIds function differentiates xrefs from genes
    List<String> xrefs = new ArrayList<>();
    List<String> genes = new ArrayList<>();
    List<Region> regions = new ArrayList<>();
    List<String> consequenceTypes = new ArrayList<>();

    // xref
    classifyIds(VariantQueryParams.ANNOT_XREF.key(), query, xrefs, genes);
    classifyIds(VariantQueryParams.ID.key(), query, xrefs, genes);
    classifyIds(VariantQueryParams.GENE.key(), query, xrefs, genes);
    classifyIds(VariantQueryParams.ANNOT_CLINVAR.key(), query, xrefs, genes);
    classifyIds(VariantQueryParams.ANNOT_COSMIC.key(), query, xrefs, genes);
    //        classifyIds(VariantQueryParams.ANNOT_HPO.key(), query, xrefs, genes);

    // Convert region string to region objects
    if (query.containsKey(VariantQueryParams.REGION.key())) {
        regions = Region.parseRegions(query.getString(VariantQueryParams.REGION.key()));
    }

    // consequence types (cts)
    if (query.containsKey(VariantQueryParams.ANNOT_CONSEQUENCE_TYPE.key())
            && StringUtils.isNotEmpty(query.getString(VariantQueryParams.ANNOT_CONSEQUENCE_TYPE.key()))) {
        consequenceTypes = Arrays
                .asList(query.getString(VariantQueryParams.ANNOT_CONSEQUENCE_TYPE.key()).split("[,;]"));
    }

    // goal: [((xrefs OR regions) AND cts) OR (genes AND cts)] AND ... AND ...
    if (consequenceTypes.size() > 0) {
        if (genes.size() > 0) {
            // consequence types and genes
            String or = buildXrefOrRegionAndConsequenceType(xrefs, regions, consequenceTypes);
            if (xrefs.size() == 0 && regions.size() == 0) {
                // no xrefs or regions: genes AND cts
                filterList.add(buildGeneAndCt(genes, consequenceTypes));
            } else {
                // otherwise: [((xrefs OR regions) AND cts) OR (genes AND cts)]
                filterList.add("(" + or + ") OR (" + buildGeneAndCt(genes, consequenceTypes) + ")");
            }
        } else {
            // consequence types but no genes: (xrefs OR regions) AND cts
            // in this case, the resulting string will never be null, because there are some consequence types!!
            filterList.add(buildXrefOrRegionAndConsequenceType(xrefs, regions, consequenceTypes));
        }
    } else {
        // no consequence types: (xrefs OR regions) but we must add "OR genes", i.e.: xrefs OR regions OR genes
        // no consequence types: (xrefs OR regions) but we must add "OR genMINes", i.e.: xrefs OR regions OR genes
        // we must make an OR with xrefs, genes and regions and add it to the "AND" filter list
        String orXrefs = buildXrefOrGeneOrRegion(xrefs, genes, regions);
        if (!orXrefs.isEmpty()) {
            filterList.add(orXrefs);
        }
    }

    // now we continue with the other AND conditions...
    // type (t)
    String key = VariantQueryParams.STUDIES.key();
    if (isValidParam(query, VariantQueryParams.STUDIES)) {
        try {
            String value = query.getString(key);
            VariantDBAdaptorUtils.QueryOperation op = checkOperator(value);
            Set<Integer> studyIds = new HashSet<>(
                    variantDBAdaptorUtils.getStudyIds(splitValue(value, op), queryOptions));
            List<String> studyNames = new ArrayList<>(studyIds.size());
            Map<String, Integer> map = variantDBAdaptorUtils.getStudyConfigurationManager().getStudies(null);
            if (map != null && map.size() > 1) {
                map.forEach((name, id) -> {
                    if (studyIds.contains(id)) {
                        String[] s = name.split(":");
                        studyNames.add(s[s.length - 1]);
                    }
                });

                if (op == null || op == VariantDBAdaptorUtils.QueryOperation.OR) {
                    filterList.add(parseCategoryTermValue("studies", StringUtils.join(studyNames, ",")));
                } else {
                    filterList.add(parseCategoryTermValue("studies", StringUtils.join(studyNames, ";")));
                }
            }
        } catch (NullPointerException e) {
            logger.error(e.getMessage());
            e.printStackTrace();
        }
    }

    // type (t)
    key = VariantQueryParams.TYPE.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        filterList.add(parseCategoryTermValue("type", query.getString(key)));
    }

    // Gene biotype
    key = VariantQueryParams.ANNOT_BIOTYPE.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        filterList.add(parseCategoryTermValue("biotypes", query.getString(key)));
    }

    // protein-substitution
    key = VariantQueryParams.ANNOT_PROTEIN_SUBSTITUTION.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        filterList.add(parseScoreValue(query.getString(key)));
    }

    // conservation
    key = VariantQueryParams.ANNOT_CONSERVATION.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        filterList.add(parseScoreValue(query.getString(key)));
    }

    // cadd, functional score
    key = VariantQueryParams.ANNOT_FUNCTIONAL_SCORE.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        filterList.add(parseScoreValue(query.getString(key)));
    }

    // maf population frequency
    // in the model: "popFreq__1kG_phase3__CLM":0.005319148767739534
    key = VariantQueryParams.ANNOT_POPULATION_MINOR_ALLELE_FREQUENCY.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        filterList.add(parsePopValue("popFreq", query.getString(key)));
    }

    // stats maf
    // in the model: "stats__1kg_phase3__ALL"=0.02
    key = VariantQueryParams.STATS_MAF.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        filterList.add(parsePopValue("stats", query.getString(key)));
    }

    // GO
    key = VariantQueryParams.ANNOT_GO.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        List<String> gos = Arrays.asList(query.getString(key).split(","));
        Set genesByGo = variantDBAdaptorUtils.getGenesByGo(gos);
        if (genesByGo != null && genesByGo.size() > 0) {
            filterList.add(parseCategoryTermValue("xrefs", StringUtils.join(genesByGo, ",")));
        }
    }

    // hpo
    key = VariantQueryParams.ANNOT_HPO.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        filterList.add(parseCategoryTermValue("traits", query.getString(key)));
    }

    // clinvar
    key = VariantQueryParams.ANNOT_CLINVAR.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        filterList.add(parseCategoryTermValue("traits", query.getString(key)));
    }

    // traits
    key = VariantQueryParams.ANNOT_TRAITS.key();
    if (StringUtils.isNotEmpty(query.getString(key))) {
        filterList.add(parseCategoryTermValue("traits", query.getString(key)));
    }

    //-------------------------------------
    // Facet processing
    //-------------------------------------

    if (query.containsKey("facet.field")) {
        solrQuery.addFacetField((query.get("facet.field").toString()));
    }

    if (query.containsKey("facet.fields")) {
        solrQuery.addFacetField((query.get("facet.fields").toString().split(",")));
    }

    if (query.containsKey("facet.query")) {
        solrQuery.addFacetQuery(query.get("facet.query").toString());
    }

    if (query.containsKey("facet.prefix")) {
        solrQuery.setFacetPrefix(query.get("facet.prefix").toString());
    }

    if (query.containsKey("facet.range")) {

        Map<String, Map<String, Number>> rangeFields = (Map<String, Map<String, Number>>) query
                .get("facet.range");

        for (String k : rangeFields.keySet()) {
            Number rangeStart = rangeFields.get(k).get("facet.range.start");
            Number rangeEnd = rangeFields.get(k).get("facet.range.end");
            Number rangeGap = rangeFields.get(k).get("facet.range.gap");
            solrQuery.addNumericRangeFacet(k, rangeStart, rangeEnd, rangeGap);
        }
    }

    logger.debug("query = {}\n", query.toJson());

    solrQuery.setQuery("*:*");
    filterList.forEach(filter -> {
        solrQuery.addFilterQuery(filter);
        logger.debug("Solr fq: {}\n", filter);
    });

    return solrQuery;
}