Example usage for java.util Map getOrDefault

List of usage examples for java.util Map getOrDefault

Introduction

In this page you can find the example usage for java.util Map getOrDefault.

Prototype

default V getOrDefault(Object key, V defaultValue) 

Source Link

Document

Returns the value to which the specified key is mapped, or defaultValue if this map contains no mapping for the key.

Usage

From source file:com.formkiq.core.service.generator.pdfbox.PdfEditorServiceImpl.java

@Override
public Pair<FormJSON, List<WorkflowOutputFormField>> getOutputFormFields(final String filename,
        final byte[] data) throws IOException {

    List<WorkflowOutputFormField> wofields = new ArrayList<>();

    PDDocument doc = loadPDF(data);//from  w  w  w .j  av a 2  s.  co  m

    try {

        Map<COSDictionary, Integer> obMap = getCOSDictionaryToPageNumberMap(doc);

        Map<Integer, List<PDField>> pdMap = getPDFields(doc, obMap);

        Map<Integer, List<PdfTextField>> textsMap = getTextMap(doc);

        PDPageTree pages = doc.getDocumentCatalog().getPages();

        FormJSON form = buildFormJSON(doc, textsMap.get(Integer.valueOf(0)));

        for (int i = 0; i < pages.getCount(); i++) {

            PDPage page = pages.get(i);
            Integer pageNum = Integer.valueOf(i);

            List<PDField> fields = pdMap.getOrDefault(pageNum, emptyList());

            List<PdfTextField> texts = getTextForPage(textsMap, pageNum);

            List<PDRectangle> lineRects = getPageLinePaths(pages.get(i));

            Map<PDField, FormJSONField> fieldMap = buildFormSection(form, page, fields, texts, lineRects);

            List<WorkflowOutputFormField> outfields = createFieldOutputs(form, fields, fieldMap);

            wofields.addAll(outfields);
        }

        return Pair.of(form, wofields);

    } finally {
        doc.close();
    }
}

From source file:com.oneops.transistor.ws.rest.TransistorRestController.java

private Map<String, Object> getCostTotals(List<CostData> offerings) {
    Map<String, Object> map = new HashMap<>();
    Map<String, BigDecimal> byCloud = new HashMap<>();
    Map<String, BigDecimal> byPlatform = new HashMap<>();
    Map<String, BigDecimal> byService = new HashMap<>();
    BigDecimal total = BigDecimal.ZERO;
    for (CostData cost : offerings) {
        String cloud = cost.getCloud().getCiName();
        String[] array = cost.getRfc().getNsPath().split("/");
        String platform = "";
        if (array.length > 1) {
            platform = String.join("/", array[array.length - 2], array[array.length - 1]);
        }//from  ww w  .  ja  v  a  2  s .  c  o  m
        for (CmsCISimple offering : cost.getOfferings()) {
            BigDecimal rate = new BigDecimal(offering.getCiAttributes().get("cost_rate"));
            String serviceType = offering.getCiAttributes().get("service_type");
            byPlatform.put(platform, byPlatform.getOrDefault(platform, BigDecimal.ZERO).add(rate));
            byService.put(serviceType, byService.getOrDefault(serviceType, BigDecimal.ZERO).add(rate));
            byCloud.put(cloud, byCloud.getOrDefault(cloud, BigDecimal.ZERO).add(rate));
            total = total.add(rate);
        }
    }
    map.put("by_cloud", byCloud);
    map.put("by_platform", byPlatform);
    map.put("by_service", byService);
    map.put("total", total);
    return map;
}

From source file:org.mule.modules.wechat.WechatConnector.java

/**
 * Upload Article Message Data/*from  w  w w .  ja  v  a2s. c o m*/
 * <br><a href="http://admin.wechat.com/wiki/index.php?title=Advanced_Broadcast_Interface#Upload_Article_Message_Data">http://admin.wechat.com/wiki/index.php?title=Advanced_Broadcast_Interface#Upload_Article_Message_Data</a>
 * 
 * @param accessToken The certificate for the calling API. Mandatory if "Self Manage Access Token" config is true
 * @param ApiName Upload Article Message Data
 * @param articles Articles of Upload Article Message Data API
 * @return Hashmap
 * @throws Exception If anything fails
 */
@Processor
public Map<String, Object> uploadArticleMessageData(
        @Placement(tab = "Advanced", group = "Advanced") @Optional String accessToken,
        @MetaDataKeyParam @Default("UploadArticleMessageData") String ApiName,
        @Default("#[payload]") List<Map<String, Object>> articles) throws Exception {
    String httpsURL = "";
    if (!config.getSelfManageAccessToken()) {
        httpsURL = "https://api.weixin.qq.com/cgi-bin/media/uploadnews?access_token="
                + WechatConnector.accessToken;
    } else {
        httpsURL = "https://api.weixin.qq.com/cgi-bin/media/uploadnews?access_token=" + accessToken;
    }

    // Create Text Message JSON
    JSONObject obj = new JSONObject();
    JSONArray arraySubObj = new JSONArray();
    for (Map<String, Object> article : articles) {
        JSONObject _subObj = new JSONObject();
        _subObj.put("thumb_media_id", article.getOrDefault("thumb_media_id", ""));
        _subObj.put("author", article.getOrDefault("author", ""));
        _subObj.put("title", article.getOrDefault("title", ""));
        _subObj.put("content_source_url", article.getOrDefault("content_source_url", ""));
        _subObj.put("content", article.getOrDefault("content", ""));
        _subObj.put("digest", article.getOrDefault("digest", ""));
        _subObj.put("show_cover_pic", article.getOrDefault("show_cover_pic", 0));
        arraySubObj.put(_subObj);
    }
    obj.put("articles", arraySubObj);

    // Post to Wechat
    HttpsConnection con = new HttpsConnection();
    Map<String, Object> map = con.post(httpsURL, obj.toString());

    return map;
}

From source file:com.netflix.spinnaker.halyard.deploy.spinnaker.v1.service.distributed.kubernetes.KubernetesDistributedService.java

@Override
default RunningServiceDetails getRunningServiceDetails(AccountDeploymentDetails<KubernetesAccount> details,
        SpinnakerRuntimeSettings runtimeSettings) {
    ServiceSettings settings = runtimeSettings.getServiceSettings(getService());
    RunningServiceDetails res = new RunningServiceDetails();

    KubernetesClient client = KubernetesProviderUtils.getClient(details);
    String name = getServiceName();
    String namespace = getNamespace(settings);

    RunningServiceDetails.LoadBalancer lb = new RunningServiceDetails.LoadBalancer();
    lb.setExists(client.services().inNamespace(namespace).withName(name).get() != null);
    res.setLoadBalancer(lb);// w  w w  . j a v  a 2 s.  c  o m

    List<Pod> pods = client.pods().inNamespace(namespace).withLabel("load-balancer-" + name, "true").list()
            .getItems();
    pods.addAll(
            client.pods().inNamespace(namespace).withLabel("load-balancer-" + name, "false").list().getItems());

    Map<Integer, List<Instance>> instances = res.getInstances();
    for (Pod pod : pods) {
        String podName = pod.getMetadata().getName();
        String serverGroupName = podName.substring(0, podName.lastIndexOf("-"));
        Names parsedName = Names.parseName(serverGroupName);
        Integer version = parsedName.getSequence();
        if (version == null) {
            throw new IllegalStateException("Server group for service " + getServiceName()
                    + " has unknown sequence (" + serverGroupName + ")");
        }

        String location = pod.getMetadata().getNamespace();
        String id = pod.getMetadata().getName();

        Instance instance = new Instance().setId(id).setLocation(location);
        List<ContainerStatus> containerStatuses = pod.getStatus().getContainerStatuses();
        if (!containerStatuses.isEmpty() && containerStatuses.stream().allMatch(ContainerStatus::getReady)) {
            instance.setHealthy(true);
        }

        if (!containerStatuses.isEmpty() && containerStatuses.stream()
                .allMatch(s -> s.getState().getRunning() != null && s.getState().getTerminated() == null)) {
            instance.setRunning(true);
        }

        List<Instance> knownInstances = instances.getOrDefault(version, new ArrayList<>());
        knownInstances.add(instance);
        instances.put(version, knownInstances);
    }

    List<ReplicaSet> replicaSets = client.extensions().replicaSets().inNamespace(settings.getLocation()).list()
            .getItems();
    for (ReplicaSet rs : replicaSets) {
        String rsName = rs.getMetadata().getName();
        Names parsedRsName = Names.parseName(rsName);
        if (!parsedRsName.getCluster().equals(getServiceName())) {
            continue;
        }

        instances.computeIfAbsent(parsedRsName.getSequence(), i -> new ArrayList<>());
    }

    return res;
}

From source file:org.opencb.opencga.storage.core.variant.adaptors.VariantDBAdaptorTest.java

@Test
public void testGetAllVariants_drugs() {
    //ANNOT_DRUG//from w  w  w  . j  ava2 s  . com
    Query query;
    Map<String, Integer> drugs = new HashMap<>();
    for (Variant variant : allVariants.getResult()) {
        Set<String> drugsInVariant = new HashSet<>();
        for (GeneDrugInteraction drugInteraction : variant.getAnnotation().getGeneDrugInteraction()) {
            drugsInVariant.add(drugInteraction.getDrugName());
        }
        for (String flag : drugsInVariant) {
            drugs.put(flag, drugs.getOrDefault(flag, 0) + 1);
        }
    }

    for (Map.Entry<String, Integer> entry : drugs.entrySet()) {
        if (entry.getKey().contains(",")) {
            continue;
        }
        query = new Query(ANNOT_DRUG.key(), entry.getKey());
        queryResult = dbAdaptor.get(query, null);
        assertEquals(entry.getKey(), entry.getValue().intValue(), queryResult.getNumResults());
    }

}

From source file:com.netflix.spinnaker.halyard.deploy.spinnaker.v1.service.distributed.kubernetes.v1.KubernetesV1DistributedService.java

@Override
default RunningServiceDetails getRunningServiceDetails(AccountDeploymentDetails<KubernetesAccount> details,
        SpinnakerRuntimeSettings runtimeSettings) {
    ServiceSettings settings = runtimeSettings.getServiceSettings(getService());
    RunningServiceDetails res = new RunningServiceDetails();

    KubernetesClient client = KubernetesV1ProviderUtils.getClient(details);
    String name = getServiceName();
    String namespace = getNamespace(settings);

    RunningServiceDetails.LoadBalancer lb = new RunningServiceDetails.LoadBalancer();
    lb.setExists(client.services().inNamespace(namespace).withName(name).get() != null);
    res.setLoadBalancer(lb);/*from   ww  w.j  av a 2  s  .co  m*/

    List<Pod> pods = client.pods().inNamespace(namespace).withLabel("load-balancer-" + name, "true").list()
            .getItems();
    pods.addAll(
            client.pods().inNamespace(namespace).withLabel("load-balancer-" + name, "false").list().getItems());

    Map<Integer, List<Instance>> instances = res.getInstances();
    for (Pod pod : pods) {
        String podName = pod.getMetadata().getName();
        String serverGroupName = podName.substring(0, podName.lastIndexOf("-"));
        Names parsedName = Names.parseName(serverGroupName);
        Integer version = parsedName.getSequence();
        if (version == null) {
            throw new IllegalStateException("Server group for service " + getServiceName()
                    + " has unknown sequence (" + serverGroupName + ")");
        }

        String location = pod.getMetadata().getNamespace();
        String id = pod.getMetadata().getName();

        Instance instance = new Instance().setId(id).setLocation(location);
        List<ContainerStatus> containerStatuses = pod.getStatus().getContainerStatuses();
        if (!containerStatuses.isEmpty() && containerStatuses.stream().allMatch(ContainerStatus::getReady)) {
            instance.setHealthy(true);
        }

        if (!containerStatuses.isEmpty() && containerStatuses.stream()
                .allMatch(s -> s.getState().getRunning() != null && s.getState().getTerminated() == null)) {
            instance.setRunning(true);
        }

        List<Instance> knownInstances = instances.getOrDefault(version, new ArrayList<>());
        knownInstances.add(instance);
        instances.put(version, knownInstances);
    }

    List<ReplicaSet> replicaSets = client.extensions().replicaSets().inNamespace(settings.getLocation()).list()
            .getItems();
    for (ReplicaSet rs : replicaSets) {
        String rsName = rs.getMetadata().getName();
        Names parsedRsName = Names.parseName(rsName);
        if (!parsedRsName.getCluster().equals(getServiceName())) {
            continue;
        }

        instances.computeIfAbsent(parsedRsName.getSequence(), i -> new ArrayList<>());
    }

    return res;
}

From source file:org.esa.s2tbx.dataio.s2.l1b.Sentinel2L1BProductReader.java

@Override
protected Product getMosaicProduct(File metadataFile) throws IOException {

    boolean isAGranule = S2L1BGranuleMetadataFilename.isGranuleFilename(metadataFile.getName());

    if (isAGranule) {
        logger.fine("Reading a granule");
    }//from   w  ww  .j  a  va  2s.c o  m

    updateTileLayout(metadataFile.toPath(), isAGranule);

    Objects.requireNonNull(metadataFile);

    String filterTileId = null;
    File productMetadataFile = null;

    // we need to recover parent metadata file if we have a granule
    if (isAGranule) {
        try {
            Objects.requireNonNull(metadataFile.getParentFile());
            Objects.requireNonNull(metadataFile.getParentFile().getParentFile());
            Objects.requireNonNull(metadataFile.getParentFile().getParentFile().getParentFile());
        } catch (NullPointerException npe) {
            throw new IOException(
                    String.format("Unable to retrieve the product associated to granule metadata file [%s]",
                            metadataFile.getName()));
        }

        File up2levels = metadataFile.getParentFile().getParentFile().getParentFile();
        File tileIdFilter = metadataFile.getParentFile();

        filterTileId = tileIdFilter.getName();

        File[] files = up2levels.listFiles();
        if (files != null) {
            for (File f : files) {
                if (S2ProductFilename.isProductFilename(f.getName())
                        && S2ProductFilename.isMetadataFilename(f.getName())) {
                    productMetadataFile = f;
                    break;
                }
            }
        }
        if (productMetadataFile == null) {
            throw new IOException(
                    String.format("Unable to retrieve the product associated to granule metadata file [%s]",
                            metadataFile.getName()));
        }
    } else {
        productMetadataFile = metadataFile;
    }

    final String aFilter = filterTileId;

    L1bMetadata metadataHeader;

    try {
        metadataHeader = parseHeader(productMetadataFile, getConfig());
    } catch (JDOMException | JAXBException e) {
        SystemUtils.LOG.severe(Utils.getStackTrace(e));
        throw new IOException("Failed to parse metadata in " + productMetadataFile.getName());
    }

    L1bSceneDescription sceneDescription = L1bSceneDescription.create(metadataHeader, getProductResolution());
    logger.fine("Scene Description: " + sceneDescription);

    File productDir = getProductDir(productMetadataFile);
    initCacheDir(productDir);

    ProductCharacteristics productCharacteristics = metadataHeader.getProductCharacteristics();

    List<L1bMetadata.Tile> tileList = metadataHeader.getTileList();

    if (isAGranule) {
        tileList = metadataHeader.getTileList().stream().filter(p -> p.getId().equalsIgnoreCase(aFilter))
                .collect(Collectors.toList());
    }

    Map<String, Tile> tilesById = new HashMap<>(tileList.size());
    for (Tile aTile : tileList) {
        tilesById.put(aTile.getId(), aTile);
    }

    // Order bands by physicalBand
    Map<String, S2BandInformation> sin = new HashMap<>();
    for (S2BandInformation bandInformation : productCharacteristics.getBandInformations()) {
        sin.put(bandInformation.getPhysicalBand(), bandInformation);
    }

    Map<Pair<String, String>, Map<String, File>> detectorBandInfoMap = new HashMap<>();
    Map<String, L1BBandInfo> bandInfoByKey = new HashMap<>();
    if (productCharacteristics.getBandInformations() != null) {
        for (Tile tile : tileList) {
            // TODO : rely on the imageFileTemplate hosted by the S2SpectralBandInformation instance.
            S2L1BGranuleDirFilename gf = (S2L1BGranuleDirFilename) S2L1BGranuleDirFilename.create(tile.getId());
            Guardian.assertNotNull("Product files don't match regular expressions", gf);

            for (S2BandInformation bandInformation : productCharacteristics.getBandInformations()) {
                S2GranuleImageFilename granuleFileName = gf.getImageFilename(bandInformation.getPhysicalBand());
                String imgFilename = "GRANULE" + File.separator + tile.getId() + File.separator + "IMG_DATA"
                        + File.separator + granuleFileName.name;

                logger.finer("Adding file " + imgFilename + " to band: " + bandInformation.getPhysicalBand()
                        + ", and detector: " + gf.getDetectorId());

                File file = new File(productDir, imgFilename);
                if (file.exists()) {
                    Pair<String, String> key = new Pair<>(bandInformation.getPhysicalBand(),
                            gf.getDetectorId());
                    Map<String, File> fileMapper = detectorBandInfoMap.getOrDefault(key, new HashMap<>());
                    fileMapper.put(tile.getId(), file);
                    if (!detectorBandInfoMap.containsKey(key)) {
                        detectorBandInfoMap.put(key, fileMapper);
                    }
                } else {
                    logger.warning(String.format("Warning: missing file %s\n", file));
                }
            }
        }

        if (!detectorBandInfoMap.isEmpty()) {
            for (Pair<String, String> key : detectorBandInfoMap.keySet()) {
                L1BBandInfo tileBandInfo = createBandInfoFromHeaderInfo(key.getSecond(),
                        sin.get(key.getFirst()), detectorBandInfoMap.get(key));

                // composite band name : detector + band
                String keyMix = key.getSecond() + key.getFirst();
                bandInfoByKey.put(keyMix, tileBandInfo);
            }
        }
    } else {
        // fixme Look for optional info in schema
        logger.warning("There are no spectral information here !");
    }

    Product product;

    if (sceneDescription != null) {
        product = new Product(FileUtils.getFilenameWithoutExtension(productMetadataFile),
                "S2_MSI_" + productCharacteristics.getProcessingLevel(),
                sceneDescription.getSceneRectangle().width, sceneDescription.getSceneRectangle().height);

        Map<String, GeoCoding> geoCodingsByDetector = new HashMap<>();

        if (!bandInfoByKey.isEmpty()) {
            for (L1BBandInfo tbi : bandInfoByKey.values()) {
                if (!geoCodingsByDetector.containsKey(tbi.detectorId)) {
                    GeoCoding gc = getGeoCodingFromTileBandInfo(tbi, tilesById, product);
                    geoCodingsByDetector.put(tbi.detectorId, gc);
                }
            }
        }

        addDetectorBands(product, bandInfoByKey, new L1bSceneMultiLevelImageFactory(sceneDescription,
                Product.findImageToModelTransform(product.getSceneGeoCoding())));
    } else {
        product = new Product(FileUtils.getFilenameWithoutExtension(productMetadataFile),
                "S2_MSI_" + productCharacteristics.getProcessingLevel());
    }

    product.setFileLocation(productMetadataFile.getParentFile());

    for (MetadataElement metadataElement : metadataHeader.getMetadataElements()) {
        product.getMetadataRoot().addElement(metadataElement);
    }

    return product;
}

From source file:com.ikanow.aleph2.graph.titan.utils.TestTitanGraphBuildingUtils.java

@Test
public void test_groupNewEdgesAndVertices() {

    // Graph schema
    final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class)
            .with(GraphSchemaBean::deduplication_fields,
                    Arrays.asList(GraphAnnotationBean.name, GraphAnnotationBean.type))
            .done().get();/*www.j a va2 s . co  m*/

    final MutableStatsBean mutable_stats_bean = new MutableStatsBean();

    final ObjectNode key1 = _mapper.createObjectNode().put(GraphAnnotationBean.name, "1.1.1.1")
            .put(GraphAnnotationBean.type, "ip-addr");

    final ObjectNode key2 = _mapper.createObjectNode().put(GraphAnnotationBean.name, "host.com")
            .put(GraphAnnotationBean.type, "domain-name");

    final List<ObjectNode> test_vertices_and_edges = Arrays.asList(
            (ObjectNode) _mapper.createObjectNode()
                    .put(GraphAnnotationBean.type, GraphAnnotationBean.ElementType.vertex.toString())
                    .put(GraphAnnotationBean.label, "1.1.1.1").set(GraphAnnotationBean.id, key1),
            (ObjectNode) _mapper.createObjectNode() //(invalid node, no type)
                    .put(GraphAnnotationBean.label, "1.1.1.2").set(GraphAnnotationBean.id,
                            _mapper.createObjectNode().put(GraphAnnotationBean.name, "1.1.1.2")
                                    .put(GraphAnnotationBean.type, "ip-addr")),
            (ObjectNode) _mapper.createObjectNode().put(GraphAnnotationBean.label, "host.com")
                    .put(GraphAnnotationBean.type, GraphAnnotationBean.ElementType.vertex.toString())
                    .set(GraphAnnotationBean.id, key2),
            (ObjectNode) ((ObjectNode) _mapper.createObjectNode()
                    .put(GraphAnnotationBean.type, GraphAnnotationBean.ElementType.edge.toString())
                    .put(GraphAnnotationBean.label, "dns-connection").set(GraphAnnotationBean.inV,
                            _mapper.createObjectNode().put(GraphAnnotationBean.name, "host.com")
                                    .put(GraphAnnotationBean.type, "domain-name")))
                                            .set(GraphAnnotationBean.outV,
                                                    _mapper.createObjectNode()
                                                            .put(GraphAnnotationBean.name, "1.1.1.1")
                                                            .put(GraphAnnotationBean.type, "ip-addr")));

    final Map<ObjectNode, Tuple2<List<ObjectNode>, List<ObjectNode>>> ret_val = TitanGraphBuildingUtils
            .groupNewEdgesAndVertices(graph_schema, mutable_stats_bean, test_vertices_and_edges.stream());

    assertEquals(2, ret_val.keySet().size());

    final Tuple2<List<ObjectNode>, List<ObjectNode>> ret_val_1 = ret_val.getOrDefault(key1,
            Tuples._2T(Collections.emptyList(), Collections.emptyList()));
    final Tuple2<List<ObjectNode>, List<ObjectNode>> ret_val_2 = ret_val.getOrDefault(key2,
            Tuples._2T(Collections.emptyList(), Collections.emptyList()));

    assertEquals(1, ret_val_1._1().size());
    assertEquals("1.1.1.1", ret_val_1._1().get(0).get(GraphAnnotationBean.label).asText());
    assertEquals(1, ret_val_1._2().size());
    assertEquals("dns-connection", ret_val_1._2().get(0).get(GraphAnnotationBean.label).asText());
    assertEquals(1, ret_val_2._1().size());
    assertEquals("host.com", ret_val_2._1().get(0).get(GraphAnnotationBean.label).asText());
    assertEquals(1, ret_val_2._2().size());
    assertEquals("dns-connection", ret_val_2._2().get(0).get(GraphAnnotationBean.label).asText());
}

From source file:io.dockstore.webservice.helpers.BitBucketSourceCodeRepo.java

@Override
public Workflow getNewWorkflow(String repositoryId, Optional<Workflow> existingWorkflow) {
    // repository id of the form owner/name
    String[] id = repositoryId.split("/");
    String owner = id[0];/*from w  w  w  .j av a  2  s . co m*/
    String name = id[1];

    // Create new workflow object based on repository ID
    Workflow workflow = new Workflow();

    workflow.setOrganization(owner);
    workflow.setRepository(name);
    workflow.setGitUrl(BITBUCKET_GIT_URL_PREFIX + repositoryId + BITBUCKET_GIT_URL_SUFFIX);
    workflow.setLastUpdated(new Date());
    // make sure path is constructed
    workflow.setPath(workflow.getPath());

    if (!existingWorkflow.isPresent()) {
        // when there is no existing workflow at all, just return a stub workflow. Also set descriptor type to default cwl.
        workflow.setDescriptorType("cwl");
        return workflow;
    }
    if (existingWorkflow.get().getMode() == WorkflowMode.STUB) {
        // when there is an existing stub workflow, just return the new stub as well
        return workflow;
    }

    workflow.setMode(WorkflowMode.FULL);

    // Get versions of workflow

    // If existing workflow, then set versions to existing ones
    Map<String, String> existingDefaults = new HashMap<>();
    if (existingWorkflow.isPresent()) {
        existingWorkflow.get().getWorkflowVersions().forEach(existingVersion -> existingDefaults
                .put(existingVersion.getReference(), existingVersion.getWorkflowPath()));
        copyWorkflow(existingWorkflow.get(), workflow);
    }

    // Look at each version, check for valid workflows

    String url = BITBUCKET_API_URL + "repositories/" + repositoryId + "/branches-tags";

    // Call to Bitbucket API to get list of branches for a given repo (what about tags)
    Optional<String> asString = ResourceUtilities.asString(url, bitbucketTokenContent, client);
    LOG.info(gitUsername + ": RESOURCE CALL: {}", url);

    if (asString.isPresent()) {
        String repoJson = asString.get();

        JsonElement jsonElement = new JsonParser().parse(repoJson);
        JsonObject jsonObject = jsonElement.getAsJsonObject();
        // Iterate to find branches and tags arrays
        for (Map.Entry<String, JsonElement> objectEntry : jsonObject.entrySet()) {
            JsonArray branchArray = objectEntry.getValue().getAsJsonArray();
            // Iterate over both arrays
            for (JsonElement branch : branchArray) {
                String branchName = branch.getAsJsonObject().get("name").getAsString();

                WorkflowVersion version = new WorkflowVersion();
                version.setName(branchName);
                version.setReference(branchName);
                version.setValid(false);

                // determine workflow version from previous
                String calculatedPath = existingDefaults.getOrDefault(branchName,
                        existingWorkflow.get().getDefaultWorkflowPath());
                version.setWorkflowPath(calculatedPath);

                // Get relative path of main workflow descriptor to find relative paths
                String[] path = calculatedPath.split("/");
                String basepath = "";
                for (int i = 0; i < path.length - 1; i++) {
                    basepath += path[i] + "/";
                }

                // Now grab source files
                SourceFile sourceFile;
                Set<SourceFile> sourceFileSet = new HashSet<>();
                ArrayList<String> importPaths;

                if (calculatedPath.toLowerCase().endsWith(".cwl")) {
                    sourceFile = getSourceFile(calculatedPath, repositoryId, branchName, "cwl");
                } else {
                    sourceFile = getSourceFile(calculatedPath, repositoryId, branchName, "wdl");
                }

                // Find all import files
                if (sourceFile.getContent() != null) {
                    try {
                        final File tempDesc = File.createTempFile("temp", ".descriptor", Files.createTempDir());
                        Files.write(sourceFile.getContent(), tempDesc, StandardCharsets.UTF_8);
                        importPaths = calculatedPath.toLowerCase().endsWith(".cwl") ? getCwlImports(tempDesc)
                                : getWdlImports(tempDesc);
                        for (String importPath : importPaths) {
                            LOG.info(gitUsername + ": Grabbing file " + basepath + importPath);
                            sourceFileSet.add(getSourceFile(basepath + importPath, repositoryId, branchName,
                                    importPath.toLowerCase().endsWith(".cwl") ? "cwl" : "wdl"));
                        }
                    } catch (IOException e) {
                        LOG.info(gitUsername + ": Error writing descriptor file to temp file.");
                        e.printStackTrace();
                    }
                }

                if (sourceFile.getContent() != null) {
                    version.getSourceFiles().add(sourceFile);
                }

                if (version.getSourceFiles().size() > 0) {
                    version.setValid(true);
                }

                // add extra source files here
                if (sourceFileSet.size() > 0) {
                    version.getSourceFiles().addAll(sourceFileSet);
                }

                workflow.addWorkflowVersion(version);
            }
        }

    }

    return workflow;
}

From source file:org.opennms.features.topology.plugins.topo.linkd.internal.EnhancedLinkdTopologyProvider.java

private void addNodesWithoutLinks(Map<Integer, OnmsNode> nodemap, Map<Integer, List<OnmsIpInterface>> nodeipmap,
        Map<Integer, OnmsIpInterface> nodeipprimarymap) {
    for (Entry<Integer, OnmsNode> entry : nodemap.entrySet()) {
        Integer nodeId = entry.getKey();
        OnmsNode node = entry.getValue();
        if (getVertex(getVertexNamespace(), nodeId.toString()) == null) {
            LOG.debug("Adding link-less node: {}", node.getLabel());
            // Use the primary interface, if set
            OnmsIpInterface ipInterface = nodeipprimarymap.get(nodeId);
            if (ipInterface == null) {
                // Otherwise fall back to the first interface defined
                List<OnmsIpInterface> ipInterfaces = nodeipmap.getOrDefault(nodeId, Collections.emptyList());
                if (ipInterfaces.size() > 0) {
                    ipInterfaces.get(0);
                }//from   w ww . j  av a 2s. c  o  m
            }
            addVertices(createVertexFor(node, ipInterface));
        }
    }
}