Example usage for java.util Map putIfAbsent

List of usage examples for java.util Map putIfAbsent

Introduction

In this page you can find the example usage for java.util Map putIfAbsent.

Prototype

default V putIfAbsent(K key, V value) 

Source Link

Document

If the specified key is not already associated with a value (or is mapped to null ) associates it with the given value and returns null , else returns the current value.

Usage

From source file:UserInterface.TMAnalystRole.TMAnaylstWorkAreaJPanel.java

private void btnRouteAnalysisActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnRouteAnalysisActionPerformed
    // TODO add your handling code here:
    Line selectedLine = (Line) cboActiveLines.getSelectedItem();
    ArrayList<Route> routeList = new ArrayList<>();

    for (Segment s : selectedLine.getSegments()) {
        routeList.add(s.getRoutes().get(0));
    }/*from  w  w w.j a v a  2 s. c  o m*/

    ArrayList<Segment> segmentList = selectedLine.getSegments();
    for (int i = segmentList.size() - 1; i >= 0; i--) {
        routeList.add(segmentList.get(i).getRoutes().get(1));
    }

    int days = (int) spnrRecord.getValue();
    Date today = new Date();
    Date olderThanToday = new Date();
    olderThanToday.setTime(today.getTime() + (long) (-days) * 1000 * 60 * 60 * 24);
    long todayDate = (today.getTime()) / (1000 * 60 * 60 * 24);
    long olderDate = (olderThanToday.getTime()) / (1000 * 60 * 60 * 24);

    Map<Route, Integer> routesWithCapacityList = new LinkedHashMap<>();

    for (Route r : routeList) {
        int routeCount = 0;

        for (TimeSlot.TimeSlotRange ts : TimeSlot.TimeSlotRange.values()) {
            Train selectedTrain = r.fetchRouteSchedule(ts).getTrainOffered();
            int intermediateCount = 0;
            int trainsFound = 0;
            for (TrainOffered selectedTrainOffered : enterprise.getTrainsOfferedHistory().getTrainsOffered()) {
                if (selectedTrainOffered.getTrain().equals(selectedTrain)) {
                    long selectedDate = (selectedTrainOffered.getDayOffered().getTime())
                            / (1000 * 60 * 60 * 24);
                    if (selectedDate >= olderDate && selectedDate <= todayDate) {
                        intermediateCount += selectedTrainOffered.fetchRunningTrainByTimeSlot(ts)
                                .getRunningCapacity();
                        trainsFound++;
                    }
                }

            }
            if (trainsFound != 0) {
                intermediateCount = intermediateCount / trainsFound;
                routeCount += intermediateCount;
            }
        }
        routeCount = Math.round(routeCount / 15);
        routesWithCapacityList.putIfAbsent(r, routeCount);
    }

    DefaultCategoryDataset dataset = new DefaultCategoryDataset();

    for (Map.Entry<Route, Integer> entry : routesWithCapacityList.entrySet()) {
        dataset.setValue(entry.getValue(), "Routes", entry.getKey().getRouteName());

    }

    JFreeChart chart = ChartFactory.createBarChart(
            "Congestion Levels of Line : " + selectedLine.getLineName() + " for " + days + " days", "Routes",
            "Average Congestion Levels", dataset, PlotOrientation.VERTICAL, false, true, false);
    CategoryPlot plot = chart.getCategoryPlot();
    plot.setRangeGridlinePaint(Color.BLUE);
    ChartFrame frame = new ChartFrame("Bar Chart for Congestion Analysis", chart);
    frame.setVisible(true);
    frame.setSize(this.getWidth(), this.getHeight() + 200);

}

From source file:org.nuxeo.ecm.core.storage.marklogic.MarkLogicQueryBuilder.java

private QueryBuilder walkAnd(List<Operand> values) {
    List<QueryBuilder> children = walkOperandAsExpression(values);
    // Check wildcards in children in order to perform correlated constraints
    Map<String, List<QueryBuilder>> propBaseToBuilders = new LinkedHashMap<>();
    Map<String, String> propBaseKeyToFieldBase = new HashMap<>();
    for (Iterator<QueryBuilder> it = children.iterator(); it.hasNext();) {
        QueryBuilder child = it.next();//from  ww w. j  ava2s  .  c  o m
        if (child instanceof CorrelatedContainerQueryBuilder) {
            CorrelatedContainerQueryBuilder queryBuilder = (CorrelatedContainerQueryBuilder) child;
            String correlatedPath = queryBuilder.getCorrelatedPath();
            propBaseKeyToFieldBase.putIfAbsent(correlatedPath, queryBuilder.getPath());
            // Store object for this key
            List<QueryBuilder> propBaseBuilders = propBaseToBuilders.computeIfAbsent(correlatedPath,
                    key -> new LinkedList<>());
            propBaseBuilders.add(queryBuilder.getChild());
            it.remove();
        }
    }
    for (Entry<String, List<QueryBuilder>> entry : propBaseToBuilders.entrySet()) {
        String correlatedPath = entry.getKey();
        List<QueryBuilder> propBaseBuilders = entry.getValue();
        // Build the composition query builder
        QueryBuilder queryBuilder;
        if (propBaseBuilders.size() == 1) {
            queryBuilder = propBaseBuilders.get(0);
        } else {
            queryBuilder = new CompositionQueryBuilder(propBaseBuilders, true);
        }
        // Build upper container
        String path = propBaseKeyToFieldBase.get(correlatedPath);
        children.add(new CorrelatedContainerQueryBuilder(path, correlatedPath, queryBuilder));
    }
    if (children.size() == 1) {
        return children.get(0);
    }
    return new CompositionQueryBuilder(children, true);
}

From source file:com.wso2.code.quality.matrices.ChangesFinder.java

/**
 * Reading the blame received for a current selected file name and insert the parent commits of the changed lines,
 * relevant authors and the relevant commits hashes to look for the reviewers of those line ranges
 *
 * @param rootJsonObject                                JSONObject containing blame information for current selected file
 * @param arrayListOfRelevantChangedLinesOfSelectedFile arraylist containing the changed line ranges of the current selected file
 * @param gettingPr                                     should be true if running this method for finding the authors of buggy lines which are being fixed from  the patch
 *//*from  www  . j a v a  2 s. c  o m*/
public void readBlameReceivedForAFile(JSONObject rootJsonObject,
        ArrayList<String> arrayListOfRelevantChangedLinesOfSelectedFile, boolean gettingPr, String oldRange) {

    //running a iterator for fileName arrayList to get the location of the above saved file
    JSONObject dataJSONObject = (JSONObject) rootJsonObject.get(GITHUB_GRAPHQL_API_DATA_KEY_STRING);
    JSONObject repositoryJSONObect = (JSONObject) dataJSONObject.get(GITHUB_GRAPHQL_API_REPOSITORY_KEY_STRING);
    JSONObject objectJSONObject = (JSONObject) repositoryJSONObect.get(GITHUB_GRAPHQL_API_OBJECT_KEY_STRING);
    JSONObject blameJSONObject = (JSONObject) objectJSONObject.get(GITHUB_GRAPHQL_API_BLAME_KEY_STRING);
    JSONArray rangeJSONArray = (JSONArray) blameJSONObject.get(GITHUB_GRAPHQL_API_RANGES_KEY_STRING);

    //getting the starting line no of the range of lines that are modified from the patch
    // parallel streams are not used in here as the order of the arraylist is important in the process
    arrayListOfRelevantChangedLinesOfSelectedFile.stream().forEach(lineRanges -> {
        int startingLineNo = 0;
        int endLineNo = 0;
        String oldFileRange = StringUtils.substringBefore(lineRanges, "/");
        String newFileRange = StringUtils.substringAfter(lineRanges, "/");
        // need to skip the newly created files from taking the blame as they contain no previous commits
        if (!oldFileRange.equals("0,0")) {
            if (gettingPr && oldRange.equals(oldFileRange)) {
                // need to consider the line range in the old file for finding authors and reviewers
                startingLineNo = Integer.parseInt(StringUtils.substringBefore(oldFileRange, ","));
                endLineNo = Integer.parseInt(StringUtils.substringAfter(oldFileRange, ","));
            } else if (!gettingPr && oldRange == null) {
                // need to consider the line range in the new file resulted from applying the commit, for finding parent commits
                startingLineNo = Integer.parseInt(StringUtils.substringBefore(newFileRange, ","));
                endLineNo = Integer.parseInt(StringUtils.substringAfter(newFileRange, ","));
            } else {
                return; // to skip the to the next iteration if oldRange != oldFileRange when finding authornames and commits for obtaining PRs
            }

            // as a new mapForStoringAgeAndIndex map should be available for each line range to find the most recent change
            Map<Integer, ArrayList<Integer>> mapForStoringAgeAndIndex = new HashMap<Integer, ArrayList<Integer>>();

            //checking line by line by iterating the startingLineNo
            while (endLineNo >= startingLineNo) {
                // since the index value is required for later processing, without Java 8 features "for loop" is used for iteration
                for (int i = 0; i < rangeJSONArray.length(); i++) {
                    JSONObject rangeJSONObject = (JSONObject) rangeJSONArray.get(i);
                    int tempStartingLineNo = (int) rangeJSONObject
                            .get(GITHUB_GRAPHQL_API_STARTING_LINE_KEY_STRING);
                    int tempEndingLineNo = (int) rangeJSONObject.get(GITHUB_GRAPHQL_API_ENDING_LINE_KEY_STRING);

                    //checking whether the line belongs to that line range group
                    if ((tempStartingLineNo <= startingLineNo) && (tempEndingLineNo >= startingLineNo)) {
                        // so the relevant startingLineNo belongs in this line range in other words in this JSONObject
                        if (!gettingPr) {
                            int age = (int) rangeJSONObject.get(GITHUB_GRAPHQL_API_AGE_KEY_STRING);
                            // storing the age field with relevant index of the JSONObject
                            mapForStoringAgeAndIndex.putIfAbsent(age, new ArrayList<Integer>());
                            if (!mapForStoringAgeAndIndex.get(age).contains(i)) {
                                mapForStoringAgeAndIndex.get(age).add(i); // adding if the index is not present in the array list for the relevant age
                            }

                        } else {
                            //for saving the author names of commiters
                            JSONObject commitJSONObject = (JSONObject) rangeJSONObject
                                    .get(GITHUB_GRAPHQL_API_COMMIT_KEY_STRING);

                            JSONObject authorJSONObject = (JSONObject) commitJSONObject
                                    .get(GITHUB_GRAPHQL_API_AUTHOR_KEY_STRING);
                            String nameOfTheAuthor = (String) authorJSONObject
                                    .get(GITHUB_GRAPHQL_API_NAME_KEY_STRING);
                            authorNames.add(nameOfTheAuthor); // authors are added to the Set

                            String urlOfCommit = (String) commitJSONObject
                                    .get(GITHUB_GRAPHQL_API_URL_KEY_STRING);
                            String commitHashForPRReview = StringUtils.substringAfter(urlOfCommit, "commit/");
                            commitHashObtainedForPRReview.add(commitHashForPRReview);
                        }
                        break;
                    } else {
                        continue; // to skip to the next JSON Object in the rangeJSONArray
                    }
                }
                startingLineNo++; // to check for other line numbers
            }

            //for the above line range getting the lastest commit which modified the lines
            if (!gettingPr) {
                //converting the map into a treeMap to get it ordered
                TreeMap<Integer, ArrayList<Integer>> treeMap = new TreeMap<>(mapForStoringAgeAndIndex);
                int minimumKeyOfMapForStoringAgeAndIndex = treeMap.firstKey(); // getting the minimum key
                //getting the relevant JSONObject indexes which consists of the recent change with in the relevant line range
                ArrayList<Integer> indexesOfJsonObjectForRecentCommit = mapForStoringAgeAndIndex
                        .get(minimumKeyOfMapForStoringAgeAndIndex);
                // the order of the indexesOfJsonObjectForRecentCommit is not important as we only need to get the parent commit hashes
                indexesOfJsonObjectForRecentCommit.parallelStream().forEach(index -> {
                    JSONObject rangeJSONObject = (JSONObject) rangeJSONArray.get(index);
                    JSONObject commitJSONObject = (JSONObject) rangeJSONObject
                            .get(GITHUB_GRAPHQL_API_COMMIT_KEY_STRING);
                    JSONObject historyJSONObject = (JSONObject) commitJSONObject
                            .get(GITHUB_GRAPHQL_API_HISTORY_KEY_STRING);
                    JSONArray edgesJSONArray = (JSONArray) historyJSONObject
                            .get(GITHUB_GRAPHQL_API_EDGE_KEY_STRING);
                    //getting the second json object from the array as it contain the commit of the parent which modified the above line range
                    JSONObject edgeJSONObject = (JSONObject) edgesJSONArray.get(1);
                    JSONObject nodeJSONObject = (JSONObject) edgeJSONObject
                            .get(GITHUB_GRAPHQL_API_NODE_KEY_STRING);
                    String urlOfTheParentCommit = (String) nodeJSONObject
                            .get(GITHUB_GRAPHQL_API_URL_KEY_STRING); // this contain the URL of the parent commit
                    String commitHash = (String) StringUtils.substringAfter(urlOfTheParentCommit, "commit/");
                    //                                        commitHashesOfTheParent.add(commitHash);    // commitHashesof the parent for the selected file

                    commitHashesMapOfTheParent.putIfAbsent(oldFileRange, new HashSet<String>());
                    if (!commitHashesMapOfTheParent.get(oldFileRange).contains(commitHash)) {
                        commitHashesMapOfTheParent.get(oldFileRange).add(commitHash);
                    }
                });
            }

        }

    });
}

From source file:io.github.collaboratory.LauncherCWL.java

/**
 * Handles one output for upload//  w  ww  .j  av a 2  s  .  c  o m
 * @param fileMap
 * @param cwlID
 * @param key
 */
private void handleOutputFile(Map<String, List<FileProvisioning.FileInfo>> fileMap, final String cwlID,
        Map<String, Object> param, final String key) {
    String path = (String) param.get("path");
    // if it's the current one
    LOG.info("PATH TO UPLOAD TO: {} FOR {} FOR {}", path, cwlID, key);

    // output
    // TODO: poor naming here, need to cleanup the variables
    // just file name
    // the file URL
    File filePathObj = new File(cwlID);
    //String newDirectory = globalWorkingDir + "/outputs/" + UUID.randomUUID().toString();
    String newDirectory = globalWorkingDir + "/outputs";
    Utilities.executeCommand("mkdir -p " + newDirectory);
    File newDirectoryFile = new File(newDirectory);
    String uuidPath = newDirectoryFile.getAbsolutePath() + "/" + filePathObj.getName();

    // VFS call, see https://github.com/abashev/vfs-s3/tree/branch-2.3.x and
    // https://commons.apache.org/proper/commons-vfs/filesystems.html

    // now add this info to a hash so I can later reconstruct a docker -v command
    FileProvisioning.FileInfo new1 = new FileProvisioning.FileInfo();
    new1.setUrl(path);
    new1.setLocalPath(uuidPath);
    fileMap.putIfAbsent(cwlID, new ArrayList<>());
    fileMap.get(cwlID).add(new1);

    LOG.info("UPLOAD FILE: LOCAL: {} URL: {}", cwlID, path);
}

From source file:org.apache.samza.config.KafkaConsumerConfig.java

/**
 * Create kafka consumer configs, based on the subset of global configs.
 * @param config application config/*from w  ww. j  a v a2  s .  c  o  m*/
 * @param systemName system name
 * @param clientId client id provided by the caller
 * @return KafkaConsumerConfig
 */
public static KafkaConsumerConfig getKafkaSystemConsumerConfig(Config config, String systemName,
        String clientId) {

    Config subConf = config.subset(String.format("systems.%s.consumer.", systemName), true);

    final String groupId = createConsumerGroupId(config);

    Map<String, Object> consumerProps = new HashMap<>(subConf);

    consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId);

    // These are values we enforce in sazma, and they cannot be overwritten.

    // Disable consumer auto-commit because Samza controls commits
    consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");

    // check if samza default offset value is defined
    String systemOffsetDefault = new SystemConfig(config).getSystemOffsetDefault(systemName);

    // Translate samza config value to kafka config value
    String autoOffsetReset = getAutoOffsetResetValue(
            (String) consumerProps.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG), systemOffsetDefault);
    LOG.info("setting auto.offset.reset for system {} to {}", systemName, autoOffsetReset);
    consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);

    // if consumer bootstrap servers are not configured, get them from the producer configs
    if (!subConf.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)) {
        String bootstrapServers = config.get(
                String.format("systems.%s.producer.%s", systemName, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG));
        if (StringUtils.isEmpty(bootstrapServers)) {
            throw new SamzaException(
                    "Missing " + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG + " config  for " + systemName);
        }
        consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    }

    // Always use default partition assignment strategy. Do not allow override.
    consumerProps.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, RangeAssignor.class.getName());

    // the consumer is fully typed, and deserialization can be too. But in case it is not provided we should
    // default to byte[]
    if (!consumerProps.containsKey(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG)) {
        LOG.info("setting key serialization for the consumer(for system {}) to ByteArrayDeserializer",
                systemName);
        consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
    }
    if (!consumerProps.containsKey(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG)) {
        LOG.info("setting value serialization for the consumer(for system {}) to ByteArrayDeserializer",
                systemName);
        consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
                ByteArrayDeserializer.class.getName());
    }

    // Override default max poll config if there is no value
    consumerProps.putIfAbsent(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, DEFAULT_KAFKA_CONSUMER_MAX_POLL_RECORDS);

    return new KafkaConsumerConfig(consumerProps, systemName);
}

From source file:com.uber.stream.kafka.mirrormaker.manager.core.ControllerHelixManager.java

private void updateMetrics(Map<String, Set<TopicPartition>> instanceToTopicPartitionsMap,
        Map<String, InstanceTopicPartitionHolder> instanceMap) {
    // int[3]: 0: #topic, 1: #controller, 2: #worker
    Map<String, int[]> currRouteInfo = new ConcurrentHashMap<>();
    //LOGGER.info("instanceToTopicPartitionsMap: {}", instanceToTopicPartitionsMap);
    for (String instanceName : instanceToTopicPartitionsMap.keySet()) {
        Set<TopicPartition> topicPartitions = instanceToTopicPartitionsMap.get(instanceName);
        for (TopicPartition tp : topicPartitions) {
            String topicName = tp.getTopic();
            if (topicName.startsWith(SEPARATOR)) {
                // route
                String route = topicName + SEPARATOR + tp.getPartition();
                String routeString = convert(route);
                currRouteInfo.putIfAbsent(routeString, new int[3]);
                currRouteInfo.get(routeString)[1]++;
                currRouteInfo.get(routeString)[2] += instanceMap.get(instanceName).getWorkerSet().size();

                // register metrics if needed
                maybeRegisterMetrics(routeString);
            } else {
                // topic
                String route = tp.getPipeline();
                String routeString = convert(route);
                currRouteInfo.putIfAbsent(routeString, new int[3]);
                currRouteInfo.get(routeString)[0]++;
            }/*  ww  w . j av  a2  s.c o  m*/
        }
    }
    //LOGGER.info("currRouteInfo: {}", currRouteInfo);
    //LOGGER.info("_routeToCounterMap: {}", _routeToCounterMap);

    for (String routeString : _routeToCounterMap.keySet()) {
        int topicTotalNumber = 0;
        int controllerTotalNumber = 0;
        int workerTotalNumber = 0;
        if (currRouteInfo.containsKey(routeString)) {
            topicTotalNumber = currRouteInfo.get(routeString)[0];
            controllerTotalNumber = currRouteInfo.get(routeString)[1];
            workerTotalNumber = currRouteInfo.get(routeString)[2];
        }
        Counter topicTotalNumberCounter = _routeToCounterMap.get(routeString).get(TOPIC_TOTAL_NUMBER);
        topicTotalNumberCounter.inc(topicTotalNumber - topicTotalNumberCounter.getCount());

        Counter controllerTotalNumberCounter = _routeToCounterMap.get(routeString).get(CONTROLLER_TOTAL_NUMBER);
        controllerTotalNumberCounter.inc(controllerTotalNumber - controllerTotalNumberCounter.getCount());

        Counter workerTotalNumberCounter = _routeToCounterMap.get(routeString).get(WORKER_TOTAL_NUMBER);
        workerTotalNumberCounter.inc(workerTotalNumber - workerTotalNumberCounter.getCount());
        // LOGGER.info("update metrics for {}", routeString);
    }
}

From source file:com.uber.stream.kafka.mirrormaker.manager.core.ControllerHelixManager.java

public synchronized void handleLiveInstanceChange(boolean onlyCheckOffline, boolean forceBalance)
        throws Exception {
    _lock.lock();// w  ww  .ja va 2s. c  o m
    try {
        LOGGER.info("handleLiveInstanceChange() wake up!");

        // Check if any controller in route is down
        Map<String, Set<TopicPartition>> instanceToTopicPartitionsMap = HelixUtils
                .getInstanceToTopicPartitionsMap(_helixManager,
                        _srcKafkaValidationManager.getClusterToObserverMap());
        List<String> liveInstances = HelixUtils.liveInstances(_helixManager);
        List<String> instanceToReplace = new ArrayList<>();
        boolean routeControllerDown = false;
        // Check if any worker in route is down
        boolean routeWorkerDown = false;
        if (_enableRebalance || forceBalance) {
            for (String instanceName : instanceToTopicPartitionsMap.keySet()) {
                if (!liveInstances.contains(instanceName)) {
                    routeControllerDown = true;
                    instanceToReplace.add(instanceName);
                }
            }

            LOGGER.info("Controller need to replace: {}", instanceToReplace);
            // Make sure controller status is up-to-date
            updateCurrentStatus();
            // Happy scenario: instance contains route topic
            for (String instance : instanceToReplace) {
                Set<TopicPartition> tpOrRouteSet = instanceToTopicPartitionsMap.get(instance);
                for (TopicPartition tpOrRoute : tpOrRouteSet) {
                    if (tpOrRoute.getTopic().startsWith(SEPARATOR)) {
                        String pipeline = tpOrRoute.getTopic();
                        int routeId = tpOrRoute.getPartition();

                        // TODO: check if _availableControllerList is empty
                        String newInstanceName = _availableControllerList.get(0);
                        _availableControllerList.remove(0);
                        LOGGER.info("Controller {} in route {}@{} will be replaced by {}", instance, pipeline,
                                routeId, newInstanceName);
                        InstanceTopicPartitionHolder newInstance = new InstanceTopicPartitionHolder(
                                newInstanceName, tpOrRoute);

                        List<TopicPartition> tpToReassign = new ArrayList<>();
                        PriorityQueue<InstanceTopicPartitionHolder> itphList = _pipelineToInstanceMap
                                .get(pipeline);
                        for (InstanceTopicPartitionHolder itph : itphList) {
                            if (itph.getInstanceName().equals(instance)) {
                                tpToReassign.addAll(itph.getServingTopicPartitionSet());
                                // TODO: is it possible to have different route on same host?
                                break;
                            }
                        }

                        // Helix doesn't guarantee the order of execution, so we have to wait for new controller to be online
                        // before reassigning topics
                        // But this might cause long rebalance time
                        _helixAdmin.setResourceIdealState(_helixClusterName, pipeline,
                                IdealStateBuilder.resetCustomIdealStateFor(
                                        _helixAdmin.getResourceIdealState(_helixClusterName, pipeline),
                                        pipeline, String.valueOf(routeId), newInstanceName));

                        long ts1 = System.currentTimeMillis();
                        while (!isControllerOnline(newInstanceName, pipeline, String.valueOf(routeId))) {
                            if (System.currentTimeMillis() - ts1 > 30000) {
                                break;
                            }
                            try {
                                // Based on testing, the wait time is usually in the order of 100 ms
                                Thread.sleep(100);
                            } catch (InterruptedException e) {
                                e.printStackTrace();
                            }
                        }

                        long ts2 = System.currentTimeMillis();
                        LOGGER.info("Controller {} in route {}@{} is replaced by {}, it took {} ms", instance,
                                pipeline, routeId, newInstanceName, ts2 - ts1);

                        for (TopicPartition tp : tpToReassign) {
                            _helixAdmin.setResourceIdealState(_helixClusterName, tp.getTopic(),
                                    IdealStateBuilder.resetCustomIdealStateFor(
                                            _helixAdmin.getResourceIdealState(_helixClusterName, tp.getTopic()),
                                            tp.getTopic(), pipeline + SEPARATOR + routeId, newInstanceName));
                        }

                        LOGGER.info(
                                "Controller {} in route {}@{} is replaced by {}, topics are reassigned, it took {} ms",
                                instance, pipeline, routeId, newInstanceName, System.currentTimeMillis() - ts2);
                        break;
                    }
                }
            }
            // Failure scenario: instance doesn't contain route topic
            // e.g. route and the topic in that route are not assigned to the same host
            // In this case, assume the instance of the route is correct and reassign the topic to that host
            for (String instance : instanceToTopicPartitionsMap.keySet()) {
                Set<TopicPartition> topicPartitionSet = instanceToTopicPartitionsMap.get(instance);
                if (topicPartitionSet.isEmpty()) {
                    continue;
                }
                boolean foundRoute = false;
                for (TopicPartition tp : topicPartitionSet) {
                    if (tp.getTopic().startsWith(SEPARATOR)) {
                        foundRoute = true;
                        break;
                    }
                }
                if (!foundRoute) {
                    routeControllerDown = true;
                    String instanceForRoute = null;
                    // Find the host for its route
                    String route = topicPartitionSet.iterator().next().getPipeline();
                    for (String pipeline : _pipelineToInstanceMap.keySet()) {
                        if (pipeline.equals(getPipelineFromRoute(route))) {
                            for (InstanceTopicPartitionHolder itph : _pipelineToInstanceMap.get(pipeline)) {
                                if (itph.getRouteString().equals(route)) {
                                    instanceForRoute = itph.getInstanceName();
                                    break;
                                }
                            }
                        }
                    }

                    LOGGER.info("Need to reassign: {} from {} to {}", topicPartitionSet, instance,
                            instanceForRoute);
                    for (TopicPartition tp : topicPartitionSet) {
                        _helixAdmin.setResourceIdealState(_helixClusterName, tp.getTopic(),
                                IdealStateBuilder.resetCustomIdealStateFor(
                                        _helixAdmin.getResourceIdealState(_helixClusterName, tp.getTopic()),
                                        tp.getTopic(), route, instanceForRoute));
                    }
                }
            }

            if (routeControllerDown) {
                updateCurrentStatus();
            }

            HelixManager workeManager = _workerHelixManager.getHelixManager();
            Map<String, Set<TopicPartition>> workerInstanceToTopicPartitionsMap = HelixUtils
                    .getInstanceToTopicPartitionsMap(workeManager, null);
            List<String> workerLiveInstances = HelixUtils.liveInstances(workeManager);
            Map<String, List<String>> workerPipelineToRouteIdToReplace = new HashMap<>();
            List<String> workerToReplace = new ArrayList<>();

            for (String instanceName : workerInstanceToTopicPartitionsMap.keySet()) {
                if (!workerLiveInstances.contains(instanceName)) {
                    routeWorkerDown = true;
                    TopicPartition route = workerInstanceToTopicPartitionsMap.get(instanceName).iterator()
                            .next();
                    workerPipelineToRouteIdToReplace.putIfAbsent(route.getTopic(), new ArrayList<>());
                    workerPipelineToRouteIdToReplace.get(route.getTopic())
                            .add(String.valueOf(route.getPartition()));
                    workerToReplace.add(instanceName);
                    LOGGER.info("Worker changed: {} for {}", instanceName, route);
                }
            }
            if (!routeWorkerDown) {
                LOGGER.info("No worker in route is changed, do nothing!");
            } else {
                LOGGER.info("Worker need to replace: {}, {}", workerToReplace,
                        workerPipelineToRouteIdToReplace);
                // Make sure worker status is up-to-date
                if (!routeControllerDown) {
                    updateCurrentStatus();
                }
                _workerHelixManager.replaceWorkerInMirrorMaker(workerPipelineToRouteIdToReplace,
                        workerToReplace);

                updateCurrentStatus();
            }
        } else {
            LOGGER.info("AutoBalancing is disabled, do nothing");
        }

        if (onlyCheckOffline) {
            return;
        }

        LOGGER.info("Start rebalancing current cluster");
        // Haven't run updateCurrentStatus() before
        if (!routeControllerDown && !routeWorkerDown) {
            updateCurrentStatus();
        }

        if (_enableAutoScaling) {
            scaleCurrentCluster();
        } else {
            LOGGER.info("AutoScaling is disabled, do nothing");
        }

    } finally {
        _lock.unlock();
    }
}

From source file:com.epam.catgenome.manager.protein.ProteinSequenceReconstructionManager.java

/**
 * Constructs amino acid sequences for specified gene track
 *
 * @param track a track to create sequences
 * @param cdsList a list of CDS blocks//from   w  w  w .j a  v a 2 s.  c  o m
 * @param cdsNucleotides a list od CDS sequences
 * @param frames a list of CDS's frames
 * @return a map of protein sequences to CDS
 */
public Map<Gene, List<ProteinSequenceEntry>> getAminoAcids(final Track<Gene> track, final List<Gene> cdsList,
        final List<List<Sequence>> cdsNucleotides, final List<Integer> frames) {
    if (CollectionUtils.isEmpty(cdsList) || CollectionUtils.isEmpty(cdsNucleotides)) {
        return Collections.emptyMap();
    }
    double time1 = Utils.getSystemTimeMilliseconds();
    Map<Gene, List<ProteinSequenceEntry>> proteinSequences = new HashMap<>();
    //if gene is on the negative strand, we should process it from the end
    checkAndRevert(cdsList, cdsNucleotides, frames);

    MutableInt aminoAcidCounter = new MutableInt(0);
    for (int i = 0; i < cdsNucleotides.size(); i++) {
        List<Sequence> nucleotides = cdsNucleotides.get(i);
        Gene cds = cdsList.get(i);

        int extendedStart = 0;
        int frame = frames.get(i);
        if (frame > 0 && i != 0) {
            //restore the part of the triplet from the previous nucleotides
            List<Sequence> prev = cdsNucleotides.get(i - 1);
            int prevNucleotides = TRIPLE_LENGTH - frame;
            if (prev.size() >= prevNucleotides) {
                List<Sequence> nucleotidesExtended = new ArrayList<>();
                nucleotidesExtended.addAll(prev.subList(prev.size() - prevNucleotides, prev.size()));
                nucleotidesExtended.addAll(nucleotides);
                nucleotides = nucleotidesExtended;
                extendedStart = -prevNucleotides;
            }
        } else {
            nucleotides = nucleotides.subList(frame, nucleotides.size());
        }

        // Construct amino acids from nucleotide triples.
        List<List<Sequence>> tripleList = ListUtils.partition(nucleotides, TRIPLE_LENGTH);
        List<ProteinSequenceEntry> value = reconstructAminoAcidByTriples(track, cds, cdsNucleotides, i,
                tripleList, extendedStart, aminoAcidCounter);

        proteinSequences.putIfAbsent(cds, value);

    }

    double time2 = Utils.getSystemTimeMilliseconds();
    LOGGER.debug("Get amino acids {}:{} ms", Thread.currentThread().getName(), time2 - time1);

    return proteinSequences;
}

From source file:org.languagetool.rules.de.CompoundCoherencyRule.java

@Override
public RuleMatch[] match(List<AnalyzedSentence> sentences) throws IOException {
    List<RuleMatch> ruleMatches = new ArrayList<>();
    Map<String, List<String>> normToTextOccurrences = new HashMap<>();
    int pos = 0;/*from w  ww. j  a  va 2 s  .  co m*/
    for (AnalyzedSentence sentence : sentences) {
        AnalyzedTokenReadings[] tokens = sentence.getTokensWithoutWhitespace();
        for (AnalyzedTokenReadings atr : tokens) {
            String lemmaOrNull = getLemma(atr);
            String token = atr.getToken();
            if (token.isEmpty()) {
                continue;
            }
            // The whole implementation could be simpler, but this way we also catch cases where
            // the word (and this its lemma) isn't known.
            String lemma = lemmaOrNull != null ? lemmaOrNull : token;
            String normToken = lemma.replace("-", "").toLowerCase();
            if (StringUtils.isNumeric(normToken)) {
                // avoid messages about "2-3" and "23" both being used
                break;
            }
            List<String> textOcc = normToTextOccurrences.get(normToken);
            if (textOcc != null) {
                if (textOcc.stream().noneMatch(f -> f.equalsIgnoreCase(lemma))) {
                    String other = textOcc.get(0);
                    if (containsHyphenInside(other) || containsHyphenInside(token)) {
                        String msg = "Uneinheitliche Verwendung von Bindestrichen. Der Text enthlt sowohl '"
                                + token + "' als auch '" + other + "'.";
                        RuleMatch ruleMatch = new RuleMatch(this, sentence, pos + atr.getStartPos(),
                                pos + atr.getEndPos(), msg);
                        if (token.replace("-", "").equalsIgnoreCase(other.replace("-", ""))) {
                            // might be different inflected forms, so only suggest if really just the hyphen is different:
                            ruleMatch.setSuggestedReplacement(other);
                        }
                        ruleMatches.add(ruleMatch);
                    }
                }
            } else {
                List<String> l = new ArrayList<>();
                l.add(lemma);
                normToTextOccurrences.putIfAbsent(normToken, l);
            }
        }
        pos += sentence.getText().length();
    }
    return toRuleMatchArray(ruleMatches);
}

From source file:de.tudarmstadt.ukp.clarin.webanno.tsv.WebannoTsv3Writer.java

private void setChainAnnoPerFeature(Map<AnnotationUnit, List<List<String>>> aAnnotationsPertype, Type aType,
        AnnotationFS aFs, AnnotationUnit aUnit, int aLinkNo, int achainNo, boolean aMultiUnit, boolean aFirst) {
    List<String> annoPerFeatures = new ArrayList<>();
    for (Feature feature : aType.getFeatures()) {
        if (feature.toString().equals("uima.cas.AnnotationBase:sofa")
                || feature.toString().equals("uima.tcas.Annotation:begin")
                || feature.toString().equals("uima.tcas.Annotation:end")
                || feature.getShortName().equals(GOVERNOR) || feature.getShortName().equals(DEPENDENT)
                || feature.getShortName().equals(FIRST) || feature.getShortName().equals(NEXT)) {
            continue;
        }/*  ww  w.  j  av a  2s  . com*/
        String annotation = aFs.getFeatureValueAsString(feature);

        if (annotation == null)
            annotation = "*";
        else
            annotation = replaceEscapeChars(annotation);

        if (feature.getShortName().equals(REF_REL)) {
            annotation = annotation + "->" + achainNo + "-" + aLinkNo;
        } else if (aMultiUnit) {
            annotation = annotation + "[" + achainNo + "]";
        } else {
            annotation = annotation + "[" + achainNo + "]";
        }
        featurePerLayer.get(aType.getName()).add(feature.getShortName());

        annoPerFeatures.add(annotation);
    }
    aAnnotationsPertype.putIfAbsent(aUnit, new ArrayList<>());
    ambigUnits.putIfAbsent(aType.getName(), new HashMap<>());
    ambigUnits.get(aType.getName()).put(aUnit, true); // coref are always ambig

    if (annoPerFeatures.size() == 0)
        annoPerFeatures.add("*" + "[" + achainNo + "]");
    aAnnotationsPertype.get(aUnit).add(annoPerFeatures);
}