Example usage for java.util LinkedHashMap get

List of usage examples for java.util LinkedHashMap get

Introduction

In this page you can find the example usage for java.util LinkedHashMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.griddynamics.jagger.monitoring.reporting.SystemUnderTestPlotsGeneralProvider.java

public Map<String, List<MonitoringReporterData>> createTaskPlots() {
    log.info("BEGIN: Create general task plots");

    Map<String, List<MonitoringReporterData>> taskPlots = new LinkedHashMap<String, List<MonitoringReporterData>>();
    GeneralStatistics generalStatistics = getStatistics();

    Set<String> taskIds = generalStatistics.findTaskIds();
    Set<String> boxIdentifiers = generalStatistics.findBoxIdentifiers();
    Set<String> sutUrls = generalStatistics.findSutUrls();
    for (GroupKey groupName : plotGroups.getPlotGroups().keySet()) {
        log.info("    Create general task plots for group '{}'", groupName);

        if (showPlotsByGlobal) {
            log.info("        Create general global task plots");

            List<MonitoringReporterData> plots = new LinkedList<MonitoringReporterData>();
            XYSeriesCollection chartsCollection = new XYSeriesCollection();
            LinkedHashMap<String, IntervalMarker> markers = new LinkedHashMap<String, IntervalMarker>();
            for (MonitoringParameter parameterId : plotGroups.getPlotGroups().get(groupName)) {
                log.info("            Create general global task plots for parameter '{}'", parameterId);

                MonitoringParameterBean param = MonitoringParameterBean.copyOf(parameterId);
                if (generalStatistics.hasGlobalStatistics(param)) {
                    XYSeries values = new XYSeries(param.getDescription());
                    long timeShift = 0;
                    int taskNum = 0;
                    for (String taskId : taskIds) {
                        log.info("                Create general global task plots for task '{}'", taskId);

                        long maxTime = 0;
                        for (MonitoringStatistics monitoringStatistics : generalStatistics
                                .findGlobalStatistics(taskId, param)) {
                            long time = monitoringStatistics.getTime();
                            double t = timeShift + time;
                            values.add(t, monitoringStatistics.getAverageValue());

                            if (time > maxTime) {
                                maxTime = time;
                            }/*  w  w w  . j  ava  2  s  .  c o  m*/

                            if (showNumbers) {
                                IntervalMarker marker = markers.get(taskId);
                                if (marker == null) {
                                    marker = new IntervalMarker(t, t);
                                    marker.setLabel(monitoringStatistics.getTaskData().getNumber().toString());
                                    marker.setAlpha((taskNum % 2 == 0) ? 0.2f : 0.4f);

                                    int mod = taskNum % 3;
                                    if (mod == 0) {
                                        marker.setLabelAnchor(RectangleAnchor.CENTER);
                                    } else if (mod == 1) {
                                        marker.setLabelAnchor(RectangleAnchor.TOP);
                                    } else if (mod == 2) {
                                        marker.setLabelAnchor(RectangleAnchor.BOTTOM);
                                    }

                                    marker.setLabelFont(
                                            marker.getLabelFont().deriveFont(10.0f).deriveFont(Font.BOLD));
                                    markers.put(taskId, marker);
                                } else {
                                    if (t < marker.getStartValue()) {
                                        marker.setStartValue(t);
                                    }
                                    if (t > marker.getEndValue()) {
                                        marker.setEndValue(t);
                                    }
                                }
                            }
                        }
                        timeShift += maxTime;
                        taskNum++;
                    }
                    if (values.isEmpty()) {
                        values.add(0, 0);
                    }
                    chartsCollection.addSeries(values);
                }
            }

            log.debug("group name \n{} \nparams {}]\n", groupName,
                    Lists.newArrayList(plotGroups.getPlotGroups().get(groupName)));

            Pair<String, XYSeriesCollection> pair = ChartHelper.adjustTime(chartsCollection, markers.values());

            chartsCollection = pair.getSecond();

            String name = groupName.getUpperName();

            if (chartsCollection.getSeriesCount() > 0) {
                JFreeChart chart = ChartHelper.createXYChart(null, chartsCollection,
                        "Time (" + pair.getFirst() + ")", groupName.getLeftName(), 0, 1,
                        ChartHelper.ColorTheme.LIGHT);

                XYPlot plot = (XYPlot) chart.getPlot();
                for (IntervalMarker marker : markers.values()) {
                    plot.addDomainMarker(marker);
                }

                MonitoringReporterData monitoringReporterData = new MonitoringReporterData();
                monitoringReporterData.setParameterName(name);
                monitoringReporterData.setTitle(name);
                monitoringReporterData.setPlot(new JCommonDrawableRenderer(chart));
                plots.add(monitoringReporterData);
            }

            if (!plots.isEmpty()) {
                taskPlots.put(name, plots);
            }
        }

        if (showPlotsByBox) {
            log.info("        Create general box task plots");

            for (String boxIdentifier : boxIdentifiers) {
                log.info("            Create general box task plots for box '{}'", boxIdentifier);

                List<MonitoringReporterData> plots = new LinkedList<MonitoringReporterData>();
                XYSeriesCollection chartsCollection = new XYSeriesCollection();
                LinkedHashMap<String, IntervalMarker> markers = new LinkedHashMap<String, IntervalMarker>();
                for (MonitoringParameter parameterId : plotGroups.getPlotGroups().get(groupName)) {
                    log.info("                Create general box task plots for parameter '{}'", parameterId);

                    MonitoringParameterBean param = MonitoringParameterBean.copyOf(parameterId);
                    if (generalStatistics.hasBoxStatistics(param, boxIdentifier)) {
                        XYSeries values = new XYSeries(param.getDescription());
                        long timeShift = 0;
                        int taskNum = 0;
                        for (String taskId : taskIds) {
                            log.info("                    Create general box task plots for task '{}'", taskId);

                            long maxTime = 0;
                            for (MonitoringStatistics monitoringStatistics : generalStatistics
                                    .findBoxStatistics(taskId, param, boxIdentifier)) {
                                long time = monitoringStatistics.getTime();
                                double t = timeShift + time;
                                values.add(t, monitoringStatistics.getAverageValue());

                                if (time > maxTime) {
                                    maxTime = time;
                                }

                                if (showNumbers) {
                                    IntervalMarker marker = markers.get(taskId);
                                    if (marker == null) {
                                        marker = new IntervalMarker(t, t);
                                        marker.setLabel(
                                                monitoringStatistics.getTaskData().getNumber().toString());
                                        marker.setAlpha((taskNum % 2 == 0) ? 0.2f : 0.4f);

                                        int mod = taskNum % 3;
                                        if (mod == 0) {
                                            marker.setLabelAnchor(RectangleAnchor.CENTER);
                                        } else if (mod == 1) {
                                            marker.setLabelAnchor(RectangleAnchor.TOP);
                                        } else if (mod == 2) {
                                            marker.setLabelAnchor(RectangleAnchor.BOTTOM);
                                        }

                                        marker.setLabelFont(
                                                marker.getLabelFont().deriveFont(10.0f).deriveFont(Font.BOLD));
                                        markers.put(taskId, marker);
                                    } else {
                                        if (t < marker.getStartValue()) {
                                            marker.setStartValue(t);
                                        }
                                        if (t > marker.getEndValue()) {
                                            marker.setEndValue(t);
                                        }
                                    }
                                }
                            }
                            timeShift += maxTime;
                            taskNum++;
                        }
                        if (values.isEmpty()) {
                            values.add(0, 0);
                        }
                        chartsCollection.addSeries(values);
                    }
                }

                log.debug("group name \n{} \nparams {}]\n", groupName,
                        Lists.newArrayList(plotGroups.getPlotGroups().get(groupName)));

                Pair<String, XYSeriesCollection> pair = ChartHelper.adjustTime(chartsCollection,
                        markers.values());

                chartsCollection = pair.getSecond();

                String name = groupName.getUpperName() + " on " + boxIdentifier;

                if (chartsCollection.getSeriesCount() > 0) {
                    JFreeChart chart = ChartHelper.createXYChart(null, chartsCollection,
                            "Time (" + pair.getFirst() + ")", groupName.getLeftName(), 0, 1,
                            ChartHelper.ColorTheme.LIGHT);

                    XYPlot plot = (XYPlot) chart.getPlot();
                    for (IntervalMarker marker : markers.values()) {
                        plot.addDomainMarker(marker);
                    }

                    MonitoringReporterData monitoringReporterData = new MonitoringReporterData();
                    monitoringReporterData.setParameterName(name);
                    monitoringReporterData.setTitle(name);
                    monitoringReporterData.setPlot(new JCommonDrawableRenderer(chart));
                    plots.add(monitoringReporterData);
                }

                if (!plots.isEmpty()) {
                    taskPlots.put(name, plots);
                }
            }
        }

        if (showPlotsBySuT) {
            log.info("        Create general sut task plots");

            for (String sutUrl : sutUrls) {
                log.info("            Create general sut task plots for sut '{}'", sutUrl);

                List<MonitoringReporterData> plots = new LinkedList<MonitoringReporterData>();
                XYSeriesCollection chartsCollection = new XYSeriesCollection();
                LinkedHashMap<String, IntervalMarker> markers = new LinkedHashMap<String, IntervalMarker>();
                for (MonitoringParameter parameterId : plotGroups.getPlotGroups().get(groupName)) {
                    log.info("                Create general sut task plots for parameter '{}'", parameterId);

                    MonitoringParameterBean param = MonitoringParameterBean.copyOf(parameterId);
                    if (generalStatistics.hasSutStatistics(param, sutUrl)) {
                        XYSeries values = new XYSeries(param.getDescription());
                        long timeShift = 0;
                        int taskNum = 0;
                        for (String taskId : taskIds) {
                            log.info("                    Create general sut task plots for task '{}'", taskId);

                            long maxTime = 0;
                            for (MonitoringStatistics monitoringStatistics : generalStatistics
                                    .findSutStatistics(taskId, param, sutUrl)) {
                                long time = monitoringStatistics.getTime();
                                double t = timeShift + time;
                                values.add(t, monitoringStatistics.getAverageValue());

                                if (time > maxTime) {
                                    maxTime = time;
                                }

                                if (showNumbers) {
                                    IntervalMarker marker = markers.get(taskId);
                                    if (marker == null) {
                                        marker = new IntervalMarker(t, t);
                                        marker.setLabel(
                                                monitoringStatistics.getTaskData().getNumber().toString());
                                        marker.setAlpha((taskNum % 2 == 0) ? 0.2f : 0.4f);

                                        int mod = taskNum % 3;
                                        if (mod == 0) {
                                            marker.setLabelAnchor(RectangleAnchor.CENTER);
                                        } else if (mod == 1) {
                                            marker.setLabelAnchor(RectangleAnchor.TOP);
                                        } else if (mod == 2) {
                                            marker.setLabelAnchor(RectangleAnchor.BOTTOM);
                                        }

                                        marker.setLabelFont(
                                                marker.getLabelFont().deriveFont(10.0f).deriveFont(Font.BOLD));
                                        markers.put(taskId, marker);
                                    } else {
                                        if (t < marker.getStartValue()) {
                                            marker.setStartValue(t);
                                        }
                                        if (t > marker.getEndValue()) {
                                            marker.setEndValue(t);
                                        }
                                    }
                                }
                            }
                            timeShift += maxTime;
                            taskNum++;
                        }
                        if (values.isEmpty()) {
                            values.add(0, 0);
                        }
                        chartsCollection.addSeries(values);
                    }
                }

                log.debug("group name \n{} \nparams {}]\n", groupName,
                        Lists.newArrayList(plotGroups.getPlotGroups().get(groupName)));

                Pair<String, XYSeriesCollection> pair = ChartHelper.adjustTime(chartsCollection,
                        markers.values());

                chartsCollection = pair.getSecond();

                String name = groupName.getUpperName() + " on " + sutUrl;

                if (chartsCollection.getSeriesCount() > 0) {
                    JFreeChart chart = ChartHelper.createXYChart(null, chartsCollection,
                            "Time (" + pair.getFirst() + ")", groupName.getLeftName(), 0, 1,
                            ChartHelper.ColorTheme.LIGHT);

                    XYPlot plot = (XYPlot) chart.getPlot();
                    for (IntervalMarker marker : markers.values()) {
                        plot.addDomainMarker(marker);
                    }

                    MonitoringReporterData monitoringReporterData = new MonitoringReporterData();
                    monitoringReporterData.setParameterName(name);
                    monitoringReporterData.setTitle(name);
                    monitoringReporterData.setPlot(new JCommonDrawableRenderer(chart));
                    plots.add(monitoringReporterData);
                }

                if (!plots.isEmpty()) {
                    taskPlots.put(name, plots);
                }
            }
        }
    }

    clearStatistics();

    log.info("END: Create general task plots");

    return taskPlots;
}

From source file:com.vmware.bdd.cli.commands.ClusterCommands.java

@CliCommand(value = "cluster create", help = "Create a new cluster")
public void createCluster(
        @CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
        @CliOption(key = {//from w w w .j a  va2  s .co  m
                "appManager" }, mandatory = false, help = "The application manager name") final String appManager,
        @CliOption(key = {
                "type" }, mandatory = false, help = "The cluster type is Hadoop or HBase") final String type,
        @CliOption(key = { "distro" }, mandatory = false, help = "The distro name") final String distro,
        @CliOption(key = {
                "specFile" }, mandatory = false, help = "The spec file name path") final String specFilePath,
        @CliOption(key = {
                "rpNames" }, mandatory = false, help = "Resource Pools for the cluster: use \",\" among names.") final String rpNames,
        @CliOption(key = {
                "dsNames" }, mandatory = false, help = "Datastores for the cluster: use \",\" among names.") final String dsNames,
        @CliOption(key = {
                "networkName" }, mandatory = false, help = "Network Name used for management") final String networkName,
        @CliOption(key = {
                "hdfsNetworkName" }, mandatory = false, help = "Network Name for HDFS traffic.") final String hdfsNetworkName,
        @CliOption(key = {
                "mapredNetworkName" }, mandatory = false, help = "Network Name for MapReduce traffic") final String mapredNetworkName,
        @CliOption(key = {
                "topology" }, mandatory = false, help = "You must specify the topology type: HVE or RACK_AS_RACK or HOST_AS_RACK") final String topology,
        @CliOption(key = {
                "resume" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "flag to resume cluster creation") final boolean resume,
        @CliOption(key = {
                "skipConfigValidation" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Skip cluster configuration validation. ") final boolean skipConfigValidation,
        @CliOption(key = {
                "yes" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Answer 'yes' to all Y/N questions. ") final boolean alwaysAnswerYes,
        @CliOption(key = {
                "password" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "Answer 'yes' to set password for all VMs in this cluster.") final boolean setClusterPassword,
        @CliOption(key = {
                "localRepoURL" }, mandatory = false, help = "Local yum server URL for application managers, ClouderaManager/Ambari.") final String localRepoURL,
        @CliOption(key = {
                "adminGroupName" }, mandatory = false, help = "AD/LDAP Admin Group Name.") final String adminGroupName,
        @CliOption(key = {
                "userGroupName" }, mandatory = false, help = "AD/LDAP User Group Name.") final String userGroupName,
        @CliOption(key = {
                "disableLocalUsers" }, mandatory = false, help = "Disable local users") final Boolean disableLocalUsersFlag,
        @CliOption(key = {
                "skipVcRefresh" }, mandatory = false, help = "flag to skip refreshing VC resources") final Boolean skipVcRefresh,
        @CliOption(key = {
                "template" }, mandatory = false, help = "The node template name") final String templateName) {
    // validate the name
    if (name.indexOf("-") != -1) {
        CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                Constants.OUTPUT_OP_RESULT_FAIL,
                Constants.PARAM_CLUSTER + Constants.PARAM_NOT_CONTAIN_HORIZONTAL_LINE);
        return;
    } else if (name.indexOf(" ") != -1) {
        CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                Constants.OUTPUT_OP_RESULT_FAIL,
                Constants.PARAM_CLUSTER + Constants.PARAM_NOT_CONTAIN_BLANK_SPACE);
        return;
    }

    // process resume
    if (resume && setClusterPassword) {
        CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                Constants.OUTPUT_OP_RESULT_FAIL, Constants.RESUME_DONOT_NEED_SET_PASSWORD);
        return;
    } else if (resume) {
        resumeCreateCluster(name, skipVcRefresh);
        return;
    }

    // build ClusterCreate object
    ClusterCreate clusterCreate = new ClusterCreate();
    clusterCreate.setName(name);

    if (!CommandsUtils.isBlank(appManager) && !Constants.IRONFAN.equalsIgnoreCase(appManager)) {
        AppManagerRead appManagerRead = appManagerRestClient.get(appManager);
        if (appManagerRead == null) {
            CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                    Constants.OUTPUT_OP_RESULT_FAIL,
                    appManager + " cannot be found in the list of application managers.");
            return;
        }
    }

    if (CommandsUtils.isBlank(appManager)) {
        clusterCreate.setAppManager(Constants.IRONFAN);
    } else {
        clusterCreate.setAppManager(appManager);
        // local yum repo url for 3rd party app managers like ClouderaMgr, Ambari etc.
        if (!CommandsUtils.isBlank(localRepoURL)) {
            clusterCreate.setLocalRepoURL(localRepoURL);
        }
    }

    if (setClusterPassword) {
        String password = getPassword();
        //user would like to set password, but failed to enter
        //a valid one, quit cluster create
        if (password == null) {
            return;
        } else {
            clusterCreate.setPassword(password);
        }
    }

    if (type != null) {
        ClusterType clusterType = ClusterType.getByDescription(type);
        if (clusterType == null) {
            CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                    Constants.OUTPUT_OP_RESULT_FAIL, Constants.INVALID_VALUE + " " + "type=" + type);
            return;
        }
        clusterCreate.setType(clusterType);
    } else if (specFilePath == null) {
        // create Hadoop (HDFS + MapReduce) cluster as default
        clusterCreate.setType(ClusterType.HDFS_MAPRED);
    }

    TopologyType policy = null;
    if (topology != null) {
        policy = validateTopologyValue(name, topology);
        if (policy == null) {
            return;
        }
    } else {
        policy = TopologyType.NONE;
    }
    clusterCreate.setTopologyPolicy(policy);

    DistroRead distroRead4Create;
    try {
        if (distro != null) {
            DistroRead[] distroReads = appManagerRestClient.getDistros(clusterCreate.getAppManager());
            distroRead4Create = getDistroByName(distroReads, distro);

            if (distroRead4Create == null) {
                CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                        Constants.OUTPUT_OP_RESULT_FAIL,
                        Constants.PARAM_DISTRO + Constants.PARAM_NOT_SUPPORTED + getDistroNames(distroReads));
                return;
            }
        } else {
            distroRead4Create = appManagerRestClient.getDefaultDistro(clusterCreate.getAppManager());
            if (distroRead4Create == null || CommandsUtils.isBlank(distroRead4Create.getName())) {
                CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                        Constants.OUTPUT_OP_RESULT_FAIL, Constants.PARAM_NO_DEFAULT_DISTRO);
                return;
            }
        }
    } catch (CliRestException e) {
        CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
        return;
    }

    Map<String, Map<String, String>> infraConfigs = new HashMap<String, Map<String, String>>();

    if (StringUtils.isBlank(adminGroupName) && StringUtils.isBlank(userGroupName)) {
        //both adminGroupName and userGroupName are null, supposes no need to enable ldap.
    } else if (!StringUtils.isBlank(adminGroupName) && !StringUtils.isBlank(userGroupName)) {
        if (MapUtils.isEmpty(infraConfigs.get(UserMgmtConstants.LDAP_USER_MANAGEMENT))) {
            initInfraConfigs(infraConfigs, disableLocalUsersFlag);
        }
        Map<String, String> userMgmtConfig = infraConfigs.get(UserMgmtConstants.LDAP_USER_MANAGEMENT);
        userMgmtConfig.put(UserMgmtConstants.ADMIN_GROUP_NAME, adminGroupName);
        userMgmtConfig.put(UserMgmtConstants.USER_GROUP_NAME, userGroupName);
        clusterCreate.setInfrastructure_config(infraConfigs);
    } else {
        CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                Constants.OUTPUT_OP_RESULT_FAIL, "You need to supply both AdminGroupName and UserGroupName.");
        return;
    }

    clusterCreate.setDistro(distroRead4Create.getName());
    clusterCreate.setDistroVendor(distroRead4Create.getVendor());
    clusterCreate.setDistroVersion(distroRead4Create.getVersion());

    clusterCreate.setTemplateName(templateName);

    if (rpNames != null) {
        List<String> rpNamesList = CommandsUtils.inputsConvert(rpNames);
        if (rpNamesList.isEmpty()) {
            CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                    Constants.OUTPUT_OP_RESULT_FAIL,
                    Constants.INPUT_RPNAMES_PARAM + Constants.MULTI_INPUTS_CHECK);
            return;
        } else {
            clusterCreate.setRpNames(rpNamesList);
        }
    }
    if (dsNames != null) {
        List<String> dsNamesList = CommandsUtils.inputsConvert(dsNames);
        if (dsNamesList.isEmpty()) {
            CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                    Constants.OUTPUT_OP_RESULT_FAIL,
                    Constants.INPUT_DSNAMES_PARAM + Constants.MULTI_INPUTS_CHECK);
            return;
        } else {
            clusterCreate.setDsNames(dsNamesList);
        }
    }
    List<String> failedMsgList = new ArrayList<String>();
    List<String> warningMsgList = new ArrayList<String>();
    Set<String> allNetworkNames = new HashSet<String>();
    try {
        if (specFilePath != null) {
            ClusterCreate clusterSpec = CommandsUtils.getObjectByJsonString(ClusterCreate.class,
                    CommandsUtils.dataFromFile(specFilePath));
            clusterCreate.setSpecFile(true);
            clusterCreate.setExternalHDFS(clusterSpec.getExternalHDFS());
            clusterCreate.setExternalMapReduce(clusterSpec.getExternalMapReduce());
            clusterCreate.setExternalNamenode(clusterSpec.getExternalNamenode());
            clusterCreate.setExternalSecondaryNamenode(clusterSpec.getExternalSecondaryNamenode());
            clusterCreate.setExternalDatanodes(clusterSpec.getExternalDatanodes());
            clusterCreate.setNodeGroups(clusterSpec.getNodeGroups());
            clusterCreate.setConfiguration(clusterSpec.getConfiguration());
            // TODO: W'd better merge validateConfiguration with validateClusterSpec to avoid repeated validation.
            if (CommandsUtils.isBlank(appManager) || Constants.IRONFAN.equalsIgnoreCase(appManager)) {
                validateConfiguration(clusterCreate, skipConfigValidation, warningMsgList, failedMsgList);
            }
            clusterCreate.validateNodeGroupNames();
            if (!validateHAInfo(clusterCreate.getNodeGroups())) {
                CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                        Constants.OUTPUT_OP_RESULT_FAIL, Constants.PARAM_CLUSTER_SPEC_HA_ERROR + specFilePath);
                return;
            }

            Map<String, Map<String, String>> specInfraConfigs = clusterSpec.getInfrastructure_config();
            if (!MapUtils.isEmpty(specInfraConfigs)) //spec infra config is not empty
            {
                if (MapUtils.isNotEmpty(infraConfigs)) {
                    System.out.println(
                            "adminGroup and userGroup has been specified as commandline parameters, so the values inside spec file will be ignored.");
                } else {
                    clusterCreate.setInfrastructure_config(specInfraConfigs);
                }
            }
            Map<String, Object> configuration = clusterSpec.getConfiguration();
            if (MapUtils.isNotEmpty(configuration)) {
                Map<String, Map<String, String>> serviceUserConfig = (Map<String, Map<String, String>>) configuration
                        .get(UserMgmtConstants.SERVICE_USER_CONFIG_IN_SPEC_FILE);
                if (MapUtils.isNotEmpty(serviceUserConfig)) {
                    //user didn't specify ldap in command line and specfile, but specfiy ldap user in service user
                    if (hasLdapServiceUser(serviceUserConfig)
                            && (clusterCreate.getInfrastructure_config() == null)) {
                        Map<String, Map<String, String>> infraConfig = new HashMap<>();
                        initInfraConfigs(infraConfig, disableLocalUsersFlag);
                        clusterCreate.setInfrastructure_config(infraConfig);
                    }
                    validateServiceUserConfigs(appManager, clusterSpec, failedMsgList);
                }
            }

        }
        allNetworkNames = getAllNetworkNames();
    } catch (Exception e) {
        CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
        return;
    }

    if (allNetworkNames.isEmpty()) {
        CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                Constants.OUTPUT_OP_RESULT_FAIL, Constants.PARAM_CANNOT_FIND_NETWORK);
        return;
    }

    LinkedHashMap<NetTrafficType, List<String>> networkConfig = new LinkedHashMap<NetTrafficType, List<String>>();
    if (networkName == null) {
        if (allNetworkNames.size() == 1) {
            networkConfig.put(NetTrafficType.MGT_NETWORK, new ArrayList<String>());
            networkConfig.get(NetTrafficType.MGT_NETWORK).addAll(allNetworkNames);
        } else {
            CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                    Constants.OUTPUT_OP_RESULT_FAIL,
                    Constants.PARAM_NETWORK_NAME + Constants.PARAM_NOT_SPECIFIED);
            return;
        }
    } else {
        if (!allNetworkNames.contains(networkName)
                || (hdfsNetworkName != null && !allNetworkNames.contains(hdfsNetworkName))
                || (mapredNetworkName != null && !allNetworkNames.contains(mapredNetworkName))) {
            CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                    Constants.OUTPUT_OP_RESULT_FAIL,
                    Constants.PARAM_NETWORK_NAME + Constants.PARAM_NOT_SUPPORTED + allNetworkNames.toString());
            return;
        }

        networkConfig.put(NetTrafficType.MGT_NETWORK, new ArrayList<String>());
        networkConfig.get(NetTrafficType.MGT_NETWORK).add(networkName);

        if (hdfsNetworkName != null) {
            networkConfig.put(NetTrafficType.HDFS_NETWORK, new ArrayList<String>());
            networkConfig.get(NetTrafficType.HDFS_NETWORK).add(hdfsNetworkName);
        }

        if (mapredNetworkName != null) {
            networkConfig.put(NetTrafficType.MAPRED_NETWORK, new ArrayList<String>());
            networkConfig.get(NetTrafficType.MAPRED_NETWORK).add(mapredNetworkName);
        }
    }
    notifyNetsUsage(networkConfig, warningMsgList);
    clusterCreate.setNetworkConfig(networkConfig);

    clusterCreate.validateCDHVersion(warningMsgList);

    // Validate that the specified file is correct json format and proper value.
    //TODO(qjin): 1, in validateClusterCreate, implement roles check and validation
    //            2, consider use service to validate configuration for different appManager
    if (specFilePath != null) {
        validateClusterSpec(clusterCreate, failedMsgList, warningMsgList);
    }

    // give a warning message if both type and specFilePath are specified
    if (type != null && specFilePath != null) {
        warningMsgList.add(Constants.TYPE_SPECFILE_CONFLICT);
    }

    if (!failedMsgList.isEmpty()) {
        showFailedMsg(clusterCreate.getName(), Constants.OUTPUT_OP_CREATE, failedMsgList);
        return;
    }

    // rest invocation
    try {
        if (!CommandsUtils.showWarningMsg(clusterCreate.getName(), Constants.OUTPUT_OBJECT_CLUSTER,
                Constants.OUTPUT_OP_CREATE, warningMsgList, alwaysAnswerYes, null)) {
            return;
        }
        restClient.create(clusterCreate, BooleanUtils.toBoolean(skipVcRefresh));
        CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_RESULT_CREAT);
    } catch (CliRestException e) {
        CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_CREATE,
                Constants.OUTPUT_OP_RESULT_FAIL, CommandsUtils.getExceptionMessage(e));
        return;
    }

    // check the instant clone type and the HA configuration for node groups
    // currently there are limitations on HA support with instant clone, so we will
    // display a warning message for instant clone with HA function
    ClusterRead cluster = restClient.get(name, false);
    if (cluster != null) {
        String cloneType = cluster.getClusterCloneType();
        String INSTANT_CLONE = com.vmware.bdd.utils.Constants.CLUSTER_CLONE_TYPE_INSTANT_CLONE;
        if (null != cloneType && cloneType.equals(INSTANT_CLONE)) {
            String warningMsg = validateInstantCloneWithHA(specFilePath, clusterCreate);
            if (!CommonUtil.isBlank(warningMsg)) {
                System.out.println(warningMsg);
            }
        }
    }
}

From source file:com.cwctravel.hudson.plugins.extended_choice_parameter_artifactory.ExtendedChoiceParameterArtifactoryDefinition.java

LinkedHashMap<String, LinkedHashSet<String>> calculateChoicesByDropdownId() throws Exception {
    List<String[]> fileLines = Collections.emptyList();
    if (propertyFileMulti != null) {
        File file = new File(propertyFileMulti);
        if (file.isFile()) {
            CSVReader csvReader = null;//from   w w w. j  a  v a 2 s .  com
            try {
                csvReader = new CSVReader(new FileReader(file), '\t');
                fileLines = csvReader.readAll();
            } finally {
                csvReader.close();
            }
        } else {
            URL propertyFileUrl = new URL(propertyFileMulti);
            CSVReader csvReader = null;
            try {
                csvReader = new CSVReader(new InputStreamReader(propertyFileUrl.openStream()), '\t');
                fileLines = csvReader.readAll();
            } finally {
                csvReader.close();
            }
        }
    } else if (artifactoryURL != null) {
        String[] repos = artifactoryRepositories.split(",");
        HttpClient httpclient = new HttpClient();
        fileLines = new ArrayList();
        propertyValueMulti = "REPOSITORY,ARTIFACT";
        fileLines.add(propertyValueMulti.split(","));
        for (String repo : repos) {
            try {
                GetMethod method = new GetMethod(artifactoryURL + "/artifactory/api/storage/" + repo);
                httpclient.executeMethod(method);

                if (method != null) {
                    String jsonString = new String(method.getResponseBody());
                    org.json.JSONObject obj = new org.json.JSONObject(jsonString);
                    org.json.JSONArray array = obj.getJSONArray("children");
                    List<String> artifacts;
                    for (int i = 0; i < array.length(); i++) {
                        if (array.getJSONObject(i).getString("folder").equals("false")) {
                            Boolean pass = true;
                            if (mustExclude != null && !mustExclude.equals("")) {
                                for (String exclude : mustExclude.split(",")) {
                                    if (array.getJSONObject(i).getString("uri").contains(exclude)) {
                                        pass = false;
                                        break;
                                    }
                                }
                            }
                            if (mustInclude != null && !mustInclude.equals("")) {
                                for (String include : mustInclude.split(",")) {
                                    if (!array.getJSONObject(i).getString("uri").contains(include)) {
                                        pass = false;
                                        break;
                                    }
                                }
                            }
                            if (pass) {
                                fileLines.add(new String[] { repo,
                                        array.getJSONObject(i).getString("uri").substring(1) });
                            }
                        }
                    }

                }
            } catch (Exception e) {
                continue;
            }
        }
    }

    if (fileLines.size() < 2) {
        throw new Exception("Multi level tab delimited file must have at least 2 "
                + "lines (one for the header, and one or more for the data)");
    }

    ArrayList<Integer> columnIndicesForDropDowns = columnIndicesForDropDowns(fileLines.get(0));

    List<String[]> dataLines = fileLines.subList(1, fileLines.size());

    LinkedHashMap<String, LinkedHashSet<String>> choicesByDropdownId = new LinkedHashMap<String, LinkedHashSet<String>>();

    String prefix = getName() + " dropdown MultiLevelMultiSelect 0";
    choicesByDropdownId.put(prefix, new LinkedHashSet<String>());

    for (int i = 0; i < columnIndicesForDropDowns.size(); ++i) {
        String prettyCurrentColumnName = propertyValueMulti.split(",")[i];
        prettyCurrentColumnName = prettyCurrentColumnName.toLowerCase();
        prettyCurrentColumnName = prettyCurrentColumnName.replace("_", " ");

        for (String[] dataLine : dataLines) {
            String priorLevelDropdownId = prefix;
            String currentLevelDropdownId = prefix;

            int column = 0;
            for (int j = 0; j <= i; ++j) {
                column = columnIndicesForDropDowns.get(j);

                if (j < i) {
                    priorLevelDropdownId += " " + dataLine[column];
                }
                currentLevelDropdownId += " " + dataLine[column];
            }
            if (i != columnIndicesForDropDowns.size() - 1) {
                choicesByDropdownId.put(currentLevelDropdownId, new LinkedHashSet<String>());
            }
            LinkedHashSet<String> choicesForPriorDropdown = choicesByDropdownId.get(priorLevelDropdownId);
            choicesForPriorDropdown.add("Select a " + prettyCurrentColumnName + "...");
            choicesForPriorDropdown.add(dataLine[column]);
        }
    }

    return choicesByDropdownId;
}

From source file:fr.cirad.mgdb.exporting.markeroriented.EigenstratExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    // long before = System.currentTimeMillis();

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);
    File snpFile = null;/*from w  ww .  j a v a  2s . com*/

    try {
        snpFile = File.createTempFile("snpFile", "");
        FileWriter snpFileWriter = new FileWriter(snpFile);

        ZipOutputStream zos = new ZipOutputStream(outputStream);
        if (ByteArrayOutputStream.class.isAssignableFrom(outputStream.getClass()))
            zos.setLevel(ZipOutputStream.STORED);

        if (readyToExportFiles != null)
            for (String readyToExportFile : readyToExportFiles.keySet()) {
                zos.putNextEntry(new ZipEntry(readyToExportFile));
                InputStream inputStream = readyToExportFiles.get(readyToExportFile);
                byte[] dataBlock = new byte[1024];
                int count = inputStream.read(dataBlock, 0, 1024);
                while (count != -1) {
                    zos.write(dataBlock, 0, count);
                    count = inputStream.read(dataBlock, 0, 1024);
                }
            }

        MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
        int markerCount = markerCursor.count();

        List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);

        ArrayList<String> individualList = new ArrayList<String>();
        StringBuffer indFileContents = new StringBuffer();

        for (int i = 0; i < sampleIDs.size(); i++) {
            Individual individual = individuals.get(i);
            if (!individualList.contains(individual.getId())) {
                individualList.add(individual.getId());
                indFileContents
                        .append(individual.getId() + "\t" + getIndividualGenderCode(sModule, individual.getId())
                                + "\t" + (individual.getPopulation() == null ? "." : individual.getPopulation())
                                + LINE_SEPARATOR);
            }
        }

        String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
        zos.putNextEntry(new ZipEntry(exportName + ".ind"));
        zos.write(indFileContents.toString().getBytes());

        zos.putNextEntry(new ZipEntry(exportName + ".eigenstratgeno"));

        int avgObjSize = (Integer) mongoTemplate
                .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats()
                .get("avgObjSize");
        int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
        short nProgress = 0, nPreviousProgress = 0;
        long nLoadedMarkerCount = 0;

        while (markerCursor.hasNext()) {
            int nLoadedMarkerCountInLoop = 0;
            Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>();
            boolean fStartingNewChunk = true;
            markerCursor.batchSize(nChunkSize);
            while (markerCursor.hasNext()
                    && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) {
                DBObject exportVariant = markerCursor.next();
                DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION);
                markerChromosomalPositions.put((Comparable) exportVariant.get("_id"),
                        refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":"
                                + refPos.get(ReferencePosition.FIELDNAME_START_SITE));
                nLoadedMarkerCountInLoop++;
                fStartingNewChunk = false;
            }

            List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet());
            LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                    mongoTemplate, sampleIDs, currentMarkers, true,
                    null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
            for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample)
            {
                Comparable variantId = variant.getId();

                List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":");
                if (chromAndPos.size() == 0)
                    LOG.warn("Chromosomal position not found for marker " + variantId);
                // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR);
                if (markerSynonyms != null) {
                    Comparable syn = markerSynonyms.get(variantId);
                    if (syn != null)
                        variantId = syn;
                }
                snpFileWriter.write(variantId + "\t" + (chromAndPos.size() == 0 ? "0" : chromAndPos.get(0))
                        + "\t" + 0 + "\t" + (chromAndPos.size() == 0 ? 0l : Long.parseLong(chromAndPos.get(1)))
                        + LINE_SEPARATOR);

                Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>();
                Collection<VariantRunData> runs = variantsAndRuns.get(variant);
                if (runs != null)
                    for (VariantRunData run : runs)
                        for (Integer sampleIndex : run.getSampleGenotypes().keySet()) {
                            SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex);
                            String individualId = individuals
                                    .get(sampleIDs
                                            .indexOf(new SampleId(run.getId().getProjectId(), sampleIndex)))
                                    .getId();

                            Integer gq = null;
                            try {
                                gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ);
                            } catch (Exception ignored) {
                            }
                            if (gq != null && gq < nMinimumGenotypeQuality)
                                continue;

                            Integer dp = null;
                            try {
                                dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP);
                            } catch (Exception ignored) {
                            }
                            if (dp != null && dp < nMinimumReadDepth)
                                continue;

                            String gtCode = sampleGenotype.getCode();
                            List<String> storedIndividualGenotypes = individualGenotypes.get(individualId);
                            if (storedIndividualGenotypes == null) {
                                storedIndividualGenotypes = new ArrayList<String>();
                                individualGenotypes.put(individualId, storedIndividualGenotypes);
                            }
                            storedIndividualGenotypes.add(gtCode);
                        }

                for (int j = 0; j < individualList
                        .size(); j++ /* we use this list because it has the proper ordering*/) {
                    String individualId = individualList.get(j);
                    List<String> genotypes = individualGenotypes.get(individualId);
                    HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes
                    int highestGenotypeCount = 0;
                    String mostFrequentGenotype = null;
                    if (genotypes != null)
                        for (String genotype : genotypes) {
                            if (genotype.length() == 0)
                                continue; /* skip missing genotypes */

                            int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                            if (gtCount > highestGenotypeCount) {
                                highestGenotypeCount = gtCount;
                                mostFrequentGenotype = genotype;
                            }
                            genotypeCounts.put(genotype, gtCount);
                        }

                    List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>()
                            : variant.getAllelesFromGenotypeCode(mostFrequentGenotype);

                    int nOutputCode = 0;
                    if (mostFrequentGenotype == null)
                        nOutputCode = 9;
                    else
                        for (String all : Helper.split(mostFrequentGenotype, "/"))
                            if ("0".equals(all))
                                nOutputCode++;
                    if (j == 0 && variant.getKnownAlleleList().size() > 2)
                        warningFileWriter.write("- Variant " + variant.getId()
                                + " is multi-allelic. Make sure Eigenstrat genotype encoding specifications are suitable for you.\n");
                    zos.write(("" + nOutputCode).getBytes());

                    if (genotypeCounts.size() > 1 || alleles.size() > 2) {
                        if (genotypeCounts.size() > 1)
                            warningFileWriter.write("- Dissimilar genotypes found for variant "
                                    + (variantId == null ? variant.getId() : variantId) + ", individual "
                                    + individualId + ". Exporting most frequent: " + nOutputCode + "\n");
                        if (alleles.size() > 2)
                            warningFileWriter.write("- More than 2 alleles found for variant "
                                    + (variantId == null ? variant.getId() : variantId) + ", individual "
                                    + individualId + ". Exporting only the first 2 alleles.\n");
                    }
                }
                zos.write((LINE_SEPARATOR).getBytes());
            }

            if (progress.hasAborted())
                return;

            nLoadedMarkerCount += nLoadedMarkerCountInLoop;
            nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
            if (nProgress > nPreviousProgress) {
                // if (nProgress%5 == 0)
                //    LOG.info("============= exportData: " + nProgress + "% =============" + (System.currentTimeMillis() - before)/1000 + "s");
                progress.setCurrentStepProgress(nProgress);
                nPreviousProgress = nProgress;
            }
        }

        snpFileWriter.close();
        zos.putNextEntry(new ZipEntry(exportName + ".snp"));
        BufferedReader in = new BufferedReader(new FileReader(snpFile));
        String sLine;
        while ((sLine = in.readLine()) != null)
            zos.write((sLine + "\n").getBytes());
        in.close();

        warningFileWriter.close();
        if (warningFile.length() > 0) {
            zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
            int nWarningCount = 0;
            in = new BufferedReader(new FileReader(warningFile));
            while ((sLine = in.readLine()) != null) {
                zos.write((sLine + "\n").getBytes());
                nWarningCount++;
            }
            LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
            in.close();
        }
        warningFile.delete();

        zos.close();
        progress.setCurrentStepProgress((short) 100);
    } finally {
        if (snpFile != null && snpFile.exists())
            snpFile.delete();
    }
}

From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java

/**
 * Test method for 'java.util.LinkedHashMap.putAll(Map)'.
 *//*w w  w .  j  a v  a2 s . c  o  m*/
public void testPutAll() {
    LinkedHashMap<String, String> srcMap = new LinkedHashMap<String, String>();
    checkEmptyLinkedHashMapAssumptions(srcMap);

    srcMap.put(KEY_1, VALUE_1);
    srcMap.put(KEY_2, VALUE_2);
    srcMap.put(KEY_3, VALUE_3);

    // Make sure that the data is copied correctly
    LinkedHashMap<String, String> dstMap = new LinkedHashMap<String, String>();
    checkEmptyLinkedHashMapAssumptions(dstMap);

    dstMap.putAll(srcMap);
    assertEquals(srcMap.size(), dstMap.size());
    assertTrue(dstMap.containsKey(KEY_1));
    assertTrue(dstMap.containsValue(VALUE_1));
    assertFalse(dstMap.containsKey(KEY_1.toUpperCase(Locale.ROOT)));
    assertFalse(dstMap.containsValue(VALUE_1.toUpperCase(Locale.ROOT)));

    assertTrue(dstMap.containsKey(KEY_2));
    assertTrue(dstMap.containsValue(VALUE_2));
    assertFalse(dstMap.containsKey(KEY_2.toUpperCase(Locale.ROOT)));
    assertFalse(dstMap.containsValue(VALUE_2.toUpperCase(Locale.ROOT)));

    assertTrue(dstMap.containsKey(KEY_3));
    assertTrue(dstMap.containsValue(VALUE_3));
    assertFalse(dstMap.containsKey(KEY_3.toUpperCase(Locale.ROOT)));
    assertFalse(dstMap.containsValue(VALUE_3.toUpperCase(Locale.ROOT)));

    // Check that an empty map does not blow away the contents of the
    // destination map
    LinkedHashMap<String, String> emptyMap = new LinkedHashMap<String, String>();
    checkEmptyLinkedHashMapAssumptions(emptyMap);
    dstMap.putAll(emptyMap);
    assertTrue(dstMap.size() == srcMap.size());

    // Check that put all overwrite any existing mapping in the destination map
    srcMap.put(KEY_1, VALUE_2);
    srcMap.put(KEY_2, VALUE_3);
    srcMap.put(KEY_3, VALUE_1);

    dstMap.putAll(srcMap);
    assertEquals(dstMap.size(), srcMap.size());
    assertEquals(dstMap.get(KEY_1), VALUE_2);
    assertEquals(dstMap.get(KEY_2), VALUE_3);
    assertEquals(dstMap.get(KEY_3), VALUE_1);

    // Check that a putAll does adds data but does not remove it

    srcMap.put(KEY_4, VALUE_4);
    dstMap.putAll(srcMap);
    assertEquals(dstMap.size(), srcMap.size());
    assertTrue(dstMap.containsKey(KEY_4));
    assertTrue(dstMap.containsValue(VALUE_4));
    assertEquals(dstMap.get(KEY_1), VALUE_2);
    assertEquals(dstMap.get(KEY_2), VALUE_3);
    assertEquals(dstMap.get(KEY_3), VALUE_1);
    assertEquals(dstMap.get(KEY_4), VALUE_4);

    dstMap.putAll(dstMap);
}

From source file:com.tao.realweb.util.StringUtil.java

/** 
*  ??= ??? (a=1,b=2 =>a=1&b=2) /*from  w w w.ja v  a 2  s  .  co m*/
*  
* @param map 
* @return 
*/
public static String linkedHashMapToString(LinkedHashMap<String, String> map) {
    if (map != null && map.size() > 0) {
        String result = "";
        Iterator it = map.keySet().iterator();
        while (it.hasNext()) {
            String name = (String) it.next();
            String value = (String) map.get(name);
            result += (result.equals("")) ? "" : "&";
            result += String.format("%s=%s", name, value);
        }
        return result;
    }
    return null;
}

From source file:aldenjava.opticalmapping.data.mappingresult.OptMapResultNode.java

public static List<OptMapResultNode> reconstruct(LinkedHashMap<String, DataNode> optrefmap,
        OptMapResultNode map) {//from   w  ww . ja  va  2  s .co  m
    // Only reconstruct the subrefstart,.... and subfragratio, and cigar string
    int direction = map.mappedstrand;
    List<OptMapResultNode> fragmentmaplist = new ArrayList<OptMapResultNode>();

    // String precigar = Cigar.convertpreCIGAR(map.cigar);
    String precigar = map.cigar.getPrecigar();
    // if (direction == -1)
    // precigar = StringUtils.reverse(precigar);
    String[] cigarlist = precigar.split("S");
    for (int i = 1; i <= cigarlist.length; i += 2) {
        int subrefstart = -1;
        int subrefstop = -1;
        int subfragstart = -1;
        int subfragstop = -1;
        // double scale = 0;
        if (i - 2 < 0) {
            subrefstart = map.subrefstart;
            subfragstart = map.subfragstart;
        } else {
            StringBuilder concat = new StringBuilder();
            for (int j = 0; j < i - 1; j++)
                concat.append(cigarlist[j]);
            String previousprecigar = concat.toString();
            int match = Cigar.getCertainNumberFromPrecigar(previousprecigar, 'M');
            int insert = Cigar.getCertainNumberFromPrecigar(previousprecigar, 'I');
            int delete = Cigar.getCertainNumberFromPrecigar(previousprecigar, 'D');
            subfragstart = map.subfragstart + (match + insert) * direction;
            subrefstart = map.subrefstart + match + delete;
        }
        String recentprecigar = cigarlist[i - 1];
        if (i == cigarlist.length) {
            subrefstop = map.subrefstop;
            subfragstop = map.subfragstop;
        } else {
            int match = Cigar.getCertainNumberFromPrecigar(recentprecigar, 'M');
            int insert = Cigar.getCertainNumberFromPrecigar(recentprecigar, 'I');
            int delete = Cigar.getCertainNumberFromPrecigar(recentprecigar, 'D');
            subfragstop = subfragstart + (match + insert - 1) * direction;
            subrefstop = subrefstart + match + delete - 1;
        }
        DataNode ref = optrefmap.get(map.mappedRegion.ref);
        long estimatestartpos = ref.refp[subrefstart - 1];
        long estimatestoppos = ref.refp[subrefstop];

        Cigar newcigar = new Cigar(recentprecigar);
        fragmentmaplist.add(new OptMapResultNode(map.parentFrag,
                new GenomicPosNode(map.mappedRegion.ref, estimatestartpos, estimatestoppos), map.mappedstrand,
                subrefstart, subrefstop, subfragstart, subfragstop, newcigar, map.mappedscore, -1));
    }
    return fragmentmaplist;
}

From source file:com.skysql.manager.api.Monitors.java

private void parseMonitors(JsonArray array,
        LinkedHashMap<String, LinkedHashMap<String, MonitorRecord>> monitorsMap) {

    for (int i = 0; i < array.size(); i++) {
        JsonObject jsonObject = array.get(i).getAsJsonObject();
        JsonElement element;/* www  .ja va  2 s .c o m*/
        String systemType = (element = jsonObject.get("systemtype")).isJsonNull() ? null
                : element.getAsString();
        String id = (element = jsonObject.get("monitor")).isJsonNull() ? null : element.getAsString();
        String name = (element = jsonObject.get("name")).isJsonNull() ? null : element.getAsString();
        String description = (element = jsonObject.get("description")).isJsonNull() ? null
                : element.getAsString();
        String unit = (element = jsonObject.get("unit")).isJsonNull() ? null : element.getAsString();
        String monitorType = (element = jsonObject.get("monitortype")).isJsonNull() ? null
                : element.getAsString();
        boolean delta = (element = jsonObject.get("delta")).isJsonNull() ? false : element.getAsBoolean();
        boolean average = (element = jsonObject.get("systemaverage")).isJsonNull() ? false
                : element.getAsBoolean();
        String chartType = (element = jsonObject.get("charttype")).isJsonNull() ? null : element.getAsString();
        String intervalString = (element = jsonObject.get("interval")).isJsonNull() ? null
                : element.getAsString();
        int interval = (intervalString != null && !intervalString.isEmpty()) ? Integer.valueOf(intervalString)
                : 0;
        String sql = (element = jsonObject.get("sql")).isJsonNull() ? null : element.getAsString();
        for (PermittedMonitorType permitted : PermittedMonitorType.values()) {
            if (permitted.name().equals(monitorType)) {
                MonitorRecord monitorRecord = new MonitorRecord(systemType, id, name, description, unit,
                        monitorType, delta, average, chartType, interval, sql);
                monitorsMap.get(systemType).put(id, monitorRecord);
                break;
            }
        }

    }

}

From source file:service.EventService.java

public void changeAllUsersCampaignAssignation(Long campaignId, Long userFromId, String[] clientNumArray,
        Long[] userIdArray, Long pkId) {
    if (pkId != null && getCampaign(campaignId) != null
            && getCampaign(campaignId).getCabinet().getPkId().equals(pkId)) {
        List<Event> evs;
        if (userFromId != null) {
            evs = eventDao.getAssignedNotClosedUserEvents(campaignId, userFromId, pkId);
        } else {/*w  w w.ja  v a  2s . c  om*/
            evs = eventDao.getEventListNotProcessed(campaignId, pkId);
        }

        if (userIdArray != null && clientNumArray != null) {
            LinkedHashMap<Long, Integer> userIdCountAssignedMap = new LinkedHashMap();
            List<Event> eventsForUpdate = new ArrayList();
            PersonalCabinet pk = personalCabinetDao.find(pkId);
            int summClient = 0;
            if (userIdArray.length > 0 && evs.size() > 0 && clientNumArray.length > 0) {
                for (int i = 0; i < userIdArray.length; i++) {
                    if (clientNumArray.length >= i) {
                        int count = StringAdapter.toInteger(clientNumArray[i]);
                        summClient += count;
                        userIdCountAssignedMap.put(userIdArray[i], count);
                    } else {
                        userIdCountAssignedMap.put(userIdArray[i], 0);
                    }
                }
                int sindx = 0;
                if (summClient <= evs.size()) {
                    for (Long userId : userIdCountAssignedMap.keySet()) {
                        Integer eventsCountToAssign = userIdCountAssignedMap.get(userId);
                        User user = userDao.getUserBelongsPk(pk, userId);
                        if (user != null) {
                            for (int supCount = 0; supCount < eventsCountToAssign; supCount++) {
                                Event ev = evs.get(sindx);
                                if (ev != null && supCount < eventsCountToAssign) {
                                    ev.setUser(user);
                                    ev.setStatus(Event.ASSIGNED);
                                    if (validate(ev)) {
                                        eventsForUpdate.add(ev);
                                        sindx++;
                                    }
                                }
                            }
                        } else {
                            addError("!  id:" + userId
                                    + "     !");
                        }
                    }
                    for (Event ev : eventsForUpdate) {
                        eventDao.update(ev);
                        User u = ev.getUser();
                        addEventComment("   " + u.getShortName() + "("
                                + u.getEmail() + ")", EventComment.ASSIGN, ev, pkId);
                    }
                } else {
                    addError("?  " + summClient
                            + "  ?  : " + evs.size());
                }
            }
        }
    } else {
        addError(
                "  ! ?  ? !");
    }
}

From source file:com.eviware.soapui.impl.wsdl.WsdlProject.java

public void importTestSuite(File file) {
    if (!file.exists()) {
        UISupport.showErrorMessage("Error loading test case ");
        return;/*w w  w.j  a v a2  s . com*/
    }

    TestSuiteDocumentConfig newTestSuiteConfig = null;

    try {
        newTestSuiteConfig = TestSuiteDocumentConfig.Factory.parse(file);
    } catch (Exception e) {
        SoapUI.logError(e);
    }

    if (newTestSuiteConfig == null) {
        UISupport.showErrorMessage("Not valid test case xml");
    } else {
        TestSuiteConfig config = (TestSuiteConfig) projectDocument.getSoapuiProject().addNewTestSuite()
                .set(newTestSuiteConfig.getTestSuite());
        WsdlTestSuite testSuite = buildTestSuite(config);

        ModelSupport.unsetIds(testSuite);
        testSuite.afterLoad();

        /*
         * security test keeps reference to test step by id, which gets changed
         * during importing, so old values needs to be rewritten to new ones.
         * 
         * Create tarnsition table ( old id , new id ) and use it to replace
         * all old ids in new imported test case.
         * 
         * Here needs to be done for all test cases separatly.
         */
        for (int cnt2 = 0; cnt2 < config.getTestCaseList().size(); cnt2++) {
            TestCaseConfig newTestCase = config.getTestCaseList().get(cnt2);
            TestCaseConfig importTestCaseConfig = newTestSuiteConfig.getTestSuite().getTestCaseList().get(cnt2);
            LinkedHashMap<String, String> oldNewIds = new LinkedHashMap<String, String>();
            for (int cnt = 0; cnt < importTestCaseConfig.getTestStepList().size(); cnt++)
                oldNewIds.put(importTestCaseConfig.getTestStepList().get(cnt).getId(),
                        newTestCase.getTestStepList().get(cnt).getId());

            for (SecurityTestConfig scan : newTestCase.getSecurityTestList())
                for (TestStepSecurityTestConfig secStepConfig : scan.getTestStepSecurityTestList())
                    if (oldNewIds.containsKey(secStepConfig.getTestStepId()))
                        secStepConfig.setTestStepId(oldNewIds.get(secStepConfig.getTestStepId()));

        }
        testSuites.add(testSuite);
        fireTestSuiteAdded(testSuite);

        resolveImportedTestSuite(testSuite);
    }
}