Example usage for java.util LinkedHashMap entrySet

List of usage examples for java.util LinkedHashMap entrySet

Introduction

In this page you can find the example usage for java.util LinkedHashMap entrySet.

Prototype

public Set<Map.Entry<K, V>> entrySet() 

Source Link

Document

Returns a Set view of the mappings contained in this map.

Usage

From source file:com.opengamma.analytics.financial.interestrate.capletstripping.SABRTermStructureModelProvider.java

/**
 * General set up for a SABRTermStructureModelProvider
 * @param knotPoints Map between parameter curve names ("alpha", "beta", "rho" and "nu") and the positions of the knot points on each of those curves 
 * @param interpolators  Map between parameter curve names ("alpha", "beta", "rho" and "nu") and the interpolator used to describe that curve 
 * @param parameterTransforms  Map between parameter curve names ("alpha", "beta", "rho" and "nu") and the parameter transform used for that curve
 * @param knownParameterTermSturctures  Map between known curve names (could be "alpha", "beta", "rho" and "nu") and the known curve(s)
 *//* w ww  .  j  ava  2s  . c  om*/
public SABRTermStructureModelProvider(LinkedHashMap<String, double[]> knotPoints,
        final LinkedHashMap<String, Interpolator1D> interpolators,
        final LinkedHashMap<String, ParameterLimitsTransform> parameterTransforms,
        final LinkedHashMap<String, InterpolatedDoublesCurve> knownParameterTermSturctures) {

    Validate.notNull(knotPoints, "null node points");
    Validate.notNull(interpolators, "null interpolators");
    Validate.isTrue(knotPoints.size() == interpolators.size(), "size mismatch between nodes and interpolators");

    if (knownParameterTermSturctures == null) {
        Validate.isTrue(knotPoints.containsKey(ALPHA) && interpolators.containsKey(ALPHA),
                "alpha curve not found");
        Validate.isTrue(knotPoints.containsKey(BETA) && interpolators.containsKey(BETA),
                "beta curve not found");
        Validate.isTrue(knotPoints.containsKey(NU) && interpolators.containsKey(NU), "nu curve not found");
        Validate.isTrue(knotPoints.containsKey(RHO) && interpolators.containsKey(RHO), "rho curve not found");
    } else {
        Validate.isTrue((knotPoints.containsKey(ALPHA) && interpolators.containsKey(ALPHA))
                ^ knownParameterTermSturctures.containsKey(ALPHA), "alpha curve not found");
        Validate.isTrue((knotPoints.containsKey(BETA) && interpolators.containsKey(BETA))
                ^ knownParameterTermSturctures.containsKey(BETA), "beta curve not found");
        Validate.isTrue((knotPoints.containsKey(NU) && interpolators.containsKey(NU))
                ^ knownParameterTermSturctures.containsKey(NU), "nu curve not found");
        Validate.isTrue((knotPoints.containsKey(RHO) && interpolators.containsKey(RHO))
                ^ knownParameterTermSturctures.containsKey(RHO), "rho curve not found");
    }

    final LinkedHashMap<String, Interpolator1D> transInterpolators = new LinkedHashMap<>();
    for (final Map.Entry<String, Interpolator1D> entry : interpolators.entrySet()) {
        final String name = entry.getKey();
        final Interpolator1D temp = new TransformedInterpolator1D(entry.getValue(),
                parameterTransforms.get(name));
        transInterpolators.put(name, temp);
    }

    _curveBuilder = new InterpolatedCurveBuildingFunction(knotPoints, transInterpolators);

    // _parameterTransforms = parameterTransforms; //TODO all the check for this

    _knownParameterTermStructures = knownParameterTermSturctures;
}

From source file:org.talend.core.model.utils.ParameterValueUtil.java

public static String splitQueryData(String oldName, String newName, String value) {
    // example:"drop table "+context.oracle_schema+".\"TDI_26803\""
    // >>>>>>>>_*_(const)__ _____*_(varible)_______ __*_(const)___

    /**/*w  w  w  .  j  av  a  2  s  .c  o  m*/
     * <b>NOTE</b>: This [inputString] variable only used to debug, should not use it in product
     */
    inputString = value;

    final int length = value.length();
    // quotaStrings which stores the start and end point for all const strings in the value
    LinkedHashMap<Integer, Integer> quotaStrings = new LinkedHashMap<Integer, Integer>();
    // List<Point> functionNameAreas = new ArrayList<Point>();
    List<FunctionInfo> functions = new ArrayList<FunctionInfo>();

    // get and store all start and end point of const strings
    int start = -1;
    int end = -2;
    char ch;
    for (int i = 0; i < length; i++) {
        ch = value.charAt(i);
        if (ch == '\"') {
            // in case of cases :
            // case 1 : [ "select * from " + context.table + " where value = \"context.table\"" ]
            // case 2 : [ "select * from " + context.table + " where value = \"\\" + context.table +
            // "\\context.table\"" ]
            if (isEscapeSequence(value, i)) {
                continue;
            }

            // [0 <= start] >> in case the first const String position compute error
            if (0 <= start && end < start) {
                end = i;
                quotaStrings.put(start, end);
            } else {
                start = i;
            }
        }
    }

    {
        // in case the value has not complete quota
        // exapmle > "select a,context.b from " + context.b + "where value = context.b

        // **but** maybe more impossible truth is that
        // they write this(context.b) just want to use it as a varible...
        // so maybe should not set the string behind the quota as a const by default..

        // ---*--- the following code is set the string behind the quota as a const

        // if (0 <= start && end < start) {
        // end = length - 1;
        // quotaStrings.put(start, end);
        // }
    }

    // find the varible string, do replace, then concat them
    StringBuffer strBuffer = new StringBuffer();
    String subString = null;
    int vStart = 0;
    int vEnd = 0;
    int methodMaxIndex = -1;
    vStart = 0;
    vEnd = 0;
    start = 0;
    end = 0;
    for (Entry<Integer, Integer> entry : quotaStrings.entrySet()) {
        start = entry.getKey();
        end = entry.getValue() + 1;
        vEnd = start;
        if (vStart != start) {
            subString = value.substring(vStart, vEnd);
            calcMethodArea(subString, value, vStart, functions);
        }
        vStart = end;
    }
    vStart = 0;
    vEnd = 0;
    start = 0;
    end = 0;
    for (Entry<Integer, Integer> entry : quotaStrings.entrySet()) {
        start = entry.getKey();
        end = entry.getValue() + 1;
        vEnd = start;
        if (vStart == start) {
            // const string follow with const string, maybe won't happen...
            // get the const string
            subString = value.substring(start, end);
            if (start < methodMaxIndex) {
                subString = subString.replaceAll(oldName, newName);
            }
        } else {
            // get the varible string, do replace, then append it
            subString = value.substring(vStart, vEnd);
            // calcMaxIndex = calcMethodArea(subString, value, vStart, functions, methodMaxIndex);

            if (methodMaxIndex < start) {
                methodMaxIndex = FunctionInfo.getMaxIndexForCurrentParentFunction(start, functions);
            }

            String replacedString = doVaribleReplace(oldName, newName, value, functions, vStart, vEnd);
            strBuffer.append(replacedString);

            // get the const string
            // deal with: context.getProperty("test") + "test"
            subString = value.substring(start, end);
            if (start < methodMaxIndex) {
                FunctionInfo function = FunctionInfo.getParentFunctionFromList(start, end, functions);
                Point funcNameArea = function.getNameArea();
                String functionName = value.substring(funcNameArea.x, funcNameArea.y);
                if (functionName.matches("^globalMap\\..+")) { //$NON-NLS-1$
                    subString = subString.replaceAll(oldName, newName);
                } else {
                    if (subString.equals("\"" + oldName + "\"")) { //$NON-NLS-1$ //$NON-NLS-2$
                        subString = "\"" + newName + "\""; //$NON-NLS-1$ //$NON-NLS-2$
                    }
                }
            }
        }
        // append the const string
        strBuffer.append(subString);
        // update the varible string start point
        vStart = end;
    }

    // in case the last string of the value is a varible string
    // then get it, and do replace, finally append it.
    if (vStart < length) {
        vEnd = length;
        String replacedString = doVaribleReplace(oldName, newName, value, functions, vStart, vEnd);
        strBuffer.append(replacedString);
    }

    return strBuffer.toString();
}

From source file:com.tesora.dve.sql.StrictForeignKeyTest.java

public static void testAlterFKs(TestConnections conns) throws Throwable {
    String[] rhsTabNames = new String[] { "RB", "RRa" };
    String[] rhsDists = new String[] { "broadcast distribute", "range distribute on (`id`) using ra" };
    String[] lhsTabNames = new String[] { "LB", "LRa", "LA" };
    String[] lhsDists = new String[] { "broadcast distribute", "range distribute on (`fid`) using ra",
            "random distribute" };
    conns.getRootConnection()/* w  w w  .j  a v a 2  s  .c  o m*/
            .execute("create range ra (int) persistent group " + conns.getDDL().getPersistentGroup().getName());
    String rbody = "(`id` int, `junk` varchar(32), primary key (`id`)) ";
    // create the rhs tables
    for (int i = 0; i < rhsTabNames.length; i++)
        conns.getTestConnection().execute("create table " + rhsTabNames[i] + rbody + rhsDists[i]);
    // create the lhs tables - but not the fks yet
    // create the lhs tables
    String lbody = " (`id` int, `fid` int)";
    LinkedHashMap<String, Pair<String, String>> alters = new LinkedHashMap<String, Pair<String, String>>();
    int counter = 0;
    for (int li = 0; li < lhsTabNames.length; li++) {
        if (li == 0) {
            conns.getTestConnection().execute("create table " + lhsTabNames[li] + lbody + lhsDists[li]);
            String fkName = "pefkt" + counter++;
            alters.put(fkName, new Pair<String, String>(lhsTabNames[li], "alter table " + lhsTabNames[li]
                    + " add constraint " + fkName + " foreign key (fid) references RB (`id`)"));
        }
        // LRa - can target bcast and RRa
        // LA - can target bcast
        else if (li == 1) {
            for (int ri = 0; ri < rhsTabNames.length; ri++) {
                conns.getTestConnection()
                        .execute("create table " + lhsTabNames[li] + rhsTabNames[ri] + lbody + lhsDists[li]);
                String fkName = "pefkt" + counter++;
                String tn = lhsTabNames[li] + rhsTabNames[ri];
                alters.put(fkName, new Pair<String, String>(tn, "alter table " + tn + " add constraint "
                        + fkName + " foreign key (fid) references " + rhsTabNames[ri] + "(id)"));
            }
        } else if (li == 2) {
            conns.getTestConnection().execute("create table " + lhsTabNames[li] + lbody + lhsDists[li]);
            String fkName = "pefkt" + counter++;
            alters.put(fkName, new Pair<String, String>(lhsTabNames[li], "alter table " + lhsTabNames[li]
                    + " add constraint " + fkName + " foreign key (fid) references RB (id)"));
        }
    }
    String consql = "select table_name, column_name, referenced_table_name, referenced_column_name from information_schema.key_column_usage where referenced_column_name is not null and table_schema = '"
            + checkDDL.getDatabaseName() + "'";
    conns.getTestConnection().assertResults(consql, br());
    // these all should work
    for (Map.Entry<String, Pair<String, String>> me : alters.entrySet()) {
        conns.getTestConnection().execute(me.getValue().getSecond());
    }
    Object[] cols = br(nr, "LB", "fid", "RB", "id", nr, "LRaRB", "fid", "RB", "id", nr, "LRaRRa", "fid", "RRa",
            "id", nr, "LA", "fid", "RB", "id");
    conns.getTestConnection().assertResults(consql, cols);
    for (Map.Entry<String, Pair<String, String>> me : alters.entrySet()) {
        conns.getTestConnection()
                .execute("alter table " + me.getValue().getFirst() + " drop foreign key " + me.getKey());
    }
    conns.getTestConnection().assertResults(consql, br());
    // test foreign_key_check=0
    for (String tn : rhsTabNames)
        conns.getTestConnection().execute("drop table " + tn);
    conns.getTestConnection().execute("set foreign_key_checks=0");
    // these all should work
    for (Map.Entry<String, Pair<String, String>> me : alters.entrySet()) {
        conns.getTestConnection().execute(me.getValue().getSecond());
    }
    conns.getTestConnection().assertResults(consql, cols);
    // even after we add the tables again
    for (int i = 0; i < rhsTabNames.length; i++)
        conns.getTestConnection().execute("create table " + rhsTabNames[i] + rbody + rhsDists[i]);
    conns.getTestConnection().assertResults(consql, cols);

}

From source file:org.apache.hadoop.zebra.io.ColumnGroup.java

static public void dumpInfo(Path path, PrintStream out, Configuration conf, int indent)
        throws IOException, Exception {
    // final int maxKeySampleLen = 16;
    IOutils.indent(out, indent);/*from   w w w.  j ava  2  s.co  m*/
    out.println();
    IOutils.indent(out, indent);
    out.println("Column Group : " + path);
    ColumnGroup.Reader reader = new ColumnGroup.Reader(path, false, conf);
    try {
        LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>();
        IOutils.indent(out, indent);
        out.println("Name: " + reader.getName());
        IOutils.indent(out, indent);
        out.println("Serializer: " + reader.getSerializer());
        IOutils.indent(out, indent);
        out.println("Compressor: " + reader.getCompressor());
        IOutils.indent(out, indent);
        out.println("Group: " + reader.getGroup());
        IOutils.indent(out, indent);
        out.println("Perm: " + reader.getPerm());

        properties.put("Schema", reader.getSchema().toString());
        // Now output the properties table.
        int maxKeyLength = 0;
        Set<Map.Entry<String, String>> entrySet = properties.entrySet();
        for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) {
            Map.Entry<String, String> e = it.next();
            if (e.getKey().length() > maxKeyLength) {
                maxKeyLength = e.getKey().length();
            }
        }
        for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) {
            Map.Entry<String, String> e = it.next();
            IOutils.indent(out, indent);
            out.printf("%s : %s\n", e.getKey(), e.getValue());
        }
        out.println("TFiles within the Column Group :");
        if (reader.cgindex == null)
            reader.cgindex = buildIndex(reader.fs, reader.path, reader.dirty, conf);
        for (CGIndexEntry entry : reader.cgindex.index) {
            IOutils.indent(out, indent);
            out.printf(" *Name : %s\n", entry.name);
            IOutils.indent(out, indent);
            out.printf("  Rows : %d\n", entry.rows);
            if (entry.firstKey != null) {
                IOutils.indent(out, indent);
                out.printf("  First Key : %s\n", headToString(entry.firstKey));
            }
            if (entry.lastKey != null) {
                IOutils.indent(out, indent);
                out.printf("  Larst Key : %s\n", headToString(entry.lastKey));
            }
            // dump TFile info
            // Path pathTFile = new Path(path, entry.name);
            // TFile.dumpInfo(pathTFile.toString(), out, conf);
        }
    } finally {
        try {
            reader.close();
        } catch (Exception e) {
            // no-op
        }
    }
}

From source file:com.opengamma.analytics.financial.interestrate.MultipleYieldCurveFinderDataBundle.java

public MultipleYieldCurveFinderDataBundle(final List<InstrumentDerivative> derivatives,
        final double[] marketValues, final YieldCurveBundle knownCurves,
        final LinkedHashMap<String, double[]> unknownCurveNodePoints,
        final LinkedHashMap<String, Interpolator1D> unknownCurveInterpolators,
        final boolean useFiniteDifferenceByDefault, final FXMatrix fxMatrix) {
    ArgumentChecker.notNull(derivatives, "derivatives");
    ArgumentChecker.noNulls(derivatives, "derivatives");
    ArgumentChecker.notNull(marketValues, "market values null");
    ArgumentChecker.notNull(unknownCurveNodePoints, "unknown curve node points");
    ArgumentChecker.notNull(unknownCurveInterpolators, "unknown curve interpolators");
    ArgumentChecker.notEmpty(unknownCurveNodePoints, "unknown curve node points");
    ArgumentChecker.notEmpty(unknownCurveInterpolators, "unknown curve interpolators");
    ArgumentChecker.isTrue(derivatives.size() == marketValues.length,
            "marketValues wrong length; must be one par rate per derivative (have {} values for {} derivatives",
            marketValues.length, derivatives.size());
    ArgumentChecker.notNull(fxMatrix, "FX matrix");
    if (knownCurves != null) {
        for (final String name : knownCurves.getAllNames()) {
            if (unknownCurveInterpolators.containsKey(name)) {
                throw new IllegalArgumentException("Curve name in known set matches one to be solved for");
            }/* ww w  .  ja  v  a 2s  . c om*/
        }
        _knownCurves = knownCurves;
    } else {
        _knownCurves = null;
    }
    _derivatives = derivatives;
    _marketValues = marketValues;
    if (unknownCurveNodePoints.size() != unknownCurveInterpolators.size()) {
        throw new IllegalArgumentException("Number of unknown curves not the same as curve interpolators");
    }
    final Iterator<Entry<String, double[]>> nodePointsIterator = unknownCurveNodePoints.entrySet().iterator();
    final Iterator<Entry<String, Interpolator1D>> unknownCurvesIterator = unknownCurveInterpolators.entrySet()
            .iterator();
    _names = new ArrayList<>();
    while (nodePointsIterator.hasNext()) {
        final Entry<String, double[]> entry1 = nodePointsIterator.next();
        final Entry<String, Interpolator1D> entry2 = unknownCurvesIterator.next();
        final String name1 = entry1.getKey();
        if (!name1.equals(entry2.getKey())) {
            throw new IllegalArgumentException("Names must be the same");
        }
        ArgumentChecker.notNull(entry1.getValue(), "curve node points for " + name1);
        ArgumentChecker.notNull(entry2.getValue(), "interpolator for " + name1);
        _names.add(name1);
    }
    int nNodes = 0;
    for (final double[] nodes : unknownCurveNodePoints.values()) {
        nNodes += nodes.length;
    }
    if (nNodes > derivatives.size()) {
        throw new IllegalArgumentException("Total number of nodes (" + nNodes
                + ") is greater than the number of instruments (" + derivatives.size() + ")");
    }
    _totalNodes = nNodes;
    _unknownCurveNodePoints = unknownCurveNodePoints;
    _unknownCurveInterpolators = unknownCurveInterpolators;
    _useFiniteDifferenceByDefault = useFiniteDifferenceByDefault;
    _fxMatrix = fxMatrix;
}

From source file:org.spout.api.chat.ChatArguments.java

/**
 * Splits this ChatArguments instance into sections
 *
 * @param type How these arguments are to be split into sections
 * @return The split sections/* w w w. jav a 2  s  .c  o  m*/
 */
public List<ChatSection> toSections(SplitType type) {
    List<ChatSection> sections = new ArrayList<ChatSection>();
    StringBuilder currentWord = new StringBuilder();
    LinkedHashMap<Integer, List<ChatStyle>> map;
    switch (type) {
    case WORD:
        map = new LinkedHashMap<Integer, List<ChatStyle>>();
        int curIndex = 0;
        for (Object obj : getExpandedPlaceholders()) {
            if (obj instanceof ChatStyle) {
                ChatStyle style = (ChatStyle) obj;
                List<ChatStyle> list = map.get(curIndex);
                if (list == null) {
                    list = new ArrayList<ChatStyle>();
                    map.put(curIndex, list);
                }
                ChatSectionUtils.removeConflicting(list, style);
                list.add(style);
            } else {
                String val = String.valueOf(obj);
                for (int i = 0; i < val.length(); ++i) {
                    int codePoint = val.codePointAt(i);
                    if (Character.isWhitespace(codePoint)) {
                        sections.add(new ChatSectionImpl(type, new LinkedHashMap<Integer, List<ChatStyle>>(map),
                                currentWord.toString()));
                        curIndex = 0;
                        currentWord = new StringBuilder();
                        if (map.size() > 0) {
                            final List<ChatStyle> previousStyles = map.containsKey(-1)
                                    ? new ArrayList<ChatStyle>(map.get(-1))
                                    : new ArrayList<ChatStyle>();

                            for (Map.Entry<Integer, List<ChatStyle>> entry : map.entrySet()) {
                                if (entry.getKey() != -1) {
                                    for (ChatStyle style : entry.getValue()) {
                                        ChatSectionUtils.removeConflicting(previousStyles, style);
                                        previousStyles.add(style);
                                    }
                                }
                            }
                            map.clear();
                            map.put(-1, previousStyles);
                        }
                    } else {
                        currentWord.append(val.substring(i, i + 1));
                        curIndex++;
                    }
                }
            }
        }

        if (currentWord.length() > 0) {
            sections.add(new ChatSectionImpl(type, map, currentWord.toString()));
        }
        break;

    case STYLE_CHANGE:
        StringBuilder curSection = new StringBuilder();
        List<ChatStyle> activeStyles = new ArrayList<ChatStyle>(3);
        for (Object obj : getExpandedPlaceholders()) {
            if (obj instanceof ChatStyle) {
                ChatStyle style = (ChatStyle) obj;
                ChatSectionUtils.removeConflicting(activeStyles, style);
                activeStyles.add(style);

                map = new LinkedHashMap<Integer, List<ChatStyle>>();
                map.put(-1, new ArrayList<ChatStyle>(activeStyles));
                sections.add(new ChatSectionImpl(type, map, curSection.toString()));
                curSection = new StringBuilder();
            } else {
                curSection.append(obj);
            }
        }
        break;

    case ALL:
        return Collections.<ChatSection>singletonList(
                new ChatSectionImpl(getSplitType(), getActiveStyles(), getPlainString()));

    default:
        throw new IllegalArgumentException("Unknown SplitOption " + type + "!");
    }
    return sections;
}

From source file:ubic.gemma.web.controller.expression.experiment.DEDVController.java

/**
 * Prepare vvo for display on front end. Uses factors and factor values from layouts
 *
 * @param vvo Note: This will be modified! It will be updated with the factorNames and factorValuesToNames
 *//*from   w  w w.j a  v a2s.  c  o m*/
private void prepareFactorsForFrontEndDisplay(VisualizationValueObject vvo,
        LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> eeLayouts) {

    if (eeLayouts == null || eeLayouts.isEmpty()) {
        log.warn("No layouts, bail");
        vvo.setFactorNames(null);
        vvo.setFactorValuesToNames(null);
        return;
    }

    LinkedHashSet<ExperimentalFactor> factorNames = getFactorNames(eeLayouts);

    // colours for conditions/factor values bar chart FIXME make continuous maps different.
    Map<ExperimentalFactor, Queue<String>> factorColoursMap = createFactorNameToColoursMap(factorNames);
    String missingValueColour = "#DCDCDC";

    Random random = new Random();

    LinkedHashMap<String, LinkedHashMap<String, String>> factorToValueNames = new LinkedHashMap<>();
    // list of maps with entries: key = factorName, value=array of factor values
    // 1 entry per sample
    List<LinkedHashMap<String, String[]>> factorValueMaps = new ArrayList<>();

    Collection<String> factorsMissingValues = new HashSet<>();

    Collection<BioMaterialValueObject> seenSamples = new HashSet<>(); // if same sample was run more than once on
    // diff platforms.
    Map<Long, FactorValue> fvs = new HashMap<>(); // avoid loading repeatedly.
    Collection<ExperimentalFactor> seenFactors = new HashSet<>();

    for (BioAssayValueObject ba : eeLayouts.keySet()) {

        if (seenSamples.contains(ba.getSample())) {
            continue;
        }
        seenSamples.add(ba.getSample());

        // double should be the factorValue id, defined in
        // ubic.gemma.core.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(ExpressionExperiment,
        // BioAssayDimension)
        LinkedHashMap<ExperimentalFactor, Double> factorMap = eeLayouts.get(ba);
        LinkedHashMap<String, String[]> factorNamesToValueColourPairs = new LinkedHashMap<>(factorNames.size());

        // this is defensive, should only come into play when there's something messed up with the data.
        // for every factor, add a missing-value entry (guards against missing data messing up the layout)
        for (ExperimentalFactor factor : factorNames) {
            String[] facValAndColour = new String[] { "No value", missingValueColour };

            factorNamesToValueColourPairs.put(getUniqueFactorName(factor), facValAndColour);
        }

        // for each experimental factor, store the name and value
        for (Entry<ExperimentalFactor, Double> pair : factorMap.entrySet()) {
            ExperimentalFactor factor = pair.getKey();
            Double valueOrId = pair.getValue();

            /*
             * the double is only a double because it is meant to hold measurements when the factor is continuous if
             * the factor is categorical, the double value is set to the value's id see
             * ubic.gemma.core.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(
             * ExpressionExperiment, BioAssayDimension)
             */
            if (valueOrId == null || factor.getType() == null
                    || (factor.getType().equals(FactorType.CATEGORICAL)
                            && factor.getFactorValues().isEmpty())) {
                factorsMissingValues.add(getUniqueFactorName(factor));
                continue;
            }

            if (!seenFactors.contains(factor) && factor.getType().equals(FactorType.CATEGORICAL)) {
                for (FactorValue fv : factor.getFactorValues()) {
                    fvs.put(fv.getId(), fv);
                }
            }

            String facValsStr = getFacValsStr(fvs, factor, valueOrId);

            if (!factorToValueNames.containsKey(getUniqueFactorName(factor))) {
                factorToValueNames.put(getUniqueFactorName(factor), new LinkedHashMap<String, String>());
            }
            // assign colour if unassigned or fetch it if already assigned
            String colourString = "";
            if (!factorToValueNames.get(getUniqueFactorName(factor)).containsKey(facValsStr)) {
                if (factorColoursMap.containsKey(factor)) {
                    colourString = factorColoursMap.get(factor).poll();
                }
                if (colourString == null || Objects.equals(colourString, "")) { // ran out of predefined colours
                    colourString = getRandomColour(random);
                }
                factorToValueNames.get(getUniqueFactorName(factor)).put(facValsStr, colourString);
            } else {
                colourString = factorToValueNames.get(getUniqueFactorName(factor)).get(facValsStr);
            }
            String[] facValAndColour = new String[] { facValsStr, colourString };

            factorNamesToValueColourPairs.put(getUniqueFactorName(factor), facValAndColour);

        }
        factorValueMaps.add(factorNamesToValueColourPairs);
    }

    // add missing value entries here so they show up at the end of the legend's value lists
    if (!factorsMissingValues.isEmpty()) {
        for (String factorName : factorsMissingValues) {
            if (!factorToValueNames.containsKey(factorName)) {
                factorToValueNames.put(factorName, new LinkedHashMap<String, String>());
            }
            factorToValueNames.get(factorName).put("No value", missingValueColour);
        }
    }
    vvo.setFactorNames(factorToValueNames); // this is summary of values & colours by factor, used for legend
    vvo.setFactorValuesToNames(factorValueMaps); // this is list of maps for each sample
}

From source file:org.bimserver.charting.SupportFunctions.java

public static ArrayList<LinkedHashMap<String, Object>> getIfcMaterialsByClassWithTreeStructure(
        String structureKeyword, IfcModelInterface model, Chart chart, MutableInt subChartCount) {
    // Derive the column name.
    String leafColumnName = structureKeyword;
    // Update the chart configuration.
    chart.setDimensionLookupKey(structureKeyword, leafColumnName);
    chart.setDimensionLookupKey("date", "date");
    chart.setDimensionLookupKey("size", "size");
    // Prepare to iterate the relationships.
    LinkedHashMap<String, ArrayList<Double>> materialNameWithSizes = new LinkedHashMap<>();
    // Iterate only the relationships.
    for (IfcRelAssociatesMaterial ifcRelAssociatesMaterial : model
            .getAllWithSubTypes(IfcRelAssociatesMaterial.class)) {
        // IfcMaterialSelect: IfcMaterial, IfcMaterialList, IfcMaterialLayerSetUsage, IfcMaterialLayerSet, IfcMaterialLayer.
        IfcMaterialSelect materialLike = ifcRelAssociatesMaterial.getRelatingMaterial();
        // If there was a material-like object, sum it across X.
        if (materialLike != null) {
            // Get material name, like: Brick (000000), Air (000001); or, Concrete (0000000).
            String materialName = getNameOfMaterialsFromMaterialLike(materialLike, true, true);
            // Use material name if available. Otherwise, use OID of top-level material-like object.
            String name = (materialName != null) ? materialName : String.format("%d", materialLike.getOid());
            // Add entry if it doesn't exist.
            if (!materialNameWithSizes.containsKey(name))
                materialNameWithSizes.put(name, new ArrayList<Double>());
            // Get existing size data.
            ArrayList<Double> sizes = materialNameWithSizes.get(name);
            // Iterate objects.
            EList<IfcRoot> ifcRoots = ifcRelAssociatesMaterial.getRelatedObjects();
            for (IfcRoot ifcRoot : ifcRoots) {
                Double size = 0.0;
                if (ifcRoot instanceof IfcObjectDefinition) {
                    IfcObjectDefinition ifcObjectDefinition = (IfcObjectDefinition) ifcRoot;
                    if (ifcObjectDefinition instanceof IfcObject) {
                        IfcObject ifcObject = (IfcObject) ifcObjectDefinition;
                        if (ifcObject instanceof IfcProduct) {
                            IfcProduct ifcProduct = (IfcProduct) ifcObject;
                            Double volume = getRoughVolumeEstimateFromIfcProduct(ifcProduct);
                            size = volume;
                        }/*from   w  w w  .  j av a 2  s. co  m*/
                    }
                }
                if (size != null && size > 0)
                    sizes.add(size);
            }
        }
    }
    //
    subChartCount.setValue(materialNameWithSizes.size());
    //
    ArrayList<LinkedHashMap<String, Object>> rawData = new ArrayList<>();
    //
    for (Entry<String, ArrayList<Double>> entry : materialNameWithSizes.entrySet()) {
        String name = entry.getKey();
        // Get existing size data.
        ArrayList<Double> sizes = materialNameWithSizes.get(name);
        // Sort, value ascending.
        Collections.sort(sizes, sortSmallerValuesToFront);
        sizes.add(0, 0.0);
        if (sizes.size() == 1)
            sizes.add(0, 0.0);
        // Count including empty first entry.
        double count = Math.max(1, sizes.size() - 1);
        double step = 10000.0 / count;
        double runningSize = 0.0;
        // Add sum of zero at entry zero.
        int i = 0;
        // Iterate objects, summing them across 0 to 10000 (an arbitrary range, a way to relate to other sums along X).
        for (Double size : sizes) {
            double someMeasurement = (size != null) ? size : 0.0;
            runningSize += someMeasurement;
            // Prepare to store this raw data entry.
            LinkedHashMap<String, Object> dataEntry = new LinkedHashMap<>();
            // Name the group.
            dataEntry.put(leafColumnName, name);
            dataEntry.put("date", i * step);
            dataEntry.put("size", runningSize);
            // Push the entry into the data pool.
            rawData.add(dataEntry);
            //
            i += 1;
        }
    }
    // Send it all back.
    return rawData;
}

From source file:org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator.java

@Override
protected LinkedHashMap<String, ZuulRoute> locateRoutes() {
    LinkedHashMap<String, ZuulRoute> routesMap = new LinkedHashMap<String, ZuulRoute>();
    routesMap.putAll(super.locateRoutes());
    if (this.discovery != null) {
        Map<String, ZuulRoute> staticServices = new LinkedHashMap<String, ZuulRoute>();
        for (ZuulRoute route : routesMap.values()) {
            String serviceId = route.getServiceId();
            if (serviceId == null) {
                serviceId = route.getId();
            }/*  w ww . j av a 2 s .c o  m*/
            if (serviceId != null) {
                staticServices.put(serviceId, route);
            }
        }
        // Add routes for discovery services by default
        List<String> services = this.discovery.getServices();
        String[] ignored = this.properties.getIgnoredServices().toArray(new String[0]);
        for (String serviceId : services) {
            // Ignore specifically ignored services and those that were manually
            // configured
            String key = "/" + mapRouteToService(serviceId) + "/**";
            if (staticServices.containsKey(serviceId) && staticServices.get(serviceId).getUrl() == null) {
                // Explicitly configured with no URL, cannot be ignored
                // all static routes are already in routesMap
                // Update location using serviceId if location is null
                ZuulRoute staticRoute = staticServices.get(serviceId);
                if (!StringUtils.hasText(staticRoute.getLocation())) {
                    staticRoute.setLocation(serviceId);
                }
            }
            if (!PatternMatchUtils.simpleMatch(ignored, serviceId) && !routesMap.containsKey(key)) {
                // Not ignored
                routesMap.put(key, new ZuulRoute(key, serviceId));
            }
        }
    }
    if (routesMap.get(DEFAULT_ROUTE) != null) {
        ZuulRoute defaultRoute = routesMap.get(DEFAULT_ROUTE);
        // Move the defaultServiceId to the end
        routesMap.remove(DEFAULT_ROUTE);
        routesMap.put(DEFAULT_ROUTE, defaultRoute);
    }
    LinkedHashMap<String, ZuulRoute> values = new LinkedHashMap<>();
    for (Entry<String, ZuulRoute> entry : routesMap.entrySet()) {
        String path = entry.getKey();
        // Prepend with slash if not already present.
        if (!path.startsWith("/")) {
            path = "/" + path;
        }
        if (StringUtils.hasText(this.properties.getPrefix())) {
            path = this.properties.getPrefix() + path;
            if (!path.startsWith("/")) {
                path = "/" + path;
            }
        }
        values.put(path, entry.getValue());
    }
    return values;
}

From source file:org.apache.hadoop.hive.ql.parse.ExtractTmpSemanticAnalyzer.java

private String genCrtTmpQL(Operator tmpRSOp, String name) throws SemanticException {
    String crtTmpQL = null;/*from  w  w w  .  j  ava 2  s . co m*/
    List<String> cols = new ArrayList<String>();
    assert tmpRSOp.getParentOperators().size() == 0;
    Operator select = (Operator) tmpRSOp.getParentOperators().get(0);
    ArrayList<ColumnInfo> colInfos = select.getSchema().getSignature();
    for (int i = 0; i < colInfos.size(); i++) {
        String currCol = colInfos.get(i).getInternalName();

        Map<Operator<? extends OperatorDesc>, OpParseContext> ctxMap = pCtxInc.getOpParseCtx();
        RowResolver inputRR = ctxMap.get(select).getRowResolver();
        HashMap<String, LinkedHashMap<String, ColumnInfo>> rslvMap = inputRR.getRslvMap();
        for (Map.Entry<String, LinkedHashMap<String, ColumnInfo>> e : rslvMap.entrySet()) {
            LinkedHashMap<String, ColumnInfo> f_map = e.getValue();
            for (Map.Entry<String, ColumnInfo> entry : f_map.entrySet()) {
                ColumnInfo colInfo = entry.getValue();
                String internalName = colInfo.getInternalName();
                if (internalName.equals(currCol)) {
                    String oldName = entry.getKey();
                    cols.add(oldName + " " + colInfo.getType().getTypeName());
                    tmpTableCols.add(oldName);
                    break;
                }
            }
        }
    }
    crtTmpQL = "create table " + name + " (";
    for (int i = 0; i < cols.size(); i++) {
        if (i < cols.size() - 1) {
            crtTmpQL = crtTmpQL + cols.get(i) + " , ";
        } else {
            crtTmpQL = crtTmpQL + cols.get(i);
        }
    }
    crtTmpQL = crtTmpQL + " )";
    return crtTmpQL;
}