Example usage for java.util LinkedHashMap put

List of usage examples for java.util LinkedHashMap put

Introduction

In this page you can find the example usage for java.util LinkedHashMap put.

Prototype

V put(K key, V value);

Source Link

Document

Associates the specified value with the specified key in this map (optional operation).

Usage

From source file:info.magnolia.cms.util.ServletUtil.java

/**
 * Returns the init parameters for a {@link javax.servlet.FilterConfig} object as a Map, preserving the order in which they are exposed
 * by the {@link javax.servlet.FilterConfig} object.
 *//*from   ww  w. j a va2  s. c om*/
public static LinkedHashMap<String, String> initParametersToMap(FilterConfig config) {
    LinkedHashMap<String, String> initParameters = new LinkedHashMap<String, String>();
    Enumeration parameterNames = config.getInitParameterNames();
    while (parameterNames.hasMoreElements()) {
        String parameterName = (String) parameterNames.nextElement();
        initParameters.put(parameterName, config.getInitParameter(parameterName));
    }
    return initParameters;
}

From source file:hydrograph.ui.engine.ui.util.SubjobUiConverterUtil.java

/**
 * @param subjobPath//from   w  w  w .j a va 2 s  .  c o  m
 * @param propertyMap
 * @return
 */
public static IPath getSubjobPath(String subjobPath, LinkedHashMap<String, Object> propertyMap) {
    IPath path = null;
    if (StringUtils.isNotBlank(subjobPath)) {
        path = new Path(subjobPath);
        path = path.removeFileExtension();
        path = path.addFileExtension(Constants.JOB_EXTENSION_FOR_IPATH);
        propertyMap.put(Constants.PATH, path.toString());
    }
    return path;
}

From source file:org.waarp.gateway.kernel.HttpJsonDefinition.java

protected static HttpPage loadHttpConfiguration(ConfigHttpPage cpage) throws InvalidArgumentException,
        ClassNotFoundException, InstantiationException, IllegalAccessException {
    List<ConfigHttpField> list = cpage.FIELD;
    LinkedHashMap<String, AbstractHttpField> linkedHashMap = new LinkedHashMap<String, AbstractHttpField>(
            list.size());/*from  w  ww  .j  av  a  2 s.  c o m*/
    // Now read the configuration
    for (ConfigHttpField fieldValue : list) {
        AbstractHttpField field = loadHttpPage(fieldValue);
        linkedHashMap.put(field.fieldname, field);
    }
    list.clear();
    list = null;
    return new HttpPage(cpage.PAGENAME, cpage.FILEFORM, cpage.HEADER, cpage.FOOTER, cpage.BEGINFORM,
            cpage.ENDFORM, cpage.NEXTINFORM, cpage.URI, cpage.PAGEROLE, cpage.ERRORPAGE, cpage.CLASSNAME,
            linkedHashMap);
}

From source file:com.stratio.crossdata.sh.utils.ConsoleUtils.java

/**
 * In order to print the result, this method calculates the maximum width of every column.
 *
 * @param resultSet structure representing the result of a execution.
 * @return Map<String, Integer> where the key is the name of the column and Integer is the maximum
 * width./*from   w w  w . jav a2  s  .  co  m*/
 */
private static Map<String, Integer> calculateColWidths(ResultSet resultSet) {
    LinkedHashMap<String, Integer> colWidths = new LinkedHashMap<>();

    // Get column names or aliases width
    for (ColumnMetadata columnMetadata : resultSet.getColumnMetadata()) {
        colWidths.put(columnMetadata.getName().getColumnNameToShow(),
                columnMetadata.getName().getColumnNameToShow().length());
    }

    // Find widest cell content of every column
    for (Row row : resultSet) {
        int pos = 0;
        for (String key : row.getCells().keySet()) {
            String cellContent = String.valueOf(row.getCell(key).getValue());

            int currentWidth;
            if (colWidths.containsKey(key)) {
                currentWidth = colWidths.get(key);
            } else {
                Iterator<Map.Entry<String, Integer>> iter = colWidths.entrySet().iterator();
                int limit = 0;
                while (limit < pos) {
                    iter.next();
                    limit++;
                }
                currentWidth = iter.next().getKey().length();
            }

            if (cellContent.length() > currentWidth) {
                colWidths.put(key, cellContent.length());
            }

            pos++;
        }
    }

    return colWidths;
}

From source file:pl.betoncraft.betonquest.editor.model.PackageSet.java

private static PackageSet parseStreams(String setName, HashMap<String, HashMap<String, InputStream>> streamMap)
        throws IOException {
    PackageSet set = new PackageSet(setName);
    for (Entry<String, HashMap<String, InputStream>> entry : streamMap.entrySet()) {
        String packName = entry.getKey();
        HashMap<String, LinkedHashMap<String, String>> values = new LinkedHashMap<>();
        for (Entry<String, InputStream> subEntry : entry.getValue().entrySet()) {
            String name = subEntry.getKey();
            InputStream stream = subEntry.getValue();
            YAMLParser parser = new YAMLFactory().createParser(stream);
            String currentPath = "";
            String fieldName = "";
            while (true) {
                JsonToken token = parser.nextToken();
                if (token == null)
                    break;
                switch (token) {
                case START_OBJECT:
                    currentPath = currentPath + fieldName + ".";
                    break;
                case FIELD_NAME:
                    fieldName = parser.getText();
                    break;
                case END_OBJECT:
                    currentPath = currentPath.substring(0,
                            currentPath.substring(0, currentPath.length() - 1).lastIndexOf(".") + 1);
                    break;
                case VALUE_STRING:
                case VALUE_NUMBER_INT:
                case VALUE_NUMBER_FLOAT:
                case VALUE_FALSE:
                case VALUE_TRUE:
                    String key = (currentPath + fieldName).substring(1,
                            currentPath.length() + fieldName.length());
                    LinkedHashMap<String, String> map = values.get(name);
                    if (map == null) {
                        map = new LinkedHashMap<>();
                        values.put(name, map);
                    }//from w w  w  . j av  a  2s. c o  m
                    map.put(key, parser.getText());
                default:
                    // do nothing
                }
            }
            parser.close();
            stream.close();
        }
        QuestPackage pack = new QuestPackage(set, packName, values);
        set.packages.add(pack);
    }
    return set;
}

From source file:com.alibaba.wasp.fserver.TestEntityGroup.java

/**
 * @param tableName// w  w  w.ja v  a  2s.  com
 * @param startKey
 * @param stopKey
 * @param callingMethod
 * @param conf
 * @param fields
 * @throws java.io.IOException
 * @return A entityGroup on which you must call
 *         {@link EntityGroup#closeEntityGroup(EntityGroup)} when done.
 */
private static EntityGroup initEntityGroup(byte[] tableName, byte[] startKey, byte[] stopKey,
        String callingMethod, Configuration conf, List<Field> fields) throws IOException {
    FTable table = new FTable();
    table.setTableName(Bytes.toString(tableName));
    LinkedHashMap<String, Field> finalFields = new LinkedHashMap<String, Field>();
    for (Field field : fields) {
        finalFields.put(field.getName(), field);
    }
    table.setColumns(finalFields);
    EntityGroupInfo info = new EntityGroupInfo(Bytes.toBytes(table.getTableName()), startKey, stopKey, false);

    if (FMetaReader.exists(TEST_UTIL.getConfiguration(), info)) {
        throw new IOException("All ready has a entityGroupInfo " + info.getEntityGroupNameAsString());
    }
    return EntityGroup.openEntityGroup(info, table, conf, TEST_UTIL.getWaspCluster().getFServer(0), null);
}

From source file:com.streamsets.pipeline.stage.origin.jdbc.table.AllTypesIT.java

private static void populateRecords() {
    Record record = RecordCreator.create();
    LinkedHashMap<String, Field> fields;
    AtomicInteger id_field = new AtomicInteger(0);

    //CHAR_AND_BINARY
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);//from  www. ja  v a 2s.co m
    fields.put("char1", Field.create("abcdefghij"));
    fields.put("varchar1", Field.create(UUID.randomUUID().toString()));
    fields.put("clob1", Field.create(UUID.randomUUID().toString()));
    fields.put("varbinary1", Field.create(UUID.randomUUID().toString().getBytes()));
    fields.put("blob1", Field.create(UUID.randomUUID().toString().getBytes()));
    record.set(Field.createListMap(fields));

    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("CHAR_AND_BINARY").getRight().add(record);

    //Date and time
    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    Calendar calendar = Calendar.getInstance();

    calendar.set(Calendar.HOUR_OF_DAY, 0);
    calendar.set(Calendar.MINUTE, 0);
    calendar.set(Calendar.SECOND, 0);
    calendar.set(Calendar.MILLISECOND, 0);
    fields.put("date1", Field.create(Field.Type.DATE, calendar.getTime()));
    calendar.setTimeInMillis(System.currentTimeMillis());

    calendar.set(Calendar.MILLISECOND, 0);
    fields.put("timestamp1", Field.create(Field.Type.DATETIME, calendar.getTime()));
    fields.put("datetime1", Field.create(Field.Type.DATETIME, calendar.getTime()));
    calendar.setTimeInMillis(System.currentTimeMillis());

    calendar.set(Calendar.YEAR, 1970);
    calendar.set(Calendar.MONTH, Calendar.JANUARY);
    calendar.set(Calendar.DAY_OF_MONTH, 1);
    calendar.set(Calendar.MILLISECOND, 0);
    fields.put("time1", Field.create(Field.Type.TIME, calendar.getTime()));
    calendar.setTimeInMillis(System.currentTimeMillis());

    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("DATE_AND_TIME").getRight().add(record);

    //DIFFERENT_INTS
    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("int1", Field.create(Field.Type.INTEGER, Integer.MIN_VALUE));
    fields.put("int2", Field.create(Field.Type.INTEGER, Integer.MIN_VALUE));
    fields.put("mediumint1", Field.create(Field.Type.INTEGER, Integer.MIN_VALUE));
    fields.put("tinyint1", Field.create(Field.Type.SHORT, -128));
    fields.put("smallint1", Field.create(Field.Type.SHORT, Short.MIN_VALUE));
    fields.put("bigint1", Field.create(Field.Type.LONG, Long.MIN_VALUE));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("DIFFERENT_INTS").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("int1", Field.create(Field.Type.INTEGER, Integer.MAX_VALUE));
    fields.put("int2", Field.create(Field.Type.INTEGER, Integer.MAX_VALUE));
    fields.put("mediumint1", Field.create(Field.Type.INTEGER, Integer.MAX_VALUE));
    fields.put("tinyint1", Field.create(Field.Type.SHORT, 127));
    fields.put("smallint1", Field.create(Field.Type.SHORT, Short.MAX_VALUE));
    fields.put("bigint1", Field.create(Field.Type.LONG, Long.MAX_VALUE));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("DIFFERENT_INTS").getRight().add(record);

    //FLOATING_PT_INTS
    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("decimal1", Field.create(Field.Type.DECIMAL, new BigDecimal("12.345")));
    fields.put("number1", Field.create(Field.Type.DECIMAL, new BigDecimal("0.12345")));
    fields.put("double1", Field.create(Field.Type.DOUBLE, 123.456));
    fields.put("real1", Field.create(Field.Type.FLOAT, 12.34));
    fields.put("floatdouble1", Field.create(Field.Type.DOUBLE, Double.MAX_VALUE));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("FLOATING_PT_INTS").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("decimal1", Field.create(Field.Type.DECIMAL, new BigDecimal("-12.345")));
    fields.put("number1", Field.create(Field.Type.DECIMAL, new BigDecimal("-0.12345")));
    fields.put("double1", Field.create(Field.Type.DOUBLE, -123.456));
    fields.put("real1", Field.create(Field.Type.FLOAT, -12.34));
    fields.put("floatdouble1", Field.create(Field.Type.DOUBLE, Double.MIN_VALUE));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("FLOATING_PT_INTS").getRight().add(record);

    //OTHER_TYPES
    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("boolean1", Field.create(Field.Type.BOOLEAN, true));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("OTHER_TYPES").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("boolean1", Field.create(Field.Type.BOOLEAN, false));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("OTHER_TYPES").getRight().add(record);
}

From source file:com.streamsets.pipeline.stage.origin.jdbc.table.ReferentialConstraintOrderingIT.java

private static void populateRecords() {
    Record record;/* w w  w  . j  a va 2s .c om*/
    LinkedHashMap<String, Field> fields;

    //USER_TABLE Records
    int i = 0;

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("u_id", Field.create(++i));
    fields.put("name", Field.create("Alice"));
    fields.put("address", Field.create("100 First Street, Sunnyvale, CA."));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("USER_TABLE").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("u_id", Field.create(++i));
    fields.put("name", Field.create("Zach"));
    fields.put("address", Field.create("200 Second Street, Sunnyvale, CA."));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("USER_TABLE").getRight().add(record);

    record = RecordCreator.create();
    fields.put("u_id", Field.create(++i));
    fields.put("name", Field.create("Jack"));
    fields.put("address", Field.create("300 Third Street, Sunnyvale, CA."));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("USER_TABLE").getRight().add(record);

    //Product Records
    i = 0;

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("p_id", Field.create(++i));
    fields.put("name", Field.create("Coconut Chips"));
    fields.put("manufacturer", Field.create("Dang"));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("PRODUCT").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("p_id", Field.create(++i));
    fields.put("name", Field.create("Bluberry Bar"));
    fields.put("manufacturer", Field.create("Luna"));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("PRODUCT").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("p_id", Field.create(++i));
    fields.put("name", Field.create("Dark Chocolate Peanut Butter Bar"));
    fields.put("manufacturer", Field.create("Kind"));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("PRODUCT").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("p_id", Field.create(++i));
    fields.put("name", Field.create("Oats and Honey Bar"));
    fields.put("manufacturer", Field.create("Nature Valley"));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("PRODUCT").getRight().add(record);

    //ORDER_TBL Records
    i = 0;

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("o_id", Field.create(++i));
    fields.put("u_id", Field.create(1));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("ORDER_TBL").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("o_id", Field.create(++i));
    fields.put("u_id", Field.create(2));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("ORDER_TBL").getRight().add(record);

    //Items Records
    long currentTime = System.currentTimeMillis();

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("time_id", Field.create(currentTime));
    fields.put("o_id", Field.create(1));
    fields.put("p_id", Field.create(1));
    fields.put("quantity", Field.create(2));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("ITEMS").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("time_id", Field.create(currentTime + 1));
    fields.put("o_id", Field.create(1));
    fields.put("p_id", Field.create(2));
    fields.put("quantity", Field.create(3));

    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("ITEMS").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("time_id", Field.create(currentTime + 2));
    fields.put("o_id", Field.create(2));
    fields.put("p_id", Field.create(1));
    fields.put("quantity", Field.create(4));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("ITEMS").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("time_id", Field.create(currentTime + 3));
    fields.put("o_id", Field.create(2));
    fields.put("p_id", Field.create(3));
    fields.put("quantity", Field.create(2));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("ITEMS").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    fields.put("time_id", Field.create(currentTime + 4));
    fields.put("o_id", Field.create(2));
    fields.put("p_id", Field.create(4));
    fields.put("quantity", Field.create(1));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("ITEMS").getRight().add(record);
}

From source file:Main.java

/**
 * We are parsing a string like animal,-dog,--chihuahua*,-cat,--siamese
 * into a list of strings to put into ListBox and values. The strings
 * are used as keys and use tabs/whatever system to show hierarchy.
 * The values are the values themselves, with no dashes at the beginning.
 * A trailing asterisk denotes a default value and is parsed by a different function.
 *
 * @param categoryString Category string to parse.
 * @return Map with the parsed categories.
 *///from www.j av a2 s .  c  o m
public static LinkedHashMap<String, String> parseCategories(final String categoryString) {
    final LinkedHashMap<String, String> result = new LinkedHashMap<String, String>();
    final String[] categories = splitCategoryString(categoryString);
    for (final String cat : categories) {
        final String category = cat.trim();
        final int i = getNumDashes(category);
        final int isDefault = isDefault(category) ? 1 : 0;
        final String value = category.substring(i, category.length() - isDefault);
        final String key = repeat(i, " - ") + value;
        result.put(key, value);
    }

    return result;
}

From source file:com.streamsets.pipeline.stage.lib.hive.HiveMetastoreUtil.java

/**
 * Convert a Record to LinkedHashMap. This is for comparing the structure with TypeInfo in cache.
 * @param record incoming Record/*  w  w  w .ja v  a 2  s .c  o  m*/
 * @return LinkedHashMap version of record. Key is the column name, and value is column type in HiveType
 * @throws StageException
 */
public static LinkedHashMap<String, HiveType> convertRecordToHMSType(Record record) throws StageException {
    LinkedHashMap<String, HiveType> columns = new LinkedHashMap<>();
    LinkedHashMap<String, Field> list = record.get().getValueAsListMap();
    for (Map.Entry<String, Field> pair : list.entrySet()) {
        columns.put(pair.getKey(), HiveType.getHiveTypeforFieldType(pair.getValue().getType()));
    }
    return columns;
}