Example usage for java.util LinkedHashMap values

List of usage examples for java.util LinkedHashMap values

Introduction

In this page you can find the example usage for java.util LinkedHashMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java

public void testLinkedHashMapMap() {
    LinkedHashMap<Integer, Integer> srcMap = new LinkedHashMap<Integer, Integer>();
    assertNotNull(srcMap);/*from  ww  w . j a  v a2s . c o  m*/
    checkEmptyLinkedHashMapAssumptions(srcMap);

    srcMap.put(INTEGER_1, INTEGER_11);
    srcMap.put(INTEGER_2, INTEGER_22);
    srcMap.put(INTEGER_3, INTEGER_33);

    LinkedHashMap<Integer, Integer> hashMap = cloneLinkedHashMap(srcMap);
    assertFalse(hashMap.isEmpty());
    assertTrue(hashMap.size() == SIZE_THREE);

    Collection<Integer> valColl = hashMap.values();
    assertTrue(valColl.contains(INTEGER_11));
    assertTrue(valColl.contains(INTEGER_22));
    assertTrue(valColl.contains(INTEGER_33));

    Collection<Integer> keyColl = hashMap.keySet();
    assertTrue(keyColl.contains(INTEGER_1));
    assertTrue(keyColl.contains(INTEGER_2));
    assertTrue(keyColl.contains(INTEGER_3));
}

From source file:com.alibaba.wasp.plan.parser.druid.DruidDDLParser.java

/**
 * Process Create Index Statement and generate Execute Plan
 * //from  w w  w  .  j a  va2  s  .c  o  m
 */
private void getCreateIndexPlan(ParseContext context, WaspSqlCreateIndexStatement sqlCreateIndexStatement,
        MetaEventOperation metaEventOperation) throws IOException {

    // Index Name
    SQLName name = sqlCreateIndexStatement.getName();
    String indexName = parseName(name);
    metaEventOperation.isLegalIndexName(indexName);
    LOG.debug("Create Index SQL IndexName " + name);

    // Table Name
    SQLName table = sqlCreateIndexStatement.getTable();
    String tableName = parseName(table);
    LOG.debug("Create Index SQL TableName " + table);

    // check if table exists and get Table info
    FTable fTable = metaEventOperation.checkAndGetTable(tableName, true);

    // check the index not exists
    metaEventOperation.checkIndexNotExists(fTable, indexName);

    // Field
    List<SQLSelectOrderByItem> items = sqlCreateIndexStatement.getItems();
    LinkedHashSet<String> columns = new LinkedHashSet<String>(items.size());
    List<String> desc = new ArrayList<String>();
    for (SQLSelectOrderByItem item : items) {
        String columnName = parseName(item.getExpr());
        if (columns.contains(columnName)) {
            throw new UnsupportedException("Index have two same field '" + columnName + "'");
        } else {
            columns.add(columnName);
        }
        if (item.getType() == SQLOrderingSpecification.DESC) {
            desc.add(columnName);
        }
    }

    if (!metaEventOperation.isLegalDescFields(fTable, desc)) {
        throw new UnsupportedException("Currently we only support the ascending and descending time field.");
    }

    List<String> colList = new ArrayList<String>();
    colList.addAll(columns);
    if (metaEventOperation.arePrimaryKeys(fTable, colList)) {
        throw new UnsupportedException("Index keys is Primary Keys.");
    }
    if (metaEventOperation.containPrimaryKeys(fTable, colList)) {
        throw new UnsupportedException("Index keys contain all Primary Keys.");
    }

    LinkedHashMap<String, Field> indexKeys = metaEventOperation.checkAndGetFields(fTable, columns);
    // Check the indexKeys whether have Duplicate column name
    metaEventOperation.areLegalTableColumns(null, indexKeys.values());

    Index index = new Index(indexName, tableName, indexKeys);
    // Check if two index have the same columns and the same columns order
    metaEventOperation.checkTwoIndexWithSameColumn(fTable, index);

    index.setDesc(desc);
    index.setStoring(parse(sqlCreateIndexStatement.getStoringCols(), fTable.getColumns()));
    CreateIndexPlan createIndexPlan = new CreateIndexPlan(index);

    context.setPlan(createIndexPlan);
    LOG.debug("CreateIndexPlan " + createIndexPlan.toString());
}

From source file:com.ultramegasoft.flavordex2.fragment.ViewInfoFragment.java

/**
 * Populates the table of extra fields.//from www.j  a v a 2  s .c  om
 *
 * @param data A LinkedHashMap containing the extra values
 */
protected void populateExtras(@NonNull LinkedHashMap<String, ExtraFieldHolder> data) {
    final Activity activity = getActivity();
    if (activity == null) {
        return;
    }

    final TableLayout table = activity.findViewById(R.id.entry_info);
    if (!mExtraRows.isEmpty()) {
        for (View tableRow : mExtraRows) {
            table.removeView(tableRow);
        }
        mExtraRows.clear();
    }
    if (data.size() > 0) {
        final LayoutInflater inflater = LayoutInflater.from(activity);
        for (ExtraFieldHolder extra : data.values()) {
            if (extra.preset) {
                continue;
            }
            final View root = inflater.inflate(R.layout.view_info_extra, table, false);
            ((TextView) root.findViewById(R.id.label)).setText(getString(R.string.label_field, extra.name));
            ((TextView) root.findViewById(R.id.value)).setText(extra.value);
            table.addView(root);
            mExtraRows.add(root);
        }
    }
}

From source file:net.opentsdb.ConfigReader.java

public static String[] load(final Config config, final String[] args) {
    InputStream is = null;/*from   w w w.j a  v  a  2 s . c  o  m*/
    String[] noConfigArgs = {};
    try {

        is = ConfigReader.class.getClassLoader().getResourceAsStream(CONFIG_PATH);
        ArrayNode an = (ArrayNode) JSON.getMapper().readTree(is);
        final LinkedHashMap<String, ConfigurationItem> citems = new LinkedHashMap<String, ConfigurationItem>(
                an.size());
        final HashMap<String, ConfigurationItem> clOptions = new HashMap<String, ConfigurationItem>();
        for (int i = 0; i < an.size(); i++) {
            JsonNode jn = an.get(i);
            ConfigurationItem ci = JSON.getMapper().convertValue(jn, ConfigurationItem.class);
            ci.resolve();
            if (ci.getCl() != null) {
                clOptions.put(ci.getCl(), ci);
            }
            if (ci.getValue() != null) {
                //config.overrideConfig(ci.getKey(), ci.getValue().toString());
                ci.validate();
                citems.put(ci.getKey(), ci);
            }
            //System.out.println(ci.dump());
        }

        final ArgP argp = newArgP();
        noConfigArgs = argp.parse(args);
        final Map<String, String> argpOptions = argp.getParsed();
        if (!argpOptions.isEmpty()) {
            for (Map.Entry<String, String> entry : argpOptions.entrySet()) {
                ConfigurationItem argCi = clOptions.get(entry.getKey());
                if (argCi != null) {
                    argCi.setValueAsText(entry.getValue());
                    citems.put(argCi.getKey(), argCi);
                }
            }
        }
        // Write the configuration to an OpenTSDB config         
        for (ConfigurationItem configItem : citems.values()) {
            config.overrideConfig(configItem.getKey(), configItem.getValueStr());
        }
        return noConfigArgs;
    } catch (Exception ex) {
        throw new RuntimeException("Failed to load resource [" + CONFIG_PATH + "]", ex);
    }
}

From source file:com.alibaba.wasp.plan.parser.druid.DruidDDLParser.java

private void addFieldByPosition(int index, List<Field> addFields, LinkedHashMap<String, Field> ftableColumns,
        FTable newTable) {/*ww  w.  j a va  2s.  c  o m*/
    LinkedHashMap<String, Field> finalColumns = new LinkedHashMap<String, Field>();
    int i = 0;
    if (index < 0) {
        for (Field addField : addFields) {
            finalColumns.put(addField.getName(), addField);
        }
    }
    for (Field field : ftableColumns.values()) {
        if (index == i) {
            finalColumns.put(field.getName(), field);
            for (Field addField : addFields) {
                finalColumns.put(addField.getName(), addField);
            }
        } else {
            finalColumns.put(field.getName(), field);
        }
        i++;
    }
    newTable.setColumns(finalColumns);
}

From source file:io.syndesis.verifier.LocalProcessVerifier.java

private ImmutableResult createResult(Verifier.Scope scope, Verifier.Result.Status status, Properties response) {
    ImmutableResult.Builder builder = ImmutableResult.builder().scope(scope).status(status);
    if (response != null) {
        LinkedHashMap<String, ImmutableError.Builder> errors = new LinkedHashMap<>();
        for (Map.Entry<Object, Object> entry : response.entrySet()) {
            String key = (String) entry.getKey();
            if (key.startsWith("error.")) {
                String errorId = key.substring("error.".length()).replaceFirst("\\..*", "");
                ImmutableError.Builder error = errors.getOrDefault(errorId, ImmutableError.builder());
                String value = (String) entry.getValue();
                if (key.endsWith(".code")) {
                    error.code(value);//from  ww  w  .ja  v  a 2  s.c  om
                }
                if (key.endsWith(".description")) {
                    error.description(value);
                }
                errors.put(errorId, error);
            }
        }
        builder.addAllErrors(
                errors.values().stream().map(ImmutableError.Builder::build).collect(Collectors.toList()));
    }
    return builder.build();
}

From source file:org.opencms.ui.contextmenu.CmsContextMenuTreeBuilder.java

/**
 * Builds a tree from a list of available context menu items.<p>
 *
 * The root node of the returned tree has no useful data, its child nodes correspond to the top-level
 * entries of the ccontext menu./* ww w  . j  a va 2 s  .  c o m*/
 *
 * @param items the available context menu items
 * @return the context menu item tree
 */
public CmsTreeNode<I_CmsContextMenuItem> buildTree(List<I_CmsContextMenuItem> items) {

    items = Lists.newArrayList(items);

    // First sort by priority and then use a map with the id as the key to store the items,
    // eliminating items with the same id but a lower priority than another item

    Collections.sort(items, new Comparator<I_CmsContextMenuItem>() {

        public int compare(I_CmsContextMenuItem a, I_CmsContextMenuItem b) {

            return Integer.compare(a.getPriority(), b.getPriority());
        }
    });
    LinkedHashMap<String, I_CmsContextMenuItem> itemsById = Maps.newLinkedHashMap();
    for (I_CmsContextMenuItem item : items) {
        String id = item.getId();
        I_CmsContextMenuItem prevItem = itemsById.get(id);
        if (prevItem != null) {
            LOG.info("Discarding overridden context menu item " + prevItem + " because of higher priority item "
                    + item);
        }
        itemsById.put(id, item);
    }

    // Now sort by order. Since all children of a node should be processed in one iteration of the following loop,
    // this order also applies to the child order of each tree node built in the next step
    List<I_CmsContextMenuItem> uniqueItems = Lists.newArrayList(itemsById.values());
    uniqueItems = filterVisible(uniqueItems);
    if (m_context.getResources().size() == 1) {
        m_defaultActionItem = findDefaultAction(uniqueItems);
    }

    Collections.sort(uniqueItems, new Comparator<I_CmsContextMenuItem>() {

        public int compare(I_CmsContextMenuItem a, I_CmsContextMenuItem b) {

            return Float.compare(a.getOrder(), b.getOrder());
        }
    });
    Set<String> processedIds = Sets.newHashSet();
    boolean changed = true;
    Map<String, CmsTreeNode<I_CmsContextMenuItem>> treesById = Maps.newHashMap();

    // Create childless tree node for each item
    for (I_CmsContextMenuItem item : itemsById.values()) {
        CmsTreeNode<I_CmsContextMenuItem> node = new CmsTreeNode<I_CmsContextMenuItem>();
        node.setData(item);
        treesById.put(item.getId(), node);
    }
    CmsTreeNode<I_CmsContextMenuItem> root = new CmsTreeNode<I_CmsContextMenuItem>();

    // Use null as the root node, which does not have any useful data
    treesById.put(null, root);

    // Iterate through list multiple times, each time only processing those items whose parents
    // we have encountered in a previous iteration (actually, in the last iteration). We do this so that the resulting
    // tree is actually a tree and contains no cycles, even if there is a reference cycle between the context menu items via their parent ids.
    // (Items which form such a cycle will never be reached.)
    while (changed) {
        changed = false;
        Iterator<I_CmsContextMenuItem> iterator = uniqueItems.iterator();
        Set<String> currentLevel = Sets.newHashSet();
        while (iterator.hasNext()) {
            I_CmsContextMenuItem currentItem = iterator.next();
            String parentId = currentItem.getParentId();
            if ((parentId == null) || processedIds.contains(parentId)) {
                changed = true;
                iterator.remove();
                currentLevel.add(currentItem.getId());
                treesById.get(parentId).addChild(treesById.get(currentItem.getId()));
            }
        }
        processedIds.addAll(currentLevel);
    }
    return root;
}

From source file:org.kutkaitis.timetable2.timetable.MonteCarlo.java

private boolean isStudentInOneLectureAtTheTime(Teacher teacher, List<Group> teachersGroups, Group group,
        int lectureNumber, LinkedHashMap<String, LinkedHashMap> dayTimeTable) {
    boolean mandatoryConditionsMet = true;
    if (group != null) {
        //            System.out.println("Grupe idejimui: " + group.getGroupName());
        List<Student> groupStudents = group.getStudents();
        Collection<LinkedHashMap> teachersTimeTables = dayTimeTable.values();
        //            System.out.println("teachersTimeTables before if: " + teachersTimeTables);
        for (LinkedHashMap<String, String> teachersTimeTable : teachersTimeTables) {
            //                System.out.println("Iskviete fore");
            if (teachersTimeTable.isEmpty()) {
                mandatoryConditionsMet = true;
                continue;
            }/*from   ww w  . ja  va  2  s. c  om*/
            //                System.out.println("teachersTimeTable: " + teachersTimeTable);
            String groupNameToSplit = teachersTimeTable.get(String.valueOf(lectureNumber));
            if (groupNameToSplit == null) {
                mandatoryConditionsMet = true;
                continue;
            }
            String[] splittedGroupNames = groupNameToSplit.split(":");
            String groupName = splittedGroupNames[1].trim();
            //                System.out.println("Group name: " + groupName);
            Group groupToCheck = studentsMockDataFiller.getGroups().get(groupName);
            //                System.out.println("groupToCheck: " + groupToCheck);
            boolean contains = true;
            if (StringUtils.equals(groupName, "-----")) {
                contains = false;
            }

            if (groupToCheck != null) {
                //                    System.out.println("Group to check: " + groupToCheck.getGroupName());
                contains = CollectionUtils.containsAny(groupStudents, groupToCheck.getStudents());
                //                    System.out.println("Contains: " + contains);
            }
            if (contains == false) {
                mandatoryConditionsMet = true;
            } else {
                mandatoryConditionsMet = false;
                return mandatoryConditionsMet;
            }
        }
    } else {
        mandatoryConditionsMet = false;
    }
    return mandatoryConditionsMet;
}

From source file:ubic.gemma.persistence.service.expression.designElement.CompositeSequenceServiceImpl.java

/**
 * Checks to see if the CompositeSequence exists in any of the array designs. If so, it is internally stored in the
 * collection of composite sequences as a HashSet, preserving order based on insertion.
 *///from  ww  w. j  a v a2s  .co m
@Override
public Collection<CompositeSequence> findByNamesInArrayDesigns(Collection<String> compositeSequenceNames,
        Collection<ArrayDesign> arrayDesigns) {
    LinkedHashMap<String, CompositeSequence> compositeSequencesMap = new LinkedHashMap<>();

    for (ArrayDesign arrayDesign : arrayDesigns) {
        for (Object obj : compositeSequenceNames) {
            String name = (String) obj;
            name = StringUtils.trim(name);
            AbstractService.log.debug("entered: " + name);
            CompositeSequence cs = this.findByName(arrayDesign, name);
            if (cs != null && !compositeSequencesMap.containsKey(cs.getName())) {
                compositeSequencesMap.put(cs.getName(), cs);
            } else {
                AbstractService.log.warn("Composite sequence " + name + " does not exist.  Discarding ... ");
            }
        }
    }

    if (compositeSequencesMap.isEmpty())
        return null;

    return compositeSequencesMap.values();
}

From source file:com.alibaba.wasp.plan.parser.druid.DruidDDLParser.java

/**
 * Process Create Table Statement and generate Execute Plan
 * /*from   ww  w  .  ja  va 2 s.c  om*/
 */
private void getCreateTablePlan(ParseContext context, WaspSqlCreateTableStatement waspSqlCreateTableStatement,
        MetaEventOperation metaEventOperation) throws IOException {
    /**
     * example String sql3 = "CREATE TABLE User {Required Int64 user_id;
     * Required String name; Optional String phone;} primary key(user_id),ENTITY
     * GROUP ROOT, Entity Group Key(user_id);" ; String sql4 = "CREATE TABLE
     * Photo { Required Int64 user_id columnfamily cf comment 'aaa'; Required
     * Int32 photo_id comment 'child primary key'; Required Int64 time; Required
     * String full_url; Optional String thumbnail_url; Repeated String tag; }
     * primary key(user_id, photo_id) IN TABLE user,ENTITY GROUP KEY(user_id)
     * references User;";
     */

    // Table Info
    SQLExprTableSource tableSource = waspSqlCreateTableStatement.getTableSource();
    String tableName = parseFromClause(tableSource);
    // Check Table Name is legal.
    metaEventOperation.isLegalTableName(tableName);
    // Check if the table exists
    boolean tableNotExit = metaEventOperation.checkTableNotExists(tableName, true);
    if (!tableNotExit) {
        if (waspSqlCreateTableStatement.isIfNotExiists()) {
            context.setPlan(new NotingTodoPlan());
            LOG.debug("table " + tableName + " exits , isIfNotExiists is true, ignore");
            return;
        } else {
            throw new TableExistsException(tableName + " is already exists!");
        }
    }

    // Table category.
    WaspSqlCreateTableStatement.TableCategory category = waspSqlCreateTableStatement.getCategory();
    FTable.TableType tableType = FTable.TableType.CHILD;
    if (category != null && category == WaspSqlCreateTableStatement.TableCategory.ROOT) {
        tableType = FTable.TableType.ROOT;
    }

    // Primary Key.
    List<SQLExpr> primaryKeysSQLExpr = waspSqlCreateTableStatement.getPrimaryKeys();
    // table columns.
    List<SQLTableElement> tableElementList = waspSqlCreateTableStatement.getTableElementList(); // columns info
    LinkedHashMap<String, Field> columns = new LinkedHashMap<String, Field>();
    for (SQLTableElement element : tableElementList) {
        Field field = parse(element);
        columns.put(field.getName(), field);
    }
    // Check if columns are legal.
    metaEventOperation.areLegalTableColumns(null, columns.values());
    checkFamilyLegal(columns.values(), metaEventOperation);

    // Primary keys check will be done in this following method
    LinkedHashMap<String, Field> primaryKeys = parse(primaryKeysSQLExpr, columns);

    long createTime = System.currentTimeMillis();
    long lastAccessTime = createTime;
    String owner = "me";
    FTable table = new FTable(null, tableName, tableType, owner, createTime, lastAccessTime, columns,
            primaryKeys, primaryKeys.entrySet().iterator().next().getValue());
    SQLExpr entityGroupKeySQLExpr = waspSqlCreateTableStatement.getEntityGroupKey();
    Field entityGroupKey = primaryKeys.get(parseName(entityGroupKeySQLExpr));
    if (entityGroupKey == null) {
        throw new UnsupportedException(entityGroupKeySQLExpr + " is ForeignKey, but don't in primaryKeys.");
    }
    table.setEntityGroupKey(entityGroupKey);

    if (tableType == FTable.TableType.CHILD) {
        String parentName = parseFromClause(waspSqlCreateTableStatement.getInTableName());
        table.setParentName(parentName);
        if (!parentName.equals(parseFromClause(waspSqlCreateTableStatement.getReferenceTable()))) {
            throw new UnsupportedException(" in table " + waspSqlCreateTableStatement.getInTableName()
                    + " != references table " + waspSqlCreateTableStatement.getReferenceTable());
        }

        // Check parent's EGK equals child's EGK.
        TableSchemaCacheReader reader = TableSchemaCacheReader.getInstance(configuration);
        FTable parentTable = reader.getSchema(parentName);
        if (parentTable == null) {
            parentTable = TableSchemaCacheReader.getService(reader.getConf()).getTable(tableName);
        }
        if (parentTable == null) {
            throw new TableNotFoundException("Not found parent table:" + parentName);
        }

        if (!parentTable.getEntityGroupKey().getName().equals(table.getEntityGroupKey().getName())) {
            throw new UnsupportedException(
                    "Parent" + parentName + "'s egk doesn't equals Child" + tableName + "'s egk.");
        }

        // Check child's PKS contains parent's PKS.
        for (Field parentPrimaryKey : parentTable.getPrimaryKeys().values()) {
            boolean found = table.getPrimaryKeys().containsKey(parentPrimaryKey.getName());
            if (!found) {
                throw new UnsupportedException("Child's pks must contains parent's pks.");
            }
        }
    }

    SQLPartitioningClause partitioning = waspSqlCreateTableStatement.getPartitioning();
    byte[][] splitKeys = null;
    if (partitioning != null) {
        if (table.isRootTable()) {
            if (partitioning instanceof WaspSqlPartitionByKey) {
                WaspSqlPartitionByKey partitionKey = (WaspSqlPartitionByKey) partitioning;
                byte[] start = convert(null, partitionKey.getStart());
                byte[] end = convert(null, partitionKey.getEnd());
                int partitionCount = convertToInt(partitionKey.getPartitionCount());
                splitKeys = Bytes.split(start, end, partitionCount - 3);
            } else {
                throw new UnsupportedException("Unsupported SQLPartitioningClause " + partitioning);
            }
        } else {
            throw new UnsupportedException("Partition by only supported for Root Table");
        }
    }
    CreateTablePlan createTable = new CreateTablePlan(table, splitKeys);
    context.setPlan(createTable);
    LOG.debug("CreateTablePlan " + createTable.toString());
}