Example usage for org.apache.commons.lang3.tuple Pair getKey

List of usage examples for org.apache.commons.lang3.tuple Pair getKey

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair getKey.

Prototype

@Override
public final L getKey() 

Source Link

Document

Gets the key from this pair.

This method implements the Map.Entry interface returning the left element as the key.

Usage

From source file:org.verdictdb.core.querying.ExecutableNodeBase.java

public List<ExecutableNodeBase> getSources() {
    List<Pair<ExecutableNodeBase, Integer>> temp = getSourcesAndChannels();
    Collections.sort(temp, new Comparator<Pair<ExecutableNodeBase, Integer>>() {
        @Override/* www.jav a 2s  .c om*/
        public int compare(Pair<ExecutableNodeBase, Integer> o1, Pair<ExecutableNodeBase, Integer> o2) {
            return o1.getRight() - o2.getRight();
        }
    });

    List<ExecutableNodeBase> ss = new ArrayList<>();
    for (Pair<ExecutableNodeBase, Integer> s : temp) {
        ss.add(s.getKey());
    }

    return ss;
}

From source file:org.verdictdb.core.querying.ExecutableNodeBase.java

public List<Pair<ExecutableNodeBase, Integer>> getSourcesAndChannels() {
    List<Pair<ExecutableNodeBase, Integer>> sourceAndChannel = new ArrayList<>();
    for (Pair<ExecutableNodeBase, Integer> s : sources) {
        sourceAndChannel.add(Pair.of(s.getKey(), s.getValue()));
    }//from www  .j  a v a2 s  . com
    return sourceAndChannel;
}

From source file:org.verdictdb.core.querying.ola.AsyncAggExecutionNode.java

@Override
public SqlConvertible createQuery(List<ExecutionInfoToken> tokens) throws VerdictDBException {
    //    super.createQuery(tokens);

    //    System.out.println("Starts the processing of AsyncAggNode.");
    //    System.out.println(selectQuery);

    ExecutionInfoToken token = tokens.get(0);
    AggMeta sourceAggMeta = (AggMeta) token.getValue("aggMeta");

    // First, calculate the scale factor and use it to replace the scale factor placeholder
    List<Pair<UnnamedColumn, Double>> conditionToScaleFactor = composeScaleFactorForTierCombinations(
            sourceAggMeta, INNER_RAW_AGG_TABLE_ALIAS);

    // update the agg column scaling factor
    List<UnnamedColumn> scalingOperands = new ArrayList<>();
    for (Pair<UnnamedColumn, Double> condToScale : conditionToScaleFactor) {
        UnnamedColumn cond = condToScale.getKey();
        double scale = condToScale.getValue();
        scalingOperands.add(cond);//from  ww w  .j a v a  2s.  c  o m
        scalingOperands.add(ConstantColumn.valueOf(scale));
    }
    scalingOperands.add(ConstantColumn.valueOf(1.0)); // default scaling factor is always 1.0
    ColumnOp scalingColumn = ColumnOp.casewhen(scalingOperands);
    for (ColumnOp aggcol : aggColumns) {
        aggcol.setOperand(0, scalingColumn);
    }

    selectQuery = replaceWithOriginalSelectList(selectQuery, sourceAggMeta);
    return super.createQuery(tokens);
}

From source file:org.verdictdb.core.scrambling.ScrambleMetaSet.java

private ScrambleMeta getMetaFor(Pair<String, String> metakey) {
    for (Pair<Pair<String, String>, ScrambleMeta> item : metaSet) {
        Pair<String, String> key = item.getKey();
        ScrambleMeta m = item.getValue();
        if (key.equals(metakey)) {
            return m;
        }/*  ww  w  . ja v  a 2 s. c o m*/
    }
    return null;
}

From source file:org.verdictdb.core.scrambling.ScrambleMetaSet.java

private boolean doesContain(Pair<String, String> metakey) {
    for (Pair<Pair<String, String>, ScrambleMeta> item : metaSet) {
        Pair<String, String> key = item.getKey();
        if (key.equals(metakey)) {
            return true;
        }/*from w  w  w . j  av  a2s  .  c  o  m*/
    }
    return false;
}

From source file:org.verdictdb.sqlreader.RelationStandardizer.java

private Pair<List<String>, AbstractRelation> setupTableSource(AbstractRelation table)
        throws VerdictDBDbmsException {
    // in order to prevent informal table alias, we replace all table alias
    if (!(table instanceof JoinTable)) {
        if (table.getAliasName().isPresent()) {
            String alias = table.getAliasName().get();
            alias = alias.replace("`", "");
            alias = alias.replace("\"", "");
            oldTableAliasMap.put(alias, verdictTableAliasPrefix + itemID);
        }//from   w w w.  j a  v  a 2 s.  c om
        table.setAliasName(verdictTableAliasPrefix + itemID++);
    }
    // if (!table.getAliasName().isPresent() && !(table instanceof JoinTable)) {
    //  table.setAliasName(verdictTableAliasPrefix + itemID++);
    // }
    if (table instanceof BaseTable) {
        BaseTable bt = (BaseTable) table;
        List<String> colName = new ArrayList<>();
        if (bt.getSchemaName() == null) {
            bt.setSchemaName(meta.getDefaultSchema());
        }
        List<Pair<String, String>> cols = meta.getColumns(bt.getSchemaName(), bt.getTableName());
        for (Pair<String, String> c : cols) {
            colNameAndTableAlias.put(c.getKey(), bt.getAliasName().get());
            colName.add(c.getKey());
        }
        tableInfoAndAlias.put(ImmutablePair.of(bt.getSchemaName(), bt.getTableName()),
                table.getAliasName().get());
        return new ImmutablePair<>(colName, table);

    } else if (table instanceof JoinTable) {
        List<String> joinColName = new ArrayList<>();
        for (int i = 0; i < ((JoinTable) table).getJoinList().size(); i++) {
            Pair<List<String>, AbstractRelation> result = setupTableSource(
                    ((JoinTable) table).getJoinList().get(i));
            ((JoinTable) table).getJoinList().set(i, result.getValue());
            joinColName.addAll(result.getKey());
            if (i != 0) {
                ((JoinTable) table).getCondition().set(i - 1,
                        replaceFilter(((JoinTable) table).getCondition().get(i - 1)));
            }
        }
        return new ImmutablePair<>(joinColName, table);

    } else if (table instanceof SelectQuery) {
        List<String> colName = new ArrayList<>();
        RelationStandardizer g = new RelationStandardizer(meta, syntax);
        g.oldTableAliasMap.putAll(oldTableAliasMap);
        g.setTableInfoAndAlias(tableInfoAndAlias);
        g.setColNameAndTableAlias(colNameAndTableAlias);
        g.setColNameAndColAlias(colNameAndColAlias);
        String aliasName = table.getAliasName().get();
        table = g.standardize((SelectQuery) table);
        table.setAliasName(aliasName);

        // Invariant: Only Aliased Column or Asterisk Column should appear in the subquery
        for (SelectItem sel : ((SelectQuery) table).getSelectList()) {
            if (sel instanceof AliasedColumn) {
                // If the aliased name of the column is replaced by ourselves, we should remember the
                // column name
                if (((AliasedColumn) sel).getColumn() instanceof BaseColumn
                        && ((AliasedColumn) sel).getAliasName().matches("^vc[0-9]+$")) {
                    colNameAndTableAlias.put(((BaseColumn) ((AliasedColumn) sel).getColumn()).getColumnName(),
                            table.getAliasName().get());
                    colNameAndTempColAlias.put(((BaseColumn) ((AliasedColumn) sel).getColumn()).getColumnName(),
                            ((AliasedColumn) sel).getAliasName());
                } else
                    colNameAndTableAlias.put(((AliasedColumn) sel).getAliasName(), table.getAliasName().get());
                colName.add(((AliasedColumn) sel).getAliasName());

            } else if (sel instanceof AsteriskColumn) {
                // put all the columns in the fromlist of subquery to the colNameAndTableAlias
                HashMap<String, String> subqueryColumnList = g.getColNameAndTableAlias();
                for (String col : subqueryColumnList.keySet()) {
                    colNameAndTableAlias.put(col, table.getAliasName().get());
                    colName.add(col);
                }
            }
        }
        return new ImmutablePair<>(colName, table);
    }
    return null;
}

From source file:org.wso2.carbon.uuf.internal.core.create.DependencyTreeParser.java

public static Result parse(List<String> dependencyTreeLines) {
    // Flattened dependencies map.
    // key   = component name
    // value = all dependencies of the 'key'
    SetMultimap<String, String> flattenedDependencies = HashMultimap.create();
    // Leveled dependencies list.
    // index       = dependency level, index 0 == root component's dependencies
    // List.get(i) = set of dependencies in level i
    List<Set<ComponentData>> leveledDependencies = new ArrayList<>(6);

    int previousLevel = 0;
    String previousComponentName = null;
    Deque<Pair<String, List<String>>> parentNodesStack = new LinkedList<>();

    for (int i = 0; i < dependencyTreeLines.size(); i++) {
        String line = dependencyTreeLines.get(i);
        int level = countLevel(line);
        int jump = (level - previousLevel);
        ComponentData currentComponent = getComponentData(line);

        if (level < leveledDependencies.size()) {
            leveledDependencies.get(level).add(currentComponent);
        } else {//  ww  w  .  j a  va2  s .  c  o m
            Set<ComponentData> set = new HashSet<>();
            set.add(currentComponent);
            leveledDependencies.add(level, set);
        }

        if (i == 0) {
            // Very first leaf dependency.
            previousComponentName = currentComponent.name;
            continue;
        }
        if (jump < 0) {
            // Dependency level decreased, so remove entries from the stack.
            for (int j = Math.abs(jump); j > 0; j--) {
                Pair<String, List<String>> entry = parentNodesStack.removeLast();
                flattenedDependencies.putAll(entry.getKey(), entry.getValue());
            }
        } else if (jump > 0) { // jump == 1
            // Dependency level increased, so add an entry to the stack.
            parentNodesStack.add(new ImmutablePair<>(previousComponentName, new ArrayList<>(3)));
        }
        // (jump == 0): Same dependency level, no need to change the stack.

        // Add current component name to all parent nodes as a dependency.
        for (Pair<String, List<String>> entry : parentNodesStack) {
            entry.getValue().add(currentComponent.name);
        }

        previousLevel = level;
        previousComponentName = currentComponent.name;
    }
    // If there is any remaining stack elements, add them to the flattenedDependencies.
    for (Pair<String, List<String>> entry : parentNodesStack) {
        flattenedDependencies.putAll(entry.getKey(), entry.getValue());
    }

    return new Result(flattenedDependencies, leveledDependencies);
}

From source file:org.xwiki.search.solr.internal.job.IndexerJob.java

private void updateSolrIndex(int progressSize, DiffDocumentIterator<String> iterator) {
    this.progressManager.pushLevelProgress(progressSize, this);

    try {//from  ww  w.j  a  v  a2  s  . co m
        long[] counter = new long[Action.values().length];

        while (iterator.hasNext()) {
            this.progressManager.startStep(this);

            Pair<DocumentReference, Action> entry = iterator.next();
            if (entry.getValue() == Action.ADD || entry.getValue() == Action.UPDATE) {
                // The database entry has not been indexed or the indexed version doesn't match the latest
                // version
                // from the database.
                this.indexer.index(entry.getKey(), true);
            } else if (entry.getValue() == Action.DELETE && getRequest().isRemoveMissing()) {
                // The index entry doesn't exist anymore in the database.
                this.indexer.delete(entry.getKey(), true);
            }

            counter[entry.getValue().ordinal()]++;

            this.progressManager.endStep(this);
        }

        this.logger.info(
                "{} documents added, {} deleted and {} updated during the synchronization of the Solr index.",
                counter[Action.ADD.ordinal()], counter[Action.DELETE.ordinal()],
                counter[Action.UPDATE.ordinal()]);
    } finally {
        this.progressManager.popLevelProgress(this);
    }
}

From source file:petascope.util.ras.TypeRegistry.java

/**
 * Returns the mdd type for a given collection type.
 *
 * @param collectionType the collection type.
 * @return the mdd type, empty if nothing is found.
 *//*  w w w. j  a  v a2  s.co  m*/
public String getMddTypeForCollectionType(String collectionType) {
    String mddType = "";
    for (Pair<String, String> i : setTypeDefinitions) {
        if (collectionType.equals(i.getKey())) {
            mddType = i.getValue();
            break;
        }
    }
    return mddType;
}

From source file:petascope.util.ras.TypeRegistry.java

/**
 * Builds the registry from the collected types gathered by parsing the rasdl output
 *//*from w w w.  j av a2 s. c  om*/
private void buildRegistry() {
    for (Pair<String, String> entry : setTypeDefinitions) {
        String domainType = marrayTypeDefinitions.get(entry.getValue());
        if (domainType != null) {
            String[] domainTypeParts = domainType.split(",");
            if (domainTypeParts.length >= 2) {
                String[] baseTypeParts = ArrayUtils.remove(domainTypeParts, domainTypeParts.length - 1);
                String baseType = StringUtils.join(baseTypeParts, "");
                String[] nullParts = setTypeNullValues.get(entry.getKey()).split(",");
                ArrayList<String> nullValues = new ArrayList<String>();
                for (String i : nullParts) {
                    if (!i.isEmpty()) {
                        //if the value that is parsed is an interval with the same limits (e.g. 5:5), add only 1
                        //value. This is needed because currently there is a bug when creating types via rasql,
                        //which doesn't allow single values to be specified. However, petascope needs to display single
                        //values when presenting the output to the user.
                        if (i.contains(":")) {
                            String[] parts = i.split(":");
                            if (parts.length == 2 & parts[0].equals(parts[1])) {
                                i = parts[0];
                            }
                        }
                        nullValues.add(i);
                    }
                }
                typeRegistry.put(entry.getKey(), new TypeRegistryEntry(baseType, domainType, nullValues));
            }
        }
    }
}