Example usage for java.util LinkedHashMap size

List of usage examples for java.util LinkedHashMap size

Introduction

In this page you can find the example usage for java.util LinkedHashMap size.

Prototype

int size();

Source Link

Document

Returns the number of key-value mappings in this map.

Usage

From source file:org.jamwiki.servlets.CategoryServlet.java

/**
 *
 *//*from   w w  w. j  av a 2s .  c o  m*/
private void viewCategories(HttpServletRequest request, ModelAndView next, WikiPageInfo pageInfo)
        throws Exception {
    String virtualWiki = pageInfo.getVirtualWikiName();
    Pagination pagination = ServletUtil.loadPagination(request, next);
    List<Category> categoryObjects = WikiBase.getDataHandler().getAllCategories(virtualWiki, pagination);
    LinkedHashMap<String, String> categories = new LinkedHashMap<String, String>();
    for (Category category : categoryObjects) {
        String key = category.getName();
        String value = key.substring(Namespace.namespace(Namespace.CATEGORY_ID).getLabel(virtualWiki).length()
                + Namespace.SEPARATOR.length());
        categories.put(key, value);
    }
    next.addObject("categoryCount", categories.size());
    next.addObject("categories", categories);
    if (categories.isEmpty()) {
        pageInfo.addMessage(new WikiMessage("allcategories.message.none"));
    }
    pageInfo.setPageTitle(new WikiMessage("allcategories.title"));
    pageInfo.setContentJsp(JSP_CATEGORIES);
    pageInfo.setSpecial(true);
}

From source file:eu.hydrologis.jgrass.charting.impl.JGrassXYTimeBarChart.java

/**
 * A line chart creator basing on series made up two values per row. More series, independing
 * one from the other are supported./*from  w  w w  . j  a v  a  2  s . c  om*/
 * 
 * @param chartValues - a hashmap containing as keys the name of the series and as values the
 *        double[][] representing the data. In this case the x value is assumed to ge a date.
 *        Important: the data matrix has to be passed as two rows (not two columns)
 * @param barWidth TODO
 */
public JGrassXYTimeBarChart(LinkedHashMap<String, double[][]> chartValues, Class<RegularTimePeriod> timeClass,
        double barWidth) {
    try {
        chartSeries = new TimeSeries[chartValues.size()];

        constructor = timeClass.getConstructor(Date.class);

        final Iterator<String> it = chartValues.keySet().iterator();
        int count = 0;
        while (it.hasNext()) {
            final String key = it.next();
            final double[][] values = chartValues.get(key);

            chartSeries[count] = new TimeSeries(key, timeClass);
            for (int i = 0; i < values[0].length; i++) {
                // important: the data matrix has to be passed as two rows (not
                // two columns)
                chartSeries[count].add(constructor.newInstance(new Date((long) values[0][i])), values[1][i]);
            }
            count++;
        }

        lineDataset = new TimeSeriesCollection();
        for (int i = 0; i < chartSeries.length; i++) {
            lineDataset.addSeries(chartSeries[i]);
        }
        lineDataset.setXPosition(TimePeriodAnchor.MIDDLE);

        if (barWidth != -1)
            dataset = new XYBarDataset(lineDataset, barWidth);
    } catch (Exception e) {
        ChartPlugin.log("ChartPlugin", e); //$NON-NLS-1$
    }

}

From source file:com.opengamma.analytics.financial.curve.ParameterUnderlyingSensitivityCalculator.java

/**
 * Computes the sensitivity with respect to the parameters from the point sensitivities to the continuously compounded rate.
 * @param sensitivity The point sensitivity.
 * @param fixedCurves The fixed curves names (for which the parameter sensitivity are not computed even if they are necessary for the instrument pricing).
 * The curve in the list may or may not be in the bundle. Not null.
 * @param bundle The curve bundle with all the curves with respect to which the sensitivity should be computed. Not null.
 * @return The sensitivity (as a DoubleMatrix1D).
 *//*from   www  . j av a 2 s .com*/
@Override
public DoubleMatrix1D pointToParameterSensitivity(final InterestRateCurveSensitivity sensitivity,
        final Set<String> fixedCurves, final YieldCurveBundle bundle) {
    Integer nbCurve = 0;
    LinkedHashMap<String, Integer> curveNum = new LinkedHashMap<String, Integer>();
    for (final String name : bundle.getAllNames()) { // loop over all curves (by name)
        if (!fixedCurves.contains(name)) {
            curveNum.put(name, nbCurve++);
        }
    }
    nbCurve = 0;
    int[] nbNewParameters = new int[curveNum.size()];
    // Implementation note: 
    int[] startCleanParameter = new int[curveNum.size()];
    // Implementation note: 
    int[][] startDirtyParameter = new int[curveNum.size()][];
    int[][] indexOther = new int[curveNum.size()][];
    // Implementation note: the start of the different blocs of parameters. First the other curves, then the new part.
    int nbCleanParameters = 0;
    int currentDirtyStart = 0;
    for (final String name : bundle.getAllNames()) { // loop over all curves (by name)
        if (!fixedCurves.contains(name)) {
            final YieldAndDiscountCurve curve = bundle.getCurve(name);
            List<String> underlyingCurveNames = curve.getUnderlyingCurvesNames();
            startCleanParameter[nbCurve] = nbCleanParameters;
            nbNewParameters[nbCurve] = curve.getNumberOfParameters();
            List<Integer> indexOtherList = new ArrayList<Integer>();
            List<Integer> startDirtyParameterList = new ArrayList<Integer>();
            for (String u : underlyingCurveNames) {
                Integer i = curveNum.get(u);
                if (i != null) {
                    indexOtherList.add(i);
                    nbNewParameters[nbCurve] -= nbNewParameters[i];
                    startDirtyParameterList.add(currentDirtyStart);
                    currentDirtyStart += nbNewParameters[i];
                }
            }
            startDirtyParameterList.add(currentDirtyStart);
            currentDirtyStart += nbNewParameters[nbCurve];
            indexOther[nbCurve] = ArrayUtils.toPrimitive(indexOtherList.toArray(new Integer[0]));
            startDirtyParameter[nbCurve] = ArrayUtils
                    .toPrimitive(startDirtyParameterList.toArray(new Integer[0]));
            nbCleanParameters += nbNewParameters[nbCurve];
            nbCurve++;
        }
    }
    final List<Double> sensiDirtyList = new ArrayList<Double>();
    for (final String name : bundle.getAllNames()) { // loop over all curves (by name)
        if (!fixedCurves.contains(name)) {
            final YieldAndDiscountCurve curve = bundle.getCurve(name);
            List<Double> oneCurveSensitivity = pointToParameterSensitivity(
                    sensitivity.getSensitivities().get(name), curve);
            sensiDirtyList.addAll(oneCurveSensitivity);
        }
    }
    double[] sensiDirty = ArrayUtils.toPrimitive(sensiDirtyList.toArray(new Double[0]));
    double[] sensiClean = new double[nbCleanParameters];
    for (int loopcurve = 0; loopcurve < nbCurve; loopcurve++) {
        for (int loopo = 0; loopo < indexOther[loopcurve].length; loopo++) {
            for (int loops = 0; loops < nbNewParameters[indexOther[loopcurve][loopo]]; loops++) {
                sensiClean[startCleanParameter[indexOther[loopcurve][loopo]]
                        + loops] += sensiDirty[startDirtyParameter[loopcurve][loopo] + loops];
            }
        }
        for (int loops = 0; loops < nbNewParameters[loopcurve]; loops++) {
            sensiClean[startCleanParameter[loopcurve]
                    + loops] += sensiDirty[startDirtyParameter[loopcurve][indexOther[loopcurve].length]
                            + loops];
        }
    }
    return new DoubleMatrix1D(sensiClean);
}

From source file:com.alibaba.wasp.plan.parser.druid.DruidDMLParser.java

/**
 * Process Delete Statement and generate QueryPlan
 * /*from w  w  w .j  a  v  a 2 s . c  om*/
 */
private void getDeletePlan(ParseContext context, SQLDeleteStatement sqlDeleteStatement,
        MetaEventOperation metaEventOperation) throws IOException {
    // DELETE FROM users WHERE id = 123;

    // Parse The FROM clause
    String wtableName = parseFromClause(sqlDeleteStatement.getTableSource());
    LOG.debug("UPDATE SQL From clause " + sqlDeleteStatement.getTableSource());
    // check if table exists and get Table info
    FTable table = metaEventOperation.checkAndGetTable(wtableName, false);

    // Parse The WHERE clause
    SQLExpr where = sqlDeleteStatement.getWhere();
    LOG.debug("UPDATE SQL where " + where);
    LinkedHashMap<String, Condition> eqConditions = new LinkedHashMap<String, Condition>();
    LinkedHashMap<String, Condition> ranges = new LinkedHashMap<String, Condition>();
    ParserUtils.parse(where, eqConditions, ranges);
    if (ranges.size() > 0) {
        throw new UnsupportedException("RANGE is not supported!");
    }

    // check if table has this columns
    metaEventOperation.checkAndGetFields(table, eqConditions.keySet());

    List<Pair<String, byte[]>> primaryKeyPairs = metaEventOperation.getPrimaryKeyPairList(table, eqConditions,
            null);
    if (primaryKeyPairs == null) {
        throw new NotMatchPrimaryKeyException("Not match primary key.");
    }

    byte[] primayKey = RowBuilder.build().genRowkey(primaryKeyPairs);
    DeleteAction action = new DeleteAction(wtableName, primayKey);
    if (context.isGenWholePlan()) {
        Condition entityGroupKeyCondition = ParserUtils.getCondition(table.getEntityGroupKey().getName(),
                eqConditions);
        // Get entityGroupLocation according to entity group key
        EntityGroupLocation entityGroupLocation = this.connection.locateEntityGroup(
                Bytes.toBytes(table.getTableName()),
                DruidParser.convert(table.getColumn(entityGroupKeyCondition.getFieldName()),
                        entityGroupKeyCondition.getValue()));
        action.setEntityGroupLocation(entityGroupLocation);
    }
    List<DeleteAction> actions = new ArrayList<DeleteAction>();
    actions.add(action);
    DeletePlan deletePlan = new DeletePlan(actions);
    context.setPlan(deletePlan);
    LOG.debug("DeletePlan " + deletePlan.toString());
}

From source file:org.skb.lang.dal.DalPass2_Ast.java

public void testAtom(Token tk) {
    this.atoms.scope.push(tk);
    String atomScope = this.atoms.scope.toString();
    String scopeSep = this.atoms.scope.separator();
    LinkedHashMap<String, String> path = this.buildPathList(atomScope, scopeSep);
    ArrayList<String> keys = new ArrayList<String>(path.keySet());
    int pathSize = StringUtils.split(atomScope, scopeSep).length;

    if (path.size() == 0) {
        System.err.println("error: ID not known [" + tk.getText() + "]");
        return;/*from  w  w  w  .j a  v  a 2 s.  c  o m*/
    }

    //first can be Repository or Package
    if (path.get(keys.get(0)).equals(DalConstants.Tokens.dalREPOSITORY)) {
        //in Repository, we have only tables, in there lots of fields (nothing to test) and optionally a sequence
        if (path.get(keys.get(2)).equals(DalConstants.Tokens.dalSEQUENCE)) {
            //in sequence we have many fields (level 4)
            if (path.get(keys.get(3)).equals(DalConstants.Tokens.dalFIELD)) {
                //now remove "sequence@@" and test if Atom exists
                String t = atomScope.replace("sequence" + scopeSep, "");
                if (!this.atoms.containsKey(t))
                    System.err.println("erorr in repository: field in sequence not defined for table");
            }
        }
    } else if (path.get(keys.get(0)).equals(DalConstants.Tokens.dalPACKAGE)) {
        //first check for definitions for a repository table
        if (path.get(keys.get(1)).equals(DalConstants.Tokens.dalREPOSITORY)
                && path.get(keys.get(2)).equals(DalConstants.Tokens.dalTABLE)) {
            //remove the first path entry (current package) and test for the repository, print error only for the actual repo Atom
            if (pathSize == 3
                    && !this.atoms.containsKey(keys.get(1).substring(keys.get(1).indexOf(scopeSep) + 2))) {
                System.err.println("unknown repository referenced in package");
            }
            //remove the first path entry (current package) and test for the repository table, print error only for the actual repo-table Atom
            if (pathSize == 4
                    && !this.atoms.containsKey(keys.get(2).substring(keys.get(2).indexOf(scopeSep) + 2))) {
                System.err.println("unknown repository-table referenced in package");
            }
            //check for referenced field in table for repo, error if field is not defined in repo-table
            if (pathSize == 5) {
                String[] split = StringUtils.split(atomScope, scopeSep);
                String field = StringUtils.join(new String[] { split[1], split[2], split[4] }, scopeSep);
                if (!this.atoms.containsKey(field))
                    System.err.println("unknown field for repository-table referenced in package");
            }
        }
        //next check if we are defining a package table
        if (path.get(keys.get(1)).equals(DalConstants.Tokens.dalTABLE)) {
            //in a table, we have lots of fields and optionally a sequence (s=3), but we can only check on the sequence at the end
            if (path.get(keys.get(2)).equals(DalConstants.Tokens.dalFIELD)) {
                //System.err.println(pathSize+" = table field = "+atomScope);
            }
            if (path.get(keys.get(2)).equals(DalConstants.Tokens.dalSEQUENCE)) {
                //in sequence we only care about size of 4
                if (pathSize == 4) {
                    String t = atomScope.replace("sequence" + scopeSep, "");
                    if (!this.atoms.containsKey(t))
                        System.err.println("erorr in repository: field in sequence not defined for table");
                }
            }
        }
        //next check if we are adding actions to the package
        if (path.get(keys.get(1)).equals(DalConstants.Tokens.dalACTIONS)) {
            //check for the referenced table, size=4
            if (pathSize == 4) {
                String[] split = StringUtils.split(atomScope, scopeSep);
                String field = StringUtils.join(new String[] { split[0], split[3] }, scopeSep);
                if (!this.atoms.containsKey(field))
                    System.err.println("unknown table referenced in action for package");
            }
            //check for the individual fields of the actions, if keys exist in the named table
            if (pathSize == 5) {
                String[] split = StringUtils.split(atomScope, scopeSep);
                String field = StringUtils.join(new String[] { split[0], split[3], split[4] }, scopeSep);
                if (!this.atoms.containsKey(field))
                    System.err.println("unknown key for table referenced in action for package");
            }
        }
        //last check if we are adding data to the package
        if (path.get(keys.get(1)).equals(DalConstants.Tokens.dalDATA)) {
            //first check if referenced table exists in the package
            if (pathSize == 4) {
                String[] split = StringUtils.split(atomScope, scopeSep);
                String field = StringUtils.join(new String[] { split[0], split[3] }, scopeSep);
                if (!this.atoms.containsKey(field))
                    System.err.println("unknown table referenced in data for package");
            }
            if (pathSize == 5) {
                String[] split = StringUtils.split(atomScope, scopeSep);
                String field = StringUtils.join(new String[] { split[0], split[3], split[4] }, scopeSep);
                if (!this.atoms.containsKey(field))
                    System.err.println("unknown key for table referenced in data for package");
            }
        }
    }
}

From source file:com.alibaba.wasp.plan.parser.druid.DruidDMLParser.java

/**
 * Process UPDATE Statement and generate UpdatePlan
 * //  ww w.j  a  va 2s  .c  om
 */
private void getUpdatePlan(ParseContext context, SQLUpdateStatement sqlUpdateStatement,
        MetaEventOperation metaEventOperation) throws IOException {
    // UPDATE users SET age = 24, name = 'Mike' WHERE id = 123;

    // Parse The FROM clause
    String fTableName = parseFromClause(sqlUpdateStatement.getTableSource());
    LOG.debug("UPDATE SQL From clause " + sqlUpdateStatement.getTableSource());
    // check if table exists and get Table info
    FTable table = metaEventOperation.checkAndGetTable(fTableName, false);

    // Parse The WHERE clause
    SQLExpr where = sqlUpdateStatement.getWhere();
    LOG.debug("UPDATE SQL where " + where);

    LinkedHashMap<String, Condition> conditions = new LinkedHashMap<String, Condition>();
    LinkedHashMap<String, Condition> ranges = new LinkedHashMap<String, Condition>();
    ParserUtils.parse(where, conditions, ranges);
    if (ranges.size() > 0) {
        throw new UnsupportedException("RANGE is not supported by update operation!");
    }

    Set<String> conditionColumns = ParserUtils.getColumns(conditions);
    // check if table has this columns
    metaEventOperation.checkAndGetFields(table, conditionColumns);
    // check if where clause is primary keys
    metaEventOperation.checkIsPrimaryKey(table, conditionColumns);

    List<Pair<String, byte[]>> primaryKeyPairs = metaEventOperation.getPrimaryKeyPairList(table, conditions,
            null);
    if (primaryKeyPairs == null) {
        throw new IOException("Not match primary key.");
    }

    byte[] primayKey = RowBuilder.build().genRowkey(primaryKeyPairs);

    UpdateAction action = new UpdateAction(fTableName, primayKey);
    // Parse Update Item
    List<SQLUpdateSetItem> updateItems = sqlUpdateStatement.getItems();
    for (SQLUpdateSetItem updateItem : updateItems) {
        String columnName = parseColumn(updateItem.getColumn());
        // check this FTable has the column and not pk
        metaEventOperation.checkFieldNotInPrimaryKeys(table, columnName);
        Field field = table.getColumn(columnName);
        // Check the input is the same as DataType
        checkType(field, updateItem.getValue());
        byte[] value = convert(field, updateItem.getValue());
        String familyName = metaEventOperation.getColumnFamily(fTableName, columnName);
        action.addEntityColumn(fTableName, familyName, columnName, field.getType(), value);
    }
    if (context.isGenWholePlan()) {
        Condition entityGroupKeyCondition = ParserUtils.getCondition(table.getEntityGroupKey().getName(),
                conditions);
        // Get entityGroupLocation according to entity group key
        EntityGroupLocation entityGroupLocation = this.connection.locateEntityGroup(
                Bytes.toBytes(table.getTableName()),
                DruidParser.convert(table.getColumn(entityGroupKeyCondition.getFieldName()),
                        entityGroupKeyCondition.getValue()));
        action.setEntityGroupLocation(entityGroupLocation);
    }
    action.setSessionId(context.getSessionId());
    List<UpdateAction> actions = new ArrayList<UpdateAction>();
    actions.add(action);
    UpdatePlan plan = new UpdatePlan(actions);
    context.setPlan(plan);
    LOG.debug("UpdatePlan " + plan);
}

From source file:org.broad.igv.track.CombinedFeatureSource.java

/**
 * Perform the actual combination operation between the constituent data
 * sources. This implementation re-runs the operation each call.
 *
 * @param chr/*from ww w .  j a  v a  2  s  .c om*/
 * @param start
 * @param end
 * @return
 * @throws IOException
 */
@Override
public Iterator<Feature> getFeatures(String chr, int start, int end) throws IOException {

    String cmd = Globals.BEDtoolsPath + " " + this.operation.getCmd();
    LinkedHashMap<String, Integer> tempFiles = createTempFiles(chr, start, end);
    String[] fiNames = tempFiles.keySet().toArray(new String[0]);
    if (operation == Operation.MULTIINTER) {
        assert tempFiles.size() >= 2;
        cmd += " -i " + StringUtils.join(tempFiles.keySet(), " ");
    } else {
        assert tempFiles.size() == 2;
        cmd += " -a " + fiNames[0] + " -b " + fiNames[1];
    }

    //Start bedtools process
    Process pr = RuntimeUtils.startExternalProcess(cmd, null, null);

    //Read back in the data which bedtools output
    BufferedReader in = new BufferedReader(new InputStreamReader(pr.getInputStream()));
    BufferedReader err = new BufferedReader(new InputStreamReader(pr.getErrorStream()));

    List<Feature> featuresList = new ArrayList<Feature>();
    IGVBEDCodec codec = new IGVBEDCodec();

    String line;
    Feature feat;
    int numCols0 = tempFiles.get(fiNames[0]);
    int numCols1 = tempFiles.get(fiNames[1]);
    while ((line = in.readLine()) != null) {
        System.out.println(line);
        String[] tokens = line.split("\t");
        if (operation.getCmd().contains("-split")) {
            //When we split, the returned feature still has the exons
            //We don't want to plot them all a zillion times
            tokens = Arrays.copyOfRange(tokens, 0, Math.min(6, tokens.length));
        }

        if (operation == Operation.WINDOW || operation == Operation.CLOSEST) {

            String[] closest = Arrays.copyOfRange(tokens, numCols0, numCols0 + numCols1);
            //If not found, bedtools returns -1 for positions
            if (closest[1].trim().equalsIgnoreCase("-1")) {
                continue;
            }
            feat = codec.decode(closest);
        } else if (operation == Operation.MULTIINTER) {
            //We only look at regions common to ALL inputs
            //Columns: chr \t start \t \end \t # of files which contained this feature \t comma-separated list files +many more
            int numRegions = Integer.parseInt(tokens[3]);
            if (numRegions < sources.length) {
                continue;
            }
            String[] intersection = Arrays.copyOf(tokens, 3);
            feat = codec.decode(intersection);
        } else {
            feat = codec.decode(tokens);
        }
        featuresList.add(feat);
    }

    in.close();

    while ((line = err.readLine()) != null) {
        log.error(line);
    }
    err.close();

    return featuresList.iterator();
}

From source file:com.opengamma.analytics.financial.interestrate.capletstripping.SABRTermStructureModelProvider.java

/**
 * General set up for a SABRTermStructureModelProvider
 * @param knotPoints Map between parameter curve names ("alpha", "beta", "rho" and "nu") and the positions of the knot points on each of those curves 
 * @param interpolators  Map between parameter curve names ("alpha", "beta", "rho" and "nu") and the interpolator used to describe that curve 
 * @param parameterTransforms  Map between parameter curve names ("alpha", "beta", "rho" and "nu") and the parameter transform used for that curve
 * @param knownParameterTermSturctures  Map between known curve names (could be "alpha", "beta", "rho" and "nu") and the known curve(s)
 *//*from   w w w . ja v a 2  s.c om*/
public SABRTermStructureModelProvider(LinkedHashMap<String, double[]> knotPoints,
        final LinkedHashMap<String, Interpolator1D> interpolators,
        final LinkedHashMap<String, ParameterLimitsTransform> parameterTransforms,
        final LinkedHashMap<String, InterpolatedDoublesCurve> knownParameterTermSturctures) {

    Validate.notNull(knotPoints, "null node points");
    Validate.notNull(interpolators, "null interpolators");
    Validate.isTrue(knotPoints.size() == interpolators.size(), "size mismatch between nodes and interpolators");

    if (knownParameterTermSturctures == null) {
        Validate.isTrue(knotPoints.containsKey(ALPHA) && interpolators.containsKey(ALPHA),
                "alpha curve not found");
        Validate.isTrue(knotPoints.containsKey(BETA) && interpolators.containsKey(BETA),
                "beta curve not found");
        Validate.isTrue(knotPoints.containsKey(NU) && interpolators.containsKey(NU), "nu curve not found");
        Validate.isTrue(knotPoints.containsKey(RHO) && interpolators.containsKey(RHO), "rho curve not found");
    } else {
        Validate.isTrue((knotPoints.containsKey(ALPHA) && interpolators.containsKey(ALPHA))
                ^ knownParameterTermSturctures.containsKey(ALPHA), "alpha curve not found");
        Validate.isTrue((knotPoints.containsKey(BETA) && interpolators.containsKey(BETA))
                ^ knownParameterTermSturctures.containsKey(BETA), "beta curve not found");
        Validate.isTrue((knotPoints.containsKey(NU) && interpolators.containsKey(NU))
                ^ knownParameterTermSturctures.containsKey(NU), "nu curve not found");
        Validate.isTrue((knotPoints.containsKey(RHO) && interpolators.containsKey(RHO))
                ^ knownParameterTermSturctures.containsKey(RHO), "rho curve not found");
    }

    final LinkedHashMap<String, Interpolator1D> transInterpolators = new LinkedHashMap<>();
    for (final Map.Entry<String, Interpolator1D> entry : interpolators.entrySet()) {
        final String name = entry.getKey();
        final Interpolator1D temp = new TransformedInterpolator1D(entry.getValue(),
                parameterTransforms.get(name));
        transInterpolators.put(name, temp);
    }

    _curveBuilder = new InterpolatedCurveBuildingFunction(knotPoints, transInterpolators);

    // _parameterTransforms = parameterTransforms; //TODO all the check for this

    _knownParameterTermStructures = knownParameterTermSturctures;
}

From source file:com.hp.alm.ali.idea.services.EntityService.java

private String queryToString(EntityQuery query) {
    ServerStrategy cust = restService.getServerStrategy();
    EntityQuery clone = cust.preProcess(query.clone());
    StringBuffer buf = new StringBuffer();
    buf.append("fields=");
    LinkedHashMap<String, Integer> columns = clone.getColumns();
    buf.append(StringUtils.join(columns.keySet().toArray(new String[columns.size()]), ","));
    buf.append("&query=");
    buf.append(EntityQuery/*  w  w w.  j  ava 2s .  com*/
            .encode("{" + filterToString(clone, project, project.getComponent(MetadataService.class)) + "}"));
    buf.append("&order-by=");
    buf.append(EntityQuery.encode("{" + orderToString(clone) + "}"));
    if (query.getPageSize() != null) {
        buf.append("&page-size=");
        buf.append(query.getPageSize());
    }
    if (query.getStartIndex() != null) {
        buf.append("&start-index=");
        buf.append(query.getStartIndex());
    }
    return buf.toString();
}

From source file:com.aliyun.odps.graph.local.LocalGraphJobRunner.java

private void processInput(TableInfo tableInfo) throws IOException, OdpsException {
    LOG.info("Processing input: " + tableInfo);

    String projName = tableInfo.getProjectName();
    if (projName == null) {
        projName = SessionState.get().getOdps().getDefaultProject();
    }//  w w w . ja  v  a2  s  . c o m
    String tblName = tableInfo.getTableName();
    String[] readCols = tableInfo.getCols();

    // ?MR??
    LinkedHashMap<String, String> expectPartsHashMap = tableInfo.getPartSpec();
    PartitionSpec expectParts = null;
    if (expectPartsHashMap != null && expectPartsHashMap.size() > 0) {
        StringBuffer sb = new StringBuffer();
        for (String key : expectPartsHashMap.keySet()) {
            if (sb.length() > 0) {
                sb.append(",");
            }
            sb.append(key + "=" + expectPartsHashMap.get(key));
        }
        expectParts = new PartitionSpec(sb.toString());
    }

    // ?Table Scheme???
    if (!wareHouse.existsTable(projName, tblName) || wareHouse.getDownloadMode() == DownloadMode.ALWAYS) {

        DownloadUtils.downloadTableSchemeAndData(odps, tableInfo, wareHouse.getLimitDownloadRecordCount(),
                wareHouse.getInputColumnSeperator());

        if (!wareHouse.existsTable(projName, tblName)) {
            throw new OdpsException("download table from remote host failure");
        }
    }

    // ////warehouse _scheme_????////
    TableMeta whTblMeta = wareHouse.getTableMeta(projName, tblName);
    Column[] whReadFields = LocalRunUtils.getInputTableFields(whTblMeta, readCols);
    List<PartitionSpec> whParts = wareHouse.getPartitions(projName, tblName);
    // //////////////////////

    if (whParts.size() > 0) {
        // partitioned table
        for (PartitionSpec partSpec : whParts) {
            // ?
            if (!match(expectParts, partSpec)) {
                continue;
            }
            File whSrcDir = wareHouse.getPartitionDir(whTblMeta.getProjName(), whTblMeta.getTableName(),
                    partSpec);
            // add input split only when src dir has data file
            if (LocalRunUtils.listDataFiles(whSrcDir).size() > 0) {

                // ??warehouse
                File tempDataDir = jobDirecotry.getInputDir(
                        wareHouse.getRelativePath(whTblMeta.getProjName(), whTblMeta.getTableName(), partSpec));
                File tempSchemeDir = jobDirecotry.getInputDir(
                        wareHouse.getRelativePath(whTblMeta.getProjName(), whTblMeta.getTableName(), null));
                wareHouse.copyTable(whTblMeta.getProjName(), whTblMeta.getTableName(), partSpec, readCols,
                        tempSchemeDir, wareHouse.getLimitDownloadRecordCount(),
                        wareHouse.getInputColumnSeperator());
                for (File file : LocalRunUtils.listDataFiles(tempDataDir)) {
                    inputs.add(new InputSplit(file, whReadFields, 0L, file.length(), tableInfo));
                }
            }
        }
    } else {
        // not partitioned table
        if (tableInfo.getPartSpec() != null && tableInfo.getPartSpec().size() > 0) {
            throw new IOException(ExceptionCode.ODPS_0720121 + "table " + projName + "." + tblName
                    + " is not partitioned table");
        }

        File whSrcDir = wareHouse.getTableDir(whTblMeta.getProjName(), whTblMeta.getTableName());
        if (LocalRunUtils.listDataFiles(whSrcDir).size() > 0) {
            // ??warehouse
            File tempDataDir = jobDirecotry.getInputDir(
                    wareHouse.getRelativePath(whTblMeta.getProjName(), whTblMeta.getTableName(), null));
            File tempSchemeDir = tempDataDir;
            wareHouse.copyTable(whTblMeta.getProjName(), whTblMeta.getTableName(), null, readCols,
                    tempSchemeDir, wareHouse.getLimitDownloadRecordCount(),
                    wareHouse.getInputColumnSeperator());
            for (File file : LocalRunUtils.listDataFiles(tempDataDir)) {
                inputs.add(new InputSplit(file, whReadFields, 0L, file.length(), tableInfo));
            }
        }
    }

}