Example usage for java.util LinkedHashMap size

List of usage examples for java.util LinkedHashMap size

Introduction

In this page you can find the example usage for java.util LinkedHashMap size.

Prototype

int size();

Source Link

Document

Returns the number of key-value mappings in this map.

Usage

From source file:org.jamwiki.servlets.ServletUtil.java

/**
 * Utility method for adding categories associated with the current topic to
 * the ModelAndView object. This method adds a hashmap of category names and
 * sort keys to the session that can then be retrieved for display during
 * rendering.//ww w .  j  a v a  2  s.com
 * 
 * @param next
 *          The current ModelAndView object used to return rendering
 *          information.
 * @param virtualWiki
 *          The virtual wiki name for the topic being rendered.
 * @param topicName
 *          The name of the topic that is being rendered.
 * @throws WikiException
 *           Thrown if any error occurs during processing.
 */
protected static void loadCategoryContent(ModelAndView next, String virtualWiki, String topicName)
        throws WikiException {
    String categoryName = topicName.substring(
            NamespaceHandler.NAMESPACE_CATEGORY.length() + NamespaceHandler.NAMESPACE_SEPARATOR.length());
    next.addObject("categoryName", categoryName);
    List<Category> categoryTopics = null;
    try {
        categoryTopics = WikiBase.getDataHandler().lookupCategoryTopics(virtualWiki, topicName);
    } catch (DataAccessException e) {
        throw new WikiException(new WikiMessage("error.unknown", e.getMessage()), e);
    }
    List<Category> categoryImages = new ArrayList<Category>();
    LinkedHashMap<String, String> subCategories = new LinkedHashMap<String, String>();
    int i = 0;
    // loop through the results and split out images and sub-categories
    while (i < categoryTopics.size()) {
        Category category = categoryTopics.get(i);
        if (category.getTopicType() == Topic.TYPE_IMAGE) {
            categoryTopics.remove(i);
            categoryImages.add(category);
            continue;
        }
        if (category.getTopicType() == Topic.TYPE_CATEGORY) {
            categoryTopics.remove(i);
            String value = category.getChildTopicName().substring(NamespaceHandler.NAMESPACE_CATEGORY.length()
                    + NamespaceHandler.NAMESPACE_SEPARATOR.length());
            subCategories.put(category.getChildTopicName(), value);
            continue;
        }
        i++;
    }
    next.addObject("categoryTopics", categoryTopics);
    next.addObject("numCategoryTopics", categoryTopics.size());
    next.addObject("categoryImages", categoryImages);
    next.addObject("numCategoryImages", categoryImages.size());
    next.addObject("subCategories", subCategories);
    next.addObject("numSubCategories", subCategories.size());
}

From source file:com.cloud.utils.db.SqlGenerator.java

protected List<Pair<String, Attribute[]>> buildDeleteSqls() {
    LinkedHashMap<String, ArrayList<Attribute>> map = new LinkedHashMap<String, ArrayList<Attribute>>();
    for (Class<?> table : _tables) {
        map.put(DbUtil.getTableName(table), new ArrayList<Attribute>());
    }/*from   ww  w  .  j a v  a2  s .  c  o m*/

    for (Attribute attr : _attributes) {
        if (attr.isId()) {
            ArrayList<Attribute> attrs = map.get(attr.table);
            assert (attrs != null) : "Null set of attributes for " + attr.table;
            attrs.add(attr);
        }
    }

    List<Pair<String, Attribute[]>> sqls = new ArrayList<Pair<String, Attribute[]>>(map.size());
    for (Map.Entry<String, ArrayList<Attribute>> entry : map.entrySet()) {
        ArrayList<Attribute> attrs = entry.getValue();
        String sql = buildDeleteSql(entry.getKey(), attrs);
        Pair<String, Attribute[]> pair = new Pair<String, Attribute[]>(sql,
                attrs.toArray(new Attribute[attrs.size()]));
        sqls.add(pair);
    }

    Collections.reverse(sqls);
    return sqls;
}

From source file:com.cloud.utils.db.SqlGenerator.java

public List<Pair<String, Attribute[]>> buildInsertSqls() {
    LinkedHashMap<String, ArrayList<Attribute>> map = new LinkedHashMap<String, ArrayList<Attribute>>();
    for (Class<?> table : _tables) {
        map.put(DbUtil.getTableName(table), new ArrayList<Attribute>());
    }/*from   ww w .  jav  a2  s.c  o  m*/

    for (Attribute attr : _attributes) {
        if (attr.isInsertable()) {
            ArrayList<Attribute> attrs = map.get(attr.table);
            assert (attrs != null) : "Null set of attributes for " + attr.table;
            attrs.add(attr);
        }
    }

    List<Pair<String, Attribute[]>> sqls = new ArrayList<Pair<String, Attribute[]>>(map.size());
    for (Map.Entry<String, ArrayList<Attribute>> entry : map.entrySet()) {
        ArrayList<Attribute> attrs = entry.getValue();
        StringBuilder sql = buildInsertSql(entry.getKey(), attrs);
        Pair<String, Attribute[]> pair = new Pair<String, Attribute[]>(sql.toString(),
                attrs.toArray(new Attribute[attrs.size()]));
        sqls.add(pair);
    }

    return sqls;
}

From source file:org.apache.hadoop.hive.ql.parse.NewGroupByUtils1.java

@SuppressWarnings("unchecked")
private GroupByOperator genNewGroupByPlanGroupByOperator(QB qb, String dest, Operator inputOperatorInfo,
        Mode mode, ArrayList<GenericUDAFEvaluator> genericUDAFEvaluators,
        ArrayList<ArrayList<Integer>> tag2AggrPos, ArrayList<ArrayList<ASTNode>> tag2AggrParamAst,
        HashMap<Integer, ArrayList<Integer>> nonDistPos2TagOffs) throws SemanticException {

    RowResolver groupByInputRowResolver = opParseCtx.get(inputOperatorInfo).getRR();
    QBParseInfo parseInfo = qb.getParseInfo();
    RowResolver groupByOutputRowResolver = new RowResolver();
    groupByOutputRowResolver.setIsExprResolver(true);
    RowSchema operatorRowSchema = new RowSchema();
    operatorRowSchema.setSignature(new Vector<ColumnInfo>());
    Map<String, exprNodeDesc> colExprMap = new HashMap<String, exprNodeDesc>();

    ArrayList<exprNodeDesc> groupByKeys = new ArrayList<exprNodeDesc>();
    ArrayList<String> outputColumnNames = new ArrayList<String>();

    List<ASTNode> grpByExprs = SemanticAnalyzer.getGroupByForClause(parseInfo, dest);
    int colid = 0;
    if (qb.getParseInfo().getDestContainsGroupbyCubeOrRollupClause(dest)) {
        String colName = getColumnInternalName(colid++);
        outputColumnNames.add(colName);/*from w  w  w  .  j a v  a 2  s  . c  om*/
        ColumnInfo info = groupByInputRowResolver.get("", NewGroupByUtils1._CUBE_ROLLUP_GROUPINGSETS_TAG_);
        exprNodeDesc grpByExprNode = new exprNodeColumnDesc(info.getType(), info.getInternalName(),
                info.getAlias(), info.getIsPartitionCol());
        groupByKeys.add(grpByExprNode);
        ColumnInfo colInfo = new ColumnInfo(colName, grpByExprNode.getTypeInfo(), "", false);

        groupByOutputRowResolver.put("", NewGroupByUtils1._CUBE_ROLLUP_GROUPINGSETS_TAG_, colInfo);

        operatorRowSchema.getSignature().add(colInfo);
        colExprMap.put(colName, grpByExprNode);
    }
    for (int i = 0; i < grpByExprs.size(); i++) {
        ASTNode grpbyExpr = grpByExprs.get(i);
        exprNodeDesc grpByExprNode = SemanticAnalyzer.genExprNodeDesc(grpbyExpr, groupByInputRowResolver, qb,
                -1, conf);
        groupByKeys.add(grpByExprNode);
        String colName = getColumnInternalName(colid++);
        outputColumnNames.add(colName);
        ColumnInfo colInfo = new ColumnInfo(colName, grpByExprNode.getTypeInfo(), "", false);
        groupByOutputRowResolver.putExpression(grpbyExpr, colInfo);
        operatorRowSchema.getSignature().add(colInfo);
        colExprMap.put(colName, grpByExprNode);
    }

    boolean containsfunctions = tag2AggrPos != null && tag2AggrPos.size() > 0;
    boolean containsnondistinctfunctions = containsfunctions && tag2AggrPos.get(0).size() > 0;

    LinkedHashMap<String, ASTNode> aggregationTrees = parseInfo.getAggregationExprsForClause(dest);

    ArrayList<ASTNode> aggregationTreesArray = new ArrayList<ASTNode>(aggregationTrees.size());
    aggregationTreesArray.addAll(aggregationTrees.values());

    HashMap<Integer, Integer> pos2tag = new HashMap<Integer, Integer>();
    for (int tag = 0; tag < tag2AggrPos.size(); tag++) {
        for (Integer pos : tag2AggrPos.get(tag)) {
            pos2tag.put(pos, tag);
        }
    }

    ArrayList<ArrayList<exprNodeDesc>> tag2AggrParamORValueExpr = new ArrayList<ArrayList<exprNodeDesc>>();
    ArrayList<aggregationDesc> aggregations = null;
    aggregations = new ArrayList<aggregationDesc>(aggregationTrees.size());
    for (int i = 0; i < aggregationTrees.size(); i++) {
        aggregations.add(null);
    }
    exprNodeDesc aggrPartExpr = null;

    if (mode == Mode.HASH) {

        if (containsfunctions) {
            String colNameAggrPart = getColumnInternalName(colid++);
            outputColumnNames.add(colNameAggrPart);

            List<TypeInfo> unionTypes = new ArrayList<TypeInfo>();

            for (int tag = 0; tag < tag2AggrParamAst.size(); tag++) {
                tag2AggrParamORValueExpr.add(new ArrayList<exprNodeDesc>());
                ArrayList<exprNodeDesc> aggParameters = new ArrayList<exprNodeDesc>();

                for (int j = 0; j < tag2AggrParamAst.get(tag).size(); j++) {
                    ASTNode paraExpr = (ASTNode) tag2AggrParamAst.get(tag).get(j);
                    exprNodeDesc exprNode = SemanticAnalyzer.genExprNodeDesc(paraExpr, groupByInputRowResolver,
                            qb, -1, conf);
                    tag2AggrParamORValueExpr.get(tag).add(exprNode);
                    aggParameters.add(exprNode);
                }

                ArrayList<String> names = new ArrayList<String>();
                ArrayList<TypeInfo> typeInfos = new ArrayList<TypeInfo>();
                if (tag == 0) {
                    if (!containsnondistinctfunctions) {
                        names.add("nondistnull");
                        typeInfos.add(TypeInfoFactory.voidTypeInfo);
                    } else {
                        int posoff = 0;
                        for (Integer pos : tag2AggrPos.get(tag)) {
                            ASTNode value = aggregationTreesArray.get(pos);
                            String aggName = value.getChild(0).getText();

                            boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
                            GenericUDAFEvaluator.Mode amode = SemanticAnalyzer.groupByDescModeToUDAFMode(mode,
                                    isDistinct);

                            GenericUDAFEvaluator genericUDAFEvaluator = genericUDAFEvaluators.get(pos);
                            assert (genericUDAFEvaluator != null);

                            ArrayList<exprNodeDesc> aggParameters1 = aggParameters;
                            ArrayList<Integer> offs = nonDistPos2TagOffs.get(pos);
                            aggParameters1 = new ArrayList<exprNodeDesc>();
                            for (Integer off : offs) {
                                aggParameters1.add(aggParameters.get(off));
                            }

                            GenericUDAFInfo udaf = SemanticAnalyzer.getGenericUDAFInfo(genericUDAFEvaluator,
                                    amode, aggParameters1);

                            aggregations.set(pos, new aggregationDesc(aggName.toLowerCase(),
                                    udaf.genericUDAFEvaluator, udaf.convertedParameters, isDistinct, amode));

                            String innername = getColumnInternalName(posoff);
                            String field = colNameAggrPart + ":" + tag + "." + innername;

                            ColumnInfo outColInfo = new ColumnInfo(field, udaf.returnType, "", false);
                            groupByOutputRowResolver.put("", _AGGRPARTTAG_ + tag + "_" + posoff, outColInfo);

                            posoff++;

                            names.add(innername);
                            typeInfos.add(udaf.returnType);
                        }
                    }
                } else {
                    for (int i = 0; i < tag2AggrParamORValueExpr.get(tag).size(); i++) {

                        String innername = getColumnInternalName(i);
                        TypeInfo innertype = tag2AggrParamORValueExpr.get(tag).get(i).getTypeInfo();

                        String field = colNameAggrPart + ":" + tag + "." + innername;
                        ColumnInfo outColInfo = new ColumnInfo(field, innertype, "", false);
                        groupByOutputRowResolver.put("", _AGGRPARTTAG_ + tag + "_" + i, outColInfo);

                        names.add(innername);
                        typeInfos.add(innertype);
                    }
                }
                unionTypes.add(TypeInfoFactory.getStructTypeInfo(names, typeInfos));
            }

            ColumnInfo outColInfo = new ColumnInfo(colNameAggrPart,
                    TypeInfoFactory.getUnionTypeInfo(unionTypes), "", false);
            groupByOutputRowResolver.put("", _GBY_AGGRPART_OUTPUT_COLNAME_, outColInfo);
            operatorRowSchema.getSignature().add(outColInfo);
        }

    } else if (mode == Mode.PARTIAL1 || mode == Mode.PARTIALS) {
        if (containsfunctions) {

            ColumnInfo aggrPartInfo = groupByInputRowResolver.get("", _GBY_AGGRPART_OUTPUT_COLNAME_);
            aggrPartExpr = new exprNodeColumnDesc(aggrPartInfo.getType(), aggrPartInfo.getInternalName(), "",
                    false);

            String colNameAggrPart = getColumnInternalName(colid++);
            outputColumnNames.add(colNameAggrPart);
            List<TypeInfo> unionTypes = new ArrayList<TypeInfo>();

            for (int tag = 0; tag < tag2AggrParamAst.size(); tag++) {
                tag2AggrParamORValueExpr.add(new ArrayList<exprNodeDesc>());
                ArrayList<exprNodeDesc> aggParameters = new ArrayList<exprNodeDesc>();

                int paramlen = (tag == 0 && mode == Mode.PARTIALS) ? tag2AggrPos.get(tag).size()
                        : tag2AggrParamAst.get(tag).size();
                for (int j = 0; j < paramlen; j++) {
                    ColumnInfo inputColInfo = groupByInputRowResolver.get("", _AGGRPARTTAG_ + tag + "_" + j);

                    exprNodeDesc exprNode = new exprNodeColumnDesc(inputColInfo.getType(),
                            inputColInfo.getInternalName(), "", false);

                    tag2AggrParamORValueExpr.get(tag).add(exprNode);
                    aggParameters.add(exprNode);
                }

                ArrayList<String> names = new ArrayList<String>();
                ArrayList<TypeInfo> typeInfos = new ArrayList<TypeInfo>();

                int posoff = 0;
                for (Integer pos : tag2AggrPos.get(tag)) {
                    ASTNode value = aggregationTreesArray.get(pos);
                    String aggName = value.getChild(0).getText();

                    boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
                    GenericUDAFEvaluator.Mode amode = SemanticAnalyzer.groupByDescModeToUDAFMode(mode,
                            isDistinct);

                    GenericUDAFEvaluator genericUDAFEvaluator = genericUDAFEvaluators.get(pos);
                    assert (genericUDAFEvaluator != null);

                    ArrayList<exprNodeDesc> aggParameters1 = aggParameters;
                    if (tag == 0 && mode == Mode.PARTIAL1) {
                        ArrayList<Integer> offs = nonDistPos2TagOffs.get(pos);
                        aggParameters1 = new ArrayList<exprNodeDesc>();
                        for (Integer off : offs) {
                            aggParameters1.add(aggParameters.get(off));
                        }
                    } else if (tag == 0) {
                        aggParameters1 = new ArrayList<exprNodeDesc>();
                        aggParameters1.add(aggParameters.get(posoff));
                    }

                    GenericUDAFInfo udaf = SemanticAnalyzer.getGenericUDAFInfo(genericUDAFEvaluator, amode,
                            aggParameters1);

                    aggregations.set(pos, new aggregationDesc(aggName.toLowerCase(), udaf.genericUDAFEvaluator,
                            udaf.convertedParameters, isDistinct, amode));

                    String innername = getColumnInternalName(posoff);
                    String field = colNameAggrPart + ":" + tag + "." + innername;

                    ColumnInfo outColInfo = new ColumnInfo(field, udaf.returnType, "", false);
                    groupByOutputRowResolver.put("", _AGGRPARTTAG_ + tag + "_" + posoff, outColInfo);

                    posoff++;

                    names.add(innername);
                    typeInfos.add(udaf.returnType);
                }

                if (names.isEmpty()) {
                    names.add("nondistnull");
                    typeInfos.add(TypeInfoFactory.voidTypeInfo);
                }

                unionTypes.add(TypeInfoFactory.getStructTypeInfo(names, typeInfos));
            }

            ColumnInfo outColInfo = new ColumnInfo(colNameAggrPart,
                    TypeInfoFactory.getUnionTypeInfo(unionTypes), "", false);
            groupByOutputRowResolver.put("", _GBY_AGGRPART_OUTPUT_COLNAME_, outColInfo);
            operatorRowSchema.getSignature().add(outColInfo);
        }

    } else if (mode == Mode.MERGEPARTIAL || mode == Mode.FINAL || mode == Mode.COMPLETE) {

        if (containsfunctions) {

            ColumnInfo aggrPartInfo = groupByInputRowResolver.get("", _GBY_AGGRPART_OUTPUT_COLNAME_);
            aggrPartExpr = new exprNodeColumnDesc(aggrPartInfo.getType(), aggrPartInfo.getInternalName(), "",
                    false);

            HashMap<Integer, String> pos2colname = new HashMap<Integer, String>();
            for (int pos = 0; pos < aggregationTreesArray.size(); pos++) {
                String colName = getColumnInternalName(colid++);
                outputColumnNames.add(colName);
                pos2colname.put(pos, colName);
            }

            HashMap<Integer, ColumnInfo> pos2valueInfo = new HashMap<Integer, ColumnInfo>();
            for (int tag = 0; tag < tag2AggrPos.size(); tag++) {
                tag2AggrParamORValueExpr.add(new ArrayList<exprNodeDesc>());
                ArrayList<exprNodeDesc> aggParameters = new ArrayList<exprNodeDesc>();

                int aggrlen = (mode == Mode.FINAL) ? tag2AggrPos.get(tag).size()
                        : ((mode == Mode.COMPLETE) ? tag2AggrParamAst.get(tag).size()
                                : ((tag == 0) ? tag2AggrPos.get(tag).size()
                                        : tag2AggrParamAst.get(tag).size()));
                for (int j = 0; j < aggrlen; j++) {
                    ColumnInfo inputColInfo = groupByInputRowResolver.get("", _AGGRPARTTAG_ + tag + "_" + j);

                    exprNodeDesc exprNode = new exprNodeColumnDesc(inputColInfo.getType(),
                            inputColInfo.getInternalName(), "", false);

                    tag2AggrParamORValueExpr.get(tag).add(exprNode);
                    aggParameters.add(exprNode);
                }

                int posoff = 0;
                for (Integer pos : tag2AggrPos.get(tag)) {
                    ASTNode value = aggregationTreesArray.get(pos);
                    String aggName = value.getChild(0).getText();

                    boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
                    GenericUDAFEvaluator.Mode amode = SemanticAnalyzer.groupByDescModeToUDAFMode(mode,
                            isDistinct);

                    GenericUDAFEvaluator genericUDAFEvaluator = genericUDAFEvaluators.get(pos);
                    assert (genericUDAFEvaluator != null);

                    ArrayList<exprNodeDesc> aggParameters1 = aggParameters;
                    if (tag == 0 && mode == Mode.COMPLETE) {
                        ArrayList<Integer> offs = nonDistPos2TagOffs.get(pos);
                        aggParameters1 = new ArrayList<exprNodeDesc>();
                        for (Integer off : offs) {
                            aggParameters1.add(aggParameters.get(off));
                        }
                    } else if (tag == 0 || mode == Mode.FINAL) {
                        aggParameters1 = new ArrayList<exprNodeDesc>();
                        aggParameters1.add(aggParameters.get(posoff));
                    }

                    GenericUDAFInfo udaf = SemanticAnalyzer.getGenericUDAFInfo(genericUDAFEvaluator, amode,
                            aggParameters1);

                    aggregations.set(pos, new aggregationDesc(aggName.toLowerCase(), udaf.genericUDAFEvaluator,
                            udaf.convertedParameters, isDistinct, amode));

                    ColumnInfo valueColInfo = new ColumnInfo(pos2colname.get(pos), udaf.returnType, "", false);
                    pos2valueInfo.put(pos, valueColInfo);

                    posoff++;

                }
            }

            for (int pos = 0; pos < aggregationTreesArray.size(); pos++) {
                groupByOutputRowResolver.putExpression(aggregationTreesArray.get(pos), pos2valueInfo.get(pos));
                operatorRowSchema.getSignature().add(pos2valueInfo.get(pos));
            }
        }

    } else if (mode == Mode.PARTIAL2) {
    }

    GroupByOperator op = (GroupByOperator) putOpInsertMap(
            OperatorFactory.getAndMakeChild(new groupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
                    tag2AggrPos, tag2AggrParamORValueExpr, aggrPartExpr), operatorRowSchema, inputOperatorInfo),
            groupByOutputRowResolver);
    op.setColumnExprMap(colExprMap);
    return op;
}

From source file:pt.lsts.neptus.util.logdownload.LogsDownloaderWorkerActions.java

/**
 * Process the serversLogPresenceList and gets the actual log files/folder for each base log folder. This is done by
 * iterating the {@link LogsDownloaderWorker#getServersList()} depending on its presence.
 * //w w w  . j  a  va  2 s. c  om
 * @param serversLogPresenceList Map for each base log folder and servers presence as values (space separated list
 *            of servers keys)
 * @return
 */
private LinkedList<LogFolderInfo> getFromServersCompleteLogList(
        LinkedHashMap<String, String> serversLogPresenceList) {
    if (serversLogPresenceList.size() == 0)
        return new LinkedList<LogFolderInfo>();

    long timeF0 = System.currentTimeMillis();

    LinkedList<LogFolderInfo> tmpLogFolders = new LinkedList<LogFolderInfo>();

    try {
        ArrayList<String> servers = worker.getServersList();
        for (String serverKey : servers) { // Let's iterate by servers first
            if (stopLogListProcessing)
                break;

            if (!worker.isServerAvailable(serverKey))
                continue;

            FtpDownloader ftpServer = null;
            try {
                ftpServer = LogsDownloaderWorkerUtil.getOrRenewFtpDownloader(serverKey,
                        worker.getFtpDownloaders(), worker.getHostFor(serverKey), worker.getPortFor(serverKey));
            } catch (Exception e) {
                e.printStackTrace();
            }
            if (ftpServer == null)
                continue;

            // String host = worker.getHostFor(serverKey); // To fill the log files host info
            String host = serverKey; // Using a key instead of host directly

            for (String logDir : serversLogPresenceList.keySet()) { // For the server go through the folders
                if (stopLogListProcessing)
                    break;

                if (!serversLogPresenceList.get(logDir).contains(serverKey))
                    continue;

                // This is needed to avoid problems with non English languages
                String isoStr = new String(logDir.getBytes(), "ISO-8859-1");

                LogFolderInfo lFolder = null;
                for (LogFolderInfo lfi : tmpLogFolders) {
                    if (lfi.getName().equals(logDir)) {
                        lFolder = lfi;
                        break;
                    }
                }
                if (lFolder == null)
                    lFolder = new LogFolderInfo(logDir);

                if (!ftpServer.isConnected())
                    ftpServer.renewClient();

                try {
                    FTPFile[] files = ftpServer.getClient().listFiles("/" + isoStr + "/");
                    for (FTPFile file : files) {
                        if (stopLogListProcessing)
                            break;

                        String name = logDir + "/" + file.getName();
                        String uriPartial = logDir + "/" + file.getName();
                        LogFileInfo logFileTmp = new LogFileInfo(name);
                        logFileTmp.setUriPartial(uriPartial);
                        logFileTmp.setSize(file.getSize());
                        logFileTmp.setFile(file);
                        logFileTmp.setHost(host);

                        // Let us see if its a directory
                        if (file.isDirectory()) {
                            logFileTmp.setSize(-1); // Set size to -1 if directory
                            long allSize = 0;

                            // Here there are no directories, considering only 2 folder layers only, e.g. "Photos/00000"
                            LinkedHashMap<String, FTPFile> dirListing = ftpServer
                                    .listDirectory(logFileTmp.getName());
                            ArrayList<LogFileInfo> directoryContents = new ArrayList<>();
                            for (String fName : dirListing.keySet()) {
                                if (stopLogListProcessing)
                                    break;

                                FTPFile fFile = dirListing.get(fName);
                                String fURIPartial = fName;
                                LogFileInfo fLogFileTmp = new LogFileInfo(fName);
                                fLogFileTmp.setUriPartial(fURIPartial);
                                fLogFileTmp.setSize(fFile.getSize());
                                fLogFileTmp.setFile(fFile);
                                fLogFileTmp.setHost(host);

                                allSize += fLogFileTmp.getSize();
                                directoryContents.add(fLogFileTmp);
                            }
                            logFileTmp.setDirectoryContents(directoryContents);
                            logFileTmp.setSize(allSize);
                        }
                        lFolder.addFile(logFileTmp);
                        tmpLogFolders.add(lFolder);
                    }
                } catch (Exception e) {
                    System.err.println(isoStr);
                    e.printStackTrace();
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

    NeptusLog.pub().warn(".......Contacting remote systems for complete log file list "
            + (System.currentTimeMillis() - timeF0) + "ms");

    return tmpLogFolders;
}

From source file:com.aliyun.odps.local.common.WareHouse.java

/**
 * copy resource from warehouse/__resources__/ to temp/resource/
 *
 * @param projName/*from w ww  .j a v  a 2  s  .  c  om*/
 * @param resourceName
 * @param resourceRootDir
 * @param limitDownloadRecordCount
 * @param inputColumnSeperator
 * @throws IOException
 * @throws OdpsException
 */
public void copyResource(String projName, String resourceName, File resourceRootDir,
        int limitDownloadRecordCount, char inputColumnSeperator) throws IOException, OdpsException {
    if (StringUtils.isBlank(projName) || StringUtils.isBlank(resourceName) || resourceRootDir == null) {
        return;
    }

    if (!resourceRootDir.exists()) {
        resourceRootDir.mkdirs();
    }

    LOG.info("Start to copy resource: " + projName + "." + resourceName + "-->"
            + resourceRootDir.getAbsolutePath());

    if (!existsResource(projName, resourceName)) {
        DownloadUtils.downloadResource(getOdps(), projName, resourceName, limitDownloadRecordCount,
                inputColumnSeperator);
    }

    File file = getReourceFile(projName, resourceName);

    // table resource
    if (file.isDirectory()) {
        File tableResourceDir = new File(resourceRootDir, resourceName);

        TableInfo refTableInfo = getReferencedTable(projName, resourceName);
        LinkedHashMap<String, String> partitions = refTableInfo.getPartSpec();

        if (partitions != null && partitions.size() > 0) {
            PartitionSpec partSpec = new PartitionSpec();
            for (String key : partitions.keySet()) {
                partSpec.set(key, partitions.get(key));
            }
            copyTable(refTableInfo.getProjectName(), refTableInfo.getTableName(), partSpec, null,
                    tableResourceDir, limitDownloadRecordCount, inputColumnSeperator);
        } else {
            copyTable(refTableInfo.getProjectName(), refTableInfo.getTableName(), null, null, tableResourceDir,
                    limitDownloadRecordCount, inputColumnSeperator);
        }

    } else {
        // not table resource
        if (!existsResource(projName, resourceName)) {

            DownloadUtils.downloadResource(getOdps(), projName, resourceName, limitDownloadRecordCount,
                    inputColumnSeperator);
        }
        FileUtils.copyFileToDirectory(file, resourceRootDir);
    }

    LOG.info("Finished copy resource: " + projName + "." + resourceName + "-->"
            + resourceRootDir.getAbsolutePath());
}

From source file:com.codesourcery.internal.installer.InstallManager.java

/**
 * Returns the wizard pages from all install modules.  This method ensures
 * that wizard pages with the same name are not returned.
 * /*from  www.j  a  v  a  2s. c  o  m*/
 * @return Wizard pages
 */
protected IInstallWizardPage[] getModulePages() {
    // Filter duplicated named pages, maintain order
    LinkedHashMap<String, IInstallWizardPage> pages = new LinkedHashMap<String, IInstallWizardPage>();
    for (IInstallModule module : getModules()) {
        IInstallWizardPage[] modulePages = module.getInstallPages(getInstallMode());
        if (modulePages != null) {
            for (IInstallWizardPage modulePage : modulePages) {
                pages.put(modulePage.getName(), modulePage);
            }
        }
    }
    return pages.values().toArray(new IInstallWizardPage[pages.size()]);
}

From source file:com.mozilla.telemetry.pig.eval.json.ValidateTelemetrySubmission.java

@SuppressWarnings("unchecked")
protected String validateTelemetryJson(String json) {
    try {//from  w w w  .ja  v a 2 s  .co  m
        Map<String, Object> jsonMap = jsonMapper.readValue(json, new TypeReference<Map<String, Object>>() {
        });
        String appVersion = getAppVersionFromTelemetryDoc(jsonMap);
        Map<String, Object> info = (Map<String, Object>) jsonMap.get(TelemetryConstants.INFO);
        if (info == null) {
            info = new LinkedHashMap<String, Object>();
            jsonMap.put(TelemetryConstants.INFO, info);
        }

        if (appVersion == null) {
            LOG.info("appVersion is null");
            info.put(TelemetryConstants.VALID_FOR_SCHEMA, "false");
            return jsonMapper.writeValueAsString(jsonMap);
        }

        Map<String, Map<String, Object>> referenceValues = getJsonSpec(appVersion);
        if (referenceValues == null) {
            LOG.info("referenceValues is null " + appVersion);
        }
        pigCounterHelper.incrCounter(ReportStats.SUBMISSIONS_EVALUATED, 1L);

        Map<String, Object> histograms = (Map<String, Object>) jsonMap.get(TelemetryConstants.HISTOGRAMS);
        if (histograms == null) {
            info.put(TelemetryConstants.VALID_FOR_SCHEMA, "false");
            return jsonMapper.writeValueAsString(jsonMap);
        }
        boolean validForSchema = true;
        for (Map.Entry<String, Object> entry : histograms.entrySet()) {
            String jKey = entry.getKey();
            String min = new String();
            String max = new String();
            String histogramType = new String();
            String bucketCount = new String();
            boolean validHistogram = true;
            Map<String, Object> bucketValues = new LinkedHashMap<String, Object>();
            LinkedHashMap<String, Object> histogramValues = (LinkedHashMap<String, Object>) entry.getValue();

            for (Map.Entry<String, Object> histogramValue : histogramValues.entrySet()) {
                try {
                    if (StringUtils.equals(histogramValue.getKey(), TelemetryConstants.RANGE)) {
                        List<Integer> range = (List<Integer>) histogramValue.getValue();
                        min = String.valueOf(range.get(0));
                        max = String.valueOf(range.get(1));
                    } else if (StringUtils.equals(histogramValue.getKey(), TelemetryConstants.HISTOGRAM_TYPE)) {
                        histogramType = String.valueOf(histogramValue.getValue());
                    } else if (StringUtils.equals(histogramValue.getKey(), TelemetryConstants.BUCKET_COUNT)) {
                        bucketCount = String.valueOf(histogramValue.getValue());
                    } else if (StringUtils.equals(histogramValue.getKey(), TelemetryConstants.VALUES)) {
                        bucketValues = (Map<String, Object>) histogramValue.getValue();
                    }
                } catch (Exception e) {
                    LOG.error(histogramValue, e);
                }
            }

            if (referenceValues.containsKey(jKey)) {
                pigCounterHelper.incrCounter(ReportStats.KNOWN_HISTOGRAMS, 1L);
                Map<String, Object> referenceHistograms = referenceValues.get(jKey);
                String referenceHistogramType = (String) referenceHistograms
                        .get(TelemetryConstants.HISTOGRAM_TYPE);

                if (!StringUtils.equals(referenceHistogramType, histogramType)) {
                    validHistogram = false;
                    pigCounterHelper.incrCounter(ReportStats.INVALID_HISTOGRAM_TYPE, 1L);
                }
                if (!StringUtils.equals((String) referenceHistograms.get(TelemetryConstants.MIN), min)) {
                    validHistogram = false;
                    pigCounterHelper.incrCounter(ReportStats.INVALID_HISTOGRAM_MIN, 1L);
                }
                if (!StringUtils.equals((String) referenceHistograms.get(TelemetryConstants.MAX), max)) {
                    validHistogram = false;
                    pigCounterHelper.incrCounter(ReportStats.INVALID_HISTOGRAM_MAX, 1L);
                }
                if (!StringUtils.equals((String) referenceHistograms.get(TelemetryConstants.BUCKET_COUNT),
                        bucketCount)) {
                    validHistogram = false;
                    pigCounterHelper.incrCounter(ReportStats.INVALID_HISTOGRAM_BUCKET_COUNT, 1L);
                }
                if (bucketValues == null || bucketValues.size() <= 0) {
                    pigCounterHelper.incrCounter(ReportStats.NO_HISTOGRAM_BUCKET_VALUES, 1L);
                    validHistogram = false;
                } else {
                    LinkedHashMap<String, Integer> invalidValues = new LinkedHashMap<String, Integer>();
                    List<Integer> referenceBucketValues = (List<Integer>) referenceHistograms
                            .get(TelemetryConstants.BUCKETS);
                    for (Map.Entry<String, Object> bucketValue : bucketValues.entrySet()) {
                        int bucketKey = -1;
                        try {
                            bucketKey = Integer.parseInt(bucketValue.getKey());
                        } catch (Exception e) {
                            bucketKey = -1;
                        }
                        if (!referenceBucketValues.contains(bucketKey)) {
                            invalidValues.put(TelemetryConstants.VALUES, bucketKey);
                        }
                    }

                    if (invalidValues.size() > 0) {
                        pigCounterHelper.incrCounter(ReportStats.INVALID_HISTOGRAM_BUCKET_VALUE, 1L);
                        validHistogram = false;
                    }
                    invalidValues = null;
                }

                if (validHistogram) {
                    histogramValues.put(TelemetryConstants.VALID, "true");
                    pigCounterHelper.incrCounter(ReportStats.VALID_HISTOGRAM, 1L);
                } else {
                    validForSchema = false;
                    histogramValues.put(TelemetryConstants.VALID, "false");
                    pigCounterHelper.incrCounter(ReportStats.INVALID_HISTOGRAM, 1L);
                }

            } else {
                pigCounterHelper.incrCounter(ReportStats.UNKNOWN_HISTOGRAMS, 1L);
            }
        }
        if (validForSchema) {
            info.put(TelemetryConstants.VALID_FOR_SCHEMA, "true");
        } else {
            info.put(TelemetryConstants.VALID_FOR_SCHEMA, "false");
        }
        return jsonMapper.writeValueAsString(jsonMap);
    } catch (JsonParseException e) {
        LOG.info("ERROR: failed to process telemetry spec jsons " + e.getMessage());
        pigCounterHelper.incrCounter(ReportStats.INVALID_JSON_STRUCTURE, 1L);
    } catch (JsonMappingException e) {
        LOG.info("ERROR: failed to process telemetry spec jsons " + e.getMessage());
        pigCounterHelper.incrCounter(ReportStats.INVALID_JSON_STRUCTURE, 1L);
    } catch (IOException e) {
        LOG.info("ERROR: failed to process telemetry spec jsons " + e.getMessage());
    }
    return null;
}

From source file:com.karus.danktitles.commands.HelpSubcommand.java

@Override
public void execute(CommandSender sender, String[] args) {

    // Methods inheritied from CommandChecker
    if (!checkLength(sender, args, 1, 3))
        return;//from  w w  w  .  ja  v  a  2s  .  c o m
    if (!checkSender(sender, "danktitles.help"))
        return;

    LinkedHashMap<String, MutablePair<String, String>> parsedCommands;

    // Checks if the list needs to be filtered
    if (args.length == 1 || args[1].equals("all")) {
        parsedCommands = new LinkedHashMap<>(
                commands.entrySet().stream().filter(entry -> sender.hasPermission(entry.getValue().getLeft()))
                        .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())));
    } else {
        parsedCommands = new LinkedHashMap<>(commands.entrySet().stream().filter(
                entry -> entry.getKey().contains(args[1]) && sender.hasPermission(entry.getValue().getLeft()))
                .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())));
    }

    if (parsedCommands.isEmpty()) {
        sender.sendMessage(ChatColor.RED + "No matches found.");
        return;
    }

    if (args.length == 3) {
        try {
            page = Integer.parseInt(args[2]);
        } catch (NumberFormatException e) {
            sender.sendMessage(ChatColor.RED + "Invalid page number!");
            return;
        }
    } else {
        page = 1;
    }

    int totalPages = (int) Math.max(1, Math.floor(parsedCommands.size() / SIZE));

    if (page <= 0 || page > totalPages) {
        sender.sendMessage(ChatColor.RED + "Invalid page number!");
        return;
    }

    sender.sendMessage(ChatColor.GOLD + "[Commands - (" + ChatColor.RED + page + "/" + totalPages
            + ChatColor.GOLD + ") ]");

    ArrayList<String> keys = new ArrayList<>(parsedCommands.keySet());

    IntStream.range(page * SIZE - SIZE, parsedCommands.size()).limit(SIZE)
            .forEach(i -> sender.sendMessage(ChatColor.GOLD + commands.get(keys.get(i)).getRight()));

}

From source file:org.apache.atlas.hive.hook.HiveHook.java

private void collect(HiveEventContext event) throws Exception {

    assert event.getHookType() == HookContext.HookType.POST_EXEC_HOOK : "Non-POST_EXEC_HOOK not supported!";

    LOG.info("Entered Atlas hook for hook type {}, operation {} , user {} as {}", event.getHookType(),
            event.getOperation(), event.getUgi().getRealUser(), event.getUgi().getShortUserName());

    HiveMetaStoreBridge dgiBridge = new HiveMetaStoreBridge(atlasProperties, hiveConf);

    switch (event.getOperation()) {
    case CREATEDATABASE:
        handleEventOutputs(dgiBridge, event, Type.DATABASE);
        break;/* w  w  w .  j  ava2s .c  o m*/

    case CREATETABLE:
        LinkedHashMap<Type, Referenceable> tablesCreated = handleEventOutputs(dgiBridge, event, Type.TABLE);
        if (tablesCreated != null && tablesCreated.size() > 0) {
            handleExternalTables(dgiBridge, event, tablesCreated);
        }
        break;

    case CREATETABLE_AS_SELECT:
    case CREATEVIEW:
    case ALTERVIEW_AS:
    case LOAD:
    case EXPORT:
    case IMPORT:
    case QUERY:
    case TRUNCATETABLE:
        registerProcess(dgiBridge, event);
        break;

    case ALTERTABLE_RENAME:
    case ALTERVIEW_RENAME:
        renameTable(dgiBridge, event);
        break;

    case ALTERTABLE_FILEFORMAT:
    case ALTERTABLE_CLUSTER_SORT:
    case ALTERTABLE_BUCKETNUM:
    case ALTERTABLE_PROPERTIES:
    case ALTERVIEW_PROPERTIES:
    case ALTERTABLE_SERDEPROPERTIES:
    case ALTERTABLE_SERIALIZER:
    case ALTERTABLE_ADDCOLS:
    case ALTERTABLE_REPLACECOLS:
    case ALTERTABLE_PARTCOLTYPE:
        handleEventOutputs(dgiBridge, event, Type.TABLE);
        break;

    case ALTERTABLE_RENAMECOL:
        renameColumn(dgiBridge, event);
        break;

    case ALTERTABLE_LOCATION:
        LinkedHashMap<Type, Referenceable> tablesUpdated = handleEventOutputs(dgiBridge, event, Type.TABLE);
        if (tablesUpdated != null && tablesUpdated.size() > 0) {
            //Track altered lineage in case of external tables
            handleExternalTables(dgiBridge, event, tablesUpdated);
        }
        break;
    case ALTERDATABASE:
    case ALTERDATABASE_OWNER:
        handleEventOutputs(dgiBridge, event, Type.DATABASE);
        break;

    case DROPTABLE:
    case DROPVIEW:
        deleteTable(dgiBridge, event);
        break;

    case DROPDATABASE:
        deleteDatabase(dgiBridge, event);
        break;

    default:
    }
}