Example usage for java.util Map toString

List of usage examples for java.util Map toString

Introduction

In this page you can find the example usage for java.util Map toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:org.marketcetera.strategy.LanguageTestBase.java

/**
 * Executes one iteration of the <code>getAllPositionsAsOf</code> test. 
 *
 * @param inDate a <code>Date</code> value
 * @param inExpectedPositions a <code>Map&lt;PositionKey&lt;Equity&gt;,BigDecimal&gt;</code> value
 * @throws Exception if an unexpected error occurs
 *///  w w  w. j  ava  2 s  . c  o m
private void doAllPositionsAsOfTest(Date inDate, Map<PositionKey<Equity>, BigDecimal> inExpectedPositions)
        throws Exception {
    StrategyCoordinates strategy = getPositionsStrategy();
    setPropertiesToNull();
    AbstractRunningStrategy.setProperty("allPositionsAsOfDuringStop", "not-empty");
    if (inDate != null) {
        AbstractRunningStrategy.setProperty("date", Long.toString(inDate.getTime()));
    }
    verifyStrategyStartsAndStops(strategy.getName(), getLanguage(), strategy.getFile(), null, null, null);
    // verify expected results
    assertEquals((inExpectedPositions == null ? null : inExpectedPositions.toString()),
            AbstractRunningStrategy.getProperty("allPositionsAsOf"));
    assertNull(AbstractRunningStrategy.getProperty("allPositionsAsOfDuringStop"));
}

From source file:org.marketcetera.strategy.LanguageTestBase.java

/**
 * Executes one iteration of the <code>getAllOptionPositionsAsOf</code> test.
 *
 * @param inDate a <code>Date</code> value
 * @param inExpectedPositions a <code>Map&lt;PositionKey&lt;Option&gt;,BigDecimal&gt;</code> value
 * @throws Exception if an unexpected error occurs
 *///from   www.  j  av  a  2  s . c  o  m
private void doAllOptionPositionsAsOfTest(Date inDate, Map<PositionKey<Option>, BigDecimal> inExpectedPositions)
        throws Exception {
    StrategyCoordinates strategy = getPositionsStrategy();
    setPropertiesToNull();
    AbstractRunningStrategy.setProperty("allOptionPositionsAsOfDuringStop", "not-empty");
    if (inDate != null) {
        AbstractRunningStrategy.setProperty("date", Long.toString(inDate.getTime()));
    }
    verifyStrategyStartsAndStops(strategy.getName(), getLanguage(), strategy.getFile(), null, null, null);
    // verify expected results
    assertEquals((inExpectedPositions == null ? null : inExpectedPositions.toString()),
            AbstractRunningStrategy.getProperty("allOptionPositionsAsOf"));
    assertNull(AbstractRunningStrategy.getProperty("allOptionPositionsAsOfDuringStop"));
}

From source file:org.apache.hadoop.hbase.coprocessor.TimeseriesAggregateImplementation.java

@Override
public void getAvg(RpcController controller, TimeseriesAggregateRequest request,
        RpcCallback<TimeseriesAggregateResponse> done) {
    TimeseriesAggregateResponse response = null;
    InternalScanner scanner = null;/*  w w  w . java 2  s  . c o  m*/
    Map<Long, SimpleEntry<Long, S>> averages = new HashMap<Long, SimpleEntry<Long, S>>();
    boolean hasScannerRange = false;

    if (!request.hasRange()) {
        hasScannerRange = true; // When no timerange is being passed in via
        // the request, it is
        // assumed, that the scanner is
        // timestamp-range bound
    }

    try {
        ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
        S sumVal = null;
        T temp;
        Long kvCountVal = 0l;
        Scan scan = ProtobufUtil.toScan(request.getScan());
        scanner = env.getRegion().getScanner(scan);
        List<TimeRange> timeRanges = getAllTimeRanges(scan, request);
        byte[] colFamily = scan.getFamilies()[0];

        List<Cell> results = new ArrayList<Cell>();
        boolean hasMoreRows = false;

        do {
            results.clear();
            hasMoreRows = scanner.next(results);
            for (Cell kv : results) {
                long timestamp = 0;
                if (hasScannerRange)
                    timestamp = kv.getTimestamp();
                else
                    timestamp = getMillisTimestampFromOffset(getTimestampFromRowKeyAsMillis(kv, request),
                            Bytes.toInt(kv.getQualifier()));
                for (TimeRange t : timeRanges) {
                    if (t.withinTimeRange(timestamp)) {
                        long minTimestamp = t.getMin();
                        if (averages.containsKey(minTimestamp)) {
                            sumVal = averages.get(minTimestamp).getValue();
                            kvCountVal = averages.get(minTimestamp).getKey();
                        } else {
                            sumVal = null;
                            kvCountVal = 0l;
                        }
                        temp = ci.getValue(colFamily, kv.getQualifier(), kv);
                        if (temp != null) {
                            kvCountVal++;
                            sumVal = ci.add(sumVal, ci.castToReturnType(temp));
                            averages.put(t.getMin(), new AbstractMap.SimpleEntry<Long, S>(kvCountVal, sumVal));
                        }
                    }
                }
            }
        } while (hasMoreRows);
        if (!averages.isEmpty()) {
            TimeseriesAggregateResponse.Builder responseBuilder = TimeseriesAggregateResponse.newBuilder();

            for (Entry<Long, SimpleEntry<Long, S>> entry : averages.entrySet()) {
                TimeseriesAggregateResponseEntry.Builder valueBuilder = TimeseriesAggregateResponseEntry
                        .newBuilder();
                TimeseriesAggregateResponseMapEntry.Builder mapElementBuilder = TimeseriesAggregateResponseMapEntry
                        .newBuilder();
                ByteString first = ci.getProtoForPromotedType(entry.getValue().getValue()).toByteString();
                valueBuilder.addFirstPart(first);
                ByteBuffer bb = ByteBuffer.allocate(8).putLong(entry.getValue().getKey());
                bb.rewind();
                valueBuilder.setSecondPart(ByteString.copyFrom(bb));
                mapElementBuilder.setKey(entry.getKey());
                mapElementBuilder.setValue(valueBuilder.build());
                responseBuilder.addEntry(mapElementBuilder.build());
            }
            response = responseBuilder.build();
        }
    } catch (IOException e) {
        ResponseConverter.setControllerException(controller, e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (IOException ignored) {
            }
        }
    }
    log.info("Averages from this region are " + env.getRegion().getRegionNameAsString() + ": "
            + averages.toString());
    done.run(response);
}

From source file:org.apache.kylin.tool.StorageCleanupJob.java

private void cleanUnusedIntermediateHiveTable(Configuration conf) throws Exception {
    final KylinConfig config = KylinConfig.getInstanceFromEnv();
    JobEngineConfig engineConfig = new JobEngineConfig(KylinConfig.getInstanceFromEnv());
    final CliCommandExecutor cmdExec = config.getCliCommandExecutor();
    final int uuidLength = 36;
    final String preFix = "kylin_intermediate_";
    final String uuidPattern = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}";

    IHiveClient hiveClient = HiveClientFactory.getHiveClient();
    List<String> hiveTableNames = hiveClient.getHiveTableNames(config.getHiveDatabaseForIntermediateTable());
    Iterable<String> kylinIntermediates = Iterables.filter(hiveTableNames, new Predicate<String>() {
        @Override//from   w  w  w  .  java 2 s  . c  om
        public boolean apply(@Nullable String input) {
            return input != null && input.startsWith("kylin_intermediate_");
        }
    });

    List<String> allJobs = executableManager.getAllJobIds();
    List<String> allHiveTablesNeedToBeDeleted = new ArrayList<String>();
    List<String> workingJobList = new ArrayList<String>();
    Map<String, String> segmentId2JobId = Maps.newHashMap();

    StringBuilder sb = new StringBuilder();
    for (String jobId : allJobs) {
        // only remove FINISHED and DISCARDED job intermediate table
        final ExecutableState state = executableManager.getOutput(jobId).getState();
        if (!state.isFinalState()) {
            workingJobList.add(jobId);
            sb.append(jobId).append("(").append(state).append("), ");
        }

        String segmentId = getSegmentIdFromJobId(jobId);
        if (segmentId != null) {//some jobs are not cubing jobs 
            segmentId2JobId.put(segmentId, jobId);
        }
    }
    logger.info("Working jobIDs: " + workingJobList);

    for (String line : kylinIntermediates) {
        logger.info("Checking table " + line);

        if (!line.startsWith(preFix))
            continue;

        if (force == true) {
            logger.warn("Warning: will delete all intermediate hive tables!!!!!!!!!!!!!!!!!!!!!!");
            allHiveTablesNeedToBeDeleted.add(line);
            continue;
        }

        boolean isNeedDel = true;

        if (line.length() > preFix.length() + uuidLength) {
            String uuid = line.substring(line.length() - uuidLength, line.length());
            uuid = uuid.replace("_", "-");
            final Pattern UUId_PATTERN = Pattern.compile(uuidPattern);
            if (UUId_PATTERN.matcher(uuid).matches()) {
                //Check whether it's a hive table in use
                if (isTableInUse(uuid, workingJobList)) {
                    logger.info("Skip deleting because the table is in use");
                    isNeedDel = false;
                }
            } else {
                logger.info("Skip deleting because not match pattern");
                isNeedDel = false;
            }
        } else {
            logger.info("Skip deleting because length not qualified");
            isNeedDel = false;
        }

        if (isNeedDel) {
            allHiveTablesNeedToBeDeleted.add(line);
        }
    }

    if (delete == true) {

        try {
            final String useDatabaseHql = "USE " + config.getHiveDatabaseForIntermediateTable() + ";";
            final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
            hiveCmdBuilder.addStatement(useDatabaseHql);
            for (String delHive : allHiveTablesNeedToBeDeleted) {
                hiveCmdBuilder.addStatement("drop table if exists " + delHive + "; ");
                logger.info("Remove " + delHive + " from hive tables.");
            }
            cmdExec.execute(hiveCmdBuilder.build());

            //if kylin.source.hive.keep-flat-table, some intermediate table might be kept 
            //delete external path
            for (String tableToDelete : allHiveTablesNeedToBeDeleted) {
                String uuid = tableToDelete.substring(tableToDelete.length() - uuidLength,
                        tableToDelete.length());
                String segmentId = uuid.replace("_", "-");

                if (segmentId2JobId.containsKey(segmentId)) {
                    String path = JobBuilderSupport.getJobWorkingDir(engineConfig.getHdfsWorkingDirectory(),
                            segmentId2JobId.get(segmentId)) + "/" + tableToDelete;
                    Path externalDataPath = new Path(path);
                    FileSystem fs = HadoopUtil.getWorkingFileSystem();
                    if (fs.exists(externalDataPath)) {
                        fs.delete(externalDataPath, true);
                        logger.info("Hive table {}'s external path {} deleted", tableToDelete, path);
                    } else {
                        logger.info(
                                "Hive table {}'s external path {} not exist. It's normal if kylin.source.hive.keep-flat-table set false (By default)",
                                tableToDelete, path);
                    }
                } else {
                    logger.warn("Hive table {}'s job ID not found, segmentId2JobId: {}", tableToDelete,
                            segmentId2JobId.toString());
                }
            }
        } catch (IOException e) {
            e.printStackTrace();
        }

    } else {
        System.out.println("------ Intermediate Hive Tables To Be Dropped ------");
        for (String hiveTable : allHiveTablesNeedToBeDeleted) {
            System.out.println(hiveTable);
        }
        System.out.println("----------------------------------------------------");
    }
}

From source file:org.encuestame.business.service.SearchService.java

public Map<String, List<GlobalSearchItem>> quickSearch(final String keyword, final Language language,
        final Integer start, final Integer limit, Integer limitByItem,
        final List<TypeSearchResult> resultsAllowed) throws EnMeNoResultsFoundException, IOException {
    log.debug("******************************");
    log.debug("keyword " + keyword);
    log.debug("language " + language);
    log.debug("start " + start);
    log.debug("limit " + limit);
    log.debug("limitByItem " + limitByItem);
    log.debug("resultsAllowed " + resultsAllowed.size());
    final Map<String, List<GlobalSearchItem>> hashset = new HashedMap();
    hashset.put("questions", ListUtils.EMPTY_LIST);
    hashset.put("Polls", ListUtils.EMPTY_LIST);
    hashset.put("Tweetpolls", ListUtils.EMPTY_LIST);
    hashset.put("profiles", ListUtils.EMPTY_LIST);
    hashset.put("tags", ListUtils.EMPTY_LIST);
    hashset.put("comments", ListUtils.EMPTY_LIST);
    limitByItem = limitByItem == null ? 0 : limitByItem;
    // TODO :See ENCUESTAME-670: to know the reason : why has been commented the following block of code.
    if (resultsAllowed.indexOf(TypeSearchResult.QUESTION) != -1) {
        List<GlobalSearchItem> questionResult = UtilConvertToSearchItems
                .convertQuestionToSearchItem(retrieveQuestionByKeyword(keyword, null));
        if (limitByItem != 0 && questionResult.size() > limitByItem) {
            questionResult = questionResult.subList(0, limitByItem);
        }/*from  w  w  w.  ja va2s  .c  om*/
        log.debug("questionResult " + questionResult.size());
        hashset.put("questions", questionResult);
    }

    if (resultsAllowed.indexOf(TypeSearchResult.POLL) != -1) {
        List<GlobalSearchItem> polls = UtilConvertToSearchItems
                .convertPollToSearchItem(getPollDao().getPollsByQuestionKeyword(keyword, null, limitByItem, 0));
        //         if (limitByItem != 0 && polls.size() > limitByItem) {
        //            polls = polls.subList(0, limitByItem);
        //         }
        log.debug("Polls " + polls.size());
        hashset.put("Polls", polls);
    }

    if (resultsAllowed.indexOf(TypeSearchResult.TWEETPOLL) != -1) {
        List<GlobalSearchItem> tweetPolls = UtilConvertToSearchItems.convertTweetPollToSearchItem(
                getTweetPollDao().retrieveTweetPollByKeyword(keyword, start, limitByItem));
        //         if (limitByItem != 0 && tweetPolls.size() > limitByItem) {
        //            tweetPolls = tweetPolls.subList(0, limitByItem);
        //         }
        log.debug("Tweetpolls " + tweetPolls.size());
        hashset.put("Tweetpolls", tweetPolls);
    }

    if (resultsAllowed.indexOf(TypeSearchResult.PROFILE) != -1) {
        List<GlobalSearchItem> profiles = UtilConvertToSearchItems
                .convertProfileToSearchItem(getAccountDao().getPublicProfiles(keyword, limitByItem, start));
        //            if (limitByItem != 0 && profiles.size() > limitByItem) {
        //                profiles = profiles.subList(0, limitByItem);
        //            }
        log.debug("profiles " + profiles.size());
        hashset.put("profiles", profiles);
    }

    if (resultsAllowed.indexOf(TypeSearchResult.HASHTAG) != -1) {
        List<GlobalSearchItem> tags = UtilConvertToSearchItems.convertHashTagToSearchItem(
                getHashTagDao().getListHashTagsByKeyword(keyword, limitByItem, null));
        //            if (limitByItem != 0 && tags.size() > limitByItem) {
        //                tags = tags.subList(0, limitByItem);
        //            }
        log.debug("tags " + tags.size());
        hashset.put("tags", tags);
    }

    if (resultsAllowed.indexOf(TypeSearchResult.COMMENT) != -1) {
        // TODO: add comment search implementation+
        List<GlobalSearchItem> comments = UtilConvertToSearchItems.convertCommentToSearchItem(
                getCommentsOperations().getCommentsByKeyword(keyword, limitByItem, null));
        //         if (limitByItem != 0 && comments.size() > limitByItem) {
        //            comments = comments.subList(0, limitByItem);
        //         }
        log.debug("Comments " + comments.size());
        hashset.put("comments", comments);
    }
    // List<GlobalSearchItem> totalItems = new ArrayList<GlobalSearchItem>(hashset);

    //TODO: order by rated or something.

    //filter my limit
    /*if (limit != null && start != null) {
    log.debug("split to "+limit  + " starting on "+start + " to list with size "+totalItems.size());
    totalItems = totalItems.size() > limit ? totalItems
            .subList(start, limit) : totalItems;
    }
    //auto enumerate results.
    int x = 1;
    for (int i = 0; i < totalItems.size(); i++) {
    totalItems.get(i).setId(Long.valueOf(x));
    x++;
    }*/
    log.debug("total::" + hashset.toString());
    return hashset;
}

From source file:org.marketcetera.strategy.LanguageTestBase.java

/**
 * Executes one iteration of the <code>getOptionPositionsAsOf</code> test. 
 *
 * @param inOptionRoots a <code>String[]</code> value
 * @param inDate a <code>Date</code> value
 * @param inParameters a <code>Properties</code> value to use as parameters if non-null
 * @param inExpectedPositions a <code>Map&lt;PositionKey&lt;Option&gt;,BigDecimal&gt;</code> value
 * @throws Exception if an unexpected error occurs
 *//*w ww  .  j  a  v a 2 s  . c  om*/
private void doOptionPositionsAsOfTest(String[] inOptionRoots, Date inDate, Properties inParameters,
        Map<PositionKey<Option>, BigDecimal> inExpectedPositions) throws Exception {
    StrategyCoordinates strategy = getPositionsStrategy();
    setPropertiesToNull();
    AbstractRunningStrategy.setProperty("optionPositionsAsOfDuringStop", "not-empty");
    if (inOptionRoots != null && inOptionRoots.length > 0) {
        StringBuilder builder = new StringBuilder();
        for (String optionRoot : inOptionRoots) {
            builder.append(optionRoot).append(',');
        }
        AbstractRunningStrategy.setProperty("optionRoots", builder.toString());
    }
    if (inDate != null) {
        AbstractRunningStrategy.setProperty("date", Long.toString(inDate.getTime()));
    }
    verifyStrategyStartsAndStops(strategy.getName(), getLanguage(), strategy.getFile(), inParameters, null,
            null);
    // verify expected results
    assertEquals((inExpectedPositions == null ? null : inExpectedPositions.toString()),
            AbstractRunningStrategy.getProperty("optionPositionsAsOf"));
    assertNull(AbstractRunningStrategy.getProperty("optionPositionsAsOfDuringStop"));
}

From source file:org.apache.hadoop.hive.ql.exec.DDLTask.java

private int alterIndex(Hive db, AlterIndexDesc alterIndex) throws HiveException {

    if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
        throw new UnsupportedOperationException("Indexes unsupported for Tez execution engine");
    }/*from w  ww. j ava2  s.  c om*/

    String baseTableName = alterIndex.getBaseTableName();
    String indexName = alterIndex.getIndexName();
    Index idx = db.getIndex(baseTableName, indexName);

    switch (alterIndex.getOp()) {
    case ADDPROPS:
        idx.getParameters().putAll(alterIndex.getProps());
        break;
    case UPDATETIMESTAMP:
        try {
            Map<String, String> props = new HashMap<String, String>();
            Map<Map<String, String>, Long> basePartTs = new HashMap<Map<String, String>, Long>();

            Table baseTbl = db.getTable(baseTableName);

            if (baseTbl.isPartitioned()) {
                List<Partition> baseParts;
                if (alterIndex.getSpec() != null) {
                    baseParts = db.getPartitions(baseTbl, alterIndex.getSpec());
                } else {
                    baseParts = db.getPartitions(baseTbl);
                }
                if (baseParts != null) {
                    for (Partition p : baseParts) {
                        Path dataLocation = p.getDataLocation();
                        FileSystem fs = dataLocation.getFileSystem(db.getConf());
                        FileStatus fss = fs.getFileStatus(dataLocation);
                        long lastModificationTime = fss.getModificationTime();

                        FileStatus[] parts = fs.listStatus(dataLocation, FileUtils.HIDDEN_FILES_PATH_FILTER);
                        if (parts != null && parts.length > 0) {
                            for (FileStatus status : parts) {
                                if (status.getModificationTime() > lastModificationTime) {
                                    lastModificationTime = status.getModificationTime();
                                }
                            }
                        }
                        basePartTs.put(p.getSpec(), lastModificationTime);
                    }
                }
            } else {
                FileSystem fs = baseTbl.getPath().getFileSystem(db.getConf());
                FileStatus fss = fs.getFileStatus(baseTbl.getPath());
                basePartTs.put(null, fss.getModificationTime());
            }
            for (Map<String, String> spec : basePartTs.keySet()) {
                if (spec != null) {
                    props.put(spec.toString(), basePartTs.get(spec).toString());
                } else {
                    props.put("base_timestamp", basePartTs.get(null).toString());
                }
            }
            idx.getParameters().putAll(props);
        } catch (HiveException e) {
            throw new HiveException("ERROR: Failed to update index timestamps");
        } catch (IOException e) {
            throw new HiveException("ERROR: Failed to look up timestamps on filesystem");
        }

        break;
    default:
        console.printError("Unsupported Alter command");
        return 1;
    }

    // set last modified by properties
    if (!updateModifiedParameters(idx.getParameters(), conf)) {
        return 1;
    }

    try {
        db.alterIndex(baseTableName, indexName, idx);
    } catch (InvalidOperationException e) {
        console.printError("Invalid alter operation: " + e.getMessage());
        LOG.info("alter index: " + stringifyException(e));
        return 1;
    } catch (HiveException e) {
        console.printError("Invalid alter operation: " + e.getMessage());
        return 1;
    }
    return 0;
}

From source file:com.MainFiles.Functions.java

public String[] getBalance(String strAccountNo, String strAgentID, String strTerminalID) throws IOException {
    String[] strBalance = null;/*from   w w  w .ja v a2 s . com*/
    try {
        String referenceNumber = this.PadZeros(12, this.generateCorelationID());
        Map<String, String> params = new HashMap<>();

        params.put("0", "0200");
        params.put("2", "0000000000000");
        params.put("3", "310000");
        params.put("4", "0");
        params.put("7", this.anyDate("MMddHHmmss"));
        params.put("11", this.anyDate("MMddHHmmss"));
        params.put("32", SOURCE_ID);
        params.put("37", referenceNumber);
        params.put("65", "");
        params.put("66", getTerminalID(strTerminalID));
        params.put("68", strTerminalID);
        params.put("88", "BALANCE ENQUIRY FOR ACCOUNT: " + strAccountNo);
        params.put("100", "BI");
        params.put("102", strAccountNo);
        params.put("103", "");
        params.put("104", strAgentID);

        boolean sentToWebLogic = false;
        HashMap ParamsFromAdapter = new HashMap();
        QueueWriter queueWriter = new QueueWriter(QUEUE_REQUEST, PROVIDER_URL);

        int trials = 0;
        do {
            sentToWebLogic = queueWriter.sendObject((HashMap) params, referenceNumber);
            trials++;
        } while (sentToWebLogic == false & trials < 3);

        if (sentToWebLogic) {

            long Start = System.currentTimeMillis();
            long Stop = Start + (Config.flexTimeOut * 1000);
            do {
                Thread.currentThread().sleep(100);
                ParamsFromAdapter = this.getWeblogicMessageFromQueue(referenceNumber);
            } while (ParamsFromAdapter.isEmpty() && System.currentTimeMillis() < Stop);

            if (ParamsFromAdapter.isEmpty()) {
                params.put("39", "999");
                params.put("48", "No response from Flex");
                this.log("No Response " + referenceNumber + ":" + params.toString(), "ERROR");

            } else {
                strBalance = ParamsFromAdapter.get("54").toString().split("\\|");
            }

        }
    } catch (Exception ex) {
        this.log("INFO : Function getBalance()  " + ex.getMessage() + "\n" + this.StackTraceWriter(ex),
                "ERROR");
    }
    return strBalance;
}

From source file:org.apache.kylin.rest.job.StorageCleanupJob.java

private void cleanUnusedIntermediateHiveTable() throws Exception {
    Configuration conf = HadoopUtil.getCurrentConfiguration();
    final KylinConfig config = KylinConfig.getInstanceFromEnv();
    JobEngineConfig engineConfig = new JobEngineConfig(KylinConfig.getInstanceFromEnv());
    final CliCommandExecutor cmdExec = config.getCliCommandExecutor();
    final int uuidLength = 36;
    final String preFix = "kylin_intermediate_";
    final String uuidPattern = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}";

    ISourceMetadataExplorer explr = SourceFactory.getDefaultSource().getSourceMetadataExplorer();
    List<String> hiveTableNames = explr.listTables(config.getHiveDatabaseForIntermediateTable());
    Iterable<String> kylinIntermediates = Iterables.filter(hiveTableNames, new Predicate<String>() {
        @Override/*  ww  w.j a  v a  2  s.  co  m*/
        public boolean apply(@Nullable String input) {
            return input != null && input.startsWith("kylin_intermediate_");
        }
    });

    List<String> allJobs = executableManager.getAllJobIds();
    List<String> allHiveTablesNeedToBeDeleted = new ArrayList<String>();
    List<String> workingJobList = new ArrayList<String>();
    Map<String, String> segmentId2JobId = Maps.newHashMap();

    StringBuilder sb = new StringBuilder();
    for (String jobId : allJobs) {
        // only remove FINISHED and DISCARDED job intermediate table
        final ExecutableState state = executableManager.getOutput(jobId).getState();
        if (!state.isFinalState()) {
            workingJobList.add(jobId);
            sb.append(jobId).append("(").append(state).append("), ");
        }

        try {
            String segmentId = getSegmentIdFromJobId(jobId);
            if (segmentId != null) {//some jobs are not cubing jobs 
                segmentId2JobId.put(segmentId, jobId);
            }
        } catch (Exception ex) {
            logger.warn("Failed to find segment ID from job ID " + jobId + ", ignore it");
            // some older version job metadata may fail to read, ignore it
        }
    }
    logger.info("Working jobIDs: " + workingJobList);

    for (String line : kylinIntermediates) {
        logger.info("Checking table " + line);

        if (!line.startsWith(preFix))
            continue;

        if (force == true) {
            logger.warn("Warning: will delete all intermediate hive tables!!!!!!!!!!!!!!!!!!!!!!");
            allHiveTablesNeedToBeDeleted.add(line);
            continue;
        }

        boolean isNeedDel = true;

        if (line.length() < preFix.length() + uuidLength) {
            logger.info("Skip deleting because length is not qualified");
            continue;
        }

        String uuid = line.substring(line.length() - uuidLength, line.length());
        uuid = uuid.replace("_", "-");
        final Pattern UUID_PATTERN = Pattern.compile(uuidPattern);

        if (!UUID_PATTERN.matcher(uuid).matches()) {
            logger.info("Skip deleting because pattern doesn't match");
            continue;
        }

        //Some intermediate table ends with job's uuid
        if (allJobs.contains(uuid)) {
            isNeedDel = !workingJobList.contains(uuid);
        } else if (isTableInUse(uuid, workingJobList)) {
            logger.info("Skip deleting because the table is in use");
            isNeedDel = false;
        }

        if (isNeedDel) {
            allHiveTablesNeedToBeDeleted.add(line);
        }
    }

    if (delete == true) {

        try {
            final String useDatabaseHql = "USE " + config.getHiveDatabaseForIntermediateTable() + ";";
            final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
            hiveCmdBuilder.addStatement(useDatabaseHql);
            for (String delHive : allHiveTablesNeedToBeDeleted) {
                hiveCmdBuilder.addStatement("drop table if exists " + delHive + "; ");
                logger.info("Remove " + delHive + " from hive tables.");
            }
            cmdExec.execute(hiveCmdBuilder.build());

            //if kylin.source.hive.keep-flat-table, some intermediate table might be kept 
            //delete external path
            for (String tableToDelete : allHiveTablesNeedToBeDeleted) {
                String uuid = tableToDelete.substring(tableToDelete.length() - uuidLength,
                        tableToDelete.length());
                String segmentId = uuid.replace("_", "-");

                if (segmentId2JobId.containsKey(segmentId)) {
                    String path = JobBuilderSupport.getJobWorkingDir(engineConfig.getHdfsWorkingDirectory(),
                            segmentId2JobId.get(segmentId)) + "/" + tableToDelete;
                    Path externalDataPath = new Path(path);
                    FileSystem fs = HadoopUtil.getWorkingFileSystem();
                    if (fs.exists(externalDataPath)) {
                        fs.delete(externalDataPath, true);
                        logger.info("Hive table {}'s external path {} deleted", tableToDelete, path);
                    } else {
                        logger.info(
                                "Hive table {}'s external path {} not exist. It's normal if kylin.source.hive.keep-flat-table set false (By default)",
                                tableToDelete, path);
                    }
                } else {
                    logger.warn("Hive table {}'s job ID not found, segmentId2JobId: {}", tableToDelete,
                            segmentId2JobId.toString());
                }
            }
        } catch (IOException e) {
            e.printStackTrace();
        }

    } else {
        System.out.println("------ Intermediate Hive Tables To Be Dropped ------");
        for (String hiveTable : allHiveTablesNeedToBeDeleted) {
            System.out.println(hiveTable);
        }
        System.out.println("----------------------------------------------------");
    }
}

From source file:com.alibaba.jstorm.daemon.supervisor.SyncProcessEvent.java

/**
 * launch a worker in distributed mode// ww  w.ja va 2s .  c  o  m
 *
 * @param conf
 * @param sharedcontext
 * @param topologyId
 * @param supervisorId
 * @param port
 * @param workerId
 * @throws IOException
 * @pdOid 6ea369dd-5ce2-4212-864b-1f8b2ed94abb
 */
public void launchWorker(Map conf, IContext sharedcontext, String topologyId, String supervisorId, Integer port,
        String workerId, LocalAssignment assignment) throws IOException {

    // STORM-LOCAL-DIR/supervisor/stormdist/topologyId
    String stormroot = StormConfig.supervisor_stormdist_root(conf, topologyId);

    // STORM-LOCAL-DIR/supervisor/stormdist/topologyId/stormjar.jar
    String stormjar = StormConfig.stormjar_path(stormroot);

    // get supervisor conf
    Map stormConf = StormConfig.read_supervisor_topology_conf(conf, topologyId);

    Map totalConf = new HashMap();
    totalConf.putAll(conf);
    totalConf.putAll(stormConf);

    // get classpath
    // String[] param = new String[1];
    // param[0] = stormjar;
    // String classpath = JStormUtils.add_to_classpath(
    // JStormUtils.current_classpath(), param);

    // get child process parameter

    String stormhome = System.getProperty("jstorm.home");

    long memSize = assignment.getMem();
    long memMinSize = ConfigExtension.getMemMinSizePerWorker(totalConf);
    int cpuNum = assignment.getCpu();
    long memGsize = memSize / JStormUtils.SIZE_1_G;
    int gcThreadsNum = memGsize > 4 ? (int) (memGsize * 1.5) : 4;
    String childopts = getChildOpts(totalConf);

    childopts += getGcDumpParam(Common.getTopologyNameById(topologyId), totalConf);

    Map<String, String> environment = new HashMap<String, String>();

    if (ConfigExtension.getWorkerRedirectOutput(totalConf)) {
        environment.put("REDIRECT", "true");
    } else {
        environment.put("REDIRECT", "false");
    }

    environment.put("LD_LIBRARY_PATH", (String) totalConf.get(Config.JAVA_LIBRARY_PATH));

    StringBuilder commandSB = new StringBuilder();

    try {
        if (this.cgroupManager != null) {
            commandSB.append(cgroupManager.startNewWorker(totalConf, cpuNum, workerId));
        }
    } catch (Exception e) {
        LOG.error("fail to prepare cgroup to workerId: " + workerId, e);
        return;
    }

    // commandSB.append("java -server -Xdebug -Xrunjdwp:transport=dt_socket,address=8000,server=y,suspend=n ");
    commandSB.append("java -server ");
    commandSB.append(" -Xms" + memMinSize);
    commandSB.append(" -Xmx" + memSize + " ");
    if (memMinSize < (memSize / 2))
        commandSB.append(" -Xmn" + memMinSize + " ");
    else
        commandSB.append(" -Xmn" + memSize / 2 + " ");
    if (memGsize >= 2) {
        commandSB.append(" -XX:PermSize=" + memSize / 32);
    } else {
        commandSB.append(" -XX:PermSize=" + memSize / 16);
    }
    commandSB.append(" -XX:MaxPermSize=" + memSize / 16);
    commandSB.append(" -XX:ParallelGCThreads=" + gcThreadsNum);
    commandSB.append(" " + childopts);
    commandSB.append(" " + (assignment.getJvm() == null ? "" : assignment.getJvm()));

    commandSB.append(" -Djava.library.path=");
    commandSB.append((String) totalConf.get(Config.JAVA_LIBRARY_PATH));

    if (stormhome != null) {
        commandSB.append(" -Djstorm.home=");
        commandSB.append(stormhome);
    }

    String logDir = System.getProperty("jstorm.log.dir");
    if (logDir != null)
        commandSB.append(" -Djstorm.log.dir=").append(logDir);
    commandSB.append(getLogParameter(totalConf, stormhome, assignment.getTopologyName(), port));

    String classpath = getClassPath(stormjar, stormhome, totalConf);
    String workerClassPath = (String) totalConf.get(Config.TOPOLOGY_CLASSPATH);
    List<String> otherLibs = (List<String>) stormConf.get(GenericOptionsParser.TOPOLOGY_LIB_NAME);
    StringBuilder sb = new StringBuilder();
    if (otherLibs != null) {
        for (String libName : otherLibs) {
            sb.append(StormConfig.stormlib_path(stormroot, libName)).append(":");
        }
    }
    workerClassPath = workerClassPath + ":" + sb.toString();

    Map<String, String> policyReplaceMap = new HashMap<String, String>();
    String realClassPath = classpath + ":" + workerClassPath;
    policyReplaceMap.put(SandBoxMaker.CLASS_PATH_KEY, realClassPath);
    commandSB.append(sandBoxMaker.sandboxPolicy(workerId, policyReplaceMap));

    commandSB.append(" -cp ");
    // commandSB.append(workerClassPath + ":");
    commandSB.append(classpath);
    if (!ConfigExtension.isEnableTopologyClassLoader(totalConf))
        commandSB.append(":").append(workerClassPath);

    commandSB.append(" com.alibaba.jstorm.daemon.worker.Worker ");
    commandSB.append(topologyId);

    commandSB.append(" ");
    commandSB.append(supervisorId);

    commandSB.append(" ");
    commandSB.append(port);

    commandSB.append(" ");
    commandSB.append(workerId);

    commandSB.append(" ");
    commandSB.append(workerClassPath + ":" + stormjar);

    String cmd = commandSB.toString();
    cmd = cmd.replace("%ID%", port.toString());
    cmd = cmd.replace("%TOPOLOGYID%", topologyId);
    if (stormhome != null) {
        cmd = cmd.replace("%JSTORM_HOME%", stormhome);
    } else {
        cmd = cmd.replace("%JSTORM_HOME%", "./");
    }

    LOG.info("Launching worker with command: " + cmd);
    LOG.info("Environment:" + environment.toString());

    JStormUtils.launch_process(cmd, environment, true);
}