Example usage for java.util Map clear

List of usage examples for java.util Map clear

Introduction

In this page you can find the example usage for java.util Map clear.

Prototype

void clear();

Source Link

Document

Removes all of the mappings from this map (optional operation).

Usage

From source file:fragment.web.SupportControllerTest.java

@Test
public void testListTicketsPageAsMasterUser() throws Exception {
    Tenant otherTenant = tenantDAO.find(2L);
    User otherMasterUser = otherTenant.getOwner();
    userDAO.save(otherMasterUser);/*from   ww w .  j  a v a 2  s. co m*/
    asUser(otherMasterUser);
    Tenant systemTenant = controller.getCurrentUser().getTenant();
    createTestTicket(3, otherTenant, otherMasterUser);
    asUser(user);
    systemTenant = controller.getCurrentUser().getTenant();
    String view = controller.listTicketsPage(systemTenant, tenant.getUuid(), "All", false, "", "", "", map,
            request);
    List<TicketStatus> listTicketStatus = new ArrayList<Ticket.TicketStatus>();
    listTicketStatus.add(TicketStatus.NEW);
    listTicketStatus.add(TicketStatus.CLOSED);
    listTicketStatus.add(TicketStatus.ESCALATED);
    listTicketStatus.add(TicketStatus.WORKING);

    List<User> users = new ArrayList<User>();
    users.add(user);
    Map<String, String> responseAttribute = new HashMap<String, String>();
    responseAttribute.put("queryLocator", "xyz");

    List<Ticket> tickets = supportService.list(0, 0, listTicketStatus, users, "", "", responseAttribute);

    Assert.assertEquals("support.tickets.list", view);
    Assert.assertTrue(map.containsKey("tickets"));
    Assert.assertTrue(map.containsValue(tickets));

    @SuppressWarnings("unchecked")
    List<String> list = (List<String>) map.get("tickets");
    Assert.assertEquals(5, list.size());

    asUser(otherMasterUser);
    responseAttribute.clear();
    view = controller.listTicketsPage(systemTenant, tenant.getUuid(), "All", false, "", "", "", map, request);
    Assert.assertEquals("support.tickets.list", view);
    Assert.assertTrue(map.containsKey("tickets"));

    @SuppressWarnings("unchecked")
    List<String> list1 = (List<String>) map.get("tickets");
    Assert.assertEquals(5, list1.size());
}

From source file:com.espertech.esper.epl.core.ResultSetProcessorRowPerGroupRollup.java

private UniformPair<EventBean[]> handleOutputLimitLastJoin(
        List<UniformPair<Set<MultiKey<EventBean>>>> viewEventsList, boolean generateSynthetic) {
    int oldEventCount = 0;
    if (prototype.isSelectRStream()) {
        rstreamEventSortArrayPair.reset();
    }//from   www .  ja  v a  2 s.com

    for (Map<Object, EventBean[]> aGroupRepsView : outputLimitGroupRepsPerLevel) {
        aGroupRepsView.clear();
    }

    // outer loop is the events
    for (UniformPair<Set<MultiKey<EventBean>>> pair : viewEventsList) {
        Set<MultiKey<EventBean>> newData = pair.getFirst();
        Set<MultiKey<EventBean>> oldData = pair.getSecond();

        // apply to aggregates
        Object[] groupKeysPerLevel = new Object[prototype.getGroupByRollupDesc().getLevels().length];
        if (newData != null) {
            for (MultiKey<EventBean> aNewData : newData) {
                Object groupKeyComplete = generateGroupKey(aNewData.getArray(), true);
                for (AggregationGroupByRollupLevel level : prototype.getGroupByRollupDesc().getLevels()) {
                    Object groupKey = level.computeSubkey(groupKeyComplete);
                    groupKeysPerLevel[level.getLevelNumber()] = groupKey;
                    if (outputLimitGroupRepsPerLevel[level.getLevelNumber()].put(groupKey,
                            aNewData.getArray()) == null) {
                        if (prototype.isSelectRStream()) {
                            generateOutputBatched(false, groupKey, level, aNewData.getArray(), true,
                                    generateSynthetic, rstreamEventSortArrayPair.getEventsPerLevel(),
                                    rstreamEventSortArrayPair.getSortKeyPerLevel());
                            oldEventCount++;
                        }
                    }
                }
                aggregationService.applyEnter(aNewData.getArray(), groupKeysPerLevel, agentInstanceContext);
            }
        }
        if (oldData != null) {
            for (MultiKey<EventBean> anOldData : oldData) {
                Object groupKeyComplete = generateGroupKey(anOldData.getArray(), false);
                for (AggregationGroupByRollupLevel level : prototype.getGroupByRollupDesc().getLevels()) {
                    Object groupKey = level.computeSubkey(groupKeyComplete);
                    groupKeysPerLevel[level.getLevelNumber()] = groupKey;
                    if (outputLimitGroupRepsPerLevel[level.getLevelNumber()].put(groupKey,
                            anOldData.getArray()) == null) {
                        if (prototype.isSelectRStream()) {
                            generateOutputBatched(true, groupKey, level, anOldData.getArray(), true,
                                    generateSynthetic, rstreamEventSortArrayPair.getEventsPerLevel(),
                                    rstreamEventSortArrayPair.getSortKeyPerLevel());
                            oldEventCount++;
                        }
                    }
                }
                aggregationService.applyLeave(anOldData.getArray(), groupKeysPerLevel, agentInstanceContext);
            }
        }
    }

    return generateAndSort(outputLimitGroupRepsPerLevel, generateSynthetic, oldEventCount);
}

From source file:com.espertech.esper.epl.core.ResultSetProcessorRowPerGroupRollup.java

private UniformPair<EventBean[]> handleOutputLimitLastView(List<UniformPair<EventBean[]>> viewEventsList,
        boolean generateSynthetic) {
    int oldEventCount = 0;
    if (prototype.isSelectRStream()) {
        rstreamEventSortArrayPair.reset();
    }//from w  ww .ja  v a2  s . c o  m

    for (Map<Object, EventBean[]> aGroupRepsView : outputLimitGroupRepsPerLevel) {
        aGroupRepsView.clear();
    }

    // outer loop is the events
    for (UniformPair<EventBean[]> pair : viewEventsList) {
        EventBean[] newData = pair.getFirst();
        EventBean[] oldData = pair.getSecond();

        // apply to aggregates
        Object[] groupKeysPerLevel = new Object[prototype.getGroupByRollupDesc().getLevels().length];
        EventBean[] eventsPerStream;
        if (newData != null) {
            for (EventBean aNewData : newData) {
                eventsPerStream = new EventBean[] { aNewData };
                Object groupKeyComplete = generateGroupKey(eventsPerStream, true);
                for (AggregationGroupByRollupLevel level : prototype.getGroupByRollupDesc().getLevels()) {
                    Object groupKey = level.computeSubkey(groupKeyComplete);
                    groupKeysPerLevel[level.getLevelNumber()] = groupKey;
                    if (outputLimitGroupRepsPerLevel[level.getLevelNumber()].put(groupKey,
                            eventsPerStream) == null) {
                        if (prototype.isSelectRStream()) {
                            generateOutputBatched(false, groupKey, level, eventsPerStream, true,
                                    generateSynthetic, rstreamEventSortArrayPair.getEventsPerLevel(),
                                    rstreamEventSortArrayPair.getSortKeyPerLevel());
                            oldEventCount++;
                        }
                    }
                }
                aggregationService.applyEnter(eventsPerStream, groupKeysPerLevel, agentInstanceContext);
            }
        }
        if (oldData != null) {
            for (EventBean anOldData : oldData) {
                eventsPerStream = new EventBean[] { anOldData };
                Object groupKeyComplete = generateGroupKey(eventsPerStream, false);
                for (AggregationGroupByRollupLevel level : prototype.getGroupByRollupDesc().getLevels()) {
                    Object groupKey = level.computeSubkey(groupKeyComplete);
                    groupKeysPerLevel[level.getLevelNumber()] = groupKey;
                    if (outputLimitGroupRepsPerLevel[level.getLevelNumber()].put(groupKey,
                            eventsPerStream) == null) {
                        if (prototype.isSelectRStream()) {
                            generateOutputBatched(true, groupKey, level, eventsPerStream, true,
                                    generateSynthetic, rstreamEventSortArrayPair.getEventsPerLevel(),
                                    rstreamEventSortArrayPair.getSortKeyPerLevel());
                            oldEventCount++;
                        }
                    }
                }
                aggregationService.applyLeave(eventsPerStream, groupKeysPerLevel, agentInstanceContext);
            }
        }
    }

    return generateAndSort(outputLimitGroupRepsPerLevel, generateSynthetic, oldEventCount);
}

From source file:lineage2.gameserver.model.entity.Hero.java

/**
 * Method computeNewHeroes.//ww w .jav  a2  s .  c o  m
 * @param newHeroes List<StatsSet>
 * @return boolean
 */
public synchronized boolean computeNewHeroes(List<StatsSet> newHeroes) {
    if (newHeroes.size() == 0) {
        return true;
    }
    Map<Integer, StatsSet> heroes = new ConcurrentHashMap<>();
    boolean error = false;
    for (StatsSet hero : newHeroes) {
        int charId = hero.getInteger(Olympiad.CHAR_ID);
        if ((_completeHeroes != null) && _completeHeroes.containsKey(charId)) {
            StatsSet oldHero = _completeHeroes.get(charId);
            int count = oldHero.getInteger(COUNT);
            oldHero.set(COUNT, count + 1);
            oldHero.set(PLAYED, 1);
            oldHero.set(ACTIVE, 0);
            heroes.put(charId, oldHero);
        } else {
            StatsSet newHero = new StatsSet();
            newHero.set(Olympiad.CHAR_NAME, hero.getString(Olympiad.CHAR_NAME));
            newHero.set(Olympiad.CLASS_ID, hero.getInteger(Olympiad.CLASS_ID));
            newHero.set(COUNT, 1);
            newHero.set(PLAYED, 1);
            newHero.set(ACTIVE, 0);
            heroes.put(charId, newHero);
        }
        addHeroDiary(charId, HeroDiary.ACTION_HERO_GAINED, 0);
        loadDiary(charId);
    }
    _heroes.putAll(heroes);
    heroes.clear();
    updateHeroes(0);
    return error;
}

From source file:eu.brokeratcloud.fpr.model.DivaRoot.java

public void generateRule() {

    Map<String, Integer> priorities = new HashMap<String, Integer>();
    AdaptRule adaptRule = AdaptRule.INSTANCE;

    for (String r : adaptRule.involvedContext()) {
        Variable found = null;/*from  w w w. j a va 2 s . c  o m*/
        for (Variable v : root.getContext()) {
            if (v.getId().equals(r)) {
                found = v;
                break;
            }
        }
        if (found == null) {
            Variable v = DivaFactory.eINSTANCE.createBooleanVariable();
            v.setId(r);
            v.setName(r);
            root.getContext().add(v);
        }
    }

    List<String> propertyNames = ServiceAttribute.INSTANCE.listCommonAttributes();
    for (String name : adaptRule.allRuleNames()) {
        priorities.clear();
        for (String propName : propertyNames) {
            priorities.put(propName, adaptRule.getPriority(name, propName));
        }
        priorities.put("Failure", 4);
        priorities.put("Cost", 8);
        this.fillRule(name, adaptRule.getRule(name), priorities);
    }

}

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.dao.DAMQueriesLevel2.java

private void generateFileInTransaction(final DataFileLevelTwoThree dataFile, final Writer writer)
        throws IOException {
    /*//from w w  w  .j ava 2  s  .  c  o m
    Set the variables for column names we'll need locally.  Local var is 6 times faster than a shared constant.
    If you change the select for HYBRIDIZATION_VALUE_QUERY please change these values and maintain column number order.
     */
    final Integer PROBE_NAME = 1;
    final Integer CHROMOSOME = 2;
    final Integer START_POSITION = 3;
    final Integer END_POSITION = 4;
    final Integer HYBRIDIZATION_REF_ID = 5;
    final Integer GROUP_COLUMN_NAME = 6;
    final Integer VALUE = 7;

    final String STRING = "-";
    // 1. gather barcodes and hyb_data_groups from database
    final List<String> hybDataGroupNames = gatherHybridizationDataGroupNames(
            dataFile.getDataSetsDP().iterator().next());
    final Map<String, Long> hybrefIdToBarcodeMap = getBarcodesForHybrefs(dataFile);

    // sort barcodes into the order we want to write them in the file (alphabetical)
    final String[] orderedBarcodes = new String[hybrefIdToBarcodeMap.size()];
    hybrefIdToBarcodeMap.keySet().toArray(orderedBarcodes);
    Arrays.sort(orderedBarcodes);

    int platformId = getPortalPlatformId(dataFile.getDataSetsDP().iterator().next());

    final boolean willHaveProbeConstants = getWillHaveProbeConstants(platformId);
    writeHeaders(writer, hybDataGroupNames, orderedBarcodes, willHaveProbeConstants);

    List<Object> queryBindValues = new ArrayList<Object>();
    String query = prepareQueryAndBindVariables(dataFile, queryBindValues, platformId);
    insertTempHybrefIds(dataFile.getHybRefIds());
    insertTempDataSetIds(dataFile.getDataSetsDP());
    final Map<String, String> currentRowValues = new HashMap<String, String>(); // keyed by "hybref_id.data_group_name"
    final String[] lastProbe = new String[] { null, null, null }; // done this way b/c used by inner class
    getJdbcTemplate().query(query, queryBindValues.toArray(), new RowCallbackHandler() {
        public void processRow(final ResultSet resultSet) throws SQLException {
            resultSet.setFetchSize(DEFAULT_FETCHSIZE);

            String currentProbe = resultSet.getString(PROBE_NAME);
            if (lastProbe[0] != null && !lastProbe[0].equals(currentProbe)) {
                // this result set is the start of a new row, so write the old one
                try {
                    writeDataRow(lastProbe, currentRowValues, orderedBarcodes, hybrefIdToBarcodeMap,
                            hybDataGroupNames, writer, willHaveProbeConstants);
                    currentRowValues.clear();
                } catch (IOException e) {
                    getLogger().logError(e);
                    throw new DataAccessException(e.getMessage(), e) {
                    };
                }
            }

            // store this value in the values map, keyed by combination of hybrefid and datagroup name
            final String key = resultSet.getLong(HYBRIDIZATION_REF_ID) + "."
                    + resultSet.getString(GROUP_COLUMN_NAME);
            currentRowValues.put(key, resultSet.getString(VALUE));
            lastProbe[0] = currentProbe;
            lastProbe[1] = resultSet.getString(CHROMOSOME);
            lastProbe[2] = resultSet.getString(START_POSITION) + STRING + resultSet.getString(END_POSITION);
        }
    });
    // write last row!
    if (lastProbe[0] != null) {
        writeDataRow(lastProbe, currentRowValues, orderedBarcodes, hybrefIdToBarcodeMap, hybDataGroupNames,
                writer, willHaveProbeConstants);
    }
}

From source file:dao.LineageDAO.java

public static JsonNode renderGraph(LineagePathInfo pathInfo, String urn, int upLevel, int downLevel,
        List<LineageNode> allSourceNodes, List<LineageNode> allTargetNodes,
        Map<Long, List<LineageNode>> addedSourceNodes, Map<Long, List<LineageNode>> addedTargetNodes,
        Map<Long, LineageNode> addedJobNodes, List<LineageNode> nodes, List<LineageEdge> edges) {
    ObjectNode resultNode = Json.newObject();
    String message = null;/*w ww.  j a v  a  2 s .  c o m*/
    Map<String, LineageNode> addedSourceDataNodes = new HashMap<String, LineageNode>();
    Map<String, LineageNode> toBeClearedSourceDataNodes = new HashMap<String, LineageNode>();
    Map<String, List<LineageNode>> addedTargetDataNodes = new HashMap<String, List<LineageNode>>();
    Map<String, List<LineageNode>> toBeConvertedSourceDataNodes = new HashMap<String, List<LineageNode>>();
    message = "No lineage information found for this dataset";
    if (allSourceNodes.size() == 0 && allTargetNodes.size() == 0) {
        LineageNode node = new LineageNode();
        node.id = nodes.size();
        node._sort_list = new ArrayList<String>();
        node.node_type = "data";
        node.abstracted_path = pathInfo.filePath;
        node.storage_type = pathInfo.storageType;
        node._sort_list.add("abstracted_path");
        node._sort_list.add("storage_type");
        node._sort_list.add("urn");
        node.urn = urn;
        nodes.add(node);
    } else {
        message = "Found lineage information";
        for (int i = 0; i < Math.max(upLevel, downLevel); i++) {
            if (i < upLevel) {
                if (toBeConvertedSourceDataNodes.size() > 0) {
                    for (Map.Entry<String, List<LineageNode>> mapEntry : toBeConvertedSourceDataNodes
                            .entrySet()) {
                        List<LineageNode> hashedNodes = addedTargetDataNodes.get(mapEntry.getKey());
                        List<LineageNode> list = mapEntry.getValue();
                        if (list != null && list.size() > 0) {
                            if (hashedNodes == null) {
                                hashedNodes = new ArrayList<LineageNode>();
                            }
                            hashedNodes.addAll(list);
                            addedTargetDataNodes.put(mapEntry.getKey(), hashedNodes);
                        }

                    }
                    toBeConvertedSourceDataNodes.clear();
                }
                if (addedSourceDataNodes.size() > 0) {
                    toBeClearedSourceDataNodes.putAll(addedSourceDataNodes);
                    addedSourceDataNodes.clear();
                }

                for (LineageNode job : addedJobNodes.values()) {
                    if (job.level != i) {
                        continue;
                    }
                    job.id = nodes.size();
                    nodes.add(job);
                    addTargetNode(job, addedTargetNodes, addedTargetDataNodes, toBeClearedSourceDataNodes,
                            nodes, edges, true);

                    addSourceNode(job, addedSourceNodes, addedTargetDataNodes, addedSourceDataNodes,
                            toBeConvertedSourceDataNodes, nodes, edges, true);
                }
            }
            if ((i > 0) && (i < downLevel)) {
                for (LineageNode job : addedJobNodes.values()) {
                    if (job.level != -i) {
                        continue;
                    }
                    job.id = nodes.size();
                    nodes.add(job);
                    addTargetNode(job, addedTargetNodes, addedTargetDataNodes, toBeClearedSourceDataNodes,
                            nodes, edges, false);

                    addSourceNode(job, addedSourceNodes, addedTargetDataNodes, addedSourceDataNodes,
                            toBeConvertedSourceDataNodes, nodes, edges, false);
                }
            }
        }
    }
    resultNode.set("nodes", Json.toJson(nodes));
    resultNode.set("links", Json.toJson(edges));
    resultNode.put("urn", urn);
    resultNode.put("message", message);
    return resultNode;

}

From source file:ca.sqlpower.architect.ddl.LiquibaseDDLGenerator.java

/**
 * Adds a statement for creating the given foreign key relationship in
 * the target database.  Depends on the {@link #getDeferrabilityClause(SQLRelationship)}
 * method for the target database's way of describing the deferrability policy.
 *///from w ww .j a  va 2  s . com
public void addRelationship(SQLRelationship r) {
    StringBuilder sql = new StringBuilder();
    StringBuilder errorMsg = new StringBuilder();

    StringBuilder typesMismatchMsg = new StringBuilder();

    sql.append("<addForeignKeyConstraint ");
    sql.append(getTableQualifier(r.getFkTable(), "baseTableName", "baseSchemaName"));
    sql.append(" constraintName=\"");
    sql.append(getName(r));
    sql.append("\" baseColumnNames=\"");
    Map<String, SQLObject> colNameMap = new HashMap<String, SQLObject>();
    boolean firstColumn = true;

    for (ColumnMapping cm : r.getChildren(ColumnMapping.class)) {
        SQLColumn c = cm.getFkColumn();
        // make sure this is unique
        if (colNameMap.get(c.getName()) == null) {
            if (firstColumn) {
                firstColumn = false;
                sql.append(createPhysicalName(colNameMap, c));
            } else {
                sql.append(", " + createPhysicalName(colNameMap, c));
            }
            colNameMap.put(c.getName(), c);
        }
    }
    sql.append("\"");

    sql.append(" referencedTableName=\"");
    sql.append(toQualifiedName(r.getPkTable()));
    sql.append("\" referencedColumnNames=\"");
    colNameMap.clear();
    firstColumn = true;

    if (r.getChildren().isEmpty()) {
        errorMsg.append("Warning: Relationship has no columns to map:\n");
    }

    for (ColumnMapping cm : r.getChildren(ColumnMapping.class)) {
        SQLColumn c = cm.getPkColumn();
        SQLColumn fkCol = cm.getFkColumn();

        // checks the fk column and pk column are the same type,
        // generates DDLWarning if not the same.
        if (ArchitectUtils.columnTypesDiffer(c.getType(), fkCol.getType())) {
            typesMismatchMsg.append("        " + c + " -- " + fkCol + "\n");
        }
        // make sure this is unique
        if (colNameMap.get(c.getName()) == null) {
            if (firstColumn) {
                firstColumn = false;
                sql.append(createPhysicalName(colNameMap, c));
            } else {
                sql.append(", " + createPhysicalName(colNameMap, c));
            }
            colNameMap.put(c.getName(), c);
        }
    }

    sql.append("\"");

    // adds to error msg if there were types mismatch
    if (typesMismatchMsg.length() != 0) {
        errorMsg.append("Warning: Column types mismatch in the following column mapping(s):\n");
        errorMsg.append(typesMismatchMsg.toString());
    }

    if (!"NO ACTION".equals(getDeleteActionClause(r))) {
        sql.append(" onDelete=\"");
        sql.append(getDeleteActionClause(r));
        sql.append("\"");
    }
    if (!"NO ACTION".equals(getUpdateActionClause(r))) {
        sql.append(" onUpdate=\"");
        sql.append(getUpdateActionClause(r));
        sql.append("\"");
    }

    if (isDeferrable(r)) {
        sql.append(" deferrable=\"");
        sql.append(isDeferrable(r));
        sql.append("\"");

        sql.append(" initiallyDeferred=\"");
        sql.append(isInitiallyDeferred(r));
        sql.append("\"");
    }

    sql.append("/>");
    startOfStatement();
    println(sql.toString());
    endOfStatement();
    endStatement(StatementType.XMLTAG, r);
}

From source file:com.gemstone.gemfire.management.internal.cli.GfshParser.java

public String obtainHelp(String userInput, Set<String> commandNames) {
    final boolean withinShell = commandNames == null || commandNames.isEmpty();
    final String appName = withinShell ? "" : HelpUtils.EXE_PREFIX_FOR_EXTERNAL_HELP;

    StringBuilder helpText = new StringBuilder();
    try {//from www .ja  v  a  2 s.c  o  m
        if (userInput == null) {
            userInput = "";
        }

        Map<Short, List<CommandTarget>> matchingCommandsMap = findMatchingCommands(userInput, commandNames);
        List<CommandTarget> exactCommandTargets = matchingCommandsMap.get(EXACT_TARGET);
        List<CommandTarget> matchingCommandTargets = matchingCommandsMap.get(MATCHING_TARGETS);
        matchingCommandsMap.clear();

        if (exactCommandTargets.isEmpty() && matchingCommandTargets.isEmpty()) {
            // No matching commands
            helpText.append(CliStrings.GFSHPARSER__MSG__NO_MATCHING_COMMAND).append(GfshParser.LINE_SEPARATOR);
        } else {
            if (exactCommandTargets.size() == 1) {
                helpText.append(obtainCommandSpecificHelp(exactCommandTargets.get(0), withinShell));
                if (!matchingCommandTargets.isEmpty()) {
                    helpText.append(GfshParser.LINE_SEPARATOR);
                    helpText.append(CliStrings
                            .format(CliStrings.GFSHPARSER__MSG__OTHER_COMMANDS_STARTING_WITH_0_ARE, userInput));
                    for (int i = 0; i < matchingCommandTargets.size(); i++) {
                        CommandTarget commandTarget = matchingCommandTargets.get(i);
                        helpText.append(commandTarget.getCommandName());
                        if (i < matchingCommandTargets.size() - 1) {
                            helpText.append(", ");
                        }
                    }
                    helpText.append(GfshParser.LINE_SEPARATOR);
                }
            } else {
                List<CommandTarget> commandTargets = new ArrayList<CommandTarget>();
                commandTargets.addAll(exactCommandTargets);
                commandTargets.addAll(matchingCommandTargets);
                for (CommandTarget commandTarget : commandTargets) {
                    String availability = commandTarget.isAvailable() ? HelpUtils.HELP__COMMAND_AVAILABLE
                            : HelpUtils.HELP__COMMAND_NOTAVAILABLE;
                    // Many matching commands, provide one line description
                    helpText.append(commandTarget.getCommandName());
                    if (withinShell) {
                        helpText.append(" (").append(availability).append(")");
                    }
                    helpText.append(GfshParser.LINE_SEPARATOR);
                    helpText.append(Gfsh.wrapText(commandTarget.getCommandHelp(), 1))
                            .append(GfshParser.LINE_SEPARATOR);
                }
                helpText.append(GfshParser.LINE_SEPARATOR);

                if (withinShell) {
                    helpText.append(Gfsh.wrapText(CliStrings.format(
                            CliStrings.GFSHPARSER__MSG__USE_0_HELP_COMMAND_TODISPLAY_DETAILS, appName), 0))
                            .append(GfshParser.LINE_SEPARATOR);
                    helpText.append(Gfsh.wrapText(
                            CliStrings.format(CliStrings.GFSHPARSER__MSG__HELP_CAN_ALSO_BE_OBTAINED_BY_0_KEY,
                                    AbstractShell.completionKeys),
                            0));
                }
            }
        }
    } catch (IllegalArgumentException e) {
        logWrapper.warning(CliUtil.stackTraceAsString(e));
    } catch (IllegalAccessException e) {
        logWrapper.warning(CliUtil.stackTraceAsString(e));
    } catch (InvocationTargetException e) {
        logWrapper.warning(CliUtil.stackTraceAsString(e));
    }
    return helpText.toString();
}

From source file:com.streamsets.pipeline.lib.parser.log.LogCharDataParser.java

@Override
public Record parse() throws IOException, DataParserException {
    reader.resetCount();/*from   w  w w. j av a2s  . c o m*/

    //In order to detect stack trace / multi line error messages, the parser reads the next line and attempts
    //a pattern match. If it fails then the line is treated a a stack trace and associated with the previous line.
    //If the pattern matches then its a valid log line and is saved for the next round.

    Record record = null;

    //Check if EOF encountered in the previous round
    if (previousLine.length() == 0 && previousRead == -1) {
        //EOF encountered previous round, return null
        currentOffset = -1;
        return record;
    }

    //Check if a line was read and saved from the previous round
    if (previousLine.length() > 0) {
        record = createRecordFromPreviousLine();
        //update the current offset. This is what gets returned by the produce API.
        currentOffset = reader.getPos();
        //check if the EOF was reached in the previous read and update the offset accordingly
        if (previousRead == -1) {
            currentOffset = -1;
        }
    }

    //read the next line
    currentLine.setLength(0);
    Map<String, Field> fieldsFromLogLine = new LinkedHashMap<>();
    StringBuilder stackTrace = new StringBuilder();
    int read = readAhead(fieldsFromLogLine, stackTrace);

    //Use the data from the read line if there is no saved data from the previous round.
    if (record == null && !fieldsFromLogLine.isEmpty()) {
        record = context.createRecord(readerId + "::" + currentOffset);
        //create field for the record
        Map<String, Field> map = new HashMap<>();
        if (retainOriginalText) {
            map.put(TEXT_FIELD_NAME, Field.create(currentLine.toString()));
        }
        if (isTruncated(read)) {
            map.put(TRUNCATED_FIELD_NAME, Field.create(true));
        }
        map.putAll(fieldsFromLogLine);
        record.set(Field.create(map));
        //Since there was no previously saved line, the current offset must be updated to the current reader position
        currentOffset = reader.getPos();
        if (read == -1) {
            currentOffset = -1;
        }

        //store already read line for the next iteration
        fieldsFromPrevLine.clear();
        previousLine.setLength(0);
        fieldsFromPrevLine.putAll(fieldsFromLogLine);
        previousLine.append(currentLine.toString());
        previousRead = read;

        //read ahead since there was no line from the previous round
        currentLine.setLength(0);
        fieldsFromLogLine.clear();
        stackTrace.setLength(0);
        read = readAhead(fieldsFromLogLine, stackTrace);
    }

    //check if a stack trace was found during read ahead
    if (stackTrace.length() > 0) {
        //associate it with the last field in the previously read line
        Field messageField = record.get("/" + Constants.MESSAGE);
        if (messageField != null) {
            Field originalMessage = messageField;
            Field newMessage = Field.create(originalMessage.getValueAsString() + "\n" + stackTrace.toString());
            record.set("/" + Constants.MESSAGE, newMessage);
        }
        //update the originalLine if required
        if (record.has("/" + TEXT_FIELD_NAME)) {
            Field originalLine = record.get("/" + TEXT_FIELD_NAME);
            Field newLine = Field.create(originalLine.getValueAsString() + "\n" + stackTrace.toString());
            record.set("/" + TEXT_FIELD_NAME, newLine);
        }
        //if EOF was reached while reading the stack trace, update the current offset
        if (read == -1) {
            currentOffset = -1;
        }
    }

    //store already read line for the next iteration
    fieldsFromPrevLine.clear();
    previousLine.setLength(0);
    fieldsFromPrevLine.putAll(fieldsFromLogLine);
    previousLine.append(currentLine.toString());
    previousRead = read;
    return record;
}