Example usage for java.text ParseException toString

List of usage examples for java.text ParseException toString

Introduction

In this page you can find the example usage for java.text ParseException toString.

Prototype

public String toString() 

Source Link

Document

Returns a short description of this throwable.

Usage

From source file:view.App.java

private void renderChart(SpreadObject obj) {
    try {/*  w  w w.j  av  a  2  s. c o m*/
        jPanel3.removeAll();
        jPanel3.revalidate();
        chart = ChartFactory.createTimeSeriesChart(
                obj.getBaseStock().getTicker() + " : " + obj.getSecondStock().getTicker(), "date",
                "spread ratio", createDataset(obj), true, true, false);
    } catch (ParseException e) {
        System.out.println("    " + e.toString());
    }

    final XYPlot plot = chart.getXYPlot();

    XYLineAndShapeRenderer renderer = new XYLineAndShapeRenderer();
    renderer.setSeriesShape(0, new Ellipse2D.Double(-3, -3, 0, 0));
    renderer.setSeriesPaint(0, Color.RED);
    plot.setRenderer(renderer);

    axis = (DateAxis) plot.getDomainAxis();
    axis.setDateFormatOverride(new SimpleDateFormat("dd-MM-yyyy"));

    ChartPanel chartPanel = new ChartPanel(chart);
    chartPanel.setDomainZoomable(true);
    chartPanel.setRangeZoomable(false);
    Border border = BorderFactory.createCompoundBorder(BorderFactory.createEmptyBorder(4, 4, 4, 4),
            BorderFactory.createEtchedBorder());
    chartPanel.setBorder(border);

    jPanel3.add(chartPanel);
    jPanel3.revalidate();
}

From source file:org.kuali.test.runner.execution.AbstractOperationExecution.java

/**
 * /*from w  w  w. ja  v a2  s.  c  om*/
 * @param testWrapper
 * @param cp
 * @return
 * @throws TestException 
 */
protected boolean evaluateCheckpointProperty(KualiTestWrapper testWrapper, CheckpointProperty cp)
        throws TestException {
    boolean retval = false;

    try {
        Object comparisonValue = getComparisonValue(cp);
        ComparisonOperator.Enum comparisonOperator = cp.getOperator();
        Object value = getValueForType(cp.getActualValue(), cp.getValueType());

        if (ComparisonOperator.NULL.equals(cp.getOperator())) {
            retval = ((value == null) && (comparisonValue == null));
        } else if ((value == null) || (comparisonValue == null)) {
            if (((cp.getOperator() == null) || ComparisonOperator.EQUAL_TO.equals(cp.getOperator()))
                    && (value == null) && (comparisonValue == null)) {
                retval = true;
            } else {
                throw new TestException("input value is null, comparison value = " + comparisonValue, op,
                        cp.getOnFailure());
            }
        } else {
            ValueType.Enum type = cp.getValueType();

            if (type == null) {
                type = ValueType.STRING;
            }

            ValueType.Enum inputType = getInputValueType(value);
            if (type.equals(inputType)) {
                if (ComparisonOperator.IN.equals(cp.getOperator()) && (comparisonValue instanceof List)) {
                    Iterator<Comparable> it = ((List) comparisonValue).iterator();

                    while (it.hasNext()) {
                        if (it.next().equals(value)) {
                            retval = true;
                            break;
                        }
                    }
                } else {
                    if (ValueType.STRING.equals(type)) {
                        String s1 = (String) comparisonValue;
                        String s2 = (String) value;
                        if (StringUtils.isNotBlank(s1)) {
                            comparisonValue = s1.trim();
                        }

                        if (StringUtils.isNotBlank(s2)) {
                            value = s2.trim();
                        }
                    }
                    Comparable c1 = (Comparable) comparisonValue;
                    Comparable c2 = (Comparable) value;

                    switch (comparisonOperator.intValue()) {
                    case ComparisonOperator.INT_EQUAL_TO:
                        retval = c1.equals(c2);
                        break;
                    case ComparisonOperator.INT_GREATER_THAN:
                        retval = (c1.compareTo(c2) < 0);
                        break;
                    case ComparisonOperator.INT_GREATER_THAN_OR_EQUAL:
                        retval = (c1.compareTo(c2) <= 0);
                        break;
                    case ComparisonOperator.INT_LESS_THAN:
                        retval = (c1.compareTo(c2) > 0);
                        break;
                    case ComparisonOperator.INT_LESS_THAN_OR_EQUAL:
                        retval = (c1.compareTo(c2) >= 0);
                        break;
                    case ComparisonOperator.INT_BETWEEN:

                        break;
                    case ComparisonOperator.INT_NOT_NULL:
                        retval = true;
                        break;
                    }
                }
            } else {
                throw new TestException("input type (" + inputType + ") comparison type (" + type + ") mismatch"
                        + comparisonValue, op, cp.getOnFailure());
            }
        }
    }

    catch (ParseException ex) {
        throw new TestException(
                "Exception occurred while parsing data for checkpoint comparison - " + ex.toString(), op, ex);
    }

    return retval;
}

From source file:com.envirover.spl.stream.RockBlockService.java

@POST
@Produces(MediaType.TEXT_HTML)/*ww  w  .  j a  v a  2s  . c  o  m*/
public String postMobileOriginatedMessage(@FormParam(PARAM_IMEI) String imei,
        @FormParam(PARAM_MOMSN) String momsn, @FormParam(PARAM_TRANSMIT_TIME) String transmitTime,
        @FormParam(PARAM_IRIDIUM_LATITUDE) String iridiumLatitude,
        @FormParam(PARAM_IRIDIUM_LONGITUDE) String iridiumLongitude,
        @FormParam(PARAM_IRIDIUM_CEP) String iridiumCep, @FormParam(PARAM_DATA) String data) {

    Date time = new Date();

    try {
        //Time stamp like '17-04-03 02:11:35'
        SimpleDateFormat sdf = new SimpleDateFormat("yy-MM-dd HH:mm:ss");
        time = sdf.parse(transmitTime);
    } catch (ParseException e) {
        e.printStackTrace();
    }

    try {
        MAVLinkPacket packet = getPacket(data);

        if (packet != null) {
            MAVLinkOutputStream stream = MAVLinkOutputStreamFactory.getMAVLinkOutputStream();

            stream.writePacket(imei, time, packet);
        } else {
            logger.warning("Invalid MAVLink message received: " + data);
        }
    } catch (DecoderException e) {
        logger.severe(e.toString());
    } catch (IOException e) {
        logger.severe(e.toString());
    }

    return "";
}

From source file:gov.nasa.ensemble.common.io.RemotableFile.java

private void setModifiedTime(HttpGet get) {
    try {//from  ww w . ja va  2s  . com
        String mod = get.getLastHeader(HttpHeaders.LAST_MODIFIED).getValue();
        //DateFormat does not seem thread safe, so we need a new instance each time
        DateFormat df = new SimpleDateFormat(DATE_FORMAT_STR);
        long resp_mod_time = df.parse(mod).getTime();
        if (resp_mod_time > 0) {
            localFile.setLastModified(resp_mod_time);
        } else {
            //usually caused by unsafe thread use of DateFormat
            trace.error("Negative Time. ");
        }
    } catch (ParseException e) {
        trace.error("Error parsing date from the http response: " + e.toString());
    }
}

From source file:org.ofbiz.common.CommonServices.java

public static Map<String, Object> dbfConversionTool(DispatchContext ctx,
        Map<String, ? extends Object> context) {
    Delegator delegator = ctx.getDelegator();
    LocalDispatcher dispatcher = ctx.getDispatcher();
    GenericValue userLogin = (GenericValue) context.get("userLogin");
    String ofbizHome = System.getProperty("ofbiz.home");
    String fileLocation = (String) context.get("fileLocation");
    String fileName = (String) context.get("fileName");
    // let us create field definitions first
    // we will go for 5 fields
    String filePath = fileLocation + "/" + fileName;
    String dbfFilePath = filePath.replace(".csv", ".DBF");
    try {//from w w w.j a  va2 s. c o m
        File csvFile = new File(filePath);
        BufferedReader buffReader = new BufferedReader(new FileReader(csvFile));
        String line;
        line = buffReader.readLine();
        if (line == null || line.isEmpty()) {
            System.err.println("Input file '" + csvFile + "' is empty");
            System.err.flush();
            throw new IOException("Input file '" + csvFile + "' is empty");
        }
        ArrayList procLine = preProcessEmbedCommas(line);
        line = (String) procLine.get(0);
        line = line.replaceAll("\"", "");
        if (line.endsWith(",")) {
            line = line.substring(0, (line.length() - 1));

        }

        String[] nameTokens = line.split(",");
        Map<String, Map<String, Object>> fieldNameTypeMap = FastMap.newInstance();
        int fieldSize = 0;
        for (int i = 0; i < nameTokens.length; i++) {
            String entityField = nameTokens[i];
            Map fieldTypeMap = FastMap.newInstance();
            int pos = entityField.indexOf('_');
            if (pos == -1) {
                System.err
                        .println("Input file '" + csvFile + "': entityField '" + entityField + "' missing _ ");
                System.err.flush();
                throw new IOException(
                        "Input file '" + csvFile + "': entityField '" + entityField + "' missing _ ");
            }
            String fieldName = entityField.substring(0, pos);
            String fieldType = entityField.substring(pos + 1, entityField.length());
            // if field type String like (fieldName_C%L(length)%D(no.of Decimals))

            fieldTypeMap.put("type", fieldType);

            String[] fieldTypeTokens = fieldType.split("%");
            if (fieldTypeTokens.length > 0) {
                fieldTypeMap.put("type", fieldTypeTokens[0]);
                if (fieldTypeTokens.length > 1) {
                    if (fieldTypeTokens.length > 1) {
                        fieldTypeMap.put("length", fieldTypeTokens[1].replace("L", ""));
                        if (fieldTypeTokens.length > 2) {
                            fieldTypeMap.put("decimal", fieldTypeTokens[2].replace("D", ""));
                        }

                    }
                }
            }
            Debug.logInfo("fieldTypeMap================" + fieldTypeMap, module);
            fieldNameTypeMap.put(fieldName, fieldTypeMap);
            fieldSize++;
        }
        ArrayList<ArrayList<String>> fieldValuesRows = new ArrayList<ArrayList<String>>();
        int rowNum = 1;

        while ((line = buffReader.readLine()) != null) {
            rowNum++;
            ArrayList arr = preProcessEmbedCommas(line);
            line = (String) arr.get(0);
            line = line.replaceAll("\"", "");
            if (line.endsWith(",")) {
                line = line.substring(0, (line.length() - 1));
            }

            String[] valueTokens = line.split(",", -1);
            if (valueTokens.length == 0 || line.isEmpty()) {
                // for now we'll log and just skip this row. TODO can we do
                // better?
                System.out.println(
                        "row '" + rowNum + "' {" + line + "}: " + "has empty values, skipping this row...");
                System.out.flush();
                continue;
            }
            String checkForWhiteSpace = line.trim();
            if (checkForWhiteSpace.isEmpty()) {
                // for now we'll log and just skip this row. TODO can we do
                // better?
                System.out.println(
                        "row '" + rowNum + "' {" + line + "}: " + "has empty values, skipping this row...");
                System.out.flush();
                continue;
            }
            ArrayList<String> fieldValues = new ArrayList<String>();
            for (int i = 0; i < valueTokens.length; i++) {
                String tempField;
                tempField = valueTokens[i];

                fieldValues.add(tempField);
            }
            // sanity check
            if (fieldSize != fieldValues.size()) {
                System.err.println("row '" + rowNum + "' {" + line + "}: " + "fieldSize (" + fieldSize
                        + ") and fieldValues (" + fieldValues.size() + ") - size mismatch " + line.length());
                System.err.flush();
                throw new IOException("While processing File  '" + csvFile.getName() + "' :: row '" + rowNum
                        + "' {" + line + "}: " + "fieldSize (" + fieldSize + ") and fieldValues ("
                        + fieldValues.size() + ") - size mismatch " + line.length());
            }

            fieldValuesRows.add(fieldValues);
        }

        Debug.logInfo("fieldSize===========" + fieldSize, module);
        DBFField fields[] = new DBFField[fieldSize];
        int fieldCount = 0;
        Map fieldTypeSequenceMap = FastMap.newInstance();
        for (Map.Entry<String, Map<String, Object>> fieldNameType : fieldNameTypeMap.entrySet()) {
            String fieldName = fieldNameType.getKey();
            Map fieldTypeMap = (Map) fieldNameType.getValue();
            String fieldType = (String) fieldTypeMap.get("type");
            fields[fieldCount] = new DBFField();
            fields[fieldCount].setName(fieldName);
            if (UtilValidate.isNotEmpty(fieldTypeMap.get("length"))) {
                fields[fieldCount].setFieldLength(Integer.parseInt(fieldTypeMap.get("length").toString()));
            }
            if (UtilValidate.isNotEmpty(fieldTypeMap.get("decimal"))) {
                fields[fieldCount].setDecimalCount(Integer.parseInt(fieldTypeMap.get("decimal").toString()));
            }
            fieldTypeSequenceMap.put(fieldCount, fieldType);

            if ("C".equals(fieldType)) {
                fields[fieldCount].setDataType(DBFField.FIELD_TYPE_C);
            } else if ("N".equals(fieldType)) {
                fields[fieldCount].setDataType(DBFField.FIELD_TYPE_N);
            } else if ("F".equals(fieldType)) {
                fields[fieldCount].setDataType(DBFField.FIELD_TYPE_F);

            } else if ("L".equals(fieldType)) {
                fields[fieldCount].setDataType(DBFField.FIELD_TYPE_L);
            } else if ("D".equals(fieldType)) {
                fields[fieldCount].setDataType(DBFField.FIELD_TYPE_D);
            }
            fieldCount++;

        }

        DBFWriter writer = new DBFWriter();
        writer.setFields(fields);

        // now populate DBFWriter
        for (int i = 0; i < fieldValuesRows.size(); i++) {
            List<String> fieldValueList = fieldValuesRows.get(i);
            Debug.logInfo("fieldValueList===========" + fieldValueList, module);
            Object rowData[] = new Object[fieldSize];
            for (int j = 0; j < fieldValueList.size(); j++) {
                String fieldType = (String) fieldTypeSequenceMap.get(j);

                if (UtilValidate.isNotEmpty(fieldValueList.get(j))) {
                    if ("C".equals(fieldType)) {
                        rowData[j] = fieldValueList.get(j);
                    } else if ("N".equals(fieldType)) {
                        rowData[j] = new Double(fieldValueList.get(j));

                    } else if ("F".equals(fieldType)) {
                        rowData[j] = new Double(fieldValueList.get(j));
                    } else if ("L".equals(fieldType)) {
                        rowData[j] = Boolean.TRUE;
                        if ((fieldValueList.get(j)).equalsIgnoreCase("false")) {
                            rowData[j] = Boolean.FALSE;
                        }
                    } else if ("D".equals(fieldType)) {
                        Date tempDate = null;
                        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                        try {
                            tempDate = new java.sql.Date(sdf.parse(fieldValueList.get(j)).getTime());
                        } catch (ParseException e) {
                            Debug.logError(e, "Cannot parse date string: " + fieldValueList.get(j), module);
                        }
                        rowData[j] = tempDate;
                    }
                }

            }
            writer.addRecord(rowData);
        }

        FileOutputStream fos = new FileOutputStream(dbfFilePath);
        writer.write(fos);
        fos.close();

    } catch (Exception e) {
        // TODO: handle exception
        String errMsg = "There was an error creating Dbf" + e.toString();
        Debug.logError(errMsg, module);
        return ServiceUtil.returnError(errMsg);
    }
    Map<String, Object> result = ServiceUtil.returnSuccess();
    result.put("outputFile", dbfFilePath);
    return result;
}

From source file:org.kiji.mapreduce.lib.bulkimport.CSVBulkImporter.java

/** {@inheritDoc} */
@Override/* w  ww.  java 2 s  .  co  m*/
public void produce(Text value, KijiTableContext context) throws IOException {
    // This is the header line since fieldList isn't populated
    if (mFieldMap == null) {
        List<String> fields = null;
        try {
            fields = split(value.toString());
        } catch (ParseException pe) {
            LOG.error("Unable to parse header row: {} with exception {}", value.toString(), pe.getMessage());
            throw new IOException("Unable to parse header row: " + value.toString());
        }
        initializeHeader(fields);
        // Don't actually import this line
        return;
    }

    List<String> fields = null;
    try {
        fields = split(value.toString());
    } catch (ParseException pe) {
        reject(value, context, pe.toString());
        return;
    }
    for (KijiColumnName kijiColumnName : getDestinationColumns()) {
        final EntityId eid = getEntityId(fields, context);
        String source = getSource(kijiColumnName);

        if (mFieldMap.get(source) < fields.size()) {
            String fieldValue = fields.get(mFieldMap.get(source));
            context.put(eid, kijiColumnName.getFamily(), kijiColumnName.getQualifier(), fieldValue);
        } else {
            incomplete(value, context, "Detected trailing empty field: " + source);
        }
    }

}

From source file:com.moz.fiji.mapreduce.lib.bulkimport.CSVBulkImporter.java

/** {@inheritDoc} */
@Override/*from   w w w .  j  a  v a  2 s  .  c o m*/
public void produce(Text value, FijiTableContext context) throws IOException {
    // This is the header line since fieldList isn't populated
    if (mFieldMap == null) {
        List<String> fields = null;
        try {
            fields = split(value.toString());
        } catch (ParseException pe) {
            LOG.error("Unable to parse header row: {} with exception {}", value.toString(), pe.getMessage());
            throw new IOException("Unable to parse header row: " + value.toString());
        }
        initializeHeader(fields);
        // Don't actually import this line
        return;
    }

    List<String> fields = null;
    try {
        fields = split(value.toString());
    } catch (ParseException pe) {
        reject(value, context, pe.toString());
        return;
    }

    List<String> emptyFields = Lists.newArrayList();
    for (FijiColumnName fijiColumnName : getDestinationColumns()) {
        final EntityId eid = getEntityId(fields, context);
        String source = getSource(fijiColumnName);

        if (mFieldMap.get(source) < fields.size()) {
            String fieldValue = fields.get(mFieldMap.get(source));
            if (!fieldValue.isEmpty()) {
                String family = fijiColumnName.getFamily();
                String qualifier = fijiColumnName.getQualifier();
                if (isOverrideTimestamp()) {
                    // Override the timestamp from the imported source
                    Long timestamp = getTimestamp(fields);
                    context.put(eid, family, qualifier, timestamp, convert(fijiColumnName, fieldValue));
                } else {
                    // Use the system time as the timestamp
                    context.put(eid, family, qualifier, convert(fijiColumnName, fieldValue));
                }
            } else {
                emptyFields.add(source);
            }
        }
    }
    if (!emptyFields.isEmpty()) {
        incomplete(value, context, "Record is missing fields: " + StringUtils.join(emptyFields, ","));
    }

}

From source file:org.msec.LogQuery.java

public org.msec.LogsysRsp.QueryLogRsp queryRecords(int logLevel, Map<String, String> headerFilter,
        int maxRetNum, String startDate, String endDate, String startTime, String endTime,
        String whereCondition) {/*from  w  ww . jav a2 s. com*/
    long timeStart = System.currentTimeMillis();
    String where = "";
    String querySql = "";
    Iterator<Map.Entry<String, String>> entries = headerFilter.entrySet().iterator();
    Map.Entry<String, String> entry;
    while (entries.hasNext()) {
        if (!where.isEmpty()) {
            where += " and ";
        } else {
            where += " where ";
        }
        entry = entries.next();
        where += entry.getKey() + " = \'" + entry.getValue() + "\'";
    }

    if (whereCondition != null && !whereCondition.isEmpty()) {
        if (!where.isEmpty()) {
            where += " and " + whereCondition;
        } else {
            where += " where " + whereCondition;
        }
    }

    List<String> tableList = null;
    String sql;
    List<String> logsHeads = new ArrayList<String>();
    List<Object> logRecords = new ArrayList<Object>();

    int totalRetNum = 0;
    int currentCnt = 0;
    int ret = 0;
    org.msec.LogsysRsp.QueryLogRsp rsp = new org.msec.LogsysRsp.QueryLogRsp();
    rsp.setRet(0);
    rsp.setErrmsg("Succeed.");

    try {
        tableList = getTableList(startDate, endDate);

        logger.info("Timecost for query process phase1: " + (System.currentTimeMillis() - timeStart) + "ms");
        timeStart = System.currentTimeMillis();

        for (int tableIndex = 0; tableIndex < tableList.size(); tableIndex++) {
            sql = "select * from " + tableList.get(tableIndex) + where;
            if (tableIndex == 0) {
                if (sql.contains("where")) {
                    sql += " and ";
                } else {
                    sql += " where ";
                }
                sql += timeColumnName + " >= unix_timestamp(\"" + startDate + " " + startTime + "\") ";
            }
            if (tableIndex == tableList.size() - 1) {
                if (sql.contains("where")) {
                    sql += " and ";
                } else {
                    sql += " where ";
                }
                sql += timeColumnName + " <= unix_timestamp(\"" + endDate + " " + endTime + "\") ";
            }

            sql += " limit 0," + (maxRetNum - totalRetNum);
            logger.info("query sql: " + sql);
            querySql += sql + "\n";

            ResultSet rs = stmt.executeQuery(sql);//
            logger.info("Timecost for query process table(" + tableList.get(tableIndex) + "): "
                    + (System.currentTimeMillis() - timeStart) + "ms");
            timeStart = System.currentTimeMillis();

            rs.last();
            int rowCount = rs.getRow();
            rs.beforeFirst();

            if (logsHeads.size() == 0 || columnCount == 0) {
                ResultSetMetaData rsmd = rs.getMetaData();
                logsHeads.clear();
                columnCount = rsmd.getColumnCount();
                delColumns = new ArrayList<Integer>();

                for (int i = 0; i < columnCount; i++) {
                    if (delFields.contains(rsmd.getColumnName(i + 1).toLowerCase())) {
                        delColumns.add(Integer.valueOf(i));
                        continue;
                    }

                    logsHeads.add(rsmd.getColumnName(i + 1));
                }
            }

            while (rs.next()) {
                currentCnt++;

                List<String> logOneRecord = new ArrayList<String>();
                for (int i = 0; i < columnCount; i++) {
                    if (delColumns.contains(Integer.valueOf(i))) {
                        continue;
                    }
                    //logJsonEntry.a
                    if (rs.getObject(i + 1) == null)
                        logOneRecord.add("");
                    else
                        logOneRecord.add(rs.getObject(i + 1).toString());
                }
                logRecords.add(logOneRecord);
            }
            rs.close();

            //JSON
            totalRetNum += rowCount;
            if (totalRetNum >= maxRetNum)
                break;
        }

    } catch (ParseException e) {
        e.printStackTrace();
        rsp.setRet(-1);
        rsp.setErrmsg(e.toString());
    } catch (SQLException e) {
        e.printStackTrace();
        rsp.setRet(-2);
        rsp.setErrmsg(e.toString());
    } catch (JSONException e) {
        e.printStackTrace();
        rsp.setRet(-3);
        rsp.setErrmsg(e.toString());
    }

    try {
        rsp.setRet(0);
        rsp.setLines(totalRetNum);
        rsp.setQuerySql(querySql);
        rsp.setHeads(logsHeads);
        rsp.setRecords(logRecords);
    } catch (JSONException e) {
        e.printStackTrace();
        rsp.setRet(-4);
        rsp.setErrmsg("json write error.");
    }
    return rsp;
}

From source file:org.apache.logging.log4j.core.util.datetime.FastDateParserTest.java

@Test
public void testJpLocales() {

    final Calendar cal = Calendar.getInstance(GMT);
    cal.clear();/*ww  w . ja va  2s. com*/
    cal.set(2003, Calendar.FEBRUARY, 10);
    cal.set(Calendar.ERA, GregorianCalendar.BC);

    final Locale locale = LocaleUtils.toLocale("zh");
    {
        // ja_JP_JP cannot handle dates before 1868 properly

        final SimpleDateFormat sdf = new SimpleDateFormat(LONG_FORMAT, locale);
        final DateParser fdf = getInstance(LONG_FORMAT, locale);

        try {
            checkParse(locale, cal, sdf, fdf);
        } catch (final ParseException ex) {
            Assert.fail("Locale " + locale + " failed with " + LONG_FORMAT + "\n" + trimMessage(ex.toString()));
        }
    }
}

From source file:org.msec.LogQuery.java

public org.msec.LogsysRsp.CallGraphRsp callGraph(int logLevel, Map<String, String> headerFilter, int maxRetNum,
        String startDate, String endDate, String startTime, String endTime, String whereCondition) {
    long timeStart = System.currentTimeMillis();
    String where = "";
    String querySql = "";
    Iterator<Map.Entry<String, String>> entries = headerFilter.entrySet().iterator();
    Map.Entry<String, String> entry;
    while (entries.hasNext()) {
        if (!where.isEmpty()) {
            where += " and ";
        } else {/* ww  w .j av a 2  s.co m*/
            where += " where ";
        }
        entry = entries.next();
        where += entry.getKey() + " = \'" + entry.getValue() + "\'";
    }

    if (whereCondition != null && !whereCondition.isEmpty()) {
        if (!where.isEmpty()) {
            where += " and " + whereCondition;
        } else {
            where += " where " + whereCondition;
        }
    }

    List<String> tableList = null;
    String sql;
    List<String> logsHeads = new ArrayList<String>();
    List<Object> logRecords = new ArrayList<Object>();
    Set<org.msec.LogsysRsp.CallPair> callPairSet = new HashSet<org.msec.LogsysRsp.CallPair>(128);

    int totalRetNum = 0;
    int currentCnt = 0;
    int ret = 0;
    org.msec.LogsysRsp.CallGraphRsp rsp = new org.msec.LogsysRsp.CallGraphRsp();
    rsp.setRet(0);
    rsp.setErrmsg("Succeed.");

    try {
        tableList = getTableList(startDate, endDate);

        logger.info("Timecost for query process phase1: " + (System.currentTimeMillis() - timeStart) + "ms");
        timeStart = System.currentTimeMillis();

        for (int tableIndex = 0; tableIndex < tableList.size(); tableIndex++) {
            sql = "select ServiceName,content,RPCName from " + tableList.get(tableIndex) + where;
            if (tableList.size() == 1) {
                if (sql.contains("where")) {
                    sql += " and ";
                } else {
                    sql += " where ";
                }
                sql += timeColumnName + " between unix_timestamp(\"" + startDate + " " + startTime + "\") "
                        + " and unix_timestamp(\"" + endDate + " " + endTime + "\") ";
            } else {
                if (tableIndex == 0) {
                    if (sql.contains("where")) {
                        sql += " and ";
                    } else {
                        sql += " where ";
                    }
                    sql += timeColumnName + " >= unix_timestamp(\"" + startDate + " " + startTime + "\") ";
                }
                if (tableIndex == tableList.size() - 1) {
                    if (sql.contains("where")) {
                        sql += " and ";
                    } else {
                        sql += " where ";
                    }
                    sql += timeColumnName + " <= unix_timestamp(\"" + endDate + " " + endTime + "\") ";
                }
            }

            sql += " limit 0," + (maxRetNum - totalRetNum);
            logger.info("query sql: " + sql);
            querySql += sql + "\n";

            ResultSet rs = stmt.executeQuery(sql);//
            logger.info("Timecost for query process table(" + tableList.get(tableIndex) + "): "
                    + (System.currentTimeMillis() - timeStart) + "ms");
            timeStart = System.currentTimeMillis();

            rs.last();
            int rowCount = rs.getRow();
            rs.beforeFirst();

            if (logsHeads.size() == 0 || columnCount == 0) {
                ResultSetMetaData rsmd = rs.getMetaData();
                logsHeads.clear();
                columnCount = rsmd.getColumnCount();
                delColumns = new ArrayList<Integer>();

                for (int i = 0; i < columnCount; i++) {
                    if (delFields.contains(rsmd.getColumnName(i + 1).toLowerCase())) {
                        delColumns.add(Integer.valueOf(i));
                        continue;
                    }

                    logsHeads.add(rsmd.getColumnName(i + 1));
                }
            }

            while (rs.next()) {
                currentCnt++;

                String content = rs.getObject(2).toString();

                System.out.println(rs.getObject(1).toString() + " " + rs.getObject(2).toString());
                int pos = content.indexOf("Caller=");
                if (pos < 0) {
                    pos = content.indexOf("caller=");
                }
                if (pos >= 0) {
                    pos += 7;
                    int pos2 = content.indexOf(" ", pos);
                    org.msec.LogsysRsp.CallPair callPair = new org.msec.LogsysRsp.CallPair();
                    callPair.setFrom(content.substring(pos, pos2));
                    callPair.setTo(rs.getObject(1).toString());
                    callPair.setRpcname(rs.getObject(3).toString());

                    callPairSet.add(callPair);
                }
            }
            rs.close();

            //JSON
            totalRetNum += rowCount;
            if (totalRetNum >= maxRetNum)
                break;
        }

    } catch (ParseException e) {
        e.printStackTrace();
        rsp.setRet(-1);
        rsp.setErrmsg(e.toString());
    } catch (SQLException e) {
        e.printStackTrace();
        rsp.setRet(-2);
        rsp.setErrmsg(e.toString());
    } catch (JSONException e) {
        e.printStackTrace();
        rsp.setRet(-3);
        rsp.setErrmsg(e.toString());
    }

    try {
        rsp.setRet(0);

        String graph = "digraph calls {\n";
        for (org.msec.LogsysRsp.CallPair callPair : callPairSet) {
            graph += "\"" + callPair.getFrom() + "\" -> \"" + callPair.getTo() + "\"";
            graph += " [ label=\"" + callPair.getRpcname() + "\" ]\n";
        }
        graph += "}\n";
        rsp.setGraph(graph);
    } catch (JSONException e) {
        e.printStackTrace();
        rsp.setRet(-4);
        rsp.setErrmsg("json write error.");
    }
    return rsp;
}