Example usage for java.sql ResultSetMetaData getColumnType

List of usage examples for java.sql ResultSetMetaData getColumnType

Introduction

In this page you can find the example usage for java.sql ResultSetMetaData getColumnType.

Prototype

int getColumnType(int column) throws SQLException;

Source Link

Document

Retrieves the designated column's SQL type.

Usage

From source file:org.nuclos.server.dblayer.impl.standard.StandardSqlDBAccess.java

@Override
public ResultVO executePlainQueryAsResultVO(String sql, int maxRows) throws DbException {
    return executePlainQuery(sql, maxRows, new ResultSetRunner<ResultVO>() {
        @Override/*from w  ww . java  2 s  .  com*/
        public ResultVO perform(ResultSet rs) throws SQLException {
            ResultVO result = new ResultVO();
            ResultSetMetaData metadata = rs.getMetaData();

            Class<?>[] javaTypes = new Class<?>[metadata.getColumnCount()];
            for (int i = 0; i < metadata.getColumnCount(); i++) {
                ResultColumnVO column = new ResultColumnVO();
                column.setColumnLabel(metadata.getColumnLabel(i + 1));

                DbGenericType type = getDbGenericType(metadata.getColumnType(i + 1),
                        metadata.getColumnTypeName(i + 1));
                if (type != null) {
                    Class<?> javaType = type.getPreferredJavaType();
                    // override java type here @todo this is not the right place.
                    if (type == DbGenericType.NUMERIC) {
                        if (metadata.getScale(i + 1) == 0)
                            javaType = Integer.class;
                        else
                            javaType = Double.class;
                    }
                    column.setColumnClassName(javaType.getName());
                    javaTypes[i] = javaType;
                } else {
                    column.setColumnClassName(metadata.getColumnClassName(i + 1));
                    javaTypes[i] = Object.class;
                }
                result.addColumn(column);
            }
            while (rs.next()) {
                final Object[] values = new Object[javaTypes.length];
                for (int i = 0; i < values.length; i++) {
                    values[i] = getResultSetValue(rs, i + 1, javaTypes[i]);
                }
                result.addRow(values);
            }
            return result;
        }
    });
}

From source file:edu.ku.brc.specify.conversion.ConvertTaxonHelper.java

/** =============================================================================
 *                      Convert Taxon/*from   w w  w  .ja v a2  s. c om*/
 *  =============================================================================
 */
private void convertTaxonRecords() {
    txMapper = IdMapperMgr.getInstance().get("taxonname", "TaxonNameID");
    txTypMapper = IdMapperMgr.getInstance().get("TaxonomyType", "TaxonomyTypeID");
    txUnitTypMapper = IdMapperMgr.getInstance().get("TaxonomicUnitType", "TaxonomicUnitTypeID");
    mappers = new IdMapperIFace[] { txMapper, txMapper, txTypMapper, txMapper, txUnitTypMapper };

    IdHashMapper.setTblWriter(tblWriter);

    newToOldColMap.put("TaxonID", "TaxonNameID");
    newToOldColMap.put("ParentID", "ParentTaxonNameID");
    newToOldColMap.put("TaxonTreeDefID", "TaxonomyTypeID");
    newToOldColMap.put("TaxonTreeDefItemID", "TaxonomicUnitTypeID");
    newToOldColMap.put("Name", "TaxonName");
    newToOldColMap.put("FullName", "FullTaxonName");
    newToOldColMap.put("IsAccepted", "Accepted");

    oldToNewColMap.put("TaxonNameID", "TaxonID");
    oldToNewColMap.put("ParentTaxonNameID", "ParentID");
    oldToNewColMap.put("TaxonomyTypeID", "TaxonTreeDefID");
    oldToNewColMap.put("TaxonomicUnitTypeID", "TaxonTreeDefItemID");
    oldToNewColMap.put("TaxonName", "Name");
    oldToNewColMap.put("FullTaxonName", "FullName");
    oldToNewColMap.put("Accepted", "IsAccepted");

    // Ignore new fields
    // These were added for supporting the new security model and hybrids
    /*String[] ignoredFields = { "GUID", "Visibility", "VisibilitySetBy", "IsHybrid",
                            "HybridParent1ID", "HybridParent2ID", "EsaStatus", "CitesStatus", "UsfwsCode",
                            "IsisNumber", "Text1", "Text2", "NcbiTaxonNumber", "Number1", "Number2",
                            "CreatedByAgentID", "ModifiedByAgentID", "Version", "CultivarName", "LabelFormat", 
                            "COLStatus", "VisibilitySetByID"};
    */

    StringBuilder newSB = new StringBuilder();
    StringBuilder vl = new StringBuilder();
    for (int i = 0; i < cols.length; i++) {
        fieldToColHash.put(cols[i], i + 1);
        colToFieldHash.put(i + 1, cols[i]);

        if (newSB.length() > 0)
            newSB.append(", ");
        newSB.append(cols[i]);

        if (vl.length() > 0)
            vl.append(',');
        vl.append('?');
    }

    StringBuilder oldSB = new StringBuilder();
    for (int i = 0; i < oldCols.length; i++) {
        oldFieldToColHash.put(oldCols[i], i + 1);
        if (oldSB.length() > 0)
            oldSB.append(", ");
        oldSB.append("tx.");
        oldSB.append(oldCols[i]);
    }

    rankIdOldDBInx = oldFieldToColHash.get("RankID");

    String sqlStr = String.format("SELECT %s FROM taxon", newSB.toString());
    log.debug(sqlStr);

    String sql = String.format("SELECT %s %s", oldSB.toString(), taxonFromClause);
    log.debug(sql);

    String cntSQL = String.format("SELECT COUNT(*) %s", taxonFromClause);
    log.debug(cntSQL);
    int txCnt = BasicSQLUtils.getCountAsInt(oldDBConn, cntSQL);
    if (frame != null) {
        frame.setProcess(0, txCnt);
    }

    String pStr = String.format("INSERT INTO taxon (%s) VALUES (%s)", newSB.toString(), vl.toString());
    log.debug(pStr);

    try {
        stmtTx = newDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        ResultSet rs1 = stmtTx.executeQuery(sqlStr);
        ResultSetMetaData rsmd1 = rs1.getMetaData();
        colTypes = new int[rsmd1.getColumnCount()];
        colSizes = new int[rsmd1.getColumnCount()];
        for (int i = 0; i < colTypes.length; i++) {
            colTypes[i] = rsmd1.getColumnType(i + 1);
            colSizes[i] = rsmd1.getPrecision(i + 1);
        }
        rs1.close();
        stmtTx.close();

        missingParentList.clear();
        strandedFixedHash.clear();

        lastEditedByInx = oldFieldToColHash.get("LastEditedBy");
        modifiedByAgentInx = fieldToColHash.get("ModifiedByAgentID");
        stmtTx = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        pStmtTx = newDBConn.prepareStatement(pStr);

        mappers[1].setShowLogErrors(false);

        int cnt = 0;
        ResultSet rs = stmtTx.executeQuery(sql);
        ResultSetMetaData rsmd = rs.getMetaData();
        while (rs.next()) {
            processRow(rs, rsmd, null);

            cnt++;
            if (cnt % 1000 == 0) {
                log.debug(cnt);
                if (frame != null) {
                    frame.setProcess(cnt);
                }
            }
        }
        rs.close();

        if (frame != null) {
            frame.setProcess(txCnt, txCnt);
        }

        if (missingParentList.size() > 0) {
            fixStrandedTaxon(oldSB);

            tblWriter.setHasLines();

            tblWriter.startTable("Stranded Taxon (no parent): " + missingParentList.size());
            tblWriter.logHdr("Full Name", "RankID", "Sp5 RecordID", "Was Re-parented", "Description");
            for (Pair<Integer, String> p : missingParentList) {
                tblWriter.append("<TR>");
                Object[] row = BasicSQLUtils.queryForRow(oldDBConn,
                        "SELECT FullTaxonName, RankID, TaxonNameID FROM taxonname WHERE TaxonNameID = "
                                + p.first);
                for (Object obj : row) {
                    tblWriter.append("<TD>");
                    tblWriter.append(obj != null ? obj.toString() : "null");
                    tblWriter.append("</TD>");
                }
                tblWriter.append("<TD>");
                tblWriter.append(strandedFixedHash.contains(p.first) ? "Yes" : "No");
                tblWriter.append("</TD><TD>");
                tblWriter.append(p.second);
                tblWriter.append("</TD></TR>");
            }
            tblWriter.endTable();
            tblWriter.append("<BR>");

            frame.setDesc("Renumbering the tree nodes, this may take a while...");

            HashSet<Integer> ttdHash = new HashSet<Integer>();
            for (CollectionInfo colInfo : CollectionInfo.getFilteredCollectionInfoList()) {
                if (!ttdHash.contains(colInfo.getTaxonTreeDef().getId())) {
                    DataProviderSessionIFace session = null;
                    try {
                        session = DataProviderFactory.getInstance().createSession();

                        TaxonTreeDef taxonTreeDef = colInfo.getTaxonTreeDef();
                        taxonTreeDef = (TaxonTreeDef) session
                                .getData("FROM TaxonTreeDef WHERE id = " + taxonTreeDef.getId());

                        sql = "SELECT TaxonID FROM taxon WHERE RankID = 0 AND TaxonTreeDefID = "
                                + taxonTreeDef.getId();
                        log.debug(sql);
                        Integer txRootId = BasicSQLUtils.getCount(sql);
                        Taxon txRoot = (Taxon) session.getData("FROM Taxon WHERE id = " + txRootId);

                        NodeNumberer<Taxon, TaxonTreeDef, TaxonTreeDefItem> nodeNumberer = new NodeNumberer<Taxon, TaxonTreeDef, TaxonTreeDefItem>(
                                txRoot.getDefinition());
                        nodeNumberer.doInBackground();

                    } catch (Exception ex) {
                        //session.rollback();
                        ex.printStackTrace();

                    } finally {
                        if (session != null) {
                            session.close();
                        }
                    }
                    ttdHash.add(colInfo.getTaxonTreeDef().getId());
                }
            }
            frame.setDesc("Renumbering done.");
        }
        missingParentList.clear();
        strandedFixedHash.clear();

    } catch (SQLException ex) {
        ex.printStackTrace();

    } finally {
        try {
            stmtTx.close();
            pStmtTx.close();
        } catch (Exception ex) {
        }
    }

    IdHashMapper.setTblWriter(null);
}

From source file:org.apache.kylin.rest.service.QueryService.java

/**
 * @param correctedSql//  www. j a  v  a  2  s . com
 * @param sqlRequest
 * @return
 * @throws Exception
 */
private SQLResponse execute(String correctedSql, SQLRequest sqlRequest) throws Exception {
    Connection conn = null;
    Statement stat = null;
    ResultSet resultSet = null;
    Boolean isPushDown = false;

    List<List<String>> results = Lists.newArrayList();
    List<SelectedColumnMeta> columnMetas = Lists.newArrayList();

    try {
        conn = cacheService.getOLAPDataSource(sqlRequest.getProject()).getConnection();

        // special case for prepare query. 
        if (BackdoorToggles.getPrepareOnly()) {
            return getPrepareOnlySqlResponse(correctedSql, conn, isPushDown, results, columnMetas);
        }

        stat = conn.createStatement();
        processStatementAttr(stat, sqlRequest);
        resultSet = stat.executeQuery(correctedSql);

        ResultSetMetaData metaData = resultSet.getMetaData();
        int columnCount = metaData.getColumnCount();

        // Fill in selected column meta
        for (int i = 1; i <= columnCount; ++i) {
            columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i),
                    metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i),
                    metaData.isSigned(i), metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i),
                    metaData.getColumnName(i), metaData.getSchemaName(i), metaData.getCatalogName(i),
                    metaData.getTableName(i), metaData.getPrecision(i), metaData.getScale(i),
                    metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i),
                    metaData.isWritable(i), metaData.isDefinitelyWritable(i)));
        }

        // fill in results
        while (resultSet.next()) {
            List<String> oneRow = Lists.newArrayListWithCapacity(columnCount);
            for (int i = 0; i < columnCount; i++) {
                oneRow.add((resultSet.getString(i + 1)));
            }

            results.add(oneRow);
        }
    } catch (SQLException sqlException) {
        isPushDown = PushDownUtil.doPushDownQuery(sqlRequest.getProject(), correctedSql, results, columnMetas,
                sqlException);
    } finally {
        close(resultSet, stat, conn);
    }

    return getSqlResponse(isPushDown, results, columnMetas);
}

From source file:it.greenvulcano.gvesb.datahandling.dbo.utils.StandardRowSetBuilder.java

public int build(Document doc, String id, ResultSet rs, Set<Integer> keyField,
        Map<String, FieldFormatter> fieldNameToFormatter, Map<String, FieldFormatter> fieldIdToFormatter)
        throws Exception {
    if (rs == null) {
        return 0;
    }/* w  w  w  .  ja va  2s.  com*/
    int rowCounter = 0;
    Element docRoot = doc.getDocumentElement();
    ResultSetMetaData metadata = rs.getMetaData();
    FieldFormatter[] fFormatters = buildFormatterArray(metadata, fieldNameToFormatter, fieldIdToFormatter);

    boolean noKey = ((keyField == null) || keyField.isEmpty());

    //boolean isNull = false;
    Element data = null;
    Element row = null;
    Element col = null;
    Text text = null;
    String textVal = null;
    String precKey = null;
    String colKey = null;
    Map<String, String> keyAttr = new HashMap<String, String>();
    while (rs.next()) {
        if (rowCounter % 10 == 0) {
            ThreadUtils.checkInterrupted(getClass().getSimpleName(), name, logger);
        }
        row = parser.createElement(doc, AbstractDBO.ROW_NAME);

        parser.setAttribute(row, AbstractDBO.ID_NAME, id);
        for (int j = 1; j <= metadata.getColumnCount(); j++) {
            FieldFormatter fF = fFormatters[j];

            //isNull = false;
            col = parser.createElement(doc, AbstractDBO.COL_NAME);
            switch (metadata.getColumnType(j)) {
            case Types.DATE: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.DATE_TYPE);
                java.sql.Date dateVal = rs.getDate(j);
                textVal = processDateTime(col, fF, dateVal, AbstractDBO.DEFAULT_DATE_FORMAT);
            }
                break;
            case Types.TIME: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.TIME_TYPE);
                java.sql.Time dateVal = rs.getTime(j);
                textVal = processDateTime(col, fF, dateVal, AbstractDBO.DEFAULT_TIME_FORMAT);
            }
                break;
            case Types.TIMESTAMP: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.TIMESTAMP_TYPE);
                Timestamp dateVal = rs.getTimestamp(j);
                textVal = processDateTime(col, fF, dateVal, AbstractDBO.DEFAULT_DATE_FORMAT);
            }
                break;
            case Types.DOUBLE: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.FLOAT_TYPE);
                double numVal = rs.getDouble(j);
                textVal = processDouble(col, fF, numVal);
            }
                break;
            case Types.FLOAT:
            case Types.REAL: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.FLOAT_TYPE);
                float numVal = rs.getFloat(j);
                textVal = processDouble(col, fF, numVal);
            }
                break;
            case Types.BIGINT: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.BIGINT_TYPE);
                long numVal = rs.getLong(j);
                parser.setAttribute(col, AbstractDBO.NULL_NAME, "false");
                textVal = String.valueOf(numVal);
            }
                break;
            case Types.INTEGER: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.INTEGER_TYPE);
                int numVal = rs.getInt(j);
                parser.setAttribute(col, AbstractDBO.NULL_NAME, "false");
                textVal = String.valueOf(numVal);
            }
                break;
            case Types.SMALLINT:
            case Types.TINYINT: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.SMALLINT_TYPE);
                short numVal = rs.getShort(j);
                parser.setAttribute(col, AbstractDBO.NULL_NAME, "false");
                textVal = String.valueOf(numVal);
            }
                break;
            case Types.NUMERIC:
            case Types.DECIMAL: {
                BigDecimal bigdecimal = rs.getBigDecimal(j);
                boolean isNull = bigdecimal == null;
                parser.setAttribute(col, AbstractDBO.NULL_NAME, String.valueOf(isNull));
                if (isNull) {
                    if (metadata.getScale(j) > 0) {
                        parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.DECIMAL_TYPE);
                    } else {
                        parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.NUMERIC_TYPE);
                    }
                    textVal = "";
                } else {
                    if (fF != null) {
                        parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.DECIMAL_TYPE);
                        parser.setAttribute(col, AbstractDBO.FORMAT_NAME, fF.getNumberFormat());
                        parser.setAttribute(col, AbstractDBO.GRP_SEPARATOR_NAME, fF.getGroupSeparator());
                        parser.setAttribute(col, AbstractDBO.DEC_SEPARATOR_NAME, fF.getDecSeparator());
                        textVal = fF.formatNumber(bigdecimal);
                    } else if (metadata.getScale(j) > 0) {
                        parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.DECIMAL_TYPE);
                        parser.setAttribute(col, AbstractDBO.FORMAT_NAME, numberFormat);
                        parser.setAttribute(col, AbstractDBO.GRP_SEPARATOR_NAME, groupSeparator);
                        parser.setAttribute(col, AbstractDBO.DEC_SEPARATOR_NAME, decSeparator);
                        textVal = numberFormatter.format(bigdecimal);
                    } else {
                        parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.NUMERIC_TYPE);
                        textVal = bigdecimal.toString();
                    }
                }
            }
                break;
            case Types.BOOLEAN: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.BOOLEAN_TYPE);
                boolean bVal = rs.getBoolean(j);
                parser.setAttribute(col, AbstractDBO.NULL_NAME, "false");
                textVal = String.valueOf(bVal);
            }
                break;
            case Types.SQLXML: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.XML_TYPE);
                SQLXML xml = rs.getSQLXML(j);
                boolean isNull = xml == null;
                parser.setAttribute(col, AbstractDBO.NULL_NAME, String.valueOf(isNull));
                if (isNull) {
                    textVal = "";
                } else {
                    textVal = xml.getString();
                }
            }
                break;
            case Types.NCHAR:
            case Types.NVARCHAR: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.NSTRING_TYPE);
                textVal = rs.getNString(j);
                if (textVal == null) {
                    textVal = "";
                }
            }
                break;
            case Types.CHAR:
            case Types.VARCHAR: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.STRING_TYPE);
                textVal = rs.getString(j);
                boolean isNull = textVal == null;
                parser.setAttribute(col, AbstractDBO.NULL_NAME, String.valueOf(isNull));
                if (isNull) {
                    textVal = "";
                }
            }
                break;
            case Types.NCLOB: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.LONG_NSTRING_TYPE);
                NClob clob = rs.getNClob(j);
                if (clob != null) {
                    Reader is = clob.getCharacterStream();
                    StringWriter str = new StringWriter();

                    IOUtils.copy(is, str);
                    is.close();
                    textVal = str.toString();
                } else {
                    textVal = "";
                }
            }
                break;
            case Types.CLOB: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.LONG_STRING_TYPE);
                Clob clob = rs.getClob(j);
                if (clob != null) {
                    Reader is = clob.getCharacterStream();
                    StringWriter str = new StringWriter();

                    IOUtils.copy(is, str);
                    is.close();
                    textVal = str.toString();
                } else {
                    textVal = "";
                }
            }
                break;
            case Types.BLOB: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.BASE64_TYPE);
                Blob blob = rs.getBlob(j);
                boolean isNull = blob == null;
                parser.setAttribute(col, AbstractDBO.NULL_NAME, String.valueOf(isNull));
                if (isNull) {
                    textVal = "";
                } else {
                    InputStream is = blob.getBinaryStream();
                    ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    IOUtils.copy(is, baos);
                    is.close();
                    try {
                        byte[] buffer = Arrays.copyOf(baos.toByteArray(), (int) blob.length());
                        textVal = Base64.getEncoder().encodeToString(buffer);
                    } catch (SQLFeatureNotSupportedException exc) {
                        textVal = Base64.getEncoder().encodeToString(baos.toByteArray());
                    }
                }
            }
                break;
            default: {
                parser.setAttribute(col, AbstractDBO.TYPE_NAME, AbstractDBO.DEFAULT_TYPE);
                textVal = rs.getString(j);
                boolean isNull = textVal == null;
                parser.setAttribute(col, AbstractDBO.NULL_NAME, String.valueOf(isNull));
                if (isNull) {
                    textVal = "";
                }
            }
            }
            if (textVal != null) {
                text = doc.createTextNode(textVal);
                col.appendChild(text);
            }
            if (!noKey && keyField.contains(new Integer(j))) {
                if (textVal != null) {
                    if (colKey == null) {
                        colKey = textVal;
                    } else {
                        colKey += "##" + textVal;
                    }
                    keyAttr.put("key_" + j, textVal);
                }
            } else {
                row.appendChild(col);
            }
        }
        if (noKey) {
            if (data == null) {
                data = parser.createElement(doc, AbstractDBO.DATA_NAME);
                parser.setAttribute(data, AbstractDBO.ID_NAME, id);
            }
        } else if ((colKey != null) && !colKey.equals(precKey)) {
            if (data != null) {
                docRoot.appendChild(data);
            }
            data = parser.createElement(doc, AbstractDBO.DATA_NAME);
            parser.setAttribute(data, AbstractDBO.ID_NAME, id);
            for (Entry<String, String> keyAttrEntry : keyAttr.entrySet()) {
                parser.setAttribute(data, keyAttrEntry.getKey(), keyAttrEntry.getValue());
            }
            keyAttr.clear();
            precKey = colKey;
        }
        colKey = null;
        data.appendChild(row);
        rowCounter++;
    }
    if (data != null) {
        docRoot.appendChild(data);
    }

    return rowCounter;
}

From source file:solidbase.core.plugins.DumpJSON.java

public boolean execute(CommandProcessor processor, Command command, boolean skip) throws SQLException {
    if (!triggerPattern.matcher(command.getCommand()).matches())
        return false;

    if (command.isTransient()) {
        /* DUMP JSON DATE_CREATED ON | OFF */

        SQLTokenizer tokenizer = new SQLTokenizer(
                SourceReaders.forString(command.getCommand(), command.getLocation()));

        // TODO Maybe DUMP JSON CONFIG or DUMP JSON SET
        // TODO What about other configuration settings?
        tokenizer.get("DUMP");
        tokenizer.get("JSON");
        tokenizer.get("DATE_CREATED"); // FIXME This should be CREATED_DATE
        Token t = tokenizer.get("ON", "OFF");
        tokenizer.get((String) null);

        // TODO I think we should have a scope that is restricted to the current file and a scope that gets inherited when running or including another file.
        AbstractScope scope = processor.getContext().getScope();
        scope.set("solidbase.dump_json.dateCreated", t.eq("ON")); // TODO Make this a constant

        return true;
    }//from w ww . j ava 2s  . c om

    if (skip)
        return true;

    Parsed parsed = parse(command);

    AbstractScope scope = processor.getContext().getScope();
    Object object = scope.get("solidbase.dump_json.dateCreated");
    boolean dateCreated = object == null || object instanceof Boolean && (Boolean) object;

    Resource jsvResource = new FileResource(new File(parsed.fileName)); // Relative to current folder

    try {
        OutputStream out = jsvResource.getOutputStream();
        if (parsed.gzip)
            out = new BufferedOutputStream(new GZIPOutputStream(out, 65536), 65536); // TODO Ctrl-C, close the outputstream?

        JSONWriter jsonWriter = new JSONWriter(out);
        try {
            Statement statement = processor.createStatement();
            try {
                ResultSet result = statement.executeQuery(parsed.query);
                ResultSetMetaData metaData = result.getMetaData();

                // Define locals

                int columns = metaData.getColumnCount();
                int[] types = new int[columns];
                String[] names = new String[columns];
                boolean[] ignore = new boolean[columns];
                FileSpec[] fileSpecs = new FileSpec[columns];
                String schemaNames[] = new String[columns];
                String tableNames[] = new String[columns];

                // Analyze metadata

                for (int i = 0; i < columns; i++) {
                    int col = i + 1;
                    String name = metaData.getColumnName(col).toUpperCase();
                    types[i] = metaData.getColumnType(col);
                    if (types[i] == Types.DATE && parsed.dateAsTimestamp)
                        types[i] = Types.TIMESTAMP;
                    names[i] = name;
                    if (parsed.columns != null) {
                        ColumnSpec columnSpec = parsed.columns.get(name);
                        if (columnSpec != null)
                            if (columnSpec.skip)
                                ignore[i] = true;
                            else
                                fileSpecs[i] = columnSpec.toFile;
                    }
                    if (parsed.coalesce != null && parsed.coalesce.notFirst(name))
                        ignore[i] = true;
                    // TODO STRUCT serialize
                    // TODO This must be optional and not the default
                    else if (types[i] == 2002 || JDBCSupport.toTypeName(types[i]) == null)
                        ignore[i] = true;
                    tableNames[i] = StringUtils
                            .upperCase(StringUtils.defaultIfEmpty(metaData.getTableName(col), null));
                    schemaNames[i] = StringUtils
                            .upperCase(StringUtils.defaultIfEmpty(metaData.getSchemaName(col), null));
                }

                if (parsed.coalesce != null)
                    parsed.coalesce.bind(names);

                // Write header

                JSONObject properties = new JSONObject();
                properties.set("version", "1.0");
                properties.set("format", "record-stream");
                properties.set("description", "SolidBase JSON Data Dump File");
                properties.set("createdBy", new JSONObject("product", "SolidBase", "version", "2.0.0"));

                if (dateCreated) {
                    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    properties.set("createdDate", format.format(new Date()));
                }

                if (parsed.binaryFileName != null) {
                    // TODO FIXME Should be wrapped in a SourceException: solidbase.solidstack.io.FatalURISyntaxException: java.net.URISyntaxException: Illegal character in path at index 1: &{folder}/JIADHOCCH
                    Resource binResource = Resources.getResource(parsed.binaryFileName);
                    Resource resource = Resources.getResource(parsed.fileName);
                    properties.set("binaryFile", binResource.getPathFrom(resource).toString());
                }

                JSONArray fields = new JSONArray();
                properties.set("fields", fields);
                for (int i = 0; i < columns; i++)
                    if (!ignore[i]) {
                        JSONObject field = new JSONObject();
                        field.set("schemaName", schemaNames[i]);
                        field.set("tableName", tableNames[i]);
                        field.set("name", names[i]);
                        field.set("type", JDBCSupport.toTypeName(types[i])); // TODO Better error message when type is not recognized, for example Oracle's 2007 for a user type
                        FileSpec spec = fileSpecs[i];
                        if (spec != null && !spec.generator.isDynamic()) {
                            Resource fileResource = new FileResource(spec.generator.fileName);
                            field.set("file", fileResource.getPathFrom(jsvResource).toString());
                        }
                        fields.add(field);
                    }

                FileSpec binaryFile = parsed.binaryFileName != null
                        ? new FileSpec(true, parsed.binaryFileName, 0)
                        : null;

                jsonWriter.writeFormatted(properties, 120);
                jsonWriter.getWriter().write('\n');

                Counter counter = null;
                if (parsed.logRecords > 0)
                    counter = new FixedCounter(parsed.logRecords);
                else if (parsed.logSeconds > 0)
                    counter = new TimedCounter(parsed.logSeconds);

                try {
                    while (result.next()) {
                        Object[] values = new Object[columns];
                        for (int i = 0; i < values.length; i++)
                            values[i] = JDBCSupport.getValue(result, types, i);

                        if (parsed.coalesce != null)
                            parsed.coalesce.coalesce(values);

                        JSONArray array = new JSONArray();
                        for (int i = 0; i < columns; i++)
                            if (!ignore[i]) {
                                Object value = values[i];
                                if (value == null) {
                                    array.add(null);
                                    continue;
                                }

                                // TODO 2 columns can't be written to the same dynamic filename

                                FileSpec spec = fileSpecs[i];
                                if (spec != null) // The column is redirected to its own file
                                {
                                    String relFileName = null;
                                    int startIndex;
                                    if (spec.binary) {
                                        if (spec.generator.isDynamic()) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.out = fileResource.getOutputStream();
                                            spec.index = 0;
                                            relFileName = fileResource.getPathFrom(jsvResource).toString();
                                        } else if (spec.out == null) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.out = fileResource.getOutputStream();
                                        }
                                        if (value instanceof Blob) {
                                            InputStream in = ((Blob) value).getBinaryStream();
                                            startIndex = spec.index;
                                            byte[] buf = new byte[4096];
                                            for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                                spec.out.write(buf, 0, read);
                                                spec.index += read;
                                            }
                                            in.close();
                                        } else if (value instanceof byte[]) {
                                            startIndex = spec.index;
                                            spec.out.write((byte[]) value);
                                            spec.index += ((byte[]) value).length;
                                        } else
                                            throw new SourceException(names[i] + " ("
                                                    + value.getClass().getName()
                                                    + ") is not a binary column. Only binary columns like BLOB, RAW, BINARY VARYING can be written to a binary file",
                                                    command.getLocation());
                                        if (spec.generator.isDynamic()) {
                                            spec.out.close();
                                            JSONObject ref = new JSONObject();
                                            ref.set("file", relFileName);
                                            ref.set("size", spec.index - startIndex);
                                            array.add(ref);
                                        } else {
                                            JSONObject ref = new JSONObject();
                                            ref.set("index", startIndex);
                                            ref.set("length", spec.index - startIndex);
                                            array.add(ref);
                                        }
                                    } else {
                                        if (spec.generator.isDynamic()) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.writer = new DeferringWriter(spec.threshold, fileResource,
                                                    jsonWriter.getEncoding());
                                            spec.index = 0;
                                            relFileName = fileResource.getPathFrom(jsvResource).toString();
                                        } else if (spec.writer == null) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.writer = new OutputStreamWriter(fileResource.getOutputStream(),
                                                    jsonWriter.getEncoding());
                                        }
                                        if (value instanceof Blob || value instanceof byte[])
                                            throw new SourceException(names[i]
                                                    + " is a binary column. Binary columns like BLOB, RAW, BINARY VARYING cannot be written to a text file",
                                                    command.getLocation());
                                        if (value instanceof Clob) {
                                            Reader in = ((Clob) value).getCharacterStream();
                                            startIndex = spec.index;
                                            char[] buf = new char[4096];
                                            for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                                spec.writer.write(buf, 0, read);
                                                spec.index += read;
                                            }
                                            in.close();
                                        } else {
                                            String val = value.toString();
                                            startIndex = spec.index;
                                            spec.writer.write(val);
                                            spec.index += val.length();
                                        }
                                        if (spec.generator.isDynamic()) {
                                            DeferringWriter writer = (DeferringWriter) spec.writer;
                                            if (writer.isBuffered())
                                                array.add(writer.clearBuffer());
                                            else {
                                                JSONObject ref = new JSONObject();
                                                ref.set("file", relFileName);
                                                ref.set("size", spec.index - startIndex);
                                                array.add(ref);
                                            }
                                            writer.close();
                                        } else {
                                            JSONObject ref = new JSONObject();
                                            ref.set("index", startIndex);
                                            ref.set("length", spec.index - startIndex);
                                            array.add(ref);
                                        }
                                    }
                                } else if (value instanceof Clob)
                                    array.add(((Clob) value).getCharacterStream());
                                else if (binaryFile != null
                                        && (value instanceof Blob || value instanceof byte[])) {
                                    if (binaryFile.out == null) {
                                        String fileName = binaryFile.generator.generateFileName(null);
                                        Resource fileResource = new FileResource(fileName);
                                        binaryFile.out = fileResource.getOutputStream();
                                        if (parsed.binaryGzip)
                                            binaryFile.out = new BufferedOutputStream(
                                                    new GZIPOutputStream(binaryFile.out, 65536), 65536); // TODO Ctrl-C, close the outputstream?
                                    }
                                    int startIndex = binaryFile.index;
                                    if (value instanceof Blob) {
                                        InputStream in = ((Blob) value).getBinaryStream();
                                        byte[] buf = new byte[4096];
                                        for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                            binaryFile.out.write(buf, 0, read);
                                            binaryFile.index += read;
                                        }
                                        in.close();
                                    } else {
                                        binaryFile.out.write((byte[]) value);
                                        binaryFile.index += ((byte[]) value).length;
                                    }
                                    JSONObject ref = new JSONObject();
                                    ref.set("index", startIndex);
                                    ref.set("length", binaryFile.index - startIndex);
                                    array.add(ref);
                                } else
                                    array.add(value);
                            }

                        for (ListIterator<Object> i = array.iterator(); i.hasNext();) {
                            Object value = i.next();
                            if (value instanceof java.sql.Date || value instanceof java.sql.Time
                                    || value instanceof java.sql.Timestamp || value instanceof java.sql.RowId)
                                i.set(value.toString());
                        }
                        jsonWriter.write(array);
                        jsonWriter.getWriter().write('\n');

                        if (counter != null && counter.next())
                            processor.getProgressListener()
                                    .println("Exported " + counter.total() + " records.");
                    }
                    if (counter != null && counter.needFinal())
                        processor.getProgressListener().println("Exported " + counter.total() + " records.");
                } finally {
                    // Close files that have been left open
                    for (FileSpec fileSpec : fileSpecs)
                        if (fileSpec != null) {
                            if (fileSpec.out != null)
                                fileSpec.out.close();
                            if (fileSpec.writer != null)
                                fileSpec.writer.close();
                        }
                    if (binaryFile != null && binaryFile.out != null)
                        binaryFile.out.close();
                }
            } finally {
                processor.closeStatement(statement, true);
            }
        } finally {
            jsonWriter.close();
        }
    } catch (IOException e) {
        throw new SystemException(e);
    }

    return true;
}

From source file:com.openddal.test.BaseTestCase.java

/**
 * Check if the result set meta data is correct.
 *
 * @param rs the result set/* w ww.jav a 2  s .  c  o m*/
 * @param columnCount the expected column count
 * @param labels the expected column labels
 * @param datatypes the expected data types
 * @param precision the expected precisions
 * @param scale the expected scales
 */
protected void assertResultSetMeta(ResultSet rs, int columnCount, String[] labels, int[] datatypes,
        int[] precision, int[] scale) throws SQLException {
    ResultSetMetaData meta = rs.getMetaData();
    int cc = meta.getColumnCount();
    if (cc != columnCount) {
        fail("result set contains " + cc + " columns not " + columnCount);
    }
    for (int i = 0; i < columnCount; i++) {
        if (labels != null) {
            String l = meta.getColumnLabel(i + 1);
            if (!labels[i].equals(l)) {
                fail("column label " + i + " is " + l + " not " + labels[i]);
            }
        }
        if (datatypes != null) {
            int t = meta.getColumnType(i + 1);
            if (datatypes[i] != t) {
                fail("column datatype " + i + " is " + t + " not " + datatypes[i] + " (prec="
                        + meta.getPrecision(i + 1) + " scale=" + meta.getScale(i + 1) + ")");
            }
            String typeName = meta.getColumnTypeName(i + 1);
            String className = meta.getColumnClassName(i + 1);
            switch (t) {
            case Types.INTEGER:
                Assert.assertEquals("INTEGER", typeName);
                Assert.assertEquals("java.lang.Integer", className);
                break;
            case Types.VARCHAR:
                Assert.assertEquals("VARCHAR", typeName);
                Assert.assertEquals("java.lang.String", className);
                break;
            case Types.SMALLINT:
                Assert.assertEquals("SMALLINT", typeName);
                Assert.assertEquals("java.lang.Short", className);
                break;
            case Types.TIMESTAMP:
                Assert.assertEquals("TIMESTAMP", typeName);
                Assert.assertEquals("java.sql.Timestamp", className);
                break;
            case Types.DECIMAL:
                Assert.assertEquals("DECIMAL", typeName);
                Assert.assertEquals("java.math.BigDecimal", className);
                break;
            default:
            }
        }
        if (precision != null) {
            int p = meta.getPrecision(i + 1);
            if (precision[i] != p) {
                fail("column precision " + i + " is " + p + " not " + precision[i]);
            }
        }
        if (scale != null) {
            int s = meta.getScale(i + 1);
            if (scale[i] != s) {
                fail("column scale " + i + " is " + s + " not " + scale[i]);
            }
        }

    }
}

From source file:org.agnitas.dao.impl.RecipientDaoImpl.java

/**
 * Load complete Subscriber-Data from DB. customerID must be set first for this method.
 *
 * @return Map with Key/Value-Pairs of customer data
 *///ww w.j ava  2  s . com
@Override
public CaseInsensitiveMap<Object> getCustomerDataFromDb(int companyID, int customerID) {
    String aName = null;
    String aValue = null;
    int a;
    java.sql.Timestamp aTime = null;
    Recipient cust = (Recipient) applicationContext.getBean("Recipient");

    if (cust.getCustParameters() == null) {
        cust.setCustParameters(new CaseInsensitiveMap<Object>());
    }

    String getCust = "SELECT * FROM customer_" + companyID + "_tbl WHERE customer_id=" + customerID;

    if (cust.getCustDBStructure() == null) {
        cust.loadCustDBStructure();
    }

    DataSource ds = (DataSource) this.applicationContext.getBean("dataSource");
    Connection con = DataSourceUtils.getConnection(ds);

    try {
        Statement stmt = con.createStatement();
        ResultSet rset = stmt.executeQuery(getCust);

        if (logger.isInfoEnabled()) {
            logger.info("getCustomerDataFromDb: " + getCust);
        }

        if (rset.next()) {
            ResultSetMetaData aMeta = rset.getMetaData();

            for (a = 1; a <= aMeta.getColumnCount(); a++) {
                aValue = null;
                aName = aMeta.getColumnName(a).toLowerCase();
                switch (aMeta.getColumnType(a)) {
                case java.sql.Types.TIMESTAMP:
                case java.sql.Types.TIME:
                case java.sql.Types.DATE:
                    try {
                        aTime = rset.getTimestamp(a);
                    } catch (Exception e) {
                        aTime = null;
                    }
                    if (aTime == null) {
                        cust.getCustParameters().put(aName + "_DAY_DATE", "");
                        cust.getCustParameters().put(aName + "_MONTH_DATE", "");
                        cust.getCustParameters().put(aName + "_YEAR_DATE", "");
                        cust.getCustParameters().put(aName + "_HOUR_DATE", "");
                        cust.getCustParameters().put(aName + "_MINUTE_DATE", "");
                        cust.getCustParameters().put(aName + "_SECOND_DATE", "");
                        cust.getCustParameters().put(aName, "");
                    } else {
                        GregorianCalendar aCal = new GregorianCalendar();
                        aCal.setTime(aTime);
                        cust.getCustParameters().put(aName + "_DAY_DATE",
                                Integer.toString(aCal.get(GregorianCalendar.DAY_OF_MONTH)));
                        cust.getCustParameters().put(aName + "_MONTH_DATE",
                                Integer.toString(aCal.get(GregorianCalendar.MONTH) + 1));
                        cust.getCustParameters().put(aName + "_YEAR_DATE",
                                Integer.toString(aCal.get(GregorianCalendar.YEAR)));
                        cust.getCustParameters().put(aName + "_HOUR_DATE",
                                Integer.toString(aCal.get(GregorianCalendar.HOUR_OF_DAY)));
                        cust.getCustParameters().put(aName + "_MINUTE_DATE",
                                Integer.toString(aCal.get(GregorianCalendar.MINUTE)));
                        cust.getCustParameters().put(aName + "_SECOND_DATE",
                                Integer.toString(aCal.get(GregorianCalendar.SECOND)));
                        SimpleDateFormat bdfmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                        cust.getCustParameters().put(aName, bdfmt.format(aCal.getTime()));
                    }
                    break;

                default:
                    aValue = rset.getString(a);
                    if (aValue == null) {
                        aValue = "";
                    }
                    cust.getCustParameters().put(aName, aValue);
                    break;
                }
            }
        }
        rset.close();
        stmt.close();

    } catch (Exception e) {
        logger.error("getCustomerDataFromDb: " + getCust, e);
        AgnUtils.sendExceptionMail("sql:" + getCust, e);
    }
    DataSourceUtils.releaseConnection(con, ds);
    cust.setChangeFlag(false);
    Map<String, Object> result = cust.getCustParameters();
    if (result instanceof CaseInsensitiveMap) {
        return (CaseInsensitiveMap<Object>) result;
    } else {
        return new CaseInsensitiveMap<Object>(result);
    }
}

From source file:org.jpos.qi.system.SQLQueryObject.java

@Override
public String toString() {
    try {/*from w w  w.  jav  a 2  s  .  c  o m*/
        Object res = DB.exec(db -> {
            StringBuilder sb = new StringBuilder("");
            for (int n = 0; n < queries.length; n++) {
                String query = queries[n];
                String title = titles[n];
                int mxrows = maxRows[n];
                sb.append(' ').append(title).append("\n\n");
                db.session().doWork(new Work() {
                    @Override
                    public void execute(Connection connection) throws SQLException {
                        PreparedStatement stmt = connection.prepareStatement(query);
                        ResultSet rs = stmt.executeQuery();
                        ResultSetMetaData md = rs.getMetaData();
                        int cols = md.getColumnCount();
                        String[] header = new String[cols];
                        int[] colsize = new int[cols];
                        for (int i = 1; i <= cols; i++) {
                            header[i - 1] = StringUtils.defaultIfEmpty(md.getColumnLabel(i),
                                    md.getColumnName(i));
                            colsize[i - 1] = header[i - 1].length();
                        }
                        int rows = 0;
                        String[][] out = new String[mxrows][cols];
                        while (rs.next() && rows < mxrows) {
                            for (int i = 1; i <= cols; i++) {
                                out[rows][i - 1] = rs.getString(i);
                                if (out[rows][i - 1] == null)
                                    out[rows][i - 1] = " ";
                                int l = out[rows][i - 1].length();
                                if (colsize[i - 1] < l)
                                    colsize[i - 1] = l;
                            }
                            rows++;
                        }
                        rs.close();
                        stmt.close();
                        StringBuilder sbSep = new StringBuilder(" ");
                        sb.append(' ');
                        for (int i = 1; i <= cols; i++) {
                            if (isNumericDataType(md.getColumnType(i)))
                                sb.append(StringUtils.leftPad(header[i - 1], colsize[i - 1]));
                            else
                                sb.append(StringUtils.rightPad(header[i - 1], colsize[i - 1]));
                            sbSep.append(StringUtils.repeat('-', colsize[i - 1]));
                            sb.append(' ');
                            sbSep.append(' ');
                        }
                        sb.append('\n');
                        sbSep.append('\n');
                        sb.append(sbSep);
                        for (int j = 0; j < rows; j++) {
                            sb.append(' ');
                            for (int i = 1; i <= cols; i++) {
                                if (isNumericDataType(md.getColumnType(i)))
                                    sb.append(StringUtils.leftPad(out[j][i - 1], colsize[i - 1]));
                                else
                                    sb.append(StringUtils.rightPad(out[j][i - 1], colsize[i - 1]));
                                sb.append(' ');
                            }
                            sb.append('\n');
                        }
                        sb.append(sbSep).append('\n');
                    }
                });
            }
            sb.append(" Last refreshed at ").append(new Date());
            return sb;
        });
        return res.toString();
    } catch (Exception e) {
        QI.getQI().getLog().error(e);
        return e.toString();
    }

}

From source file:org.alinous.plugin.mysql.MySQLDataSource.java

private List<Record> executeSelectSQL(Object connectionHandle, String sql) throws DataSourceException {
    Connection con = (Connection) connectionHandle;
    Statement stmt = null;/*from   w ww  . j  a va2  s  .com*/
    List<Record> retList = new LinkedList<Record>();

    try {
        stmt = con.createStatement();
        stmt.execute(sql);

        ResultSet rs = stmt.getResultSet();
        ResultSetMetaData metaData = rs.getMetaData();
        while (rs.next()) {
            int cnt = metaData.getColumnCount();
            Record rec = new Record();
            for (int i = 0; i < cnt; i++) {
                String colName = metaData.getColumnName(i + 1);

                String labelName = metaData.getColumnLabel(i + 1);
                if (labelName != null && !labelName.equals("")) {
                    colName = labelName;
                }

                String value = rs.getString(i + 1);

                int colType = metaData.getColumnType(i + 1);
                rec.addFieldValue(colName, value, colType);
            }

            retList.add(rec);
        }

    } catch (SQLException e) {
        throw new DataSourceException(e);
    } finally {
        try {
            stmt.close();
        } catch (SQLException ignore) {
        }
    }

    return retList;
}

From source file:org.apache.ctakes.ytex.uima.mapper.DocumentMapperServiceImpl.java

public void initDocKeyMapping() {
    AbstractEntityPersister cm = (AbstractEntityPersister) this.sessionFactory.getClassMetadata(Document.class);
    // figure out which columns are already mapped
    String[] propNames = cm.getPropertyNames();
    Set<String> mappedCols = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
    for (String prop : propNames) {
        String cols[] = cm.getPropertyColumnNames(prop);
        mappedCols.addAll(Arrays.asList(cols));
    }//from  ww w .j  av  a2 s.  co  m
    // this.formattedTableName = DBUtil.formatTableName(cm.getTableName());
    this.formattedTableName = cm.getTableName();
    log.info("document table name = " + formattedTableName);
    final String query = "select * from " + formattedTableName + " where 1=2";
    Connection conn = null;
    Statement stmt = null;
    ResultSet rs = null;
    try {
        conn = dataSource.getConnection();
        stmt = conn.createStatement();
        rs = stmt.executeQuery(query);
        ResultSetMetaData rsmd = rs.getMetaData();
        int nCols = rsmd.getColumnCount();
        for (int i = 1; i <= nCols; i++) {
            String colName = rsmd.getColumnName(i);
            if (!mappedCols.contains(colName)) {
                log.info("document candidate foreign key column: " + colName);
                docTableCols.put(colName, rsmd.getColumnType(i));
            }
        }
        if (log.isDebugEnabled()) {
            log.debug("docTableCols: " + docTableCols);
        }
    } catch (SQLException e) {
        log.error("problem determining document table fields", e);
        throw new RuntimeException(e);
    } finally {
        try {
            if (rs != null)
                rs.close();
        } catch (SQLException e) {
            e.printStackTrace();
        }
        try {
            if (stmt != null)
                stmt.close();
        } catch (SQLException e) {
            e.printStackTrace();
        }
        try {
            if (conn != null)
                conn.close();
        } catch (SQLException e) {
            e.printStackTrace();
        }

    }
}