Example usage for java.sql ResultSetMetaData getTableName

List of usage examples for java.sql ResultSetMetaData getTableName

Introduction

In this page you can find the example usage for java.sql ResultSetMetaData getTableName.

Prototype

String getTableName(int column) throws SQLException;

Source Link

Document

Gets the designated column's table name.

Usage

From source file:org.kawanfw.sql.servlet.sql.ResultSetWriter.java

/**
 * Code extracted and modified for oreilly jdbc book in french.
 * //from  w  w w .j  a v a 2s. c o  m
 * Process the ResultSet and print it on the outPutStream <br>
 * - Each row is a line of a List of column values <br>
 * 
 * @param resultSet
 *            the Result Set to process and print on the output stream
 * @param br
 *            the writer where to redirect the result set content, one Json
 *            line per rs.next();
 * 
 */
public void write(ResultSet resultSet) throws SQLException, IOException {
    try {
        if (resultSet == null) {
            throw new SQLException("SQL Connection is null!");
        }

        String productName = getDatabaseProductName(resultSet);
        isTerradata = productName.equals(SqlUtil.TERADATA) ? true : false;
        isPostgreSQL = productName.equals(SqlUtil.POSTGRESQL) ? true : false;

        ResultSetMetaData meta = resultSet.getMetaData();
        int cols = meta.getColumnCount();

        @SuppressWarnings("unused")
        int row_count = 0;

        List<Integer> columnTypeList = new Vector<Integer>();
        List<String> columnTypeNameList = new Vector<String>();
        List<String> columnNameList = new Vector<String>();
        List<String> columnTableList = new Vector<String>();

        // Loop on Columns
        for (int i = 1; i <= cols; i++) {
            columnTypeList.add(meta.getColumnType(i));
            columnNameList.add(meta.getColumnName(i).toLowerCase());
            columnTypeNameList.add(meta.getColumnTypeName(i));

            if (isPostgreSQL) {
                columnTableList.add(PostgreSqlUtil.getTableName(resultSet, i));
            } else {
                columnTableList.add(meta.getTableName(i));
            }

            debug("");
            debug("meta.getColumnType(" + i + ")    : " + meta.getColumnType(i));
            debug("meta.getColumnTypeName(" + i + "): " + meta.getColumnTypeName(i));
            debug("meta.getColumnName(" + i + ")    : " + meta.getColumnName(i));
            debug("meta.getTableName(" + i + ")     : " + meta.getTableName(i));
        }

        // Ok, dump the column Map<String, Integer> == (Column name, column
        // pos starting 9)
        Map<String, Integer> mapColumnNames = new LinkedHashMap<String, Integer>();

        for (int i = 0; i < columnNameList.size(); i++) {
            mapColumnNames.put(columnNameList.get(i), i);
        }

        String jsonString = JsonColPosition.toJson(mapColumnNames);

        debug("JsonColPosition.toJson(mapColumnNames) jsonString" + jsonString);

        boolean doEncryptResultSet = SqlConfiguratorCall.encryptResultSet(sqlConfigurator);

        // Maybe encryption asked
        if (doEncryptResultSet) {
            jsonString = JsonLineEncrypter.encrypt(jsonString, commonsConfigurator);
        }

        //br.write(jsonString + CR_LF);
        ServerSqlManager.writeLine(out, jsonString);

        // Loop result Set
        while (resultSet.next()) {
            row_count++;

            // The List of column values for one row
            List<String> values = new Vector<String>();

            for (int i = 0; i < columnTypeList.size(); i++) {
                int columnIndex = i + 1;
                int columnType = columnTypeList.get(i);
                String columnName = columnNameList.get(i);
                String columnTable = columnTableList.get(i);

                debug("");
                debug("columnIndex: " + columnIndex);
                debug("columnType : " + columnType);
                debug("columnName : " + columnName);
                debug("columnTable: " + columnTable);

                Object columnValue = null;
                String columnValueStr = null;

                if (isBinaryColumn(resultSet, columnType, columnName, columnTable)) {
                    columnValueStr = formatBinaryColumn(resultSet, columnIndex, columnType, columnName,
                            columnTable);
                } else if (isNStringColumn(columnType)) {
                    columnValueStr = resultSet.getNString(columnIndex);
                    columnValueStr = HtmlConverter.toHtml(columnValueStr);
                } else if (isClobColumn(columnType)) {
                    columnValueStr = formatClobColumn(resultSet, columnIndex);
                } else if (columnType == Types.ARRAY) {
                    columnValueStr = formatArrayColumn(resultSet, columnIndex);
                } else if (columnType == Types.ROWID) {
                    columnValueStr = formatRowIdColumn(resultSet, columnIndex);
                } else {
                    try {
                        columnValue = resultSet.getObject(columnIndex);
                        debug("columnValue: " + columnValue);

                    } catch (Exception e) {
                        // int intValue = resultSet.getInt(columnName);
                        debug("Exception     : " + e.toString());
                        debug("columnType    : " + columnType);
                        debug("columnTypeName: " + columnTypeNameList.get(i));
                        debug("columnName    : " + columnName);
                        throw new SQLException(columnType + "Type/TypeName/ColName " + columnTypeNameList.get(i)
                                + " " + columnName, e);
                    }

                    if (resultSet.wasNull()) {
                        columnValueStr = "NULL";
                    } else if (columnValue == null) {
                        columnValueStr = null;
                    } else {
                        columnValueStr = columnValue.toString();
                    }

                    debug("columnValueStr : " + columnValueStr);

                    // Case we - maybe - have an URL:
                    columnValueStr = urlFormater(resultSet, columnIndex, columnValueStr);

                    if (isCharacterType(columnType)) {
                        debugStringType(columnValueStr);
                        columnValueStr = HtmlConverter.toHtml(columnValueStr);
                    }
                }

                // Add the value to the list of values:
                values.add(columnValueStr);
            }

            jsonString = StringListTransport.toJson(values);
            debug("ResultSetLineTransport.toJson(values) jsonString" + jsonString);

            // Maybe encryption asked
            if (doEncryptResultSet) {
                jsonString = JsonLineEncrypter.encrypt(jsonString, commonsConfigurator);
            }

            //br.write(jsonString + CR_LF);
            ServerSqlManager.writeLine(out, jsonString);

        }

        // Maybe we send also the ResultSet meta data, if asked by client
        // side

        if (JoinResultSetMetaData) {
            //br.write(FileSplitSeparatorLine.RESULT_SET_GET_METADATA_SEP
            //   + CR_LF);
            ServerSqlManager.writeLine(out, FileSplitSeparatorLine.RESULT_SET_GET_METADATA_SEP);

            ResultSetMetaDataWriter resultSetMetaDataWriter = new ResultSetMetaDataWriter(out,
                    commonsConfigurator, sqlConfigurator);
            resultSetMetaDataWriter.write(resultSet);
        }

    } finally {
        // NO! resultSet.close();
        // NO! IOUtils.closeQuietly(br);
    }
}

From source file:com.streamsets.pipeline.lib.jdbc.JdbcUtil.java

public void setColumnSpecificHeaders(Record record, Set<String> knownTableNames, ResultSetMetaData metaData,
        String jdbcNameSpacePrefix) throws SQLException {
    Record.Header header = record.getHeader();
    Set<String> tableNames = new HashSet<>();
    for (int i = 1; i <= metaData.getColumnCount(); i++) {
        header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".jdbcType",
                String.valueOf(metaData.getColumnType(i)));

        // Additional headers per various types
        switch (metaData.getColumnType(i)) {
        case Types.DECIMAL:
        case Types.NUMERIC:
            header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".scale",
                    String.valueOf(metaData.getScale(i)));
            header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".precision",
                    String.valueOf(metaData.getPrecision(i)));
            break;
        }/*  ww w  .j av a2 s  .  c  o  m*/

        String tableName = metaData.getTableName(i);

        // Store the column's table name (if not empty)
        if (StringUtils.isNotEmpty(tableName)) {
            tableNames.add(tableName);
        }
    }

    if (tableNames.isEmpty()) {
        tableNames.addAll(knownTableNames);
    }

    header.setAttribute(jdbcNameSpacePrefix + "tables", Joiner.on(",").join(tableNames));
}

From source file:org.fastcatsearch.datasource.reader.DBReader.java

@Override
public SchemaSetting getAutoGeneratedSchemaSetting() {
    Map<String, String> properties = singleSourceConfig.getProperties();
    String jdbcSourceId = properties.get("jdbcSourceId");
    String dataSQL = properties.get("dataSQL");
    IRService service = ServiceManager.getInstance().getService(IRService.class);
    Connection con = null;//from w w  w. j  av a 2s . c  o  m
    PreparedStatement pst = null;
    ResultSet res = null;
    ResultSetMetaData meta = null;
    try {
        JDBCSourceInfo jdbcInfo = service.getJDBCSourceInfo(jdbcSourceId);
        if (jdbcInfo != null) {
            con = getConnection(jdbcInfo);
        }
        logger.trace("get jdbc connection : {}", con);

        if (con != null) {
            logger.trace("executing sql :{}", dataSQL);
            pst = con.prepareStatement(dataSQL);
            pst.setFetchSize(1);
            pst.setMaxRows(1);
            res = pst.executeQuery();
            res.next();
            meta = res.getMetaData();

            SchemaSetting setting = new SchemaSetting();
            PrimaryKeySetting primaryKeySetting = new PrimaryKeySetting();
            List<FieldSetting> fieldSettingList = new ArrayList<FieldSetting>();
            List<AnalyzerSetting> analyzerSetting = new ArrayList<AnalyzerSetting>();
            List<GroupIndexSetting> groupIndexSetting = new ArrayList<GroupIndexSetting>();
            List<IndexSetting> indexSetting = new ArrayList<IndexSetting>();
            List<FieldIndexSetting> fieldIndexSetting = new ArrayList<FieldIndexSetting>();

            logger.trace("columnCount:{}", meta.getColumnCount());

            String tableName = null;

            for (int inx = 0; inx < meta.getColumnCount(); inx++) {
                if (tableName == null) {
                    tableName = meta.getTableName(inx + 1);
                }
                FieldSetting field = new FieldSetting();
                Type type = null;
                int size = 0;
                switch (meta.getColumnType(inx + 1)) {
                case Types.INTEGER:
                case Types.TINYINT:
                case Types.SMALLINT:
                case Types.NUMERIC:
                    type = Type.INT;
                    break;
                case Types.BIGINT:
                    type = Type.LONG;
                    break;
                case Types.FLOAT:
                    type = Type.FLOAT;
                    break;
                case Types.DOUBLE:
                    type = Type.DOUBLE;
                    break;
                case Types.DATE:
                case Types.TIME:
                case Types.TIMESTAMP:
                    type = Type.DATETIME;
                    break;
                case Types.CHAR:
                case Types.VARCHAR:
                case Types.LONGVARCHAR:
                    type = Type.STRING;
                    break;
                default:
                    type = Type.STRING;
                    break;
                }
                field.setId(meta.getColumnLabel(inx + 1));
                field.setName(field.getId());
                field.setType(type);
                field.setSize(size);
                logger.trace("field add {}", field);
                fieldSettingList.add(field);
            }

            setting.setFieldSettingList(fieldSettingList);
            setting.setPrimaryKeySetting(primaryKeySetting);
            setting.setFieldIndexSettingList(fieldIndexSetting);
            setting.setAnalyzerSettingList(analyzerSetting);
            setting.setGroupIndexSettingList(groupIndexSetting);
            setting.setIndexSettingList(indexSetting);

            return setting;
        }
    } catch (IRException e) {
        logger.error("", e);
    } catch (SQLException e) {
        logger.error("", e);
    } finally {
        if (res != null)
            try {
                res.close();
            } catch (SQLException ignore) {
            }
        if (pst != null)
            try {
                pst.close();
            } catch (SQLException ignore) {
            }
        if (con != null)
            try {
                con.close();
            } catch (SQLException ignore) {
            }
    }
    return null;
}

From source file:solidbase.core.plugins.DumpJSON.java

public boolean execute(CommandProcessor processor, Command command, boolean skip) throws SQLException {
    if (!triggerPattern.matcher(command.getCommand()).matches())
        return false;

    if (command.isTransient()) {
        /* DUMP JSON DATE_CREATED ON | OFF */

        SQLTokenizer tokenizer = new SQLTokenizer(
                SourceReaders.forString(command.getCommand(), command.getLocation()));

        // TODO Maybe DUMP JSON CONFIG or DUMP JSON SET
        // TODO What about other configuration settings?
        tokenizer.get("DUMP");
        tokenizer.get("JSON");
        tokenizer.get("DATE_CREATED"); // FIXME This should be CREATED_DATE
        Token t = tokenizer.get("ON", "OFF");
        tokenizer.get((String) null);

        // TODO I think we should have a scope that is restricted to the current file and a scope that gets inherited when running or including another file.
        AbstractScope scope = processor.getContext().getScope();
        scope.set("solidbase.dump_json.dateCreated", t.eq("ON")); // TODO Make this a constant

        return true;
    }/*from  w  w w. j a va 2 s .co  m*/

    if (skip)
        return true;

    Parsed parsed = parse(command);

    AbstractScope scope = processor.getContext().getScope();
    Object object = scope.get("solidbase.dump_json.dateCreated");
    boolean dateCreated = object == null || object instanceof Boolean && (Boolean) object;

    Resource jsvResource = new FileResource(new File(parsed.fileName)); // Relative to current folder

    try {
        OutputStream out = jsvResource.getOutputStream();
        if (parsed.gzip)
            out = new BufferedOutputStream(new GZIPOutputStream(out, 65536), 65536); // TODO Ctrl-C, close the outputstream?

        JSONWriter jsonWriter = new JSONWriter(out);
        try {
            Statement statement = processor.createStatement();
            try {
                ResultSet result = statement.executeQuery(parsed.query);
                ResultSetMetaData metaData = result.getMetaData();

                // Define locals

                int columns = metaData.getColumnCount();
                int[] types = new int[columns];
                String[] names = new String[columns];
                boolean[] ignore = new boolean[columns];
                FileSpec[] fileSpecs = new FileSpec[columns];
                String schemaNames[] = new String[columns];
                String tableNames[] = new String[columns];

                // Analyze metadata

                for (int i = 0; i < columns; i++) {
                    int col = i + 1;
                    String name = metaData.getColumnName(col).toUpperCase();
                    types[i] = metaData.getColumnType(col);
                    if (types[i] == Types.DATE && parsed.dateAsTimestamp)
                        types[i] = Types.TIMESTAMP;
                    names[i] = name;
                    if (parsed.columns != null) {
                        ColumnSpec columnSpec = parsed.columns.get(name);
                        if (columnSpec != null)
                            if (columnSpec.skip)
                                ignore[i] = true;
                            else
                                fileSpecs[i] = columnSpec.toFile;
                    }
                    if (parsed.coalesce != null && parsed.coalesce.notFirst(name))
                        ignore[i] = true;
                    // TODO STRUCT serialize
                    // TODO This must be optional and not the default
                    else if (types[i] == 2002 || JDBCSupport.toTypeName(types[i]) == null)
                        ignore[i] = true;
                    tableNames[i] = StringUtils
                            .upperCase(StringUtils.defaultIfEmpty(metaData.getTableName(col), null));
                    schemaNames[i] = StringUtils
                            .upperCase(StringUtils.defaultIfEmpty(metaData.getSchemaName(col), null));
                }

                if (parsed.coalesce != null)
                    parsed.coalesce.bind(names);

                // Write header

                JSONObject properties = new JSONObject();
                properties.set("version", "1.0");
                properties.set("format", "record-stream");
                properties.set("description", "SolidBase JSON Data Dump File");
                properties.set("createdBy", new JSONObject("product", "SolidBase", "version", "2.0.0"));

                if (dateCreated) {
                    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    properties.set("createdDate", format.format(new Date()));
                }

                if (parsed.binaryFileName != null) {
                    // TODO FIXME Should be wrapped in a SourceException: solidbase.solidstack.io.FatalURISyntaxException: java.net.URISyntaxException: Illegal character in path at index 1: &{folder}/JIADHOCCH
                    Resource binResource = Resources.getResource(parsed.binaryFileName);
                    Resource resource = Resources.getResource(parsed.fileName);
                    properties.set("binaryFile", binResource.getPathFrom(resource).toString());
                }

                JSONArray fields = new JSONArray();
                properties.set("fields", fields);
                for (int i = 0; i < columns; i++)
                    if (!ignore[i]) {
                        JSONObject field = new JSONObject();
                        field.set("schemaName", schemaNames[i]);
                        field.set("tableName", tableNames[i]);
                        field.set("name", names[i]);
                        field.set("type", JDBCSupport.toTypeName(types[i])); // TODO Better error message when type is not recognized, for example Oracle's 2007 for a user type
                        FileSpec spec = fileSpecs[i];
                        if (spec != null && !spec.generator.isDynamic()) {
                            Resource fileResource = new FileResource(spec.generator.fileName);
                            field.set("file", fileResource.getPathFrom(jsvResource).toString());
                        }
                        fields.add(field);
                    }

                FileSpec binaryFile = parsed.binaryFileName != null
                        ? new FileSpec(true, parsed.binaryFileName, 0)
                        : null;

                jsonWriter.writeFormatted(properties, 120);
                jsonWriter.getWriter().write('\n');

                Counter counter = null;
                if (parsed.logRecords > 0)
                    counter = new FixedCounter(parsed.logRecords);
                else if (parsed.logSeconds > 0)
                    counter = new TimedCounter(parsed.logSeconds);

                try {
                    while (result.next()) {
                        Object[] values = new Object[columns];
                        for (int i = 0; i < values.length; i++)
                            values[i] = JDBCSupport.getValue(result, types, i);

                        if (parsed.coalesce != null)
                            parsed.coalesce.coalesce(values);

                        JSONArray array = new JSONArray();
                        for (int i = 0; i < columns; i++)
                            if (!ignore[i]) {
                                Object value = values[i];
                                if (value == null) {
                                    array.add(null);
                                    continue;
                                }

                                // TODO 2 columns can't be written to the same dynamic filename

                                FileSpec spec = fileSpecs[i];
                                if (spec != null) // The column is redirected to its own file
                                {
                                    String relFileName = null;
                                    int startIndex;
                                    if (spec.binary) {
                                        if (spec.generator.isDynamic()) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.out = fileResource.getOutputStream();
                                            spec.index = 0;
                                            relFileName = fileResource.getPathFrom(jsvResource).toString();
                                        } else if (spec.out == null) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.out = fileResource.getOutputStream();
                                        }
                                        if (value instanceof Blob) {
                                            InputStream in = ((Blob) value).getBinaryStream();
                                            startIndex = spec.index;
                                            byte[] buf = new byte[4096];
                                            for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                                spec.out.write(buf, 0, read);
                                                spec.index += read;
                                            }
                                            in.close();
                                        } else if (value instanceof byte[]) {
                                            startIndex = spec.index;
                                            spec.out.write((byte[]) value);
                                            spec.index += ((byte[]) value).length;
                                        } else
                                            throw new SourceException(names[i] + " ("
                                                    + value.getClass().getName()
                                                    + ") is not a binary column. Only binary columns like BLOB, RAW, BINARY VARYING can be written to a binary file",
                                                    command.getLocation());
                                        if (spec.generator.isDynamic()) {
                                            spec.out.close();
                                            JSONObject ref = new JSONObject();
                                            ref.set("file", relFileName);
                                            ref.set("size", spec.index - startIndex);
                                            array.add(ref);
                                        } else {
                                            JSONObject ref = new JSONObject();
                                            ref.set("index", startIndex);
                                            ref.set("length", spec.index - startIndex);
                                            array.add(ref);
                                        }
                                    } else {
                                        if (spec.generator.isDynamic()) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.writer = new DeferringWriter(spec.threshold, fileResource,
                                                    jsonWriter.getEncoding());
                                            spec.index = 0;
                                            relFileName = fileResource.getPathFrom(jsvResource).toString();
                                        } else if (spec.writer == null) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.writer = new OutputStreamWriter(fileResource.getOutputStream(),
                                                    jsonWriter.getEncoding());
                                        }
                                        if (value instanceof Blob || value instanceof byte[])
                                            throw new SourceException(names[i]
                                                    + " is a binary column. Binary columns like BLOB, RAW, BINARY VARYING cannot be written to a text file",
                                                    command.getLocation());
                                        if (value instanceof Clob) {
                                            Reader in = ((Clob) value).getCharacterStream();
                                            startIndex = spec.index;
                                            char[] buf = new char[4096];
                                            for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                                spec.writer.write(buf, 0, read);
                                                spec.index += read;
                                            }
                                            in.close();
                                        } else {
                                            String val = value.toString();
                                            startIndex = spec.index;
                                            spec.writer.write(val);
                                            spec.index += val.length();
                                        }
                                        if (spec.generator.isDynamic()) {
                                            DeferringWriter writer = (DeferringWriter) spec.writer;
                                            if (writer.isBuffered())
                                                array.add(writer.clearBuffer());
                                            else {
                                                JSONObject ref = new JSONObject();
                                                ref.set("file", relFileName);
                                                ref.set("size", spec.index - startIndex);
                                                array.add(ref);
                                            }
                                            writer.close();
                                        } else {
                                            JSONObject ref = new JSONObject();
                                            ref.set("index", startIndex);
                                            ref.set("length", spec.index - startIndex);
                                            array.add(ref);
                                        }
                                    }
                                } else if (value instanceof Clob)
                                    array.add(((Clob) value).getCharacterStream());
                                else if (binaryFile != null
                                        && (value instanceof Blob || value instanceof byte[])) {
                                    if (binaryFile.out == null) {
                                        String fileName = binaryFile.generator.generateFileName(null);
                                        Resource fileResource = new FileResource(fileName);
                                        binaryFile.out = fileResource.getOutputStream();
                                        if (parsed.binaryGzip)
                                            binaryFile.out = new BufferedOutputStream(
                                                    new GZIPOutputStream(binaryFile.out, 65536), 65536); // TODO Ctrl-C, close the outputstream?
                                    }
                                    int startIndex = binaryFile.index;
                                    if (value instanceof Blob) {
                                        InputStream in = ((Blob) value).getBinaryStream();
                                        byte[] buf = new byte[4096];
                                        for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                            binaryFile.out.write(buf, 0, read);
                                            binaryFile.index += read;
                                        }
                                        in.close();
                                    } else {
                                        binaryFile.out.write((byte[]) value);
                                        binaryFile.index += ((byte[]) value).length;
                                    }
                                    JSONObject ref = new JSONObject();
                                    ref.set("index", startIndex);
                                    ref.set("length", binaryFile.index - startIndex);
                                    array.add(ref);
                                } else
                                    array.add(value);
                            }

                        for (ListIterator<Object> i = array.iterator(); i.hasNext();) {
                            Object value = i.next();
                            if (value instanceof java.sql.Date || value instanceof java.sql.Time
                                    || value instanceof java.sql.Timestamp || value instanceof java.sql.RowId)
                                i.set(value.toString());
                        }
                        jsonWriter.write(array);
                        jsonWriter.getWriter().write('\n');

                        if (counter != null && counter.next())
                            processor.getProgressListener()
                                    .println("Exported " + counter.total() + " records.");
                    }
                    if (counter != null && counter.needFinal())
                        processor.getProgressListener().println("Exported " + counter.total() + " records.");
                } finally {
                    // Close files that have been left open
                    for (FileSpec fileSpec : fileSpecs)
                        if (fileSpec != null) {
                            if (fileSpec.out != null)
                                fileSpec.out.close();
                            if (fileSpec.writer != null)
                                fileSpec.writer.close();
                        }
                    if (binaryFile != null && binaryFile.out != null)
                        binaryFile.out.close();
                }
            } finally {
                processor.closeStatement(statement, true);
            }
        } finally {
            jsonWriter.close();
        }
    } catch (IOException e) {
        throw new SystemException(e);
    }

    return true;
}

From source file:it.fub.jardin.server.DbUtils.java

public Integer removeObjects(final Integer resultsetId, final List<BaseModelData> records, String username)
        throws HiddenException, VisibleException {

    int resCode = 0;

    Connection connection = this.dbConnectionHandler.getConn();

    String query = new String(""), appChiavePrimaria = "";
    PreparedStatement ps = null;//w  ww .j av  a 2  s. co  m
    try {

        ResultSetMetaData metadata = this.dbProperties.getResultsetMetadata(connection, resultsetId);
        String tableName = metadata.getTableName(1);
        // Ciclo per gestire pi cancellazioni nella stessa invocazione
        List<BaseModelData> primaryKeyList = this.dbProperties.getPrimaryKeys(tableName);
        if (primaryKeyList.size() <= 0) {
            throw new HiddenException("La tabella non contiene chiavi primarie: impossibile operare!");
        }
        for (BaseModelData record : records) {
            query = "";
            // Preparazione della query
            query = query.concat("DELETE FROM `" + tableName + "` WHERE `");

            String separator = "AND `";
            for (BaseModelData pk : primaryKeyList) {
                appChiavePrimaria = pk.get("PK_NAME");
                query = query.concat(appChiavePrimaria);
                if (record.get(appChiavePrimaria) == null) {
                    query = query.concat("` IS NULL ");
                } else {
                    query = query.concat("` = ? ");
                }

                query = query.concat(separator);
            }

            query = query.substring(0, query.length() - separator.length());

            ps = (PreparedStatement) connection.prepareStatement(query);
            int i = 1;
            // for (String property : record.getPropertyNames()) {
            for (BaseModelData pk : primaryKeyList) {
                ps.setObject(i, record.get((String) pk.get("PK_NAME")));
                i++;
            }

            // Log.debug("Query DELETE: " + ps);
            int num = ps.executeUpdate();
            if (num > 0) {
                // this.log("DELETE (" + ps.toString() + ")");
                JardinLogger.debug(username, "DELETE query (" + ps.toString() + ")");
            }
            resCode += num;
        }
    } catch (MySQLIntegrityConstraintViolationException ecv) {
        ecv.printStackTrace();

        throw new HiddenException("Errore durante l'eliminazione dei record: " + ecv.getLocalizedMessage());
    } catch (Exception e) {
        // Log.warn("Errore SQL", e);
        e.printStackTrace();
        throw new HiddenException("Errore durante l'eliminazione dei record: " + e.getLocalizedMessage());
    } finally {
        // JardinLogger.info("Objects removed");
        this.dbConnectionHandler.closeConn(connection);
    }

    return (new Integer(resCode));
}

From source file:it.fub.jardin.server.DbUtils.java

public int setObjects(final Integer resultsetId, final List<BaseModelData> records, String username)
        throws HiddenException {

    int result = 0;
    Connection connection = this.dbConnectionHandler.getConn();
    final String sep = ",";

    String tableName = null;//from  w w  w.j av  a  2s .c o m
    // String set = "";
    try {
        ResultSetMetaData metadata = this.dbProperties.getResultsetMetadata(connection, resultsetId);
        tableName = metadata.getTableName(1);
        connection.setAutoCommit(false);
        for (BaseModelData record : records) {
            String set = "";
            int columns = record.getPropertyNames().size();
            for (String property : record.getPropertyNames()) {
                set += "`" + property + "`=?" + sep;
            }
            set = set.substring(0, set.length() - sep.length());

            // String query =
            // "INSERT INTO `" + tableName + "` SET " + set
            // + " ON DUPLICATE KEY UPDATE " + set;
            String query = "INSERT INTO `" + tableName + "` SET " + set;

            PreparedStatement ps = (PreparedStatement) connection.prepareStatement(query);
            int i = 1;
            for (String property : record.getPropertyNames()) {
                Object value = record.get(property);
                if ((value != null) && (String.valueOf(value).length() > 0)) {
                    ps.setObject(i, record.get(property));
                    // ps.setObject(i + columns, record.get(property));
                } else {
                    ps.setNull(i, java.sql.Types.NULL);
                    // ps.setNull(i + columns, java.sql.Types.NULL);
                }
                i++;
            }
            // System.out.println(ps.toString());
            int num = ps.executeUpdate();
            if (num > 0) {
                String toLog = "INSERT (" + ps.toString() + ")";
                // Log.debug(toLog);
                JardinLogger.debug(username, toLog);
            }
            result += num;
        }
        connection.commit();
        connection.setAutoCommit(true);
    } catch (MySQLIntegrityConstraintViolationException ex) {
        try {
            connection.rollback();
        } catch (Exception e) {
            // TODO Auto-generated catch block
            JardinLogger.debug(username, "Errore SQL: impossibile eseguire rollback transazione");
            e.printStackTrace();
        }
        String message = ex.getLocalizedMessage();
        String newMess = "";
        // Log.warn("Errore SQL", ex);
        if (ex.getErrorCode() == 1062) {
            // updateObjects(resultsetId, records);
            newMess = newMess.concat(ex.getErrorCode() + " - Errore!!! \n PRIMARY KEY DUPLICATA :\n" + message);
        } else if (ex.getErrorCode() == 1048) {
            newMess = newMess
                    .concat(ex.getErrorCode() + " - Errore!!! \n VINCOLO DI INTEGRITA' VIOLATO :\n" + message);
        } else if (ex.getErrorCode() == 1452) {
            newMess = newMess
                    .concat(ex.getErrorCode() + " - Errore!!! \n VINCOLO DI FOREIGN KEY VIOLATO :\n" + message);
        } else {
            newMess = ex.getErrorCode() + " - Errore!!! \n Problemi sui dati da salvare :\n" + message;
        }
        JardinLogger.debug(username, "Errore SQL: " + newMess);
        throw new HiddenException(newMess);

    } catch (Exception e) {
        try {
            JardinLogger.error(username, "Errore SQL: impossibile eseguire rollback transazione");
            connection.rollback();
        } catch (Exception e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }
        // Log.warn("Errore SQL", e);
        throw new HiddenException("Errore durante il salvataggio delle modifiche:\n" + e.getLocalizedMessage());
    } finally {
        // JardinLogger.info("Records setted");
        this.dbConnectionHandler.closeConn(connection);
    }
    return result;
}

From source file:it.fub.jardin.server.DbUtils.java

public Integer updateObjects(final Integer resultsetId, final List<BaseModelData> newItemList,
        final String condition, String username) throws HiddenException {

    // JardinLogger.info("Updating records...");

    int result = 0;
    Connection connection = this.dbConnectionHandler.getConn();
    final String sep = ",";
    boolean defaultPrimaryKeys = condition.equalsIgnoreCase("$-notspec-$");

    try {/*from  ww w.j  a va2  s  .c  o  m*/
        ResultSetMetaData metadata = this.dbProperties.getResultsetMetadata(connection, resultsetId);
        String tableName = metadata.getTableName(1);

        // TODO Creare un oggetto per la memorizzazione colonna->valore
        List<BaseModelData> PKs = this.dbProperties.getResultsetPrimaryKeys(resultsetId);

        String PKset = "";
        connection.setAutoCommit(false);
        for (BaseModelData record : newItemList) {

            boolean conditionFounded = false;
            if (defaultPrimaryKeys) {
                conditionFounded = true;

                // richiesta di update da griglia o dettaglio
                for (BaseModelData pk : PKs) {
                    PKset += (String) pk.get("PK_NAME") + "=? AND ";
                }
                PKset = PKset.substring(0, PKset.length() - 5); // Strips
                // " AND "

            } else {
                PKset = condition + "=? ";
            }

            String set = "";
            Collection<String> properties = record.getPropertyNames();
            for (String property : properties) {
                if (property.equalsIgnoreCase(condition)) {
                    conditionFounded = true;
                } else {
                    set += "`" + property + "`=? " + sep;
                }
            }

            if (!conditionFounded) {
                throw new VisibleException("condizione di UPDATE non trovata nel file");
            }

            set = set.substring(0, set.length() - sep.length());

            String query = "UPDATE `" + tableName + "` SET " + set + " WHERE " + PKset;

            PreparedStatement ps = (PreparedStatement) connection.prepareStatement(query);
            int i = 1;

            /* Set prepared statement values for changing fields */
            for (String property : properties) {
                if (!property.equalsIgnoreCase(condition)) {
                    i += this.putJavaObjectInPs(ps, i, record.get(property));
                }
            }

            /* Set prepared statement values for where condition fields */
            if (defaultPrimaryKeys) {
                for (BaseModelData pk : PKs) {
                    Object value = record.get((String) pk.get("PK_NAME"));
                    i += this.putJavaObjectInPs(ps, i, value);
                }
            } else {
                Object value = record.get(condition);
                i += this.putJavaObjectInPs(ps, i, value);
            }

            // Log.debug("Query UPDATE: " + ps);
            JardinLogger.debug(username, "UPDATE (" + ps.toString() + ")");
            int num = ps.executeUpdate();
            if (num > 0) {
                JardinLogger.debug(username, "UPDATE (" + ps.toString() + ")");
            }
            result += num;
        }
        connection.commit();
        connection.setAutoCommit(true);
        // JardinLogger.info("Records updated");
    } catch (Exception e) {
        try {
            connection.rollback();
        } catch (Exception e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }
        // Log.warn("Errore SQL", e);
        e.printStackTrace();
        throw new HiddenException("Errore durante l'aggiornamento del record:\n" + e.getLocalizedMessage());
    } finally {

        this.dbConnectionHandler.closeConn(connection);
    }
    return result;
}

From source file:it.fub.jardin.server.DbUtils.java

public Integer massiveUpdate(MassiveUpdateObject muo, String username)
        throws HiddenException, VisibleException {
    // TODO Auto-generated method stub
    Connection connection = this.dbConnectionHandler.getConn();

    int result = -1;
    String tableName = null;// w  w w  . j a  v  a  2 s  . c  o m

    ResultSetMetaData metadata;
    String transQueries = "";
    try {
        metadata = this.dbProperties.getResultsetMetadata(connection, muo.getResultsetId());
        tableName = metadata.getTableName(1);
        connection.setAutoCommit(false);

        // String[] transQueries = null;

        // int i = 0;
        for (String pkValue : muo.getPrimaryKeyValues()) {
            transQueries = "UPDATE `" + tableName + "` SET ";
            BaseModelData newValues = muo.getNewValues();
            for (String tableField : newValues.getPropertyNames()) {
                transQueries += tableField + " = '" + newValues.get(tableField) + "', ";
            }
            transQueries = transQueries.substring(0, transQueries.length() - 2);

            transQueries += " WHERE " + muo.getFieldName() + " = '" + pkValue + "'; ";
            // i++;
            Statement stmt = connection.createStatement();
            // System.out.println("query update massivo: " + transQueries);
            JardinLogger.debug(username, "query update massivo: " + transQueries);
            stmt.executeUpdate(transQueries);
        }

        connection.commit();
        connection.setAutoCommit(true);
        connection.close();

        return 1;

    } catch (SQLException e) {
        // TODO Auto-generated catch block
        try {
            connection.rollback();
        } catch (SQLException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

        JardinLogger.debug(username, "query update massivo: " + transQueries);
        e.printStackTrace();
        throw new VisibleException("Impossibile eseguire update massivo");
    } finally {
        try {
            connection.close();
        } catch (SQLException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}

From source file:uk.ac.ed.epcc.webapp.model.data.Repository.java

/**
 * Use a ResultSet to populate the MetaData information
 * /* w  ww.j av  a 2  s  . co  m*/
 * @param rs
 * @throws SQLException
 * @throws ConsistencyError
 */
private void setMetaData(ResultSet rs) throws SQLException, ConsistencyError {
    assert (fields == null);
    fields = new LinkedHashMap<String, FieldInfo>();
    ResultSetMetaData meta_data = rs.getMetaData();
    int md_columns = meta_data.getColumnCount();
    boolean seen_key = false;
    // Logger log = ctx.getLogger(getClass());
    for (int i = 1; i <= md_columns; i++) {
        String returned_name = meta_data.getTableName(i);
        if (returned_name.length() > 0 && !returned_name.equalsIgnoreCase(table_name)) {
            throw new ConsistencyError("Table names do not match " + getTag() + "!=" + returned_name);
        }
        // if we don't know for sure assume no nulls
        boolean can_null = (meta_data.isNullable(i) == ResultSetMetaData.columnNullable);
        String name = meta_data.getColumnName(i);
        if (!seen_key && meta_data.isAutoIncrement(i)) {
            seen_key = true;
            id_name = name;
        } else {
            // log.debug("Metadata "+name+" "+meta_data.getColumnType(i));
            int columnType = meta_data.getColumnType(i);
            int columnDisplaySize = meta_data.getColumnDisplaySize(i);
            fields.put(dbFieldtoTag(name), new FieldInfo(name, columnType, columnDisplaySize, can_null));
        }
    }
    if (use_id && !seen_key) {
        // Note we need an up-to-date mysql driver for the isAutoIncrement
        // method to work properly. otherwise default to first col and hope
        id_name = meta_data.getColumnName(1);
        fields.remove(dbFieldtoTag(id_name));
    }
    // cache the qualified form as this is used frequently
    StringBuilder sb = new StringBuilder();
    sb.append(alias_name);
    sb.append(".");
    sb.append(id_name);
    qualified_id_name = sb.toString();
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * @param rsmd/*from  w w w  . j  av a2s  . com*/
 * @param map
 * @param tableNames
 * @throws SQLException
 */
protected void buildIndexMapFromMetaData(final ResultSetMetaData rsmd, final List<String> origList,
        final Hashtable<String, Integer> map) throws SQLException {
    map.clear();

    StringBuilder sb = new StringBuilder();
    for (int i = 1; i <= rsmd.getColumnCount(); i++) {
        sb.setLength(0);
        String tableName = rsmd.getTableName(i);
        String fieldName = rsmd.getColumnName(i);

        if (isNotEmpty(tableName)) {
            sb.append(tableName);
        } else {
            for (String fullName : origList) {
                String[] parts = split(fullName, ".");
                if (parts[1].equals(fieldName)) {
                    sb.append(parts[0]);
                    break;
                }
            }
        }
        sb.append(".");
        sb.append(fieldName);
        // log.info("["+strBuf.toString()+"] "+i);
        map.put(sb.toString(), i);
    }
}