Example usage for java.sql ResultSet getObject

List of usage examples for java.sql ResultSet getObject

Introduction

In this page you can find the example usage for java.sql ResultSet getObject.

Prototype

Object getObject(String columnLabel) throws SQLException;

Source Link

Document

Gets the value of the designated column in the current row of this ResultSet object as an Object in the Java programming language.

Usage

From source file:com.krawler.workflow.module.dao.DataObjectRowMapper.java

public Object mapRow(ResultSet rs, int rowNumber) throws SQLException {
    Map<String, Object> mappedObject = new HashMap<String, Object>();

    ResultSetMetaData rsmd = rs.getMetaData();
    int columnCount = rsmd.getColumnCount();
    for (int index = 1; index <= columnCount; index++) {
        String column = columnMap.get(index);
        if (column == null) {
            column = JdbcUtils.lookupColumnName(rsmd, index).toLowerCase();
            columnMap.put(index, column);
        }/*from   ww w  .  ja  va  2s. co m*/
        Object value = rs.getObject(index);
        if (formatter != null && value != null && value instanceof Date)
            value = formatter.format(value);
        mappedObject.put(column, value);
    }

    return mappedObject;
}

From source file:com.streamsets.pipeline.stage.it.AllSdcTypesIT.java

@Test
public void testType() throws Exception {
    DecimalDefaultsConfig decimalDefaultsConfig = new DecimalDefaultsConfig();
    decimalDefaultsConfig.scaleExpression = "${record:attributeOrDefault(str:concat(str:concat('jdbc.', str:toUpper(field:field())), '.scale'), 2)}";
    decimalDefaultsConfig.precisionExpression = "${record:attributeOrDefault(str:concat(str:concat('jdbc.', str:toUpper(field:field())), '.precision'), 2)}";
    HiveMetadataProcessor processor = new HiveMetadataProcessorBuilder().decimalConfig(decimalDefaultsConfig)
            .build();//from w  ww . j a  v  a2  s  .  c o  m
    HiveMetastoreTarget hiveTarget = new HiveMetastoreTargetBuilder().build();

    Map<String, Field> map = new LinkedHashMap<>();
    map.put("col", field);
    Record record = RecordCreator.create("s", "s:1");
    record.set(Field.create(map));
    record.getHeader().setAttribute("jdbc.COL.scale", "1");
    //So scale - 1 , precision -1 (at last as scale is set to 1, precision is not set ( default is 2))
    try {
        processRecords(processor, hiveTarget, ImmutableList.of(record));
        if (!supported) {
            Assert.fail("Type is not supported, but yet no exception was thrown");
        }
    } catch (StageException se) {
        if (supported) {
            LOG.error("Processing exception", se);
            Assert.fail("Processing testing record unexpectedly failed: " + se.getMessage());
            throw se;
        } else {
            Assert.assertEquals(Errors.HIVE_19, se.getErrorCode());
            // No additional verification necessary
            return;
        }
    }

    assertTableExists("default.tbl");
    assertQueryResult("select * from tbl", new QueryValidator() {
        @Override
        public void validateResultSet(ResultSet rs) throws Exception {
            assertResultSetStructure(rs, new ImmutablePair("tbl.col", hiveType),
                    new ImmutablePair("tbl.dt", Types.VARCHAR));

            Assert.assertTrue("Table tbl doesn't contain any rows", rs.next());
            if (hiveValue.getClass().isArray()) { // Only supported array is a byte array
                Assert.assertArrayEquals((byte[]) hiveValue, (byte[]) rs.getObject(1));
            } else {
                Assert.assertEquals(hiveValue, rs.getObject(1));
            }
            Assert.assertFalse("Table tbl contains more then one row", rs.next());
        }
    });
}

From source file:com.streamsets.pipeline.stage.it.AvroToParquetHiveIT.java

@Test
public void testAvroToParquetToHive() throws Exception {
    // This will be string like "1.1.0"
    String hiveVersionString = HiveVersionInfo.getShortVersion();
    LOG.info("Detected Hive version: " + hiveVersionString);

    // We're doing fairly simple string comparison, but that should be fine for now
    Assume.assumeTrue("Incompatible Hive version, skipping test",
            ensureHiveVersion.compareTo(hiveVersionString) < 0);

    String inputDirectory = "/input/";
    String outputDirectory = "/output/";

    OutputStream outputStream = getDefaultFileSystem().create(new Path(inputDirectory, "file.avro"));

    Schema avroSchema = Schema.parse("{" + "\"type\": \"record\", " + "\"name\": \"RandomRecord\", "
            + "\"fields\": [" + "{\"name\": \"value\", \"type\": " + avroType + "}" + "]" + "}");

    DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(avroSchema);
    DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(writer);
    dataFileWriter.create(avroSchema, outputStream);

    GenericRecord datum = new GenericData.Record(avroSchema);
    datum.put("value", avroValue);
    dataFileWriter.append(datum);/*from  www .  j  ava  2s.c om*/
    dataFileWriter.close();

    AvroParquetConfig conf = new AvroParquetConfig();
    conf.inputFile = inputDirectory + "file.avro";
    conf.outputDirectory = outputDirectory;

    MapReduceConfig mapReduceConfig = new MapReduceConfig();
    mapReduceConfig.mapReduceConfDir = getConfDir();
    mapReduceConfig.mapreduceConfigs = Collections.emptyMap();
    mapReduceConfig.mapreduceUser = "";
    mapReduceConfig.kerberos = false;

    JobConfig jobConfig = new JobConfig();
    jobConfig.jobType = JobType.AVRO_PARQUET;
    jobConfig.jobConfigs = Collections.emptyMap();
    jobConfig.jobName = "SDC Test Job";
    jobConfig.avroParquetConfig = conf;

    MapReduceExecutor executor = new MapReduceExecutor(mapReduceConfig, jobConfig);
    executor.waitForCompletition = true;

    TargetRunner runner = new TargetRunner.Builder(MapReduceDExecutor.class, executor)
            .setOnRecordError(OnRecordError.TO_ERROR).build();
    runner.runInit();

    Record record = RecordCreator.create();
    record.set(Field.create(Collections.<String, Field>emptyMap()));

    runner.runWrite(ImmutableList.of(record));
    Assert.assertTrue(getDefaultFileSystem().exists(new Path(outputDirectory, "file.parquet")));

    executeUpdate(Utils.format("CREATE TABLE tbl(value {}) STORED AS PARQUET LOCATION '{}'", hiveType,
            outputDirectory));

    assertTableExists("default.tbl");
    assertQueryResult("select * from tbl", new QueryValidator() {
        @Override
        public void validateResultSet(ResultSet rs) throws Exception {
            assertResultSetStructure(rs, new ImmutablePair("tbl.value", jdbcType));

            Assert.assertTrue("Table tbl doesn't contain any rows", rs.next());
            if (jdbcValue.getClass().isArray()) { // Only supported array is a byte array
                Assert.assertArrayEquals((byte[]) jdbcValue, (byte[]) rs.getObject(1));
            } else {
                Assert.assertEquals(jdbcValue, rs.getObject(1));
            }
            Assert.assertFalse("Table tbl contains more then one row", rs.next());
        }
    });
}

From source file:org.schedoscope.metascope.util.HiveQueryExecutor.java

@Transactional
public HiveQueryResult executeQuery(String databaseName, String tableName, String fields,
        Set<MetascopeField> parameters, Map<String, String> params) {
    List<List<String>> rows = new ArrayList<List<String>>();

    HiveServerConnection hiveConn = new HiveServerConnection(config);

    hiveConn.connect();//from   w w  w  .  jav  a  2  s.  c o m

    if (hiveConn.getConnection() == null) {
        return new HiveQueryResult("Could not connect to HiveServer2");
    }

    String where = "";
    List<String> values = new ArrayList<String>();
    if (params != null) {
        for (Entry<String, String> param : params.entrySet()) {
            if (param.getKey().equals("fqdn") || param.getKey().equals("_csrf")) {
                continue;
            }
            if (!param.getValue().isEmpty()) {
                boolean parameterExists = false;
                for (MetascopeField parameter : parameters) {
                    if (parameter.getFieldName().equals(param.getKey())) {
                        parameterExists = true;
                    }
                }
                if (!parameterExists) {
                    hiveConn.close();
                    return new HiveQueryResult("Query not allowed");
                }

                if (!where.isEmpty()) {
                    where += " AND ";
                }
                where += param.getKey() + "=?";
                values.add(param.getValue());
            }
        }
    }

    String sql = " SELECT " + fields;
    String parameterList = "";
    for (MetascopeField parameter : parameters) {
        sql += "," + parameter.getFieldName();
    }
    sql += parameterList;
    sql += " FROM " + databaseName + "." + tableName;
    sql += where.isEmpty() ? "" : " WHERE " + where;
    sql += " LIMIT 10";

    List<String> header = new ArrayList<String>();
    try {
        PreparedStatement pstmt = hiveConn.getConnection().prepareStatement(sql);
        for (int i = 1; i <= values.size(); i++) {
            pstmt.setString(i, values.get(i - 1));
        }
        ResultSet rs = pstmt.executeQuery();
        ResultSetMetaData rsmd = rs.getMetaData();

        for (int i = 1; i <= rsmd.getColumnCount(); i++) {
            header.add(rsmd.getColumnName(i));
        }

        while (rs.next()) {
            List<String> row = new ArrayList<String>();
            for (int i = 1; i <= rsmd.getColumnCount(); i++) {
                Object val = rs.getObject(i);
                String strVal = (val == null ? null : val.toString());
                row.add(strVal);
            }
            rows.add(row);
        }
    } catch (SQLException e) {
        LOG.error("Could not execute query", e);
        hiveConn.close();
        return new HiveQueryResult(e.getMessage());
    }

    hiveConn.close();
    return new HiveQueryResult(header, rows);
}

From source file:com.netspective.axiom.sql.ResultSetUtils.java

public Object[][] getResultSetRowsAsMatrix(ResultSet rs) throws SQLException {
    ArrayList result = new ArrayList();
    while (rs.next()) {
        ResultSetMetaData rsmd = rs.getMetaData();
        int colsCount = rsmd.getColumnCount();
        Object[] row = new Object[colsCount];
        for (int i = 1; i <= colsCount; i++) {
            row[i - 1] = rs.getObject(i);
        }//from w w  w. ja va2 s.c o m
        result.add(row);
    }

    if (result.size() > 0)
        return (Object[][]) result.toArray(new Object[result.size()][]);
    else
        return null;
}

From source file:com.gdcn.modules.db.jdbc.processor.CamelBeanProcessor.java

/**
 * Convert a <code>ResultSet</code> column into an object.  Simple
 * implementations could just call <code>rs.getObject(index)</code> while
 * more complex implementations could perform type manipulation to match
 * the column's type to the bean property type.
 *
 * <p>/*  www.ja v a2 s . c  om*/
 * This implementation calls the appropriate <code>ResultSet</code> getter
 * method for the given property type to perform the type conversion.  If
 * the property type doesn't match one of the supported
 * <code>ResultSet</code> types, <code>getObject</code> is called.
 * </p>
 *
 * @param rs The <code>ResultSet</code> currently being processed.  It is
 * positioned on a valid row before being passed into this method.
 *
 * @param index The current column index being processed.
 *
 * @param propType The bean property type that this column needs to be
 * converted into.
 *
 * @throws SQLException if a database access error occurs
 *
 * @return The object from the <code>ResultSet</code> at the given column
 * index after optional type processing or <code>null</code> if the column
 * value was SQL NULL.
 */
protected Object processColumn(ResultSet rs, int index, Class<?> propType) throws SQLException {

    if (!propType.isPrimitive() && rs.getObject(index) == null) {
        return null;
    }

    if (propType.equals(String.class)) {
        return rs.getString(index);

    } else if (propType.equals(Integer.TYPE) || propType.equals(Integer.class)) {
        return Integer.valueOf(rs.getInt(index));

    } else if (propType.equals(Boolean.TYPE) || propType.equals(Boolean.class)) {
        return Boolean.valueOf(rs.getBoolean(index));

    } else if (propType.equals(Long.TYPE) || propType.equals(Long.class)) {
        return Long.valueOf(rs.getLong(index));

    } else if (propType.equals(Double.TYPE) || propType.equals(Double.class)) {
        return Double.valueOf(rs.getDouble(index));

    } else if (propType.equals(Float.TYPE) || propType.equals(Float.class)) {
        return Float.valueOf(rs.getFloat(index));

    } else if (propType.equals(Short.TYPE) || propType.equals(Short.class)) {
        return Short.valueOf(rs.getShort(index));

    } else if (propType.equals(Byte.TYPE) || propType.equals(Byte.class)) {
        return Byte.valueOf(rs.getByte(index));

    } else if (propType.equals(Timestamp.class)) {
        return rs.getTimestamp(index);

    } else if (propType.equals(SQLXML.class)) {
        return rs.getSQLXML(index);

    } else {
        return rs.getObject(index);
    }

}

From source file:net.orpiske.ssps.common.db.MultiRsHandler.java

@Override
protected T handleRow(ResultSet rs) throws SQLException {
    T dto;/* w  ww  .  j a va 2  s  .  co m*/

    try {
        dto = clazz.newInstance();
    } catch (InstantiationException e1) {
        throw new SQLException("Unable to instantiate DTO class: " + e1.getMessage(), e1);
    } catch (IllegalAccessException e1) {
        throw new SQLException("Illegal to instantiate DTO class: " + e1.getMessage(), e1);
    }

    ResultSetMetaData meta = rs.getMetaData();

    for (int i = 1; i <= meta.getColumnCount(); i++) {
        Object value = rs.getObject(i);
        String name = meta.getColumnName(i);

        try {
            /*
             * We convert the column name to a more appropriate and java like name 
             * because some columns are usually named as some_thing whereas Java 
             * properties are named someThing. This call does this conversion.
             */
            String javaProperty = NameConverter.sqlToProperty(name);

            PropertyUtils.setSimpleProperty(dto, javaProperty, value);
        } catch (Exception e) {
            throw new SQLException("Unable to set property " + name + " for bean" + dto.getClass(), e);
        }
    }

    return dto;
}

From source file:BadProfessor.java

public void checkData(String sql) throws Exception {
        ResultSet rs = st.executeQuery(sql);
        ResultSetMetaData metadata = rs.getMetaData();

        for (int i = 0; i < metadata.getColumnCount(); i++) {
            System.out.print("\t" + metadata.getColumnLabel(i + 1));
        }/* ww w.  ja  v a2s . co  m*/
        System.out.println("\n----------------------------------");

        while (rs.next()) {
            for (int i = 0; i < metadata.getColumnCount(); i++) {
                Object value = rs.getObject(i + 1);
                if (value == null) {
                    System.out.print("\t       ");
                } else {
                    System.out.print("\t" + value.toString().trim());
                }
            }
            System.out.println("");
        }
    }

From source file:com.micromux.cassandra.jdbc.CollectionsTest.java

@Test
public void testReadMap() throws Exception {
    if (LOG.isDebugEnabled())
        LOG.debug("Test: 'testReadMap'\n");

    Statement statement = con.createStatement();

    String update2 = "UPDATE testcollection SET M = {2.0: true, 4.0: false, 6.0 : true} WHERE k = 1;";
    statement.executeUpdate(update2);//from  w  w  w .j av a 2 s.  com
    ResultSet result = statement.executeQuery("SELECT * FROM testcollection WHERE k = 1;");
    result.next();

    assertEquals(1, result.getInt("k"));

    Object myObj = result.getObject("m");
    if (LOG.isDebugEnabled())
        LOG.debug("m           = '{}'\n", myObj);
    Map<Double, Boolean> myMap = (Map<Double, Boolean>) myObj;
    assertEquals(3, myMap.size());
    assertTrue(myMap.keySet().contains(2.0));
    assertTrue(myObj instanceof HashMap);
}

From source file:com.micromux.cassandra.jdbc.CollectionsTest.java

@Test
public void testReadSet() throws Exception {
    if (LOG.isDebugEnabled())
        LOG.debug("Test: 'testReadSet'\n");

    Statement statement = con.createStatement();

    String update1 = "UPDATE testcollection SET S = {'red', 'white', 'blue'} WHERE k = 1;";
    statement.executeUpdate(update1);//from  ww w. j  ava 2s  .  c  o m

    ResultSet result = statement.executeQuery("SELECT * FROM testcollection WHERE k = 1;");
    result.next();

    assertEquals(1, result.getInt("k"));

    Object myObj = result.getObject("s");
    if (LOG.isDebugEnabled())
        LOG.debug("s           = '{}'\n", myObj);
    Set<String> mySet = (Set<String>) myObj;
    assertEquals(3, mySet.size());
    assertTrue(mySet.contains("white"));
    assertTrue(myObj instanceof LinkedHashSet);
}