List of usage examples for java.sql ResultSetMetaData isSigned
boolean isSigned(int column) throws SQLException;
From source file:org.apache.nifi.processors.standard.util.TestJdbcCommon.java
@Test public void testSignedIntShouldBeInt() throws SQLException, IllegalArgumentException, IllegalAccessException { final ResultSetMetaData metadata = mock(ResultSetMetaData.class); when(metadata.getColumnCount()).thenReturn(1); when(metadata.getColumnType(1)).thenReturn(Types.INTEGER); when(metadata.isSigned(1)).thenReturn(true); when(metadata.getColumnName(1)).thenReturn("Col1"); when(metadata.getTableName(1)).thenReturn("Table1"); final ResultSet rs = mock(ResultSet.class); when(rs.getMetaData()).thenReturn(metadata); Schema schema = JdbcCommon.createSchema(rs); Assert.assertNotNull(schema);/* ww w. j av a2 s .c om*/ Schema.Field field = schema.getField("Col1"); Schema fieldSchema = field.schema(); Assert.assertEquals(2, fieldSchema.getTypes().size()); boolean foundIntSchema = false; boolean foundNullSchema = false; for (Schema type : fieldSchema.getTypes()) { if (type.getType().equals(Schema.Type.INT)) { foundIntSchema = true; } else if (type.getType().equals(Schema.Type.NULL)) { foundNullSchema = true; } } assertTrue(foundIntSchema); assertTrue(foundNullSchema); }
From source file:org.apache.nifi.processors.standard.util.TestJdbcCommon.java
@Test public void testMediumUnsignedIntShouldBeInt() throws SQLException, IllegalArgumentException, IllegalAccessException { final ResultSetMetaData metadata = mock(ResultSetMetaData.class); when(metadata.getColumnCount()).thenReturn(1); when(metadata.getColumnType(1)).thenReturn(Types.INTEGER); when(metadata.getPrecision(1)).thenReturn(8); when(metadata.isSigned(1)).thenReturn(false); when(metadata.getColumnName(1)).thenReturn("Col1"); when(metadata.getTableName(1)).thenReturn("Table1"); final ResultSet rs = mock(ResultSet.class); when(rs.getMetaData()).thenReturn(metadata); Schema schema = JdbcCommon.createSchema(rs); Assert.assertNotNull(schema);/*from w ww .j a va 2s. com*/ Schema.Field field = schema.getField("Col1"); Schema fieldSchema = field.schema(); Assert.assertEquals(2, fieldSchema.getTypes().size()); boolean foundIntSchema = false; boolean foundNullSchema = false; for (Schema type : fieldSchema.getTypes()) { if (type.getType().equals(Schema.Type.INT)) { foundIntSchema = true; } else if (type.getType().equals(Schema.Type.NULL)) { foundNullSchema = true; } } assertTrue(foundIntSchema); assertTrue(foundNullSchema); }
From source file:org.apache.nifi.processors.standard.util.TestJdbcCommon.java
@Test public void testUnsignedIntShouldBeLong() throws SQLException, IllegalArgumentException, IllegalAccessException { final ResultSetMetaData metadata = mock(ResultSetMetaData.class); when(metadata.getColumnCount()).thenReturn(1); when(metadata.getColumnType(1)).thenReturn(Types.INTEGER); when(metadata.getPrecision(1)).thenReturn(10); when(metadata.isSigned(1)).thenReturn(false); when(metadata.getColumnName(1)).thenReturn("Col1"); when(metadata.getTableName(1)).thenReturn("Table1"); final ResultSet rs = mock(ResultSet.class); when(rs.getMetaData()).thenReturn(metadata); Schema schema = JdbcCommon.createSchema(rs); Assert.assertNotNull(schema);/*from www .ja v a 2s . co m*/ Schema.Field field = schema.getField("Col1"); Schema fieldSchema = field.schema(); Assert.assertEquals(2, fieldSchema.getTypes().size()); boolean foundLongSchema = false; boolean foundNullSchema = false; for (Schema type : fieldSchema.getTypes()) { if (type.getType().equals(Schema.Type.LONG)) { foundLongSchema = true; } else if (type.getType().equals(Schema.Type.NULL)) { foundNullSchema = true; } } assertTrue(foundLongSchema); assertTrue(foundNullSchema); }
From source file:com.kylinolap.rest.service.QueryService.java
/** * @param sql// www .j a va2 s .c o m * @param project * @return * @throws Exception */ private SQLResponse execute(String sql, SQLRequest sqlRequest) throws Exception { Connection conn = null; Statement stat = null; ResultSet resultSet = null; List<List<String>> results = new LinkedList<List<String>>(); List<SelectedColumnMeta> columnMetas = new LinkedList<SelectedColumnMeta>(); try { conn = getOLAPDataSource(sqlRequest.getProject()).getConnection(); if (sqlRequest instanceof PrepareSqlRequest) { PreparedStatement preparedState = conn.prepareStatement(sql); for (int i = 0; i < ((PrepareSqlRequest) sqlRequest).getParams().length; i++) { setParam(preparedState, i + 1, ((PrepareSqlRequest) sqlRequest).getParams()[i]); } resultSet = preparedState.executeQuery(); } else { stat = conn.createStatement(); resultSet = stat.executeQuery(sql); } ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); // Fill in selected column meta for (int i = 1; i <= columnCount; ++i) { columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i), metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), metaData.getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i), metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i), metaData.isWritable(i), metaData.isDefinitelyWritable(i))); } List<String> oneRow = new LinkedList<String>(); // fill in results while (resultSet.next()) { for (int i = 0; i < columnCount; i++) { oneRow.add((resultSet.getString(i + 1))); } results.add(new LinkedList<String>(oneRow)); oneRow.clear(); } } catch (Exception e) { logger.error(e.getLocalizedMessage(), e); throw e; } finally { close(resultSet, stat, conn); } boolean isPartialResult = false; String cube = ""; long totalScanCount = 0; for (OLAPContext ctx : OLAPContext.getThreadLocalContexts()) { isPartialResult |= ctx.storageContext.isPartialResultReturned(); cube = ctx.cubeInstance.getName(); totalScanCount += ctx.storageContext.getTotalScanCount(); } SQLResponse response = new SQLResponse(columnMetas, results, cube, 0, false, null, isPartialResult); response.setTotalScanCount(totalScanCount); return response; }
From source file:org.apache.sqoop.manager.SqlManager.java
/** * Get column types for a query statement that we do not modify further. */// w w w. ja v a 2s . co m protected Map<String, Integer> getColumnTypesForRawQuery(String stmt) { ResultSet results; try { results = execute(stmt); } catch (SQLException sqlE) { LOG.error("Error executing statement: " + sqlE.toString(), sqlE); release(); return null; } try { Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>(); int cols = results.getMetaData().getColumnCount(); ResultSetMetaData metadata = results.getMetaData(); for (int i = 1; i < cols + 1; i++) { int typeId = metadata.getColumnType(i); // If we have an unsigned int we need to make extra room by // plopping it into a bigint if (typeId == Types.INTEGER && !metadata.isSigned(i)) { typeId = Types.BIGINT; } String colName = metadata.getColumnName(i); if (colName == null || colName.equals("")) { colName = metadata.getColumnLabel(i); } colTypes.put(colName, Integer.valueOf(typeId)); } return colTypes; } catch (SQLException sqlException) { LOG.error("Error reading from database: " + sqlException.toString()); return null; } finally { try { results.close(); getConnection().commit(); } catch (SQLException sqlE) { LOG.warn("SQLException closing ResultSet: " + sqlE.toString()); } release(); } }
From source file:org.apache.kylin.rest.service.QueryService.java
/** * @param correctedSql/*from w w w . j a v a 2 s . c o m*/ * @param sqlRequest * @return * @throws Exception */ private SQLResponse execute(String correctedSql, SQLRequest sqlRequest) throws Exception { Connection conn = null; Statement stat = null; ResultSet resultSet = null; Boolean isPushDown = false; List<List<String>> results = Lists.newArrayList(); List<SelectedColumnMeta> columnMetas = Lists.newArrayList(); try { conn = cacheService.getOLAPDataSource(sqlRequest.getProject()).getConnection(); // special case for prepare query. if (BackdoorToggles.getPrepareOnly()) { return getPrepareOnlySqlResponse(correctedSql, conn, isPushDown, results, columnMetas); } stat = conn.createStatement(); processStatementAttr(stat, sqlRequest); resultSet = stat.executeQuery(correctedSql); ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); // Fill in selected column meta for (int i = 1; i <= columnCount; ++i) { columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i), metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), metaData.getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i), metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i), metaData.isWritable(i), metaData.isDefinitelyWritable(i))); } // fill in results while (resultSet.next()) { List<String> oneRow = Lists.newArrayListWithCapacity(columnCount); for (int i = 0; i < columnCount; i++) { oneRow.add((resultSet.getString(i + 1))); } results.add(oneRow); } } catch (SQLException sqlException) { isPushDown = PushDownUtil.doPushDownQuery(sqlRequest.getProject(), correctedSql, results, columnMetas, sqlException); } finally { close(resultSet, stat, conn); } return getSqlResponse(isPushDown, results, columnMetas); }
From source file:net.ymate.platform.persistence.jdbc.scaffold.EntityGenerator.java
/** * @param dbName ???// ww w. ja v a 2 s .c om * @param dbUserName ?? * @param tableName ?? * @return ????? */ private TableMeta getTableMeta(String dbName, String dbUserName, String tableName) { IConnectionHolder _connHolder = null; Statement _statement = null; ResultSet _resultSet = null; Map<String, ColumnInfo> _tableFields = new LinkedHashMap<String, ColumnInfo>(); List<String> _pkFields = new LinkedList<String>(); TableMeta _meta = new TableMeta(_pkFields, _tableFields); try { _connHolder = __jdbc.getDefaultConnectionHolder(); String _dbType = _connHolder.getDialect().getName(); DatabaseMetaData _dbMetaData = _connHolder.getConnection().getMetaData(); System.out.println(">>> Catalog: " + dbName); System.out.println(">>> Schema: " + dbUserName); System.out.println(">>> Table: " + tableName); _resultSet = _dbMetaData.getPrimaryKeys(dbName, _dbType.equalsIgnoreCase("oracle") ? dbUserName.toUpperCase() : dbUserName, tableName); if (_resultSet == null) { System.err.println("Database table \"" + tableName + "\" primaryKey resultSet is null, ignored"); return null; } else { while (_resultSet.next()) { _pkFields.add(_resultSet.getString(4).toLowerCase()); } if (_pkFields.isEmpty()) { System.err .println("Database table \"" + tableName + "\" does not set the primary key, ignored"); return null; } else { // System.out.println(">>> " + "COLUMN_NAME / " + "COLUMN_CLASS_NAME / " + "PRIMARY_KEY / " + "AUTO_INCREMENT / " + "SIGNED / " + "PRECISION / " + "SCALE / " + "NULLABLE / " + "DEFAULT / " + "REMARKS"); // _statement = _connHolder.getConnection().createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); _resultSet = _statement.executeQuery( "SELECT * FROM ".concat(_connHolder.getDialect().wrapIdentifierQuote(tableName))); ResultSetMetaData _rsMetaData = _resultSet.getMetaData(); // for (int _idx = 1; _idx <= _rsMetaData.getColumnCount(); _idx++) { // ?? ResultSet _column = _dbMetaData.getColumns(dbName, _dbType.equalsIgnoreCase("oracle") ? dbUserName.toUpperCase() : dbUserName, tableName, _rsMetaData.getColumnName(_idx)); if (_column.next()) { // ???? _tableFields.put(_rsMetaData.getColumnName(_idx).toLowerCase(), new ColumnInfo(_rsMetaData.getColumnName(_idx).toLowerCase(), _rsMetaData.getColumnClassName(_idx), _rsMetaData.isAutoIncrement(_idx), _rsMetaData.isSigned(_idx), _rsMetaData.getPrecision(_idx), _rsMetaData.getScale(_idx), _rsMetaData.isNullable(_idx), _column.getString("COLUMN_DEF"), _column.getString("REMARKS"))); System.out.println("--> " + _rsMetaData.getColumnName(_idx).toLowerCase() + "\t" + _rsMetaData.getColumnClassName(_idx) + "\t" + _pkFields.contains(_rsMetaData.getColumnName(_idx).toLowerCase()) + "\t" + _rsMetaData.isAutoIncrement(_idx) + "\t" + _rsMetaData.isSigned(_idx) + "\t" + _rsMetaData.getPrecision(_idx) + "\t" + _rsMetaData.getScale(_idx) + "\t" + _rsMetaData.isNullable(_idx) + "\t" + _column.getString("COLUMN_DEF") + "\t" + _column.getString("REMARKS")); } _column.close(); } } } } catch (Exception e) { if (e instanceof RuntimeException) { throw (RuntimeException) e; } throw new RuntimeException(e); } finally { if (_statement != null) { try { _statement.close(); } catch (SQLException e) { _LOG.warn("", e); } } if (_resultSet != null) { try { _resultSet.close(); } catch (SQLException e) { _LOG.warn("", e); } } if (_connHolder != null) { _connHolder.release(); } } return _meta; }
From source file:org.apache.nifi.processors.standard.util.JdbcCommon.java
/** * Creates an Avro schema from a result set. If the table/record name is known a priori and provided, use that as a * fallback for the record name if it cannot be retrieved from the result set, and finally fall back to a default value. * * @param rs The result set to convert to Avro * @param recordName The a priori record name to use if it cannot be determined from the result set. * @return A Schema object representing the result set converted to an Avro record * @throws SQLException if any error occurs during conversion *//*from ww w .j av a 2 s .co m*/ public static Schema createSchema(final ResultSet rs, String recordName, boolean convertNames) throws SQLException { final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); String tableName = StringUtils.isEmpty(recordName) ? "NiFi_ExecuteSQL_Record" : recordName; if (nrOfColumns > 0) { String tableNameFromMeta = meta.getTableName(1); if (!StringUtils.isBlank(tableNameFromMeta)) { tableName = tableNameFromMeta; } } if (convertNames) { tableName = normalizeNameForAvro(tableName); } final FieldAssembler<Schema> builder = SchemaBuilder.record(tableName).namespace("any.data").fields(); /** * Some missing Avro types - Decimal, Date types. May need some additional work. */ for (int i = 1; i <= nrOfColumns; i++) { /** * as per jdbc 4 specs, getColumnLabel will have the alias for the column, if not it will have the column name. * so it may be a better option to check for columnlabel first and if in case it is null is someimplementation, * check for alias. Postgres is the one that has the null column names for calculated fields. */ String nameOrLabel = StringUtils.isNotEmpty(meta.getColumnLabel(i)) ? meta.getColumnLabel(i) : meta.getColumnName(i); String columnName = convertNames ? normalizeNameForAvro(nameOrLabel) : nameOrLabel; switch (meta.getColumnType(i)) { case CHAR: case LONGNVARCHAR: case LONGVARCHAR: case NCHAR: case NVARCHAR: case VARCHAR: case CLOB: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; case BIT: case BOOLEAN: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().booleanType().endUnion() .noDefault(); break; case INTEGER: if (meta.isSigned(i) || (meta.getPrecision(i) > 0 && meta.getPrecision(i) <= MAX_DIGITS_IN_INT)) { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().intType().endUnion() .noDefault(); } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().longType().endUnion() .noDefault(); } break; case SMALLINT: case TINYINT: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().intType().endUnion() .noDefault(); break; case BIGINT: // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // If the precision > 19 (or is negative), use a string for the type, otherwise use a long. The object(s) will be converted // to strings as necessary int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().longType().endUnion() .noDefault(); } break; // java.sql.RowId is interface, is seems to be database // implementation specific, let's convert to String case ROWID: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; case FLOAT: case REAL: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().floatType().endUnion() .noDefault(); break; case DOUBLE: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().doubleType().endUnion() .noDefault(); break; // Did not find direct suitable type, need to be clarified!!!! case DECIMAL: case NUMERIC: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; // Did not find direct suitable type, need to be clarified!!!! case DATE: case TIME: case TIMESTAMP: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; case BINARY: case VARBINARY: case LONGVARBINARY: case ARRAY: case BLOB: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().bytesType().endUnion() .noDefault(); break; default: throw new IllegalArgumentException("createSchema: Unknown SQL type " + meta.getColumnType(i) + " / " + meta.getColumnTypeName(i) + " (table: " + tableName + ", column: " + columnName + ") cannot be converted to Avro type"); } } return builder.endRecord(); }
From source file:be.dataminded.nifi.plugins.util.JdbcCommon.java
/** * Creates an Avro schema from a result set. If the table/record name is known a priori and provided, use that as a * fallback for the record name if it cannot be retrieved from the result set, and finally fall back to a default value. * * @param rs The result set to convert to Avro * @param recordName The a priori record name to use if it cannot be determined from the result set. * @return A Schema object representing the result set converted to an Avro record * @throws SQLException if any error occurs during conversion *//*from w w w .jav a 2 s.co m*/ public static Schema createSchema(final ResultSet rs, String recordName, boolean convertNames) throws SQLException { final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); String tableName = StringUtils.isEmpty(recordName) ? "NiFi_ExecuteSQL_Record" : recordName; if (nrOfColumns > 0) { String tableNameFromMeta = meta.getTableName(1); if (!StringUtils.isBlank(tableNameFromMeta)) { tableName = tableNameFromMeta; } } if (convertNames) { tableName = normalizeNameForAvro(tableName); } final FieldAssembler<Schema> builder = SchemaBuilder.record(tableName).namespace("any.data").fields(); /** * Some missing Avro types - Decimal, Date types. May need some additional work. */ for (int i = 1; i <= nrOfColumns; i++) { /** * as per jdbc 4 specs, getColumnLabel will have the alias for the column, if not it will have the column name. * so it may be a better option to check for columnlabel first and if in case it is null is someimplementation, * check for alias. Postgres is the one that has the null column names for calculated fields. */ String nameOrLabel = StringUtils.isNotEmpty(meta.getColumnLabel(i)) ? meta.getColumnLabel(i) : meta.getColumnName(i); String columnName = convertNames ? normalizeNameForAvro(nameOrLabel) : nameOrLabel; switch (meta.getColumnType(i)) { case CHAR: case LONGNVARCHAR: case LONGVARCHAR: case NCHAR: case NVARCHAR: case VARCHAR: case CLOB: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; case BIT: case BOOLEAN: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().booleanType().endUnion() .noDefault(); break; case INTEGER: if (meta.isSigned(i)) { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().intType().endUnion() .noDefault(); } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().longType().endUnion() .noDefault(); } break; case SMALLINT: case TINYINT: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().intType().endUnion() .noDefault(); break; case BIGINT: // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // If the precision > 19 (or is negative), use a string for the type, otherwise use a long. The object(s) will be converted // to strings as necessary int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().longType().endUnion() .noDefault(); } break; // java.sql.RowId is interface, is seems to be database // implementation specific, let's convert to String case ROWID: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; case FLOAT: case REAL: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().floatType().endUnion() .noDefault(); break; case DOUBLE: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().doubleType().endUnion() .noDefault(); break; // Did not find direct suitable type, need to be clarified!!!! case DECIMAL: case NUMERIC: int scale = meta.getScale(i); if (scale == 0) { if (meta.getPrecision(i) < 10) { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().intType().endUnion() .noDefault(); } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().longType() .endUnion().noDefault(); } } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().doubleType().endUnion() .noDefault(); } break; // Did not find direct suitable type, need to be clarified!!!! case DATE: case TIME: case TIMESTAMP: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; case BINARY: case VARBINARY: case LONGVARBINARY: case ARRAY: case BLOB: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().bytesType().endUnion() .noDefault(); break; default: throw new IllegalArgumentException("createSchema: Unknown SQL type " + meta.getColumnType(i) + " cannot be converted to Avro type"); } } return builder.endRecord(); }
From source file:org.apache.nifi.util.hive.HiveJdbcCommon.java
/** * Creates an Avro schema from a result set. If the table/record name is known a priori and provided, use that as a * fallback for the record name if it cannot be retrieved from the result set, and finally fall back to a default value. * * @param rs The result set to convert to Avro * @param recordName The a priori record name to use if it cannot be determined from the result set. * @return A Schema object representing the result set converted to an Avro record * @throws SQLException if any error occurs during conversion *///from w w w. j a va2s. c o m public static Schema createSchema(final ResultSet rs, String recordName) throws SQLException { final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); String tableName = StringUtils.isEmpty(recordName) ? "NiFi_SelectHiveQL_Record" : recordName; try { if (nrOfColumns > 0) { // Hive JDBC doesn't support getTableName, instead it returns table.column for column name. Grab the table name from the first column String firstColumnNameFromMeta = meta.getColumnName(1); int tableNameDelimiter = firstColumnNameFromMeta.lastIndexOf("."); if (tableNameDelimiter > -1) { String tableNameFromMeta = firstColumnNameFromMeta.substring(0, tableNameDelimiter); if (!StringUtils.isBlank(tableNameFromMeta)) { tableName = tableNameFromMeta; } } } } catch (SQLException se) { // Not all drivers support getTableName, so just use the previously-set default } final FieldAssembler<Schema> builder = SchemaBuilder.record(tableName).namespace("any.data").fields(); /** * Some missing Avro types - Decimal, Date types. May need some additional work. */ for (int i = 1; i <= nrOfColumns; i++) { String columnNameFromMeta = meta.getColumnName(i); // Hive returns table.column for column name. Grab the column name as the string after the last period int columnNameDelimiter = columnNameFromMeta.lastIndexOf("."); String columnName = columnNameFromMeta.substring(columnNameDelimiter + 1); switch (meta.getColumnType(i)) { case CHAR: case LONGNVARCHAR: case LONGVARCHAR: case NCHAR: case NVARCHAR: case VARCHAR: case ARRAY: case STRUCT: case JAVA_OBJECT: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; case BIT: case BOOLEAN: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().booleanType().endUnion() .noDefault(); break; case INTEGER: // Default to signed type unless otherwise noted. Some JDBC drivers don't implement isSigned() boolean signedType = true; try { signedType = meta.isSigned(i); } catch (SQLException se) { // Use signed types as default } if (signedType) { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().intType().endUnion() .noDefault(); } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().longType().endUnion() .noDefault(); } break; case SMALLINT: case TINYINT: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().intType().endUnion() .noDefault(); break; case BIGINT: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().longType().endUnion() .noDefault(); break; // java.sql.RowId is interface, is seems to be database // implementation specific, let's convert to String case ROWID: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; case FLOAT: case REAL: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().floatType().endUnion() .noDefault(); break; case DOUBLE: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().doubleType().endUnion() .noDefault(); break; // Did not find direct suitable type, need to be clarified!!!! case DECIMAL: case NUMERIC: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; // Did not find direct suitable type, need to be clarified!!!! case DATE: case TIME: case TIMESTAMP: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion() .noDefault(); break; case BINARY: case VARBINARY: case LONGVARBINARY: case BLOB: case CLOB: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().bytesType().endUnion() .noDefault(); break; default: throw new IllegalArgumentException("createSchema: Unknown SQL type " + meta.getColumnType(i) + " cannot be converted to Avro type"); } } return builder.endRecord(); }