List of usage examples for java.sql Types LONGNVARCHAR
int LONGNVARCHAR
To view the source code for java.sql Types LONGNVARCHAR.
Click Source Link
LONGNVARCHAR
. From source file:org.apache.tajo.storage.jdbc.JdbcMetadataProviderBase.java
private TypeDesc convertDataType(ResultSet res) throws SQLException { final int typeId = res.getInt("DATA_TYPE"); switch (typeId) { case Types.BOOLEAN: return new TypeDesc(newSimpleDataType(Type.BOOLEAN)); case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: return new TypeDesc(newSimpleDataType(Type.INT4)); case Types.DISTINCT: // sequence for postgresql case Types.BIGINT: return new TypeDesc(newSimpleDataType(Type.INT8)); case Types.FLOAT: return new TypeDesc(newSimpleDataType(Type.FLOAT4)); case Types.NUMERIC: case Types.DECIMAL: case Types.DOUBLE: return new TypeDesc(newSimpleDataType(Type.FLOAT8)); case Types.DATE: return new TypeDesc(newSimpleDataType(Type.DATE)); case Types.TIME: return new TypeDesc(newSimpleDataType(Type.TIME)); case Types.TIMESTAMP: return new TypeDesc(newSimpleDataType(Type.TIMESTAMP)); case Types.CHAR: case Types.NCHAR: case Types.VARCHAR: case Types.NVARCHAR: case Types.CLOB: case Types.NCLOB: case Types.LONGVARCHAR: case Types.LONGNVARCHAR: return new TypeDesc(newSimpleDataType(Type.TEXT)); case Types.BINARY: case Types.VARBINARY: case Types.BLOB: return new TypeDesc(newSimpleDataType(Type.BLOB)); default:/* w w w. ja va 2 s .c om*/ throw SQLExceptionUtil.toSQLException(new UnsupportedDataTypeException(typeId + "")); } }
From source file:com.alibaba.otter.node.etl.common.db.utils.SqlUtils.java
public static String encoding(String source, int sqlType, String sourceEncoding, String targetEncoding) { switch (sqlType) { case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: case Types.NCHAR: case Types.NVARCHAR: case Types.LONGNVARCHAR: case Types.CLOB: case Types.NCLOB: if (false == StringUtils.isEmpty(source)) { String fromEncoding = StringUtils.isBlank(sourceEncoding) ? "UTF-8" : sourceEncoding; String toEncoding = StringUtils.isBlank(targetEncoding) ? "UTF-8" : targetEncoding; // if (false == StringUtils.equalsIgnoreCase(fromEncoding, // toEncoding)) { try { return new String(source.getBytes(fromEncoding), toEncoding); } catch (UnsupportedEncodingException e) { throw new IllegalArgumentException(e.getMessage(), e); }// ww w . j a v a 2 s .co m // } } } return source; }
From source file:org.jumpmind.db.platform.mssql.MsSqlDdlReader.java
protected Integer mapUnknownJdbcTypeForColumn(Map<String, Object> values) { String typeName = (String) values.get("TYPE_NAME"); int size = -1; String columnSize = (String) values.get("COLUMN_SIZE"); if (isNotBlank(columnSize)) { size = Integer.parseInt(columnSize); }//from w w w . j ava2s.c o m if (typeName != null && typeName.toLowerCase().startsWith("text")) { return Types.LONGVARCHAR; } else if (typeName != null && typeName.toLowerCase().startsWith("ntext")) { return Types.CLOB; } else if (typeName != null && typeName.toUpperCase().contains(TypeMap.GEOMETRY)) { return Types.VARCHAR; } else if (typeName != null && typeName.toUpperCase().contains("VARCHAR") && size > 8000) { return Types.LONGVARCHAR; } else if (typeName != null && typeName.toUpperCase().contains("NVARCHAR") && size > 8000) { return Types.LONGNVARCHAR; } else if (typeName != null && typeName.toUpperCase().equals("SQL_VARIANT")) { return Types.BINARY; } else { return super.mapUnknownJdbcTypeForColumn(values); } }
From source file:madgik.exareme.master.queryProcessor.analyzer.stat.ExternalStat.java
@Override public Map<String, Table> extractStats() throws Exception { DatabaseMetaData dbmd = con.getMetaData(); // dtabase metadata object // listing tables and columns String catalog = null;//from w ww . j ava 2 s .com String schemaPattern = sch; String tableNamePattern = tblName; String columnNamePattern = "%"; if (con.getClass().getName().contains("postgresql")) { // tableNamePattern="\""+tableNamePattern+"\""; schemaPattern = "public"; } // ResultSet resultTables = dbmd.getTables(catalog, "public", // tableNamePattern, types); ResultSet resultColumns = dbmd.getColumns(catalog, schemaPattern, tableNamePattern, columnNamePattern); if (con.getClass().getName().contains("postgresql")) { tableNamePattern = "\"" + tableNamePattern + "\""; } else if (con.getClass().getName().contains("oracle")) { tableNamePattern = schemaPattern + "." + tableNamePattern; } log.debug("Starting extracting stats"); // while (resultTables.next()) { Map<String, Column> columnMap = new HashMap<String, Column>(); // StringEscapeUtils.escapeJava(resultTables.getString(3)); log.debug("Analyzing table " + tblName); int toupleSize = 0; // in bytes // tableNamePattern = tableName; int columnCount = resultColumns.getMetaData().getColumnCount(); Statement st = con.createStatement(); ResultSet rs = st.executeQuery("select count(*) from " + tableNamePattern); int count = 0; if (rs.next()) { count = rs.getInt(1); } else { log.error("could not get count for table " + tableNamePattern); } rs.close(); st.close(); ResultSet pkrs = dbmd.getExportedKeys("", "", tblName); String pkey = "DEFAULT_KEY"; while (pkrs.next()) { pkey = pkrs.getString("PKCOLUMN_NAME"); break; } pkrs.close(); if (count == 0) { log.debug("Empty table"); Table t = new Table(tblName, columnCount, toupleSize, columnMap, count, pkey); schema.put(tblName, t); return schema; } while (resultColumns.next()) { String columnName = StringEscapeUtils.escapeJava(resultColumns.getString(4)); try { String colNamePattern = columnName; if (con.getClass().getName().contains("postgresql")) { colNamePattern = "\"" + columnName + "\""; } int columnType = resultColumns.getInt(5); // computing column's size in bytes int columnSize = computeColumnSize(colNamePattern, columnType, tableNamePattern); toupleSize += columnSize; // execute queries for numberOfDiffValues, minVal, maxVal // Map<String, Integer> diffValFreqMap = new HashMap<String, // Integer>(); // computing column's min and max values String minVal = "0"; String maxVal = "0"; if (columnType != Types.BLOB) { MinMax mm = computeMinMax(tableNamePattern, colNamePattern); minVal = mm.getMin(); maxVal = mm.getMax(); } Map<String, Integer> diffValFreqMap = new HashMap<String, Integer>(); //only for equidepth! // for (ValFreq k : freqs) { // diffValFreqMap.put(k.getVal(), k.getFreq()); // } // /add min max diff vals in the sampling values int minOcc = 1; int maxOcc = 1; int diffVals = 0; boolean equidepth = false; if (equidepth) { //diffValFreqMap is used only in equidepth, do not compute it //if we have primitive diffValFreqMap = computeDistinctValuesFrequency(tableNamePattern, colNamePattern); String minValChar = minVal; String maxValChar = maxVal; if (columnType == Types.VARCHAR || columnType == Types.CHAR || columnType == Types.LONGNVARCHAR || columnType == Types.DATE) { minValChar = "\'" + minVal + "\'"; maxValChar = "\'" + maxVal + "\'"; } try { minOcc = computeValOccurences(tableNamePattern, colNamePattern, minValChar); } catch (Exception e) { log.error("Could not compute value occurences for column:" + colNamePattern + " and value:" + minValChar); } if (equidepth && !diffValFreqMap.containsKey(minVal)) diffValFreqMap.put(minVal, minOcc); try { maxOcc = computeValOccurences(tableNamePattern, colNamePattern, maxValChar); } catch (Exception e) { log.error("Could not compute value occurences for column:" + colNamePattern + " and value:" + maxValChar); } if (diffValFreqMap.containsKey(maxVal)) diffValFreqMap.put(maxVal, maxOcc); diffVals = diffValFreqMap.size(); } else { diffVals = computeDiffVals(tableNamePattern, colNamePattern, columnType); } if (diffVals == 0) { //all values are null! continue; } Column c = new Column(columnName, columnType, columnSize, diffVals, minVal, maxVal, diffValFreqMap); columnMap.put(columnName, c); } catch (Exception ex) { log.error("could not analyze column " + columnName + ":" + ex.getMessage()); } } Table t = new Table(tblName, columnCount, toupleSize, columnMap, count, pkey); schema.put(tblName, t); // } // resultTables.close(); resultColumns.close(); return schema; }
From source file:org.fao.geonet.arcgis.ArcSDEJdbcConnection.java
@Override public Map<String, String> retrieveMetadata(AtomicBoolean cancelMonitor, String arcSDEVersion) throws Exception { Map<String, String> results = new HashMap<>(); ArcSDEVersionFactory arcSDEVersionFactory = new ArcSDEVersionFactory(); String metadataTable = arcSDEVersionFactory.getTableName(arcSDEVersion); String columnName = arcSDEVersionFactory.getMetadataColumnName(arcSDEVersion); String sqlQuery = "SELECT " + columnName + ", UUID FROM " + metadataTable; getJdbcTemplate().query(sqlQuery, new RowCallbackHandler() { @Override//from w w w .j av a2s . co m public void processRow(ResultSet rs) throws SQLException { // Cancel processing if (cancelMonitor.get()) { Log.warning(ARCSDE_LOG_MODULE_NAME, "Cancelling metadata retrieve using " + "ArcSDE connection (via JDBC)"); rs.getStatement().cancel(); results.clear(); } String document = ""; int colId = rs.findColumn(columnName); int colIdUuid = rs.findColumn("UUID"); // very simple type check: if (rs.getObject(colId) != null) { if (rs.getMetaData().getColumnType(colId) == Types.BLOB) { Blob blob = rs.getBlob(columnName); byte[] bdata = blob.getBytes(1, (int) blob.length()); document = new String(bdata); } else if (rs.getMetaData().getColumnType(colId) == Types.LONGVARBINARY) { byte[] byteData = rs.getBytes(colId); document = new String(byteData); } else if (rs.getMetaData().getColumnType(colId) == Types.LONGNVARCHAR || rs.getMetaData().getColumnType(colId) == Types.LONGVARCHAR || rs.getMetaData().getColumnType(colId) == Types.VARCHAR || rs.getMetaData().getColumnType(colId) == Types.SQLXML) { document = rs.getString(colId); } else { throw new SQLException("Trying to harvest from a column with an invalid datatype: " + rs.getMetaData().getColumnTypeName(colId)); } String uuid = rs.getString(colIdUuid); ; results.put(uuid, document); } } }); Log.info(ARCSDE_LOG_MODULE_NAME, "Finished retrieving metadata, found: #" + results.size() + " metadata records"); return results; }
From source file:com.xpfriend.fixture.cast.temp.TypeConverter.java
private static Class<?> getJavaType(int sqltype, int precision, int scale) { switch (sqltype) { case Types.BIGINT: return Long.class; case Types.BIT: return Boolean.class; case Types.BOOLEAN: return Boolean.class; case Types.CHAR: return String.class; case Types.DECIMAL: return getNumericType(precision, scale); case Types.DOUBLE: return Double.class; case Types.FLOAT: return Double.class; case Types.INTEGER: return Integer.class; case Types.LONGVARCHAR: return String.class; case Types.NUMERIC: return getNumericType(precision, scale); case Types.REAL: return Float.class; case Types.SMALLINT: return Short.class; case Types.DATE: return java.sql.Timestamp.class; case Types.TIME: return java.sql.Time.class; case Types.TIMESTAMP: return java.sql.Timestamp.class; case Types.TINYINT: return Byte.class; case Types.VARCHAR: return String.class; case Types.BLOB: return byte[].class; case Types.LONGVARBINARY: return byte[].class; case Types.CLOB: return String.class; case Types.BINARY: return byte[].class; case Types.VARBINARY: return byte[].class; case Types.NVARCHAR: return String.class; case Types.NCHAR: return String.class; case Types.LONGNVARCHAR: return String.class; case -155:/*from ww w.j av a2 s .c o m*/ return java.sql.Timestamp.class; default: return Object.class; } }
From source file:org.apache.sqoop.manager.ConnManager.java
/** * Resolve a database-specific type to Avro data type. * @param sqlType sql type//from w w w . ja v a2 s . co m * @return avro type */ public Type toAvroType(int sqlType) { switch (sqlType) { case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: return Type.INT; case Types.BIGINT: return Type.LONG; case Types.BIT: case Types.BOOLEAN: return Type.BOOLEAN; case Types.REAL: return Type.FLOAT; case Types.FLOAT: case Types.DOUBLE: return Type.DOUBLE; case Types.NUMERIC: case Types.DECIMAL: return Type.STRING; case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: case Types.LONGNVARCHAR: case Types.NVARCHAR: case Types.NCHAR: return Type.STRING; case Types.DATE: case Types.TIME: case Types.TIMESTAMP: return Type.STRING; case Types.BLOB: case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: return Type.BYTES; default: throw new IllegalArgumentException("Cannot convert SQL type " + sqlType); } }
From source file:com.opencsv.ResultSetHelperService.java
private String getColumnValue(ResultSet rs, int colType, int colIndex, boolean trim, String dateFormatString, String timestampFormatString) throws SQLException, IOException { String value = ""; switch (colType) { case Types.BIT: case Types.JAVA_OBJECT: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getObject(colIndex), ""); value = ObjectUtils.toString(rs.getObject(colIndex), ""); break;/* w ww . ja v a2 s . c o m*/ case Types.BOOLEAN: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getBoolean(colIndex)); value = ObjectUtils.toString(rs.getBoolean(colIndex)); break; case Types.NCLOB: // todo : use rs.getNClob case Types.CLOB: Clob c = rs.getClob(colIndex); if (c != null) { StrBuilder sb = new StrBuilder(); sb.readFrom(c.getCharacterStream()); value = sb.toString(); } break; case Types.BIGINT: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getLong(colIndex)); value = ObjectUtils.toString(rs.getLong(colIndex)); break; case Types.DECIMAL: case Types.REAL: case Types.NUMERIC: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getBigDecimal(colIndex), ""); value = ObjectUtils.toString(rs.getBigDecimal(colIndex), ""); break; case Types.DOUBLE: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getDouble(colIndex)); value = ObjectUtils.toString(rs.getDouble(colIndex)); break; case Types.FLOAT: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getFloat(colIndex)); value = ObjectUtils.toString(rs.getFloat(colIndex)); break; case Types.INTEGER: case Types.TINYINT: case Types.SMALLINT: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getInt(colIndex)); value = ObjectUtils.toString(rs.getInt(colIndex)); break; case Types.DATE: java.sql.Date date = rs.getDate(colIndex); if (date != null) { SimpleDateFormat df = new SimpleDateFormat(dateFormatString); value = df.format(date); } break; case Types.TIME: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getTime(colIndex), ""); value = ObjectUtils.toString(rs.getTime(colIndex), ""); break; case Types.TIMESTAMP: value = handleTimestamp(rs.getTimestamp(colIndex), timestampFormatString); break; case Types.NVARCHAR: // todo : use rs.getNString case Types.NCHAR: // todo : use rs.getNString case Types.LONGNVARCHAR: // todo : use rs.getNString case Types.LONGVARCHAR: case Types.VARCHAR: case Types.CHAR: String columnValue = rs.getString(colIndex); if (trim && columnValue != null) { value = columnValue.trim(); } else { value = columnValue; } break; default: value = ""; } if (rs.wasNull() || value == null) { value = ""; } return value; }
From source file:org.dashbuilder.dataprovider.backend.sql.JDBCUtils.java
public static ColumnType calculateType(int sqlDataType) { switch (sqlDataType) { // Category-like columns. case Types.CHAR: case Types.VARCHAR: case Types.NCHAR: case Types.NVARCHAR: case Types.BIT: case Types.BOOLEAN: { return ColumnType.LABEL; }/*from w ww . j a va2 s . com*/ // Text-like columns. case Types.LONGVARCHAR: case Types.LONGNVARCHAR: { return ColumnType.TEXT; } // Number-like columns. case Types.TINYINT: case Types.BIGINT: case Types.INTEGER: case Types.DECIMAL: case Types.DOUBLE: case Types.FLOAT: case Types.NUMERIC: case Types.REAL: case Types.SMALLINT: { return ColumnType.NUMBER; } // Date-like columns. case Types.DATE: case Types.TIME: case Types.TIMESTAMP: { return ColumnType.DATE; } /*case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: case Types.NULL: case Types.OTHER: case Types.JAVA_OBJECT: case Types.DISTINCT: case Types.STRUCT: case Types.ARRAY: case Types.BLOB: case Types.CLOB: case Types.REF: case Types.ROWID: case Types.SQLXML: case Types.DATALINK:*/ // Unsupported (see above) types are treated as a text values. default: { return ColumnType.TEXT; } } }
From source file:org.dashbuilder.dataprovider.sql.JDBCUtils.java
public static ColumnType calculateType(int sqlDataType) { switch (sqlDataType) { // Category-like columns. case Types.CHAR: case Types.VARCHAR: case Types.NCHAR: case Types.NVARCHAR: case Types.BIT: case Types.BOOLEAN: { return ColumnType.LABEL; }/*from w w w .j a v a 2s . c om*/ // Text-like columns. case Types.LONGVARCHAR: case Types.LONGNVARCHAR: { return ColumnType.TEXT; } // Number-like columns. case Types.TINYINT: case Types.BIGINT: case Types.INTEGER: case Types.DECIMAL: case Types.DOUBLE: case Types.FLOAT: case Types.NUMERIC: case Types.REAL: case Types.SMALLINT: { return ColumnType.NUMBER; } // Date-like columns. case Types.DATE: case Types.TIME: case Types.TIMESTAMP: { return ColumnType.DATE; } // Unsupported default: { return null; } } }