List of usage examples for java.sql Types DATE
int DATE
To view the source code for java.sql Types DATE.
Click Source Link
The constant in the Java programming language, sometimes referred to as a type code, that identifies the generic SQL type DATE
.
From source file:madgik.exareme.master.queryProcessor.analyzer.stat.ExternalStat.java
@Override public Map<String, Table> extractStats() throws Exception { DatabaseMetaData dbmd = con.getMetaData(); // dtabase metadata object // listing tables and columns String catalog = null;/* w w w .ja v a 2 s . c o m*/ String schemaPattern = sch; String tableNamePattern = tblName; String columnNamePattern = "%"; if (con.getClass().getName().contains("postgresql")) { // tableNamePattern="\""+tableNamePattern+"\""; schemaPattern = "public"; } // ResultSet resultTables = dbmd.getTables(catalog, "public", // tableNamePattern, types); ResultSet resultColumns = dbmd.getColumns(catalog, schemaPattern, tableNamePattern, columnNamePattern); if (con.getClass().getName().contains("postgresql")) { tableNamePattern = "\"" + tableNamePattern + "\""; } else if (con.getClass().getName().contains("oracle")) { tableNamePattern = schemaPattern + "." + tableNamePattern; } log.debug("Starting extracting stats"); // while (resultTables.next()) { Map<String, Column> columnMap = new HashMap<String, Column>(); // StringEscapeUtils.escapeJava(resultTables.getString(3)); log.debug("Analyzing table " + tblName); int toupleSize = 0; // in bytes // tableNamePattern = tableName; int columnCount = resultColumns.getMetaData().getColumnCount(); Statement st = con.createStatement(); ResultSet rs = st.executeQuery("select count(*) from " + tableNamePattern); int count = 0; if (rs.next()) { count = rs.getInt(1); } else { log.error("could not get count for table " + tableNamePattern); } rs.close(); st.close(); ResultSet pkrs = dbmd.getExportedKeys("", "", tblName); String pkey = "DEFAULT_KEY"; while (pkrs.next()) { pkey = pkrs.getString("PKCOLUMN_NAME"); break; } pkrs.close(); if (count == 0) { log.debug("Empty table"); Table t = new Table(tblName, columnCount, toupleSize, columnMap, count, pkey); schema.put(tblName, t); return schema; } while (resultColumns.next()) { String columnName = StringEscapeUtils.escapeJava(resultColumns.getString(4)); try { String colNamePattern = columnName; if (con.getClass().getName().contains("postgresql")) { colNamePattern = "\"" + columnName + "\""; } int columnType = resultColumns.getInt(5); // computing column's size in bytes int columnSize = computeColumnSize(colNamePattern, columnType, tableNamePattern); toupleSize += columnSize; // execute queries for numberOfDiffValues, minVal, maxVal // Map<String, Integer> diffValFreqMap = new HashMap<String, // Integer>(); // computing column's min and max values String minVal = "0"; String maxVal = "0"; if (columnType != Types.BLOB) { MinMax mm = computeMinMax(tableNamePattern, colNamePattern); minVal = mm.getMin(); maxVal = mm.getMax(); } Map<String, Integer> diffValFreqMap = new HashMap<String, Integer>(); //only for equidepth! // for (ValFreq k : freqs) { // diffValFreqMap.put(k.getVal(), k.getFreq()); // } // /add min max diff vals in the sampling values int minOcc = 1; int maxOcc = 1; int diffVals = 0; boolean equidepth = false; if (equidepth) { //diffValFreqMap is used only in equidepth, do not compute it //if we have primitive diffValFreqMap = computeDistinctValuesFrequency(tableNamePattern, colNamePattern); String minValChar = minVal; String maxValChar = maxVal; if (columnType == Types.VARCHAR || columnType == Types.CHAR || columnType == Types.LONGNVARCHAR || columnType == Types.DATE) { minValChar = "\'" + minVal + "\'"; maxValChar = "\'" + maxVal + "\'"; } try { minOcc = computeValOccurences(tableNamePattern, colNamePattern, minValChar); } catch (Exception e) { log.error("Could not compute value occurences for column:" + colNamePattern + " and value:" + minValChar); } if (equidepth && !diffValFreqMap.containsKey(minVal)) diffValFreqMap.put(minVal, minOcc); try { maxOcc = computeValOccurences(tableNamePattern, colNamePattern, maxValChar); } catch (Exception e) { log.error("Could not compute value occurences for column:" + colNamePattern + " and value:" + maxValChar); } if (diffValFreqMap.containsKey(maxVal)) diffValFreqMap.put(maxVal, maxOcc); diffVals = diffValFreqMap.size(); } else { diffVals = computeDiffVals(tableNamePattern, colNamePattern, columnType); } if (diffVals == 0) { //all values are null! continue; } Column c = new Column(columnName, columnType, columnSize, diffVals, minVal, maxVal, diffValFreqMap); columnMap.put(columnName, c); } catch (Exception ex) { log.error("could not analyze column " + columnName + ":" + ex.getMessage()); } } Table t = new Table(tblName, columnCount, toupleSize, columnMap, count, pkey); schema.put(tblName, t); // } // resultTables.close(); resultColumns.close(); return schema; }
From source file:com.opencsv.ResultSetHelperService.java
private String getColumnValue(ResultSet rs, int colType, int colIndex, boolean trim, String dateFormatString, String timestampFormatString) throws SQLException, IOException { String value = ""; switch (colType) { case Types.BIT: case Types.JAVA_OBJECT: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getObject(colIndex), ""); value = ObjectUtils.toString(rs.getObject(colIndex), ""); break;/*from ww w . j av a 2s. com*/ case Types.BOOLEAN: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getBoolean(colIndex)); value = ObjectUtils.toString(rs.getBoolean(colIndex)); break; case Types.NCLOB: // todo : use rs.getNClob case Types.CLOB: Clob c = rs.getClob(colIndex); if (c != null) { StrBuilder sb = new StrBuilder(); sb.readFrom(c.getCharacterStream()); value = sb.toString(); } break; case Types.BIGINT: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getLong(colIndex)); value = ObjectUtils.toString(rs.getLong(colIndex)); break; case Types.DECIMAL: case Types.REAL: case Types.NUMERIC: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getBigDecimal(colIndex), ""); value = ObjectUtils.toString(rs.getBigDecimal(colIndex), ""); break; case Types.DOUBLE: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getDouble(colIndex)); value = ObjectUtils.toString(rs.getDouble(colIndex)); break; case Types.FLOAT: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getFloat(colIndex)); value = ObjectUtils.toString(rs.getFloat(colIndex)); break; case Types.INTEGER: case Types.TINYINT: case Types.SMALLINT: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getInt(colIndex)); value = ObjectUtils.toString(rs.getInt(colIndex)); break; case Types.DATE: java.sql.Date date = rs.getDate(colIndex); if (date != null) { SimpleDateFormat df = new SimpleDateFormat(dateFormatString); value = df.format(date); } break; case Types.TIME: // Once Java 7 is the minimum supported version. // value = Objects.toString(rs.getTime(colIndex), ""); value = ObjectUtils.toString(rs.getTime(colIndex), ""); break; case Types.TIMESTAMP: value = handleTimestamp(rs.getTimestamp(colIndex), timestampFormatString); break; case Types.NVARCHAR: // todo : use rs.getNString case Types.NCHAR: // todo : use rs.getNString case Types.LONGNVARCHAR: // todo : use rs.getNString case Types.LONGVARCHAR: case Types.VARCHAR: case Types.CHAR: String columnValue = rs.getString(colIndex); if (trim && columnValue != null) { value = columnValue.trim(); } else { value = columnValue; } break; default: value = ""; } if (rs.wasNull() || value == null) { value = ""; } return value; }
From source file:co.nubetech.apache.hadoop.DateSplitter.java
/** Parse the long-valued timestamp into the appropriate SQL date type. */ private Date longToDate(long val, int sqlDataType) { switch (sqlDataType) { case Types.DATE: return new java.sql.Date(val); case Types.TIME: return new java.sql.Time(val); case Types.TIMESTAMP: return new java.sql.Timestamp(val); default: // Shouldn't ever hit this case. return null; }// w w w .j a v a 2 s . co m }
From source file:org.batoo.jpa.jdbc.adapter.HsqlAdaptor.java
/** * {@inheritDoc}//w w w .ja v a2 s . c om * */ @Override protected String getColumnType(AbstractColumn cd, int sqlType) { switch (sqlType) { case Types.BLOB: case Types.CLOB: return "VARBINARY(" + cd.getLength() + ")"; case Types.VARCHAR: return "VARCHAR(" + cd.getLength() + ")"; case Types.TIME: return "TIME"; case Types.DATE: return "DATE"; case Types.TIMESTAMP: return "TIMESTAMP"; case Types.CHAR: return "CHAR"; case Types.BOOLEAN: return "BOOLEAN"; case Types.TINYINT: case Types.SMALLINT: return "SMALLINT"; case Types.INTEGER: return "INTEGER"; case Types.BIGINT: return "BIGINT"; case Types.FLOAT: return "FLOAT" + (cd.getPrecision() > 0 ? "(" + cd.getPrecision() + ")" : ""); case Types.DOUBLE: return "DOUBLE" + (cd.getPrecision() > 0 ? "(" + cd.getPrecision() + ")" : ""); case Types.DECIMAL: return "DECIMAL" + (cd.getPrecision() > 0 ? "(" + cd.getPrecision() + (cd.getScale() > 0 ? "," + cd.getScale() : "") + ")" : ""); } throw new IllegalArgumentException("Unhandled sql type: " + sqlType); }
From source file:org.jumpmind.symmetric.service.impl.AbstractDataExtractorServiceTest.java
protected void save(TestExtract obj) { String updateSql = String.format( "update %s set varchar_value=?, longvarchar_value=?, timestamp_value=?, date_value=?, bit_value=?, bigint_value=?, decimal_value=? where id=?", TEST_TABLE);//from w ww . j ava 2s. c om String insertSql = String.format( "insert into %s (varchar_value, longvarchar_value, timestamp_value, date_value, bit_value, bigint_value, decimal_value, id) values(?,?,?,?,?,?,?,?)", TEST_TABLE); if (0 == getSqlTemplate().update(updateSql, new Object[] { obj.getVarcharValue(), obj.getLongVarcharValue(), obj.getTimestampValue(), obj.getDateValue(), obj.isBitValue(), obj.getBigIntValue(), obj.getDecimalValue(), obj.getId() }, new int[] { Types.VARCHAR, Types.VARCHAR, Types.TIMESTAMP, Types.DATE, Types.BIT, Types.NUMERIC, Types.NUMERIC, Types.NUMERIC })) { getSqlTemplate().update(insertSql, new Object[] { obj.getVarcharValue(), obj.getLongVarcharValue(), obj.getTimestampValue(), obj.getDateValue(), obj.isBitValue(), obj.getBigIntValue(), obj.getDecimalValue(), obj.getId() }, new int[] { Types.VARCHAR, Types.VARCHAR, Types.TIMESTAMP, Types.DATE, Types.BIT, Types.NUMERIC, Types.NUMERIC, Types.NUMERIC }); } }
From source file:com.btobits.automator.ant.sql.task.SQLCompareTask.java
private void verify() throws Exception { final LinkedList<SQLCompareTask.VerifyCell> rows = impl.getVerifySqls(); for (final SQLCompareTask.VerifyCell sqlRow : rows) { sqlRow.validate();//from w w w . j a va 2 s .c o m switch (getColumnType(sqlRow)) { case Types.DOUBLE: { final Double val = (Double) getRowValue(sqlRow); if (val == null) { errors.add("Error compare row, row not exist [" + sqlRow.toString() + "]."); } final DoubleValidator doubleValidator = new DoubleValidator(); if (doubleValidator.isValid(sqlRow.getValue())) { final Double dbValue = doubleValidator.validate(sqlRow.getValue()); if (!dbValue.equals(val)) { errors.add("Error compare row [" + sqlRow.getRow() + "], field [" + sqlRow.getField() + "], value [" + sqlRow.getValue() + " != " + val + "]"); } } else { errors.add("Error cast field [" + sqlRow.toString() + "] to Double value."); } } break; case Types.FLOAT: { final Float val = (Float) getRowValue(sqlRow); if (val == null) { errors.add("Error compare row, row not exist [" + sqlRow.toString() + "]."); } final FloatValidator validator = new FloatValidator(); if (validator.isValid(sqlRow.getValue())) { final Float dbValue = validator.validate(sqlRow.getValue()); if (!dbValue.equals(val)) { errors.add("Error compare row [" + sqlRow.getRow() + "], field [" + sqlRow.getField() + "], value [" + sqlRow.getValue() + " != " + val + "]"); } } else { errors.add("Error cast field [" + sqlRow.toString() + "] to Float value."); } } break; case Types.DECIMAL: { final BigDecimal val = (BigDecimal) getRowValue(sqlRow); if (val == null) { errors.add("Error compare row, row not exist [" + sqlRow.toString() + "]."); } final BigDecimalValidator validator = new BigDecimalValidator(); if (validator.isValid(sqlRow.getValue())) { BigDecimal dbValue = validator.validate(sqlRow.getValue()); dbValue = dbValue.setScale(val.scale()); if (!dbValue.equals(val)) { errors.add("Error compare row [" + sqlRow.getRow() + "], field [" + sqlRow.getField() + "], value [" + sqlRow.getValue() + " != " + val + "]"); } } else { errors.add("Error cast field [" + sqlRow.toString() + "] to Decimal value."); } } break; case Types.DATE: { final Date val = (Date) getDateRowValue(sqlRow); if (val == null) { errors.add("Error compare row, row not exist [" + sqlRow.toString() + "]."); } final DateValidator validator = DateValidator.getInstance(); if (validator.isValid(sqlRow.getValue(), "yyyy-MM-dd")) { final Date dbValue = validator.validate(sqlRow.getValue(), "yyyy-MM-dd"); if (!dbValue.equals(val)) { errors.add("Error compare row [" + sqlRow.getRow() + "], field [" + sqlRow.getField() + "], value [" + sqlRow.getValue() + " != " + val + "]"); } } else { errors.add("Error cast field [" + sqlRow.toString() + "] to Date value."); } } break; case Types.TIME: { final Date val = (Date) getTimeRowValue(sqlRow); if (val == null) { errors.add("Error compare row, row not exist [" + sqlRow.toString() + "]."); } final TimeValidator validator = TimeValidator.getInstance(); if (validator.isValid(sqlRow.getValue(), "HH:mm:ss")) { final Calendar dbValue = validator.validate(sqlRow.getValue(), "HH:mm:ss"); final Calendar dbVal = Calendar.getInstance(); dbVal.setTime(val); if (validator.compareHours(dbValue, dbVal) != 0 || validator.compareMinutes(dbValue, dbVal) != 0 || validator.compareSeconds(dbValue, dbVal) != 0) { errors.add("Error compare row [" + sqlRow.getRow() + "], field [" + sqlRow.getField() + "], value [" + sqlRow.getValue() + " != " + val + "]"); } } else { errors.add("Error cast field [" + sqlRow.toString() + "] to Time value."); } } break; case Types.TIMESTAMP: { final Date val = getDateTimeRowValue(sqlRow); if (val == null) { errors.add("Error compare row, row not exist [" + sqlRow.toString() + "]."); } final CalendarValidator validatorDate = CalendarValidator.getInstance(); final TimeValidator validatorTime = TimeValidator.getInstance(); if (validatorDate.isValid(sqlRow.getValue(), "yyyy-MM-dd HH:mm:ss")) { final Calendar dbValue = validatorDate.validate(sqlRow.getValue(), "yyyy-MM-dd HH:mm:ss"); final Calendar dbVal = Calendar.getInstance(); dbVal.setTimeInMillis(val.getTime()); if (validatorDate.compareDates(dbVal, dbValue) != 0 || validatorTime.compareHours(dbValue, dbVal) != 0 || validatorTime.compareMinutes(dbValue, dbVal) != 0 || validatorTime.compareSeconds(dbValue, dbVal) != 0) { errors.add("Error compare row [" + sqlRow.getRow() + "], field [" + sqlRow.getField() + "], value [" + sqlRow.getValue() + " != " + val + "]"); } } else { errors.add("Error cast field [" + sqlRow.toString() + "] to Timestamp value."); } } break; default: { final String dbValue = getStringRowValue(sqlRow); if (dbValue == null) { errors.add("Error compare row, row not exist [" + sqlRow.toString() + "]."); } else if (!StringUtils.equals(sqlRow.getValue(), dbValue)) { errors.add("Error compare row [" + sqlRow.getRow() + "], field [" + sqlRow.getField() + "], value [" + sqlRow.getValue() + " != " + dbValue + "]"); } } } } }
From source file:solidbase.core.plugins.DumpJSON.java
public boolean execute(CommandProcessor processor, Command command, boolean skip) throws SQLException { if (!triggerPattern.matcher(command.getCommand()).matches()) return false; if (command.isTransient()) { /* DUMP JSON DATE_CREATED ON | OFF */ SQLTokenizer tokenizer = new SQLTokenizer( SourceReaders.forString(command.getCommand(), command.getLocation())); // TODO Maybe DUMP JSON CONFIG or DUMP JSON SET // TODO What about other configuration settings? tokenizer.get("DUMP"); tokenizer.get("JSON"); tokenizer.get("DATE_CREATED"); // FIXME This should be CREATED_DATE Token t = tokenizer.get("ON", "OFF"); tokenizer.get((String) null); // TODO I think we should have a scope that is restricted to the current file and a scope that gets inherited when running or including another file. AbstractScope scope = processor.getContext().getScope(); scope.set("solidbase.dump_json.dateCreated", t.eq("ON")); // TODO Make this a constant return true; }// w w w . j ava2 s .co m if (skip) return true; Parsed parsed = parse(command); AbstractScope scope = processor.getContext().getScope(); Object object = scope.get("solidbase.dump_json.dateCreated"); boolean dateCreated = object == null || object instanceof Boolean && (Boolean) object; Resource jsvResource = new FileResource(new File(parsed.fileName)); // Relative to current folder try { OutputStream out = jsvResource.getOutputStream(); if (parsed.gzip) out = new BufferedOutputStream(new GZIPOutputStream(out, 65536), 65536); // TODO Ctrl-C, close the outputstream? JSONWriter jsonWriter = new JSONWriter(out); try { Statement statement = processor.createStatement(); try { ResultSet result = statement.executeQuery(parsed.query); ResultSetMetaData metaData = result.getMetaData(); // Define locals int columns = metaData.getColumnCount(); int[] types = new int[columns]; String[] names = new String[columns]; boolean[] ignore = new boolean[columns]; FileSpec[] fileSpecs = new FileSpec[columns]; String schemaNames[] = new String[columns]; String tableNames[] = new String[columns]; // Analyze metadata for (int i = 0; i < columns; i++) { int col = i + 1; String name = metaData.getColumnName(col).toUpperCase(); types[i] = metaData.getColumnType(col); if (types[i] == Types.DATE && parsed.dateAsTimestamp) types[i] = Types.TIMESTAMP; names[i] = name; if (parsed.columns != null) { ColumnSpec columnSpec = parsed.columns.get(name); if (columnSpec != null) if (columnSpec.skip) ignore[i] = true; else fileSpecs[i] = columnSpec.toFile; } if (parsed.coalesce != null && parsed.coalesce.notFirst(name)) ignore[i] = true; // TODO STRUCT serialize // TODO This must be optional and not the default else if (types[i] == 2002 || JDBCSupport.toTypeName(types[i]) == null) ignore[i] = true; tableNames[i] = StringUtils .upperCase(StringUtils.defaultIfEmpty(metaData.getTableName(col), null)); schemaNames[i] = StringUtils .upperCase(StringUtils.defaultIfEmpty(metaData.getSchemaName(col), null)); } if (parsed.coalesce != null) parsed.coalesce.bind(names); // Write header JSONObject properties = new JSONObject(); properties.set("version", "1.0"); properties.set("format", "record-stream"); properties.set("description", "SolidBase JSON Data Dump File"); properties.set("createdBy", new JSONObject("product", "SolidBase", "version", "2.0.0")); if (dateCreated) { SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); properties.set("createdDate", format.format(new Date())); } if (parsed.binaryFileName != null) { // TODO FIXME Should be wrapped in a SourceException: solidbase.solidstack.io.FatalURISyntaxException: java.net.URISyntaxException: Illegal character in path at index 1: &{folder}/JIADHOCCH Resource binResource = Resources.getResource(parsed.binaryFileName); Resource resource = Resources.getResource(parsed.fileName); properties.set("binaryFile", binResource.getPathFrom(resource).toString()); } JSONArray fields = new JSONArray(); properties.set("fields", fields); for (int i = 0; i < columns; i++) if (!ignore[i]) { JSONObject field = new JSONObject(); field.set("schemaName", schemaNames[i]); field.set("tableName", tableNames[i]); field.set("name", names[i]); field.set("type", JDBCSupport.toTypeName(types[i])); // TODO Better error message when type is not recognized, for example Oracle's 2007 for a user type FileSpec spec = fileSpecs[i]; if (spec != null && !spec.generator.isDynamic()) { Resource fileResource = new FileResource(spec.generator.fileName); field.set("file", fileResource.getPathFrom(jsvResource).toString()); } fields.add(field); } FileSpec binaryFile = parsed.binaryFileName != null ? new FileSpec(true, parsed.binaryFileName, 0) : null; jsonWriter.writeFormatted(properties, 120); jsonWriter.getWriter().write('\n'); Counter counter = null; if (parsed.logRecords > 0) counter = new FixedCounter(parsed.logRecords); else if (parsed.logSeconds > 0) counter = new TimedCounter(parsed.logSeconds); try { while (result.next()) { Object[] values = new Object[columns]; for (int i = 0; i < values.length; i++) values[i] = JDBCSupport.getValue(result, types, i); if (parsed.coalesce != null) parsed.coalesce.coalesce(values); JSONArray array = new JSONArray(); for (int i = 0; i < columns; i++) if (!ignore[i]) { Object value = values[i]; if (value == null) { array.add(null); continue; } // TODO 2 columns can't be written to the same dynamic filename FileSpec spec = fileSpecs[i]; if (spec != null) // The column is redirected to its own file { String relFileName = null; int startIndex; if (spec.binary) { if (spec.generator.isDynamic()) { String fileName = spec.generator.generateFileName(result); Resource fileResource = new FileResource(fileName); spec.out = fileResource.getOutputStream(); spec.index = 0; relFileName = fileResource.getPathFrom(jsvResource).toString(); } else if (spec.out == null) { String fileName = spec.generator.generateFileName(result); Resource fileResource = new FileResource(fileName); spec.out = fileResource.getOutputStream(); } if (value instanceof Blob) { InputStream in = ((Blob) value).getBinaryStream(); startIndex = spec.index; byte[] buf = new byte[4096]; for (int read = in.read(buf); read >= 0; read = in.read(buf)) { spec.out.write(buf, 0, read); spec.index += read; } in.close(); } else if (value instanceof byte[]) { startIndex = spec.index; spec.out.write((byte[]) value); spec.index += ((byte[]) value).length; } else throw new SourceException(names[i] + " (" + value.getClass().getName() + ") is not a binary column. Only binary columns like BLOB, RAW, BINARY VARYING can be written to a binary file", command.getLocation()); if (spec.generator.isDynamic()) { spec.out.close(); JSONObject ref = new JSONObject(); ref.set("file", relFileName); ref.set("size", spec.index - startIndex); array.add(ref); } else { JSONObject ref = new JSONObject(); ref.set("index", startIndex); ref.set("length", spec.index - startIndex); array.add(ref); } } else { if (spec.generator.isDynamic()) { String fileName = spec.generator.generateFileName(result); Resource fileResource = new FileResource(fileName); spec.writer = new DeferringWriter(spec.threshold, fileResource, jsonWriter.getEncoding()); spec.index = 0; relFileName = fileResource.getPathFrom(jsvResource).toString(); } else if (spec.writer == null) { String fileName = spec.generator.generateFileName(result); Resource fileResource = new FileResource(fileName); spec.writer = new OutputStreamWriter(fileResource.getOutputStream(), jsonWriter.getEncoding()); } if (value instanceof Blob || value instanceof byte[]) throw new SourceException(names[i] + " is a binary column. Binary columns like BLOB, RAW, BINARY VARYING cannot be written to a text file", command.getLocation()); if (value instanceof Clob) { Reader in = ((Clob) value).getCharacterStream(); startIndex = spec.index; char[] buf = new char[4096]; for (int read = in.read(buf); read >= 0; read = in.read(buf)) { spec.writer.write(buf, 0, read); spec.index += read; } in.close(); } else { String val = value.toString(); startIndex = spec.index; spec.writer.write(val); spec.index += val.length(); } if (spec.generator.isDynamic()) { DeferringWriter writer = (DeferringWriter) spec.writer; if (writer.isBuffered()) array.add(writer.clearBuffer()); else { JSONObject ref = new JSONObject(); ref.set("file", relFileName); ref.set("size", spec.index - startIndex); array.add(ref); } writer.close(); } else { JSONObject ref = new JSONObject(); ref.set("index", startIndex); ref.set("length", spec.index - startIndex); array.add(ref); } } } else if (value instanceof Clob) array.add(((Clob) value).getCharacterStream()); else if (binaryFile != null && (value instanceof Blob || value instanceof byte[])) { if (binaryFile.out == null) { String fileName = binaryFile.generator.generateFileName(null); Resource fileResource = new FileResource(fileName); binaryFile.out = fileResource.getOutputStream(); if (parsed.binaryGzip) binaryFile.out = new BufferedOutputStream( new GZIPOutputStream(binaryFile.out, 65536), 65536); // TODO Ctrl-C, close the outputstream? } int startIndex = binaryFile.index; if (value instanceof Blob) { InputStream in = ((Blob) value).getBinaryStream(); byte[] buf = new byte[4096]; for (int read = in.read(buf); read >= 0; read = in.read(buf)) { binaryFile.out.write(buf, 0, read); binaryFile.index += read; } in.close(); } else { binaryFile.out.write((byte[]) value); binaryFile.index += ((byte[]) value).length; } JSONObject ref = new JSONObject(); ref.set("index", startIndex); ref.set("length", binaryFile.index - startIndex); array.add(ref); } else array.add(value); } for (ListIterator<Object> i = array.iterator(); i.hasNext();) { Object value = i.next(); if (value instanceof java.sql.Date || value instanceof java.sql.Time || value instanceof java.sql.Timestamp || value instanceof java.sql.RowId) i.set(value.toString()); } jsonWriter.write(array); jsonWriter.getWriter().write('\n'); if (counter != null && counter.next()) processor.getProgressListener() .println("Exported " + counter.total() + " records."); } if (counter != null && counter.needFinal()) processor.getProgressListener().println("Exported " + counter.total() + " records."); } finally { // Close files that have been left open for (FileSpec fileSpec : fileSpecs) if (fileSpec != null) { if (fileSpec.out != null) fileSpec.out.close(); if (fileSpec.writer != null) fileSpec.writer.close(); } if (binaryFile != null && binaryFile.out != null) binaryFile.out.close(); } } finally { processor.closeStatement(statement, true); } } finally { jsonWriter.close(); } } catch (IOException e) { throw new SystemException(e); } return true; }
From source file:org.executequery.gui.browser.ColumnData.java
/** * Returns whether this column is a date type or * extension of./* w w w . java2 s . c o m*/ * * ie. Types.DATE, Types.TIME, Types.TIMESTAMP. * * @return true | false */ public boolean isDateDataType() { return sqlType == Types.DATE || sqlType == Types.TIME || sqlType == Types.TIMESTAMP; }
From source file:org.netflux.core.FieldMetadata.java
/** * Sets the <code>type</code> of the field that this metadata describes. The currently supported types are: string ({@link java.sql.Types#CHAR}, * {@link java.sql.Types#VARCHAR}), date ({@link java.sql.Types#DATE}, {@link java.sql.Types#TIMESTAMP}), numeric ({@link java.sql.Types#SMALLINT}, * {@link java.sql.Types#INTEGER}, {@link java.sql.Types#BIGINT}, {@link java.sql.Types#DECIMAL}, {@link java.sql.Types#FLOAT}, * {@link java.sql.Types#DOUBLE}) and boolean ({@link java.sql.Types#BOOLEAN}). If the supplied <code>type</code> is not one of * the above, an <code>IllegalArgumentException</code> will be thrown. * /*from ww w.ja v a 2s.c om*/ * @param type the <code>type</code> of the field that this metadata describes. * @throws IllegalArgumentException if the supplied <code>type</code> is not included in the supported types. */ public void setType(int type) { switch (type) { case Types.CHAR: case Types.VARCHAR: case Types.DATE: case Types.TIMESTAMP: case Types.SMALLINT: case Types.INTEGER: case Types.BIGINT: case Types.DECIMAL: case Types.FLOAT: case Types.DOUBLE: case Types.BOOLEAN: this.type = type; break; default: if (FieldMetadata.log.isInfoEnabled()) { FieldMetadata.log.info(FieldMetadata.messages.getString("exception.unsupported.type")); } throw new IllegalArgumentException(); } }
From source file:org.apache.sqoop.manager.ConnManager.java
/** * Resolve a database-specific type to Avro data type. * @param sqlType sql type/*from w ww.ja v a 2 s.c o m*/ * @return avro type */ public Type toAvroType(int sqlType) { switch (sqlType) { case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: return Type.INT; case Types.BIGINT: return Type.LONG; case Types.BIT: case Types.BOOLEAN: return Type.BOOLEAN; case Types.REAL: return Type.FLOAT; case Types.FLOAT: case Types.DOUBLE: return Type.DOUBLE; case Types.NUMERIC: case Types.DECIMAL: return Type.STRING; case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: case Types.LONGNVARCHAR: case Types.NVARCHAR: case Types.NCHAR: return Type.STRING; case Types.DATE: case Types.TIME: case Types.TIMESTAMP: return Type.STRING; case Types.BLOB: case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: return Type.BYTES; default: throw new IllegalArgumentException("Cannot convert SQL type " + sqlType); } }