List of usage examples for java.sql Types DATE
int DATE
To view the source code for java.sql Types DATE.
Click Source Link
The constant in the Java programming language, sometimes referred to as a type code, that identifies the generic SQL type DATE
.
From source file:org.jfree.data.jdbc.JDBCXYDataset.java
/** * ExecuteQuery will attempt execute the query passed to it against the * provided database connection. If connection is null then no action is * taken.//from ww w. j a v a 2 s . com * * The results from the query are extracted and cached locally, thus * applying an upper limit on how many rows can be retrieved successfully. * * @param query the query to be executed. * @param con the connection the query is to be executed against. * * @throws SQLException if there is a problem executing the query. */ public void executeQuery(Connection con, String query) throws SQLException { if (con == null) { throw new SQLException("There is no database to execute the query."); } ResultSet resultSet = null; Statement statement = null; try { statement = con.createStatement(); resultSet = statement.executeQuery(query); ResultSetMetaData metaData = resultSet.getMetaData(); int numberOfColumns = metaData.getColumnCount(); int numberOfValidColumns = 0; int[] columnTypes = new int[numberOfColumns]; for (int column = 0; column < numberOfColumns; column++) { try { int type = metaData.getColumnType(column + 1); switch (type) { case Types.NUMERIC: case Types.REAL: case Types.INTEGER: case Types.DOUBLE: case Types.FLOAT: case Types.DECIMAL: case Types.BIT: case Types.DATE: case Types.TIME: case Types.TIMESTAMP: case Types.BIGINT: case Types.SMALLINT: ++numberOfValidColumns; columnTypes[column] = type; break; default: columnTypes[column] = Types.NULL; break; } } catch (SQLException e) { columnTypes[column] = Types.NULL; throw e; } } if (numberOfValidColumns <= 1) { throw new SQLException("Not enough valid columns where generated by query."); } /// First column is X data this.columnNames = new String[numberOfValidColumns - 1]; /// Get the column names and cache them. int currentColumn = 0; for (int column = 1; column < numberOfColumns; column++) { if (columnTypes[column] != Types.NULL) { this.columnNames[currentColumn] = metaData.getColumnLabel(column + 1); ++currentColumn; } } // Might need to add, to free memory from any previous result sets if (this.rows != null) { for (int column = 0; column < this.rows.size(); column++) { ArrayList row = (ArrayList) this.rows.get(column); row.clear(); } this.rows.clear(); } // Are we working with a time series. switch (columnTypes[0]) { case Types.DATE: case Types.TIME: case Types.TIMESTAMP: this.isTimeSeries = true; break; default: this.isTimeSeries = false; break; } // Get all rows. // rows = new ArrayList(); while (resultSet.next()) { ArrayList newRow = new ArrayList(); for (int column = 0; column < numberOfColumns; column++) { Object xObject = resultSet.getObject(column + 1); switch (columnTypes[column]) { case Types.NUMERIC: case Types.REAL: case Types.INTEGER: case Types.DOUBLE: case Types.FLOAT: case Types.DECIMAL: case Types.BIGINT: case Types.SMALLINT: newRow.add(xObject); break; case Types.DATE: case Types.TIME: case Types.TIMESTAMP: newRow.add(new Long(((Date) xObject).getTime())); break; case Types.NULL: break; default: System.err.println("Unknown data"); columnTypes[column] = Types.NULL; break; } } this.rows.add(newRow); } /// a kludge to make everything work when no rows returned if (this.rows.size() == 0) { ArrayList newRow = new ArrayList(); for (int column = 0; column < numberOfColumns; column++) { if (columnTypes[column] != Types.NULL) { newRow.add(new Integer(0)); } } this.rows.add(newRow); } /// Determine max and min values. if (this.rows.size() < 1) { this.maxValue = 0.0; this.minValue = 0.0; } else { ArrayList row = (ArrayList) this.rows.get(0); this.maxValue = Double.NEGATIVE_INFINITY; this.minValue = Double.POSITIVE_INFINITY; for (int rowNum = 0; rowNum < this.rows.size(); ++rowNum) { row = (ArrayList) this.rows.get(rowNum); for (int column = 1; column < numberOfColumns; column++) { Object testValue = row.get(column); if (testValue != null) { double test = ((Number) testValue).doubleValue(); if (test < this.minValue) { this.minValue = test; } if (test > this.maxValue) { this.maxValue = test; } } } } } fireDatasetChanged(new DatasetChangeInfo()); //TODO: fill in real change info } finally { if (resultSet != null) { try { resultSet.close(); } catch (Exception e) { // TODO: is this a good idea? } } if (statement != null) { try { statement.close(); } catch (Exception e) { // TODO: is this a good idea? } } } }
From source file:com.flexive.core.storage.GenericDivisionExporter.java
/** * Dump a generic table to XML/* w w w. j a va 2 s .c o m*/ * * @param tableName name of the table * @param stmt an open statement * @param out output stream * @param sb an available and valid StringBuilder * @param xmlTag name of the xml tag to write per row * @param idColumn (optional) id column to sort results * @param onlyBinaries process binary fields (else these will be ignored) * @throws SQLException on errors * @throws IOException on errors */ private void dumpTable(String tableName, Statement stmt, OutputStream out, StringBuilder sb, String xmlTag, String idColumn, boolean onlyBinaries) throws SQLException, IOException { ResultSet rs = stmt.executeQuery("SELECT * FROM " + tableName + (StringUtils.isEmpty(idColumn) ? "" : " ORDER BY " + idColumn + " ASC")); final ResultSetMetaData md = rs.getMetaData(); String value, att; boolean hasSubTags; while (rs.next()) { hasSubTags = false; if (!onlyBinaries) { sb.setLength(0); sb.append(" <").append(xmlTag); } for (int i = 1; i <= md.getColumnCount(); i++) { value = null; att = md.getColumnName(i).toLowerCase(); switch (md.getColumnType(i)) { case java.sql.Types.DECIMAL: case java.sql.Types.NUMERIC: case java.sql.Types.BIGINT: if (!onlyBinaries) { value = String.valueOf(rs.getBigDecimal(i)); if (rs.wasNull()) value = null; } break; case java.sql.Types.INTEGER: case java.sql.Types.SMALLINT: case java.sql.Types.TINYINT: if (!onlyBinaries) { value = String.valueOf(rs.getLong(i)); if (rs.wasNull()) value = null; } break; case java.sql.Types.DOUBLE: case java.sql.Types.FLOAT: case java.sql.Types.REAL: if (!onlyBinaries) { value = String.valueOf(rs.getDouble(i)); if (rs.wasNull()) value = null; } break; case java.sql.Types.TIMESTAMP: case java.sql.Types.DATE: if (!onlyBinaries) { final Timestamp ts = rs.getTimestamp(i); if (rs.wasNull()) value = null; else value = FxFormatUtils.getDateTimeFormat().format(ts); } break; case java.sql.Types.BIT: case java.sql.Types.CHAR: case java.sql.Types.BOOLEAN: if (!onlyBinaries) { value = rs.getBoolean(i) ? "1" : "0"; if (rs.wasNull()) value = null; } break; case java.sql.Types.CLOB: case java.sql.Types.BLOB: case java.sql.Types.LONGVARBINARY: case java.sql.Types.LONGVARCHAR: case java.sql.Types.VARBINARY: case java.sql.Types.VARCHAR: case java.sql.Types.BINARY: case SQL_LONGNVARCHAR: case SQL_NCHAR: case SQL_NCLOB: case SQL_NVARCHAR: hasSubTags = true; break; default: LOG.warn("Unhandled type [" + md.getColumnType(i) + "] for [" + tableName + "." + att + "]"); } if (value != null && !onlyBinaries) sb.append(' ').append(att).append("=\"").append(value).append("\""); } if (hasSubTags) { if (!onlyBinaries) sb.append(">\n"); for (int i = 1; i <= md.getColumnCount(); i++) { switch (md.getColumnType(i)) { case java.sql.Types.VARBINARY: case java.sql.Types.LONGVARBINARY: case java.sql.Types.BLOB: case java.sql.Types.BINARY: if (idColumn == null) throw new IllegalArgumentException("Id column required to process binaries!"); String binFile = FOLDER_BINARY + "/BIN_" + String.valueOf(rs.getLong(idColumn)) + "_" + i + ".blob"; att = md.getColumnName(i).toLowerCase(); if (onlyBinaries) { if (!(out instanceof ZipOutputStream)) throw new IllegalArgumentException( "out has to be a ZipOutputStream to store binaries!"); ZipOutputStream zip = (ZipOutputStream) out; InputStream in = rs.getBinaryStream(i); if (rs.wasNull()) break; ZipEntry ze = new ZipEntry(binFile); zip.putNextEntry(ze); byte[] buffer = new byte[4096]; int read; while ((read = in.read(buffer)) != -1) zip.write(buffer, 0, read); in.close(); zip.closeEntry(); zip.flush(); } else { InputStream in = rs.getBinaryStream(i); //need to fetch to see if it is empty if (rs.wasNull()) break; in.close(); sb.append(" <").append(att).append(">").append(binFile).append("</").append(att) .append(">\n"); } break; case java.sql.Types.CLOB: case SQL_LONGNVARCHAR: case SQL_NCHAR: case SQL_NCLOB: case SQL_NVARCHAR: case java.sql.Types.LONGVARCHAR: case java.sql.Types.VARCHAR: if (!onlyBinaries) { value = rs.getString(i); if (rs.wasNull()) break; att = md.getColumnName(i).toLowerCase(); sb.append(" <").append(att).append('>'); escape(sb, value); sb.append("</").append(att).append(">\n"); } break; } } if (!onlyBinaries) sb.append(" </").append(xmlTag).append(">\n"); } else { if (!onlyBinaries) sb.append("/>\n"); } if (!onlyBinaries) write(out, sb); } }
From source file:org.apache.hadoop.sqoop.manager.SqlManager.java
public String toJavaType(int sqlType) { // mappings from http://java.sun.com/j2se/1.3/docs/guide/jdbc/getstart/mapping.html if (sqlType == Types.INTEGER) { return "Integer"; } else if (sqlType == Types.VARCHAR) { return "String"; } else if (sqlType == Types.CHAR) { return "String"; } else if (sqlType == Types.LONGVARCHAR) { return "String"; } else if (sqlType == Types.NUMERIC) { return "java.math.BigDecimal"; } else if (sqlType == Types.DECIMAL) { return "java.math.BigDecimal"; } else if (sqlType == Types.BIT) { return "Boolean"; } else if (sqlType == Types.BOOLEAN) { return "Boolean"; } else if (sqlType == Types.TINYINT) { return "Integer"; } else if (sqlType == Types.SMALLINT) { return "Integer"; } else if (sqlType == Types.BIGINT) { return "Long"; } else if (sqlType == Types.REAL) { return "Float"; } else if (sqlType == Types.FLOAT) { return "Double"; } else if (sqlType == Types.DOUBLE) { return "Double"; } else if (sqlType == Types.DATE) { return "java.sql.Date"; } else if (sqlType == Types.TIME) { return "java.sql.Time"; } else if (sqlType == Types.TIMESTAMP) { return "java.sql.Timestamp"; } else {// w ww.j a va 2 s. co m // TODO(aaron): Support BINARY, VARBINARY, LONGVARBINARY, DISTINCT, CLOB, BLOB, ARRAY, // STRUCT, REF, JAVA_OBJECT. return null; } }
From source file:org.sakaiproject.webservices.SakaiReport.java
protected String toCsvString(ResultSet rs, boolean includeHeaderRow) throws IOException, SQLException { StringWriter stringWriter = new StringWriter(); CsvWriter writer = new CsvWriter(stringWriter, ','); writer.setRecordDelimiter('\n'); writer.setForceQualifier(true);//from w w w .j av a2 s. c o m ResultSetMetaData rsmd = rs.getMetaData(); int numColumns = rsmd.getColumnCount(); if (includeHeaderRow) { String[] row = new String[numColumns]; for (int i = 1; i < numColumns + 1; i++) { row[i - 1] = rsmd.getColumnLabel(i); } writer.writeRecord(row); } while (rs.next()) { String[] row = new String[numColumns]; for (int i = 1; i < numColumns + 1; i++) { String column_name = rsmd.getColumnName(i); LOG.debug("Column Name=" + column_name + ",type=" + rsmd.getColumnType(i)); switch (rsmd.getColumnType(i)) { case Types.BIGINT: row[i - 1] = String.valueOf(rs.getInt(i)); break; case Types.BOOLEAN: row[i - 1] = String.valueOf(rs.getBoolean(i)); break; case Types.BLOB: row[i - 1] = rs.getBlob(i).toString(); break; case Types.DOUBLE: row[i - 1] = String.valueOf(rs.getDouble(i)); break; case Types.FLOAT: row[i - 1] = String.valueOf(rs.getFloat(i)); break; case Types.INTEGER: row[i - 1] = String.valueOf(rs.getInt(i)); break; case Types.LONGVARCHAR: row[i - 1] = rs.getString(i); break; case Types.NVARCHAR: row[i - 1] = rs.getNString(i); break; case Types.VARCHAR: row[i - 1] = rs.getString(i); break; case Types.TINYINT: row[i - 1] = String.valueOf(rs.getInt(i)); break; case Types.SMALLINT: row[i - 1] = String.valueOf(rs.getInt(i)); break; case Types.DATE: row[i - 1] = rs.getDate(i).toString(); break; case Types.TIMESTAMP: row[i - 1] = rs.getTimestamp(i).toString(); break; default: row[i - 1] = rs.getString(i); break; } LOG.debug("value: " + row[i - 1]); } writer.writeRecord(row); //writer.endRecord(); } LOG.debug("csv output:" + stringWriter.toString()); return stringWriter.toString(); }
From source file:org.apache.sqoop.hcat.HCatalogExportTest.java
public void testDateTypesToBigInt() throws Exception { final int TOTAL_RECORDS = 1 * 10; long offset = TimeZone.getDefault().getRawOffset(); String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "date", Types.DATE, HCatFieldSchema.Type.BIGINT, 0, 0, 0 - offset, new Date(70, 0, 1), KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "time", Types.TIME, HCatFieldSchema.Type.BIGINT, 0, 0, 36672000L - offset, new Time(10, 11, 12), KeyType.NOT_A_KEY),//w w w. jav a 2 s .c o m HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2), "timestamp", Types.TIMESTAMP, HCatFieldSchema.Type.BIGINT, 0, 0, 36672000L - offset, new Timestamp(70, 0, 1, 10, 11, 12, 0), KeyType.NOT_A_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--map-column-hive"); addlArgsArray.add("COL0=bigint,COL1=bigint,COL2=bigint"); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
From source file:com.panet.imeta.trans.steps.luciddbbulkloader.LucidDBBulkLoader.java
private void createBulkLoadConfigFile(String bcpFilename) throws KettleException { File bcpFile = new File(bcpFilename); FileWriter writer = null;//w w w. j ava2s . com try { writer = new FileWriter(bcpFile); // The first Line is the version number, usually 9.0 // writer.write("9.0" + Const.CR); // The second line contains the number of columns... // writer.write(meta.getFieldTable().length + Const.CR); RowMetaInterface targetFieldMeta = meta.getRequiredFields(); data.bulkFormatMeta = new ValueMetaInterface[meta.getFieldTable().length]; // The next block lists the columns from 1..N where N is the number of columns... // for (int i = 0; i < meta.getFieldTable().length; i++) { ValueMetaInterface field = getInputRowMeta().getValueMeta(data.keynrs[i]); // Col 1 : the column number (i+1) // writer.write(Integer.toString(i + 1) + " "); // Col 2 : the data type // String dataType = null; switch (field.getType()) { case ValueMetaInterface.TYPE_STRING: dataType = "SQLVARCHAR"; break; case ValueMetaInterface.TYPE_BIGNUMBER: dataType = "SQLREAL"; break; case ValueMetaInterface.TYPE_NUMBER: dataType = "SQLFLT8"; break; case ValueMetaInterface.TYPE_INTEGER: dataType = "SQLBIGINT"; break; case ValueMetaInterface.TYPE_DATE: // Use the actual datatypes in the target table to // determine how to create the control file column // definition for date/time fields. if (targetFieldMeta.getValueMetaList().get(i).getOriginalColumnType() == Types.DATE) { data.bulkFormatMeta[i] = data.bulkDateMeta; dataType = "SQLDATE"; } else { data.bulkFormatMeta[i] = data.bulkTimestampMeta; dataType = "SQLTIMESTAMP"; } break; // REVIEW jvs 13-Dec-2008: enable boolean support? case ValueMetaInterface.TYPE_BOOLEAN: dataType = "SQLCHAR"; break; default: dataType = "SQLVARCHAR"; break; } writer.write(dataType + " "); // Col 3 : an ignored column (start position or something like that) // writer.write("0 "); // Col 4 : the data length, just put the length metadata in here // if (field.getLength() == -1) { writer.write("1000 "); } else { writer.write("" + field.getLength() + " "); } // Col 5 : The separator is also ignored, we're going to put a tab in here, like in the sample // writer.write("\"\\t\" "); // "\t" // Col 6 : the column number again... // writer.write(Integer.toString(i + 1) + " "); // Col 7 : The identifier // writer.write(meta.getFieldTable()[i] + " "); // Col 8 : Collation / Format : leave it empty/default at the time being // writer.write("\"\" "); // "" // Newline to finish // writer.write(Const.CR); } } catch (Exception e) { throw new KettleException("Unable to create BCP control file", e); } finally { // That's it, close shop // if (writer != null) { try { writer.close(); } catch (IOException e) { throw new KettleException("Unable to close BCP file '" + bcpFilename + "'", e); } } } }
From source file:com.squid.core.domain.operators.ExtendedType.java
/** * compute a type "order" that can be used to compare types and promote types. * The order is a couple (x,y), where x represent a family type (string,date,numbers) and y an order in that family * A type should be promoted to the higher order. * @return/* w w w .j ava2 s . c om*/ */ public int[] computeTypeOrder() { switch (getDataType()) { case Types.BIT: return new int[] { NUMBER_ORDER, 0 }; case Types.BOOLEAN: return new int[] { NUMBER_ORDER, 1 }; case Types.TINYINT: return new int[] { NUMBER_ORDER, 2 }; case Types.SMALLINT: return new int[] { NUMBER_ORDER, 3 }; case Types.INTEGER: return new int[] { NUMBER_ORDER, 4 }; case Types.BIGINT: return new int[] { NUMBER_ORDER, 5 }; /////////////////////////// case Types.REAL: return new int[] { NUMBER_ORDER, 6 }; case Types.DOUBLE: case Types.FLOAT: return new int[] { NUMBER_ORDER, 7 }; case Types.DECIMAL: return new int[] { NUMBER_ORDER, 8 }; case Types.NUMERIC: return new int[] { NUMBER_ORDER, 9 }; ////////////////////////// case Types.CHAR: return new int[] { STRING_ORDER, 0 }; case Types.VARCHAR: return new int[] { STRING_ORDER, 1 }; case Types.LONGVARCHAR: return new int[] { STRING_ORDER, 2 }; case Types.CLOB: return new int[] { STRING_ORDER, 3 }; /////////////////////////// case Types.TIME: return new int[] { DATE_ORDER, 1 }; case Types.DATE: return new int[] { DATE_ORDER, 2 }; case Types.TIMESTAMP: return new int[] { DATE_ORDER, 3 }; case CustomTypes.INTERVAL: return new int[] { DATE_ORDER, 4 }; /////////////////////////// default: return new int[] { UNKNOWN_ORDER, 0 }; } }
From source file:com.jaspersoft.jrx.query.PlSqlQueryExecuter.java
private void createStatement() throws JRException { String queryString = getQueryString(); if (connection != null && queryString != null && queryString.trim().length() > 0) { try {/*from w w w .j a va 2s . c o m*/ isStoredProcedure = isOracleStoredProcedure(queryString); if (isStoredProcedure) { statement = connection.prepareCall(queryString); } else { statement = connection.prepareStatement(queryString); } int fetchSize = JRPropertiesUtil.getInstance(getJasperReportsContext()).getIntegerProperty( dataset.getPropertiesMap(), JRJdbcQueryExecuterFactory.PROPERTY_JDBC_FETCH_SIZE, 0); if (fetchSize > 0) { statement.setFetchSize(fetchSize); } List<String> parameterNames = getCollectedParameterNames(); if (!parameterNames.isEmpty()) { for (int i = 0; i < parameterNames.size(); i++) { String parameterName = (String) parameterNames.get(i); JRValueParameter parameter = getValueParameter(parameterName); Class<?> clazz = parameter.getValueClass(); Object parameterValue = parameter.getValue(); if (clazz.equals(java.lang.Object.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.JAVA_OBJECT); } else { statement.setObject(i + 1, parameterValue); } } else if (clazz.equals(java.lang.Boolean.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.BIT); } else { statement.setBoolean(i + 1, ((Boolean) parameterValue).booleanValue()); } } else if (clazz.equals(java.lang.Byte.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.TINYINT); } else { statement.setByte(i + 1, ((Byte) parameterValue).byteValue()); } } else if (clazz.equals(java.lang.Double.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.DOUBLE); } else { statement.setDouble(i + 1, ((Double) parameterValue).doubleValue()); } } else if (clazz.equals(java.lang.Float.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.FLOAT); } else { statement.setFloat(i + 1, ((Float) parameterValue).floatValue()); } } else if (clazz.equals(java.lang.Integer.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.INTEGER); } else { statement.setInt(i + 1, ((Integer) parameterValue).intValue()); } } else if (clazz.equals(java.lang.Long.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.BIGINT); } else { statement.setLong(i + 1, ((Long) parameterValue).longValue()); } } else if (clazz.equals(java.lang.Short.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.SMALLINT); } else { statement.setShort(i + 1, ((Short) parameterValue).shortValue()); } } else if (clazz.equals(java.math.BigDecimal.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.DECIMAL); } else { statement.setBigDecimal(i + 1, (BigDecimal) parameterValue); } } else if (clazz.equals(java.lang.String.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.VARCHAR); } else { statement.setString(i + 1, parameterValue.toString()); } } else if (clazz.equals(java.util.Date.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.DATE); } else { statement.setDate(i + 1, new java.sql.Date(((java.util.Date) parameterValue).getTime())); } } else if (clazz.equals(java.sql.Timestamp.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.TIMESTAMP); } else { statement.setTimestamp(i + 1, (java.sql.Timestamp) parameterValue); } } else if (clazz.equals(java.sql.Time.class)) { if (parameterValue == null) { statement.setNull(i + 1, Types.TIME); } else { statement.setTime(i + 1, (java.sql.Time) parameterValue); } } else if (clazz.equals(java.sql.ResultSet.class)) { if (!isStoredProcedure) { throw new JRException("OUT paramater used in non-stored procedure call : " + parameterName + " class " + clazz.getName()); } else if (cursorParameter > 0) { throw new JRException("A stored procedure can have at most one cursor parameter : " + parameterName + " class " + clazz.getName()); } ((CallableStatement) statement).registerOutParameter(i + 1, ORACLE_CURSOR_TYPE); cursorParameter = i + 1; } else { throw new JRException("Parameter type not supported in query : " + parameterName + " class " + clazz.getName()); } } } } catch (SQLException e) { throw new JRException("Error preparing statement for executing the report query : " + "\n\n" + queryString + "\n\n", e); } } }
From source file:org.easyrec.store.dao.plugin.impl.LogEntryDAOMysqlImpl.java
protected LogEntryDAOMysqlImpl(DataSource dataSource, SqlScriptService sqlScriptService, PluginRegistry pluginRegistry) { super(sqlScriptService); setDataSource(dataSource);/* www.j a v a 2s .c om*/ startEntry = new SqlUpdate(dataSource, "INSERT INTO plugin_log(tenantId, pluginId, pluginVersion, startDate, assocTypeId, " + "configuration) VALUES (?, ?, ?, ?, ?, ?)", new int[] { Types.INTEGER, Types.VARCHAR, Types.VARCHAR, Types.TIMESTAMP, Types.VARCHAR, Types.BLOB }); startEntry.compile(); endEntry = new SqlUpdate(dataSource, "INSERT INTO plugin_log(tenantId, pluginId, pluginVersion, startDate, assocTypeId, configuration, " + "endDate, statistics) VALUES (?, ?, ?, ?, ?, ?, ?, ?) " + "ON DUPLICATE KEY UPDATE endDate = ?, statistics = ?", new int[] { Types.INTEGER, Types.VARCHAR, Types.VARCHAR, Types.TIMESTAMP, Types.VARCHAR, Types.BLOB, Types.TIMESTAMP, Types.BLOB, Types.TIMESTAMP, Types.BLOB }); endEntry.compile(); endAllEntries = new SqlUpdate(dataSource, "UPDATE plugin_log SET endDate = ?, statistics = ? WHERE endDate IS NULL", new int[] { Types.TIMESTAMP, Types.BLOB }); endAllEntries.compile(); getRunningTenants = new MappingSqlQuery<Integer>(dataSource, "SELECT tenantId FROM plugin_log WHERE endDate IS NULL") { @Override protected Integer mapRow(ResultSet rs, int rowNum) throws SQLException { return rs.getInt("tenantId"); } }; getRunningTenants.compile(); getLogEntries = new GetLogEntriesStatement(dataSource, pluginRegistry, "SELECT * FROM plugin_log ORDER BY endDate DESC, id DESC LIMIT ?, ?"); getLogEntries.declareParameter(new SqlParameter("offset", Types.INTEGER)); getLogEntries.declareParameter(new SqlParameter("limit", Types.INTEGER)); getLogEntries.compile(); getLogEntriesForTenant = new GetLogEntriesStatement(dataSource, pluginRegistry, "SELECT * FROM plugin_log WHERE tenantId = ? ORDER BY startDate DESC, id DESC LIMIT ?, ?"); getLogEntriesForTenant.declareParameter(new SqlParameter("tenantId", Types.INTEGER)); getLogEntriesForTenant.declareParameter(new SqlParameter("offset", Types.INTEGER)); getLogEntriesForTenant.declareParameter(new SqlParameter("limit", Types.INTEGER)); getLogEntriesForTenant.compile(); getLogEntriesWithAssocType = new GetLogEntriesStatement(dataSource, pluginRegistry, "SELECT * FROM plugin_log WHERE assocTypeId = ? ORDER BY startDate DESC, id DESC LIMIT ?, ?"); getLogEntriesWithAssocType.declareParameter(new SqlParameter("assocTypeId", Types.INTEGER)); getLogEntriesWithAssocType.declareParameter(new SqlParameter("offset", Types.INTEGER)); getLogEntriesWithAssocType.declareParameter(new SqlParameter("limit", Types.INTEGER)); getLogEntriesWithAssocType.compile(); getLogEntriesForTenantWithAssocType = new GetLogEntriesStatement(dataSource, pluginRegistry, "SELECT * FROM plugin_log WHERE tenantId = ? AND assocTypeId = ? ORDER BY startDate DESC, id DESC LIMIT ?, ?"); getLogEntriesForTenantWithAssocType.declareParameter(new SqlParameter("tenantId", Types.INTEGER)); getLogEntriesForTenantWithAssocType.declareParameter(new SqlParameter("assocTypeId", Types.INTEGER)); getLogEntriesForTenantWithAssocType.declareParameter(new SqlParameter("offset", Types.INTEGER)); getLogEntriesForTenantWithAssocType.declareParameter(new SqlParameter("limit", Types.INTEGER)); getLogEntriesForTenantWithAssocType.compile(); getNumberOfLogEntries = new SqlFunction<Integer>(dataSource, "SELECT count(*) AS entry_count FROM plugin_log"); getNumberOfLogEntries.compile(); getNumberOfLogEntriesForTenant = new SqlFunction<Integer>(dataSource, "SELECT count(*) AS entry_count FROM plugin_log WHERE tenantId = ?"); getNumberOfLogEntriesForTenant.setResultType(Integer.class); getNumberOfLogEntriesForTenant.declareParameter(new SqlParameter("tenantId", Types.INTEGER)); getNumberOfLogEntriesForTenant.compile(); deleteLogEntries = new SqlUpdate(dataSource, "TRUNCATE plugin_log"); deleteLogEntries.compile(); getComputationDurationForDate = new SqlFunction<Integer>(dataSource, "SELECT sum(timestampdiff(second, startDate, endDate)) AS sum_seconds FROM plugin_log WHERE endDate BETWEEN ? AND ?"); getComputationDurationForDate.setResultType(Integer.class); getComputationDurationForDate.declareParameter(new SqlParameter("start", Types.DATE)); getComputationDurationForDate.declareParameter(new SqlParameter("end", Types.DATE)); getComputationDurationForDate.compile(); deleteLogEntryStatement = new SqlUpdate(dataSource, "DELETE FROM plugin_log WHERE tenantId = ? AND pluginId = ? AND pluginVersion = ? AND startDate = ? AND assocTypeId = ?"); deleteLogEntryStatement.declareParameter(new SqlParameter("tenantId", Types.INTEGER)); deleteLogEntryStatement.declareParameter(new SqlParameter("pluginId", Types.VARCHAR)); deleteLogEntryStatement.declareParameter(new SqlParameter("pluginVersion", Types.VARCHAR)); deleteLogEntryStatement.declareParameter(new SqlParameter("startDate", Types.TIMESTAMP)); deleteLogEntryStatement.declareParameter(new SqlParameter("assocTypeId", Types.VARCHAR)); deleteLogEntryStatement.compile(); }
From source file:org.pentaho.di.trans.steps.luciddbbulkloader.LucidDBBulkLoader.java
private void createBulkLoadConfigFile(String bcpFilename) throws KettleException { File bcpFile = new File(bcpFilename); FileWriter writer = null;/* w w w . j a va 2 s . c o m*/ try { writer = new FileWriter(bcpFile); // The first Line is the version number, usually 9.0 // writer.write("9.0" + Const.CR); // The second line contains the number of columns... // writer.write(meta.getFieldTable().length + Const.CR); RowMetaInterface targetFieldMeta = meta.getRequiredFields(this); data.bulkFormatMeta = new ValueMetaInterface[meta.getFieldTable().length]; // The next block lists the columns from 1..N where N is the number of columns... // for (int i = 0; i < meta.getFieldTable().length; i++) { ValueMetaInterface field = getInputRowMeta().getValueMeta(data.keynrs[i]); // Col 1 : the column number (i+1) // writer.write(Integer.toString(i + 1) + " "); // Col 2 : the data type // String dataType = null; switch (field.getType()) { case ValueMetaInterface.TYPE_STRING: dataType = "SQLVARCHAR"; break; case ValueMetaInterface.TYPE_BIGNUMBER: dataType = "SQLREAL"; break; case ValueMetaInterface.TYPE_NUMBER: dataType = "SQLFLT8"; break; case ValueMetaInterface.TYPE_INTEGER: dataType = "SQLBIGINT"; break; case ValueMetaInterface.TYPE_DATE: // Use the actual datatypes in the target table to // determine how to create the control file column // definition for date/time fields. if (targetFieldMeta.getValueMetaList().get(i).getOriginalColumnType() == Types.DATE) { data.bulkFormatMeta[i] = data.bulkDateMeta; dataType = "SQLDATE"; } else { data.bulkFormatMeta[i] = data.bulkTimestampMeta; dataType = "SQLTIMESTAMP"; } break; // REVIEW jvs 13-Dec-2008: enable boolean support? case ValueMetaInterface.TYPE_BOOLEAN: dataType = "SQLCHAR"; break; default: dataType = "SQLVARCHAR"; break; } writer.write(dataType + " "); // Col 3 : an ignored column (start position or something like that) // writer.write("0 "); // Col 4 : the data length, just put the length metadata in here // if (field.getLength() == -1) { writer.write("1000 "); } else { writer.write("" + field.getLength() + " "); } // Col 5 : The separator is also ignored, we're going to put a tab in here, like in the sample // writer.write("\"\\t\" "); // "\t" // Col 6 : the column number again... // writer.write(Integer.toString(i + 1) + " "); // Col 7 : The identifier // writer.write(meta.getFieldTable()[i] + " "); // Col 8 : Collation / Format : leave it empty/default at the time being // writer.write("\"\" "); // "" // Newline to finish // writer.write(Const.CR); } } catch (Exception e) { throw new KettleException("Unable to create BCP control file", e); } finally { // That's it, close shop // if (writer != null) { try { writer.close(); } catch (IOException e) { throw new KettleException("Unable to close BCP file '" + bcpFilename + "'", e); } } } }