List of usage examples for java.sql PreparedStatement setObject
void setObject(int parameterIndex, Object x) throws SQLException;
Sets the value of the designated parameter using the given object.
From source file:org.efaps.admin.user.Person.java
/** * The method reads directly from the database all stores roles for the this * person. The found roles are returned as instance of {@link java.util.Set} * .//from ww w. ja v a2 s. co m * * @param _jaasSystem JAAS system for which the roles are searched in eFaps * (if value is <code>null</code>, all roles independent from the * related JAAS system are returned) * @return set of all found roles for given JAAS system * @throws EFapsException on error */ public Set<Role> getRolesFromDB(final JAASSystem _jaasSystem) throws EFapsException { final Set<Role> ret = new HashSet<Role>(); ConnectionResource rsrc = null; try { final List<Long> roleIds = new ArrayList<Long>(); rsrc = Context.getThreadContext().getConnectionResource(); PreparedStatement stmt = null; try { if (_jaasSystem == null) { stmt = rsrc.getConnection().prepareStatement(SQL_ROLE); } else { stmt = rsrc.getConnection().prepareStatement(SQL_ROLEJAASKEY); stmt.setObject(2, _jaasSystem.getId()); } stmt.setObject(1, getId()); final ResultSet resultset = stmt.executeQuery(); while (resultset.next()) { roleIds.add(resultset.getLong(1)); } resultset.close(); } catch (final SQLException e) { throw new EFapsException(getClass(), "getRolesFromDB.SQLException", e, getName()); } finally { try { if (stmt != null) { stmt.close(); } } catch (final SQLException e) { throw new EFapsException(getClass(), "getRolesFromDB.SQLException", e, getName()); } } rsrc.commit(); final Set<UUID> roleUUIDs = AppAccessHandler.getLoginRoles(); for (final Long roleId : roleIds) { final Role role = Role.get(roleId); if (!AppAccessHandler.excludeMode() || AppAccessHandler.excludeMode() && roleUUIDs.contains(role.getUUID())) { ret.add(role); } } } finally { if (rsrc != null && rsrc.isOpened()) { rsrc.abort(); } } return ret; }
From source file:org.efaps.admin.user.Person.java
/** * The method reads directly from the database all stored companies for this * person. The found roles are returned as instance of {@link Set}. * * @param _jaasSystem JAAS system for which the roles must get from eFaps * (if value is <code>null</code>, all companies independent from * the related JAAS system are returned) * @return set of all found companies for given JAAS system * @throws EFapsException on error/* www . ja va 2s . com*/ */ public Set<Company> getCompaniesFromDB(final JAASSystem _jaasSystem) throws EFapsException { final Set<Company> ret = new HashSet<Company>(); ConnectionResource rsrc = null; try { final List<Long> companyIds = new ArrayList<Long>(); rsrc = Context.getThreadContext().getConnectionResource(); PreparedStatement stmt = null; try { if (_jaasSystem == null) { stmt = rsrc.getConnection().prepareStatement(SQL_COMPANY); } else { stmt = rsrc.getConnection().prepareStatement(SQL_COMPANYJAASKEY); stmt.setObject(2, _jaasSystem.getId()); } stmt.setObject(1, getId()); final ResultSet resultset = stmt.executeQuery(); while (resultset.next()) { companyIds.add(resultset.getLong(1)); } resultset.close(); } catch (final SQLException e) { throw new EFapsException(getClass(), "getCompaniesFromDB.SQLException", e, getName()); } finally { try { if (stmt != null) { stmt.close(); } } catch (final SQLException e) { throw new EFapsException(getClass(), "getCompaniesFromDB.SQLException", e, getName()); } } rsrc.commit(); for (final Long companyId : companyIds) { final Company company = Company.get(companyId); ret.add(company); } } finally { if (rsrc != null && rsrc.isOpened()) { rsrc.abort(); } } return ret; }
From source file:org.apache.geronimo.security.realm.providers.GenericHttpHeaderSqlLoginmodule.java
public boolean login() throws LoginException { Map<String, String> headerMap = null; loginSucceeded = false;/*ww w. j a v a 2 s .c o m*/ Connection conn = null; ResultSet result = null; PreparedStatement statement = null; Callback[] callbacks = new Callback[1]; callbacks[0] = new RequestCallback(); try { callbackHandler.handle(callbacks); } catch (IOException ioe) { throw (LoginException) new LoginException().initCause(ioe); } catch (UnsupportedCallbackException uce) { throw (LoginException) new LoginException().initCause(uce); } httpRequest = ((RequestCallback) callbacks[0]).getRequest(); String[] headers = headerNames.split(","); try { headerMap = matchHeaders(httpRequest, headers); } catch (HeaderMismatchException e) { throw (LoginException) new LoginException("Header Mistmatch error").initCause(e); } if (headerMap.isEmpty()) { throw new FailedLoginException(); } if (authenticationAuthority.equalsIgnoreCase("Siteminder")) { HeaderHandler headerHandler = new SiteminderHeaderHandler(); username = headerHandler.getUser(headerMap); } else if (authenticationAuthority.equalsIgnoreCase("Datapower")) { /* To be Done */ } if (username == null || username.equals("")) { username = null; throw new FailedLoginException(); } if (dataSource != null) { try { conn = dataSource.getConnection(); try { statement = conn.prepareStatement(groupSelect); int count = countParameters(groupSelect); for (int i = 0; i < count; i++) { statement.setObject(i + 1, username); } result = statement.executeQuery(); while (result.next()) { String userName = result.getString(1); String groupName = result.getString(2); if (userName.equals(username)) groups.add(groupName); } if (groups.isEmpty()) { log.error("No roles associated with user " + username); loginSucceeded = false; throw new FailedLoginException(); } else loginSucceeded = true; } finally { result.close(); statement.close(); conn.close(); } } catch (LoginException e) { // Clear out the private state username = null; groups.clear(); throw e; } catch (SQLException sqle) { // Clear out the private state username = null; groups.clear(); throw (LoginException) new LoginException("SQL error").initCause(sqle); } catch (Exception e) { // Clear out the private state username = null; groups.clear(); throw (LoginException) new LoginException("Could not access datasource").initCause(e); } } return loginSucceeded; }
From source file:com.streamsets.pipeline.lib.jdbc.JdbcMultiRowRecordWriter.java
@SuppressWarnings("unchecked") private void processPartition(Connection connection, Multimap<Long, Record> partitions, Long partitionKey, List<OnRecordErrorException> errorRecords) throws SQLException, OnRecordErrorException { Collection<Record> partition = partitions.get(partitionKey); // Fetch the base insert query for this partition. SortedMap<String, String> columnsToParameters = getFilteredColumnsToParameters(getColumnsToParameters(), partition.iterator().next()); // put all the records in a queue for consumption LinkedList<Record> queue = new LinkedList<>(partition); // compute number of rows per batch if (columnsToParameters.isEmpty()) { throw new OnRecordErrorException(Errors.JDBCDEST_22); }/*from w w w. j a va 2s . c om*/ int maxRowsPerBatch = maxPrepStmtParameters / columnsToParameters.size(); PreparedStatement statement = null; // parameters are indexed starting with 1 int paramIdx = 1; int rowCount = 0; while (!queue.isEmpty()) { // we're at the start of a batch. if (statement == null) { // instantiate the new statement statement = generatePreparedStatement(columnsToParameters, // the next batch will have either the max number of records, or however many are left. Math.min(maxRowsPerBatch, queue.size()), getTableName(), connection); } // process the next record into the current statement Record record = queue.removeFirst(); for (String column : columnsToParameters.keySet()) { Field field = record.get(getColumnsToFields().get(column)); Field.Type fieldType = field.getType(); Object value = field.getValue(); try { switch (fieldType) { case LIST: List<Object> unpackedList = unpackList((List<Field>) value); Array array = connection.createArrayOf(getSQLTypeName(fieldType), unpackedList.toArray()); statement.setArray(paramIdx, array); break; case DATE: case DATETIME: // Java Date types are not accepted by JDBC drivers, so we need to convert to java.sql.Date java.util.Date date = field.getValueAsDatetime(); statement.setObject(paramIdx, new java.sql.Date(date.getTime())); break; default: statement.setObject(paramIdx, value, getColumnType(column)); break; } } catch (SQLException e) { LOG.error(Errors.JDBCDEST_23.getMessage(), column, fieldType.toString(), e); throw new OnRecordErrorException(record, Errors.JDBCDEST_23, column, fieldType.toString()); } ++paramIdx; } rowCount++; // check if we've filled up the current batch if (rowCount == maxRowsPerBatch) { // time to execute the current batch statement.addBatch(); statement.executeBatch(); statement.close(); statement = null; // reset our counters rowCount = 0; paramIdx = 1; } } // check if there are any records left. this should occur whenever there isn't *exactly* maxRowsPerBatch records in // this partition. if (statement != null) { statement.addBatch(); statement.executeBatch(); statement.close(); } }
From source file:edu.ku.brc.specify.conversion.BasicSQLUtils.java
/** * @param pStmt/*from ww w . j av a 2s .c o m*/ * @param type * @param data * @throws SQLException */ public static void setData(final PreparedStatement pStmt, final int type, final int colInx, final Object data) throws SQLException { if (data == null) { pStmt.setObject(colInx, null); return; } boolean isStr = data instanceof String; switch (type) { case java.sql.Types.TINYINT: case java.sql.Types.SMALLINT: case java.sql.Types.INTEGER: if (isStr) { pStmt.setString(colInx, (String) data); } else { pStmt.setInt(colInx, (Integer) data); } break; case java.sql.Types.FLOAT: if (isStr) { pStmt.setString(colInx, (String) data); } else { pStmt.setFloat(colInx, (Float) data); } break; case java.sql.Types.VARCHAR: case java.sql.Types.CHAR: case java.sql.Types.LONGVARCHAR: case java.sql.Types.LONGNVARCHAR: case java.sql.Types.NCHAR: if (isStr) { pStmt.setString(colInx, (String) data); } else { pStmt.setString(colInx, (String) data); } break; case java.sql.Types.REAL: case java.sql.Types.DOUBLE: if (isStr) { pStmt.setString(colInx, (String) data); } else { pStmt.setDouble(colInx, (Double) data); } break; case java.sql.Types.DATE: if (isStr) { pStmt.setString(colInx, (String) data); } else { pStmt.setDate(colInx, (java.sql.Date) data); } break; case java.sql.Types.TIMESTAMP: if (isStr) { pStmt.setString(colInx, (String) data); } else { pStmt.setTimestamp(colInx, (Timestamp) data); } break; case java.sql.Types.BOOLEAN: if (isStr) { String val = (String) data; pStmt.setBoolean(colInx, !val.equalsIgnoreCase("true")); } else { pStmt.setBoolean(colInx, (Boolean) data); } break; case java.sql.Types.BIT: if (data instanceof Boolean) { pStmt.setBoolean(colInx, (Boolean) data); } else { pStmt.setBoolean(colInx, !(((Integer) data) == 0)); } break; default: throw new RuntimeException(String.format("Missing case for SQL Type %d for Column: %d Data[%s]", type, colInx, data.getClass().getSimpleName())); } }
From source file:edu.ku.brc.specify.toycode.mexconabio.CopyFromGBIF.java
/** * // ww w . j ava 2 s . c o m */ public void process() { boolean doQueryForCollNum = true; String pSQL = "INSERT INTO raw (old_id,data_provider_id,data_resource_id,resource_access_point_id, institution_code, collection_code, " + "catalogue_number, scientific_name, author, rank, kingdom, phylum, class, order_rank, family, genus, species, subspecies, latitude, longitude, " + "lat_long_precision, max_altitude, min_altitude, altitude_precision, min_depth, max_depth, depth_precision, continent_ocean, country, state_province, county, collector_name, " + "locality,year, month, day, basis_of_record, identifier_name, identification_date,unit_qualifier, created, modified, deleted, collector_num) " + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; String gbifSQLBase = "SELECT r.id, r.data_provider_id, r.data_resource_id, r.resource_access_point_id, r.institution_code, r.collection_code, " + "r.catalogue_number, r.scientific_name, r.author, r.rank, r.kingdom, r.phylum, r.class, r.order_rank, r.family, r.genus, r.species, r.subspecies, " + "r.latitude, r.longitude, r.lat_long_precision, r.max_altitude, r.min_altitude, r.altitude_precision, r.min_depth, r.max_depth, r.depth_precision, " + "r.continent_ocean, r.country, r.state_province, r.county, r.collector_name, r.locality, r.year, r.month, r.day, r.basis_of_record, r.identifier_name, " + "r.identification_date, r.unit_qualifier, r.created, r.modified, r.deleted"; String gbifSQL; if (doQueryForCollNum) { gbifSQL = gbifSQLBase + " FROM raw_occurrence_record r"; } else { gbifSQL = gbifSQLBase + ", i.identifier FROM raw_occurrence_record r, identifier_record i WHERE r.id = i.occurrence_id AND i.identifier_type = 3"; } BasicSQLUtils.update(srcDBConn, "DELETE FROM raw WHERE id > 0"); long totalRecs = BasicSQLUtils.getCount(dbConn, "SELECT COUNT(*) FROM raw_occurrence_record"); long procRecs = 0; long startTime = System.currentTimeMillis(); int secsThreshold = 0; PrintWriter pw = null; final double HRS = 1000.0 * 60.0 * 60.0; Statement gStmt = null; PreparedStatement pStmt = null; PreparedStatement stmt = null; try { pw = new PrintWriter("gbif.log"); pStmt = srcDBConn.prepareStatement(pSQL); stmt = dbConn2.prepareStatement( "SELECT identifier FROM identifier_record WHERE occurrence_id = ? AND identifier_type = 3"); //stmt.setFetchSize(Integer.MIN_VALUE); System.out.println("Total Records: " + totalRecs); pw.println("Total Records: " + totalRecs); gStmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); gStmt.setFetchSize(Integer.MIN_VALUE); String fullSQL = gbifSQL; System.out.println(fullSQL); ResultSet gRS = gStmt.executeQuery(fullSQL); ResultSetMetaData rsmd = gRS.getMetaData(); int lastColInx = rsmd.getColumnCount() + (doQueryForCollNum ? 1 : 0); while (gRS.next()) { int id = gRS.getInt(1); pStmt.setObject(1, id); for (int i = 2; i <= rsmd.getColumnCount(); i++) { Object obj = gRS.getObject(i); pStmt.setObject(i, obj); } String collNum = null; if (doQueryForCollNum) { //String tmpSQL = String.format("SELECT identifier FROM identifier_record WHERE occurrence_id = %d AND identifier_type = 3", id); //System.out.println(tmpSQL); stmt.setInt(1, id); ResultSet rs = stmt.executeQuery(); if (rs.next()) { collNum = rs.getString(1); } rs.close(); } else { collNum = gRS.getString(lastColInx - 1); } if (StringUtils.isNotEmpty(collNum)) { if (collNum.length() < 256) { pStmt.setString(lastColInx, collNum); } else { pStmt.setString(lastColInx, collNum.substring(0, 255)); } } else { pStmt.setObject(lastColInx, null); } try { pStmt.executeUpdate(); } catch (Exception ex) { System.err.println("For ID[" + gRS.getObject(1) + "]"); ex.printStackTrace(); pw.print("For ID[" + gRS.getObject(1) + "] " + ex.getMessage()); pw.flush(); } procRecs++; if (procRecs % 10000 == 0) { long endTime = System.currentTimeMillis(); long elapsedTime = endTime - startTime; double avergeTime = (double) elapsedTime / (double) procRecs; double hrsLeft = (((double) elapsedTime / (double) procRecs) * (double) totalRecs - procRecs) / HRS; int seconds = (int) (elapsedTime / 60000.0); if (secsThreshold != seconds) { secsThreshold = seconds; String msg = String.format( "Elapsed %8.2f hr.mn Ave Time: %5.2f Percent: %6.3f Hours Left: %8.2f ", ((double) (elapsedTime)) / HRS, avergeTime, 100.0 * ((double) procRecs / (double) totalRecs), hrsLeft); System.out.println(msg); pw.println(msg); pw.flush(); } } } } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (gStmt != null) { gStmt.close(); } if (pStmt != null) { pStmt.close(); } if (stmt != null) { stmt.close(); } pw.close(); } catch (Exception ex) { } } System.out.println("Done transferring."); pw.println("Done transferring."); /* int count = 0; boolean cont = true; while (cont) { long start = System.currentTimeMillis(); Statement gStmt = null; PreparedStatement pStmt = null; try { gStmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY,ResultSet.CONCUR_READ_ONLY); String fullSQL = gbifSQL + String.format(" AND r.id >= %d AND r.id < %d", count, count+recordStep); //System.out.println(fullSQL); int cnt = 0; ResultSet rs = gStmt.executeQuery(fullSQL); ResultSetMetaData rsmd = rs.getMetaData(); //System.out.println("Done with query."); pStmt = srcDBConn.prepareStatement(pSQL); count += recordStep; while (rs.next()) { Integer id = rs.getInt(1); pStmt.setInt(1, id); for (int i=2;i<=rsmd.getColumnCount();i++) { Object obj = rs.getObject(i); pStmt.setObject(i, obj); } pStmt.executeUpdate(); cnt++; procRecs++; } rs.close(); if (count == 0) { break; } } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (gStmt != null) { gStmt.close(); } if (pStmt != null) { pStmt.close(); } } catch (Exception ex) { } } long endTime = System.currentTimeMillis(); long deltaTime = endTime - start; long elapsedTime = endTime - startTime; double avergeTime = (double)elapsedTime / (double)procRecs; double hrsLeft = (((double)procRecs / (double)elapsedTime) * (double)totalRecs) / 3600000.0; int seconds = (int)(elapsedTime / 60000.0); if (secsThreshold != seconds) { secsThreshold = seconds; System.out.println(String.format("Elapsed %8.2f hr.mn Time: %5.2f Ave Time: %5.2f Percent: %6.3f Hours Left: Elapsed %8.2f ", ((double)(elapsedTime)) / 3600000.0, ((double)(deltaTime)) / 1000.0, avergeTime, 100.0 * ((double)procRecs / (double)totalRecs), hrsLeft)); } } System.out.println("Done transferring.");*/ /*Statement uStmt = null; try { uStmt = srcDBConn.createStatement(); int rv = uStmt.executeUpdate("ALTER TABLE raw ADD FULLTEXT(catalogue_number, genus, species, subspecies, collector_num)"); System.out.println("Indexing rv = "+rv); } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (uStmt != null) { uStmt.close(); } } catch (Exception ex) { ex.printStackTrace(); } } System.out.println("Done Indexing.");*/ }
From source file:org.cloudgraph.rdb.service.GraphQuery.java
private int countResults(Connection con, Query query, PlasmaType type) { int result = 0; Object[] params = new Object[0]; StringBuilder sqlQuery = new StringBuilder(); AliasMap aliasMap = new AliasMap(type); // construct a filter adding to alias map RDBFilterAssembler filterAssembler = null; Where where = query.findWhereClause(); if (where != null) { filterAssembler = new RDBFilterAssembler(where, type, aliasMap); params = filterAssembler.getParams(); if (log.isDebugEnabled()) { log.debug("filter: " + filterAssembler.getFilter()); }//from ww w. j a v a 2 s.c om } sqlQuery.append("SELECT COUNT(*)"); // construct a FROM clause from alias map sqlQuery.append(" FROM "); Iterator<PlasmaType> it = aliasMap.getTypes(); int count = 0; while (it.hasNext()) { PlasmaType aliasType = it.next(); String alias = aliasMap.getAlias(aliasType); if (count > 0) sqlQuery.append(", "); sqlQuery.append(this.statementUtil.getQualifiedPhysicalName(aliasType)); sqlQuery.append(" "); sqlQuery.append(alias); count++; } if (filterAssembler != null) { sqlQuery.append(" "); sqlQuery.append(filterAssembler.getFilter()); } if (query.getStartRange() != null && query.getEndRange() != null) log.warn("query range (start: " + query.getStartRange() + ", end: " + query.getEndRange() + ") ignored for count operation"); PreparedStatement statement = null; ResultSet rs = null; try { if (log.isDebugEnabled()) { log.debug("queryString: " + sqlQuery.toString()); log.debug("executing..."); } statement = con.prepareStatement(sqlQuery.toString(), ResultSet.TYPE_FORWARD_ONLY, /* * ResultSet * . * TYPE_SCROLL_INSENSITIVE * , */ ResultSet.CONCUR_READ_ONLY); // set params // note params are pre-converted // to string in filter assembly if (filterAssembler != null) { params = filterAssembler.getParams(); if (params != null) for (int i = 0; i < params.length; i++) statement.setObject(i + 1, params[i]); } if (log.isDebugEnabled()) { if (params == null || params.length == 0) { log.debug("executing: " + sqlQuery.toString()); } else { StringBuilder paramBuf = new StringBuilder(); paramBuf.append(" ["); for (int p = 0; p < params.length; p++) { if (p > 0) paramBuf.append(", "); paramBuf.append(String.valueOf(params[p])); } paramBuf.append("]"); log.debug("executing: " + sqlQuery.toString() + " " + paramBuf.toString()); } } statement.execute(); rs = statement.getResultSet(); rs.next(); result = rs.getInt(1); } catch (Throwable t) { StringBuffer buf = this.generateErrorDetail(t, sqlQuery.toString(), filterAssembler); log.error(buf.toString()); throw new DataAccessException(t); } finally { try { if (rs != null) rs.close(); if (statement != null) statement.close(); } catch (SQLException e) { log.error(e.getMessage(), e); } } return result; }
From source file:com.alibaba.wasp.jdbc.TestPreparedStatement.java
public void testDataTypes() throws SQLException { conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); conn.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); Statement stat = conn.createStatement(); PreparedStatement prep; ResultSet rs;// ww w .j av a2 s . c o m trace("Create tables"); stat.execute("CREATE TABLE T_INT(ID INT PRIMARY KEY,VALUE INT)"); stat.execute("CREATE TABLE T_VARCHAR(ID INT PRIMARY KEY,VALUE VARCHAR(255))"); stat.execute("CREATE TABLE T_DECIMAL_0(ID INT PRIMARY KEY,VALUE DECIMAL(30,0))"); stat.execute("CREATE TABLE T_DECIMAL_10(ID INT PRIMARY KEY,VALUE DECIMAL(20,10))"); stat.execute("CREATE TABLE T_DATETIME(ID INT PRIMARY KEY,VALUE DATETIME)"); prep = conn.prepareStatement("INSERT INTO T_INT VALUES(?,?)", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); prep.setInt(1, 1); prep.setInt(2, 0); prep.executeUpdate(); prep.setInt(1, 2); prep.setInt(2, -1); prep.executeUpdate(); prep.setInt(1, 3); prep.setInt(2, 3); prep.executeUpdate(); prep.setInt(1, 4); prep.setNull(2, Types.INTEGER); prep.executeUpdate(); prep.setInt(1, 5); prep.setBigDecimal(2, new BigDecimal("0")); prep.executeUpdate(); prep.setInt(1, 6); prep.setString(2, "-1"); prep.executeUpdate(); prep.setInt(1, 7); prep.setObject(2, new Integer(3)); prep.executeUpdate(); prep.setObject(1, "8"); // should throw an exception prep.setObject(2, null); // some databases don't allow calling setObject with null (no data type) prep.executeUpdate(); prep.setInt(1, 9); prep.setObject(2, -4, Types.VARCHAR); prep.executeUpdate(); prep.setInt(1, 10); prep.setObject(2, "5", Types.INTEGER); prep.executeUpdate(); prep.setInt(1, 11); prep.setObject(2, null, Types.INTEGER); prep.executeUpdate(); prep.setInt(1, 12); prep.setBoolean(2, true); prep.executeUpdate(); prep.setInt(1, 13); prep.setBoolean(2, false); prep.executeUpdate(); prep.setInt(1, 14); prep.setByte(2, (byte) -20); prep.executeUpdate(); prep.setInt(1, 15); prep.setByte(2, (byte) 100); prep.executeUpdate(); prep.setInt(1, 16); prep.setShort(2, (short) 30000); prep.executeUpdate(); prep.setInt(1, 17); prep.setShort(2, (short) (-30000)); prep.executeUpdate(); prep.setInt(1, 18); prep.setLong(2, Integer.MAX_VALUE); prep.executeUpdate(); prep.setInt(1, 19); prep.setLong(2, Integer.MIN_VALUE); prep.executeUpdate(); assertTrue(stat.execute("SELECT * FROM T_INT ORDER BY ID")); rs = stat.getResultSet(); assertResultSetOrdered(rs, new String[][] { { "1", "0" }, { "2", "-1" }, { "3", "3" }, { "4", null }, { "5", "0" }, { "6", "-1" }, { "7", "3" }, { "8", null }, { "9", "-4" }, { "10", "5" }, { "11", null }, { "12", "1" }, { "13", "0" }, { "14", "-20" }, { "15", "100" }, { "16", "30000" }, { "17", "-30000" }, { "18", "" + Integer.MAX_VALUE }, { "19", "" + Integer.MIN_VALUE }, }); prep = conn.prepareStatement("INSERT INTO T_DECIMAL_0 VALUES(?,?)"); prep.setInt(1, 1); prep.setLong(2, Long.MAX_VALUE); prep.executeUpdate(); prep.setInt(1, 2); prep.setLong(2, Long.MIN_VALUE); prep.executeUpdate(); prep.setInt(1, 3); prep.setFloat(2, 10); prep.executeUpdate(); prep.setInt(1, 4); prep.setFloat(2, -20); prep.executeUpdate(); prep.setInt(1, 5); prep.setFloat(2, 30); prep.executeUpdate(); prep.setInt(1, 6); prep.setFloat(2, -40); prep.executeUpdate(); rs = stat.executeQuery("SELECT VALUE FROM T_DECIMAL_0 ORDER BY ID"); checkBigDecimal(rs, new String[] { "" + Long.MAX_VALUE, "" + Long.MIN_VALUE, "10", "-20", "30", "-40" }); }
From source file:org.nuclos.server.masterdata.ejb3.MetaDataFacadeBean.java
/** * @return Script (with results if selected) *//*from ww w. j a va 2 s. com*/ @Override @RolesAllowed("Login") public MasterDataMetaVO transferTable(String url, String user, String password, String schema, String table, String sEntity) { MasterDataMetaVO metaNew = null; Connection connect = null; try { DependantMasterDataMap dependMap = new DependantMasterDataMapImpl(); List<String> lstFields = new ArrayList<String>(); connect = DriverManager.getConnection(url, user, password); DatabaseMetaData dbmeta = connect.getMetaData(); ResultSet rsCols = dbmeta.getColumns(null, schema.toUpperCase(), table, "%"); while (rsCols.next()) { String colName = rsCols.getString("COLUMN_NAME"); int colsize = rsCols.getInt("COLUMN_SIZE"); int postsize = rsCols.getInt("DECIMAL_DIGITS"); int columsType = rsCols.getInt("DATA_TYPE"); String sJavaType = getBestJavaType(columsType); if (postsize > 0) sJavaType = "java.lang.Double"; MasterDataMetaVO metaFieldVO = masterDataFacade .getMetaData(NuclosEntity.ENTITYFIELD.getEntityName()); MasterDataVO mdFieldVO = new MasterDataVO(metaFieldVO, false); mdFieldVO.setField("foreignentityfield", null); mdFieldVO.setField("unique", Boolean.FALSE); mdFieldVO.setField("logbook", Boolean.FALSE); mdFieldVO.setField("entity", NuclosEntity.ENTITYFIELD.getEntityName()); mdFieldVO.setField("formatinput", null); mdFieldVO.setField("entityId", null); mdFieldVO.setField("datascale", colsize); mdFieldVO.setField("label", org.apache.commons.lang.StringUtils.capitalize(colName.toLowerCase())); mdFieldVO.setField("nullable", Boolean.TRUE); mdFieldVO.setField("dataprecision", postsize); mdFieldVO.setField("dbfield", colName.toLowerCase()); mdFieldVO.setField("description", org.apache.commons.lang.StringUtils.capitalize(colName.toLowerCase())); mdFieldVO.setField("name", colName.toLowerCase()); mdFieldVO.setField("entityfieldDefault", null); mdFieldVO.setField("foreignentity", null); mdFieldVO.setField("formatoutput", null); mdFieldVO.setField("datatype", sJavaType); mdFieldVO.setField("searchable", Boolean.FALSE); mdFieldVO.setField("foreignentity", null); mdFieldVO.setField("foreignentityfield", null); final String entity = NuclosEntity.ENTITYFIELD.getEntityName(); dependMap.addData(entity, DalSupportForMD.getEntityObjectVO(entity, mdFieldVO)); lstFields.add(colName); } rsCols.close(); metaNew = masterDataFacade.getMetaData(sEntity); String sqlSelect = "select * from " + schema + "." + table; Statement stmt = connect.createStatement(); ResultSet rsSelect = stmt.executeQuery(sqlSelect); while (rsSelect.next()) { List<Object> lstValues = new ArrayList<Object>(); for (String sColname : lstFields) { lstValues.add(rsSelect.getObject(sColname)); } StringBuffer sb = new StringBuffer(); sb.append("insert into " + metaNew.getDBEntity()); sb.append(" values(?"); for (int i = 0; i < lstValues.size(); i++) { sb.append(",?"); } sb.append(",?,?,?,?,?)"); int col = 1; PreparedStatement pst = dataSource.getConnection().prepareStatement(sb.toString()); pst.setInt(col++, dataBaseHelper.getNextIdAsInteger(SpringDataBaseHelper.DEFAULT_SEQUENCE)); for (Object object : lstValues) { pst.setObject(col++, object); } pst.setDate(col++, new java.sql.Date(System.currentTimeMillis())); pst.setString(col++, "Wizard"); pst.setDate(col++, new java.sql.Date(System.currentTimeMillis())); pst.setString(col++, "Wizard"); pst.setInt(col++, 1); pst.executeUpdate(); pst.close(); } rsSelect.close(); stmt.close(); } catch (SQLException e) { LOG.info("transferTable: " + e, e); } finally { if (connect != null) try { connect.close(); } catch (SQLException e) { // do noting here LOG.info("transferTable: " + e); } } return metaNew; }
From source file:nl.b3p.catalog.arcgis.ArcSDE9xJDBCHelper.java
@Override public void saveMetadata(ArcSDEJDBCDataset dataset, String metadata) throws Exception { Connection c = getConnection(); PreparedStatement ps = null; try {// w w w . j a v a 2 s . c o m c.setAutoCommit(false); // gebruik geen DbUtils; setBinaryStream() werkt niet met setObject() // welke DbUtils gebruikt String sql = "update " + getTableName(TABLE_USERMETADATA) + " set xml = ? where name = ? and owner = ?"; sql += databaseNameSQL(dataset); ps = c.prepareStatement(sql); byte[] xml = metadata.getBytes(ENCODING); ps.setBinaryStream(1, new ByteArrayInputStream(xml), xml.length); ps.setString(2, dataset.getName()); ps.setString(3, dataset.getOwner()); if (dataset.getDatabaseName() != null) { ps.setString(4, dataset.getDatabaseName()); } int rowsAffected = ps.executeUpdate(); ps.close(); ps = null; if (rowsAffected > 1) { throw new Exception("Updating metadata should affect maximum one row; got rows affected count of " + rowsAffected); } if (rowsAffected == 0) { // try to insert new row QueryRunner runner = new QueryRunner(); // determine highest id Object id = runner.query(c, "select coalesce(max(id)+1,1) from " + getTableName(TABLE_USERMETADATA), new ScalarHandler()); Integer datasetType = determineDatasetType(c, dataset); // weer setBinaryStream nodig ps = c.prepareStatement("insert into " + getTableName(TABLE_USERMETADATA) + " (id, databasename, owner, name, datasettype, xml) values(?,?,?,?,?,?)"); ps.setObject(1, id); ps.setObject(2, dataset.getDatabaseName()); ps.setString(3, dataset.getOwner()); ps.setString(4, dataset.getName()); ps.setObject(5, datasetType); ps.setBinaryStream(6, new ByteArrayInputStream(xml), xml.length); ps.executeUpdate(); ps.close(); ps = null; } DbUtils.commitAndClose(c); } catch (Exception e) { DbUtils.rollbackAndCloseQuietly(c); throw e; } finally { DbUtils.closeQuietly(ps); } }