List of usage examples for java.sql PreparedStatement setBigDecimal
void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException;
java.math.BigDecimal
value. From source file:Logica.Usuario.java
@Override public ArrayList<informeDescargos> generarInformePorRA(String area, BigDecimal id) throws RemoteException { EntityManagerFactory emf = Persistence.createEntityManagerFactory("Biot_ServerPU"); Connection con = null;// ww w . j ava 2s.c o m PreparedStatement ps = null; ResultSet rs = null; String statement = "select D.CINTERNO, \"-\", item.DESCRIPCION, item.CANTIDAD, sum(D.CANTIDAD), D.FECHA\n" + "from item, DESCARGO d\n" + "where item.CINTERNO = D.CINTERNO and D.ID_usuario = ?\n" + "group by D.CINTERNO, item.DESCRIPCION, item.CANTIDAD, D.FECHA"; informeDescargos fila = null; System.out.println(statement); ArrayList<informeDescargos> listado = new ArrayList<>(); try { con = Conexion.conexion.getConnection(); ps = con.prepareStatement(statement); ps.setBigDecimal(1, id); rs = ps.executeQuery(); while (rs.next()) { GregorianCalendar c = new GregorianCalendar(); c.setTime(rs.getDate(6)); fila = new informeDescargos(rs.getString(1), rs.getString(2), rs.getString(3), rs.getFloat(4), rs.getFloat(5), c); listado.add(fila); } } catch (SQLException ex) { Logger.getLogger(Usuario.class.getName()).log(Level.SEVERE, null, ex); } finally { try { if (ps != null) { ps.close(); } if (rs != null) { rs.close(); } if (con != null) { con.close(); } } catch (SQLException ex) { System.out.println("Error cerrando conexion"); } } return listado; }
From source file:com.buckwa.dao.impl.excise4.Form24DaoImpl.java
@Override public void create(final Form24 form24) { KeyHolder keyHolder = new GeneratedKeyHolder(); final StringBuilder sql = new StringBuilder(); sql.append(//from www .ja v a 2s. c om "INSERT INTO `form24`(`form24_id`,`industry_id`,`factory_id`,`create_date`,`create_by`,`update_date`,`update_by`,step,industry_name,tax_no,factory_name) ") .append(" VALUES ( NULL,?,?,?,?,?,?,?,?,?,?)"); logger.info("SQL : " + sql.toString()); String user = ""; try { user = BuckWaUtils.getUserNameFromContext(); } catch (BuckWaException e) { e.printStackTrace(); } final String userName = user; jdbcTemplate.update(new PreparedStatementCreator() { public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { Timestamp currentDate = new Timestamp(System.currentTimeMillis()); final Industry industry = form24.getIndustry(); final Factory factory = form24.getFactory(); PreparedStatement ps = connection.prepareStatement(sql.toString(), Statement.RETURN_GENERATED_KEYS); ps.setLong(1, industry.getIndustryId()); if (factory.getFactoryId() != null) { ps.setLong(2, factory.getFactoryId()); } else { ps.setNull(2, java.sql.Types.BIGINT); } ps.setTimestamp(3, currentDate); ps.setString(4, userName); ps.setTimestamp(5, currentDate); ps.setString(6, userName); ps.setString(7, form24.getStep()); ps.setString(8, industry.getIndustryName()); ps.setString(9, industry.getTaxNo()); ps.setString(10, factory.getFactoryName()); return ps; } }, keyHolder); final Long returnidform24 = keyHolder.getKey().longValue(); form24.setForm24Id(returnidform24); logger.info("returnidform24 : " + returnidform24); //ID PRODUCT List<Product> products = form24.getProductList(); if (products != null) { final StringBuilder psql = new StringBuilder(); psql.append( "INSERT INTO `form24_product`(`form24_id`,`seq`,`productName`,`size`,`bandColor`,`backgroudColor`,`licenseNo`,`grossnumber200`,`grossnumber400`,`corkScrap`,`totalScrap`,`create_date`,`create_by`, `update_date`,`update_by`,`product_id`) ") .append("VALUES ( ?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,NULL)"); logger.info("SQL : " + psql.toString()); for (final Product p : products) { jdbcTemplate.update(new PreparedStatementCreator() { public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { Timestamp currentDate = new Timestamp(System.currentTimeMillis()); PreparedStatement ps = connection.prepareStatement(psql.toString(), Statement.RETURN_GENERATED_KEYS); ps.setLong(1, returnidform24); ps.setString(2, p.getSeq()); ps.setString(3, p.getProductName()); ps.setString(4, p.getSize()); ps.setString(5, p.getBandColor()); ps.setString(6, p.getBackgroudColor()); ps.setString(7, p.getLicenseNo()); ps.setBigDecimal(8, p.getGrossnumber200()); ps.setBigDecimal(9, p.getGrossnumber400()); ps.setBigDecimal(10, p.getCorkScrap()); ps.setBigDecimal(11, p.getTotalScrap()); ps.setTimestamp(12, currentDate); ps.setString(13, userName); ps.setTimestamp(14, currentDate); ps.setString(15, userName); return ps; } }, keyHolder); long returnidproduct = keyHolder.getKey().longValue(); p.setProcuctId(returnidproduct); logger.info("returnidproduct : " + returnidproduct); } } }
From source file:com.streamsets.pipeline.stage.origin.jdbc.cdc.oracle.OracleCDCSource.java
private void generateRecords(Offset startingOffset, PreparedStatement selectChanges) { // When this is called the first time, Logminer was started either from SCN or from a start date, so we just keep // track of the start date etc. LOG.info("Attempting to generate records"); boolean error; StringBuilder query = new StringBuilder(); BigDecimal lastCommitSCN = new BigDecimal(startingOffset.scn); int sequenceNumber = startingOffset.sequence; LocalDateTime startTime = adjustStartTime(startingOffset.timestamp); String lastTxnId = startingOffset.txnId; LocalDateTime endTime = getEndTimeForStartTime(startTime); ResultSet resultSet = null;//from w w w.ja va2 s.c o m while (!getContext().isStopped()) { error = false; generationStarted = true; try { recordQueue.put(new RecordOffset(dummyRecord, new Offset(version, startTime, lastCommitSCN.toPlainString(), sequenceNumber, lastTxnId))); selectChanges = getSelectChangesStatement(); if (!useLocalBuffering) { selectChanges.setBigDecimal(1, lastCommitSCN); selectChanges.setInt(2, sequenceNumber); selectChanges.setBigDecimal(3, lastCommitSCN); if (shouldTrackDDL) { selectChanges.setBigDecimal(4, lastCommitSCN); } } selectChanges.setFetchSize(configBean.jdbcFetchSize); resultSet = selectChanges.executeQuery(); while (resultSet.next() && !getContext().isStopped()) { String queryFragment = resultSet.getString(5); BigDecimal scnDecimal = resultSet.getBigDecimal(1); String scn = scnDecimal.toPlainString(); String xidUsn = String.valueOf(resultSet.getLong(10)); String xidSlt = String.valueOf(resultSet.getString(11)); String xidSqn = String.valueOf(resultSet.getString(12)); String xid = xidUsn + "." + xidSlt + "." + xidSqn; // Query Fragment is not null -> we need to process // Query Fragment is null AND the query string buffered from previous rows due to CSF == 0 is null, // nothing to do, go to next row // Query Fragment is null, but there is previously buffered data in the query, go ahead and process. if (queryFragment != null) { query.append(queryFragment); } else if (queryFragment == null && query.length() == 0) { LOG.debug(READ_NULL_QUERY_FROM_ORACLE, scn, xid); continue; } // CSF is 1 if the query is incomplete, so read the next row before parsing // CSF being 0 means query is complete, generate the record if (resultSet.getInt(9) == 0) { if (query.length() == 0) { LOG.debug(READ_NULL_QUERY_FROM_ORACLE, scn, xid); continue; } String queryString = query.toString(); query.setLength(0); String username = resultSet.getString(2); short op = resultSet.getShort(3); String timestamp = resultSet.getString(4); LocalDateTime tsDate = Timestamp.valueOf(timestamp).toLocalDateTime(); delay.getValue().put("delay", getDelay(tsDate)); String table = resultSet.getString(6); BigDecimal commitSCN = resultSet.getBigDecimal(7); int seq = resultSet.getInt(8); String rsId = resultSet.getString(13); Object ssn = resultSet.getObject(14); String schema = String.valueOf(resultSet.getString(15)); int rollback = resultSet.getInt(16); String rowId = resultSet.getString(17); SchemaAndTable schemaAndTable = new SchemaAndTable(schema, table); TransactionIdKey key = new TransactionIdKey(xid); bufferedRecordsLock.lock(); try { if (useLocalBuffering && bufferedRecords.containsKey(key) && bufferedRecords.get(key) .contains(new RecordSequence(null, null, 0, 0, rsId, ssn, null))) { continue; } } finally { bufferedRecordsLock.unlock(); } Offset offset = null; if (LOG.isDebugEnabled()) { LOG.debug( "Commit SCN = {}, SCN = {}, Operation = {}, Txn Id = {}, Timestamp = {}, Row Id = {}, Redo SQL = {}", commitSCN, scn, op, xid, tsDate, rowId, queryString); } if (op != DDL_CODE && op != COMMIT_CODE && op != ROLLBACK_CODE) { if (!useLocalBuffering) { offset = new Offset(version, tsDate, commitSCN.toPlainString(), seq, xid); } Map<String, String> attributes = new HashMap<>(); attributes.put(SCN, scn); attributes.put(USER, username); attributes.put(TIMESTAMP_HEADER, timestamp); attributes.put(TABLE, table); attributes.put(SEQ, String.valueOf(seq)); attributes.put(XID, xid); attributes.put(RS_ID, rsId); attributes.put(SSN, ssn.toString()); attributes.put(SCHEMA, schema); attributes.put(ROLLBACK, String.valueOf(rollback)); attributes.put(ROWID_KEY, rowId); if (!useLocalBuffering || getContext().isPreview()) { if (commitSCN.compareTo(lastCommitSCN) < 0 || (commitSCN.compareTo(lastCommitSCN) == 0 && seq < sequenceNumber)) { continue; } lastCommitSCN = commitSCN; sequenceNumber = seq; if (configBean.keepOriginalQuery) { attributes.put(QUERY_KEY, queryString); } try { Record record = generateRecord(queryString, attributes, op); if (record != null && record.getEscapedFieldPaths().size() > 0) { recordQueue.put(new RecordOffset(record, offset)); } } catch (UnparseableSQLException ex) { LOG.error("Parsing failed", ex); unparseable.offer(queryString); } } else { bufferedRecordsLock.lock(); try { HashQueue<RecordSequence> records = bufferedRecords.computeIfAbsent(key, x -> { x.setTxnStartTime(tsDate); return createTransactionBuffer(key.txnId); }); int nextSeq = records.isEmpty() ? 1 : records.tail().seq + 1; RecordSequence node = new RecordSequence(attributes, queryString, nextSeq, op, rsId, ssn, tsDate); records.add(node); } finally { bufferedRecordsLock.unlock(); } } } else if (!getContext().isPreview() && useLocalBuffering && (op == COMMIT_CODE || op == ROLLBACK_CODE)) { // so this commit was previously processed or it is a rollback, so don't care. if (op == ROLLBACK_CODE || scnDecimal.compareTo(lastCommitSCN) < 0) { bufferedRecordsLock.lock(); try { bufferedRecords.remove(key); } finally { bufferedRecordsLock.unlock(); } } else { bufferedRecordsLock.lock(); try { HashQueue<RecordSequence> records = bufferedRecords.getOrDefault(key, EMPTY_LINKED_HASHSET); if (lastCommitSCN.equals(scnDecimal) && xid.equals(lastTxnId)) { removeProcessedRecords(records, sequenceNumber); } int bufferedRecordsToBeRemoved = records.size(); LOG.debug(FOUND_RECORDS_IN_TRANSACTION, bufferedRecordsToBeRemoved, xid); lastCommitSCN = scnDecimal; lastTxnId = xid; sequenceNumber = addRecordsToQueue(tsDate, scn, xid); } finally { bufferedRecordsLock.unlock(); } } } else { offset = new Offset(version, tsDate, scn, 0, xid); boolean sendSchema = false; // Commit/rollback in Preview will also end up here, so don't really do any of the following in preview // Don't bother with DDL events here. if (!getContext().isPreview()) { // Event is sent on every DDL, but schema is not always sent. // Schema sending logic: // CREATE/ALTER: Schema is sent if the schema after the ALTER is newer than the cached schema // (which we would have sent as an event earlier, at the last alter) // DROP/TRUNCATE: Schema is not sent, since they don't change schema. DDL_EVENT type = getDdlType(queryString); if (type == DDL_EVENT.ALTER || type == DDL_EVENT.CREATE) { sendSchema = refreshSchema(scnDecimal, new SchemaAndTable(schema, table)); } recordQueue.put(new RecordOffset(createEventRecord(type, queryString, schemaAndTable, offset.toString(), sendSchema, timestamp), offset)); } } query.setLength(0); } } } catch (SQLException ex) { error = true; // force a restart from the same timestamp. if (ex.getErrorCode() == MISSING_LOG_FILE) { LOG.warn("SQL Exception while retrieving records", ex); addToStageExceptionsQueue(new StageException(JDBC_86, ex)); } else if (ex.getErrorCode() != RESULTSET_CLOSED_AS_LOGMINER_SESSION_CLOSED) { LOG.warn("SQL Exception while retrieving records", ex); } else if (ex.getErrorCode() == QUERY_TIMEOUT) { LOG.warn("LogMiner select query timed out"); } else if (ex.getErrorCode() == LOGMINER_START_MUST_BE_CALLED) { LOG.warn("Last LogMiner session did not start successfully. Will retry", ex); } else { LOG.error("Error while reading data", ex); addToStageExceptionsQueue(new StageException(JDBC_52, ex)); } } catch (StageException e) { LOG.error("Error while reading data", e); error = true; addToStageExceptionsQueue(e); } catch (InterruptedException ex) { LOG.error("Interrupted while waiting to add data"); Thread.currentThread().interrupt(); } catch (Exception ex) { LOG.error("Error while reading data", ex); error = true; addToStageExceptionsQueue(new StageException(JDBC_52, ex)); } finally { // If an incomplete batch is seen, it means we are going to move the window forward // Ending this session and starting a new one helps reduce PGA memory usage. try { if (resultSet != null && !resultSet.isClosed()) { resultSet.close(); } if (selectChanges != null && !selectChanges.isClosed()) { selectChanges.close(); } } catch (SQLException ex) { LOG.warn("Error while attempting to close SQL statements", ex); } try { endLogMnr.execute(); } catch (SQLException ex) { LOG.warn("Error while trying to close logminer session", ex); } try { if (error) { resetConnectionsQuietly(); } else { discardOldUncommitted(startTime); startTime = adjustStartTime(endTime); endTime = getEndTimeForStartTime(startTime); } startLogMinerUsingGivenDates(startTime.format(dateTimeColumnHandler.dateFormatter), endTime.format(dateTimeColumnHandler.dateFormatter)); } catch (SQLException ex) { LOG.error("Error while attempting to start LogMiner", ex); addToStageExceptionsQueue(new StageException(JDBC_52, ex)); } catch (StageException ex) { LOG.error("Error while attempting to start logminer for redo log dictionary", ex); addToStageExceptionsQueue(ex); } } } }
From source file:gemlite.core.internal.db.AsyncEventHelper.java
/** * Set column value at given index in a prepared statement. The implementation * tries using the matching underlying type to minimize any data type * conversions, and avoid creating wrapper Java objects (e.g. {@link Integer} * for primitive int).// w w w .j ava 2 s . c o m * * @param type * the SQL type of the column as specified by JDBC {@link Types} * class * @param ps * the prepared statement where the column value has to be set * @param row * the source row as a {@link ResultSet} from where the value has to * be extracted * @param rowPosition * the 1-based position of the column in the provided * <code>row</code> * @param paramIndex * the 1-based position of the column in the target prepared * statement (provided <code>ps</code> argument) * @param sync * the {@link DBSynchronizer} object, if any; it is used to store * whether the current driver is JDBC4 compliant to enable performing * BLOB/CLOB operations {@link PreparedStatement#setBinaryStream}, * {@link PreparedStatement#setCharacterStream} * * @throws SQLException * in case of an exception in setting parameters */ public final void setColumnInPrepStatement(String type, Object val, PreparedStatement ps, final DBSynchronizer sync, int paramIndex) throws SQLException { switch (type) { case JavaTypes.STRING: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.VARCHAR); else { final String realVal = (String) val; ps.setString(paramIndex, realVal); } break; case JavaTypes.INT1: case JavaTypes.INT2: case JavaTypes.INT3: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.INTEGER); else { final int realVal = (int) val; ps.setInt(paramIndex, realVal); } break; case JavaTypes.DOUBLE1: case JavaTypes.DOUBLE2: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.DOUBLE); else { final double realVal = (double) val; ps.setDouble(paramIndex, realVal); } break; case JavaTypes.FLOAT1: case JavaTypes.FLOAT2: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.FLOAT); else { final float realVal = (float) val; ps.setDouble(paramIndex, realVal); } break; case JavaTypes.BOOLEAN1: case JavaTypes.BOOLEAN2: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.BOOLEAN); else { final boolean realVal = (boolean) val; ps.setBoolean(paramIndex, realVal); } break; case JavaTypes.DATE_SQL: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.DATE); else { final Date realVal = (Date) val; ps.setDate(paramIndex, realVal); } break; case JavaTypes.DATE_UTIL: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.DATE); else { final java.util.Date realVal = (java.util.Date) val; ps.setDate(paramIndex, new Date(realVal.getTime())); } break; case JavaTypes.BIGDECIMAL: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.DECIMAL); else { final BigDecimal realVal = (BigDecimal) val; ps.setBigDecimal(paramIndex, realVal); } break; case JavaTypes.TIME: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.TIME); else { final Time realVal = (Time) val; ps.setTime(paramIndex, realVal); } break; case JavaTypes.TIMESTAMP: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.TIMESTAMP); else { final Timestamp realVal = (Timestamp) val; ps.setTimestamp(paramIndex, realVal); } break; case JavaTypes.OBJECT: if (val == null || StringUtils.isEmpty(val.toString())) ps.setNull(paramIndex, Types.JAVA_OBJECT); else { final Object realVal = (Object) val; ps.setObject(paramIndex, realVal); } break; default: throw new UnsupportedOperationException("java.sql.Type = " + type + " not supported"); } }
From source file:com.flexive.core.storage.GenericDivisionImporter.java
/** * Import data from a zip archive to a database table * * @param stmt statement to use * @param zip zip archive containing the zip entry * @param ze zip entry within the archive * @param xpath xpath containing the entries to import * @param table name of the table * @param executeInsertPhase execute the insert phase? * @param executeUpdatePhase execute the update phase? * @param updateColumns columns that should be set to <code>null</code> in a first pass (insert) * and updated to the provided values in a second pass (update), * columns that should be used in the where clause have to be prefixed * with "KEY:", to assign a default value use the expression "columnname:default value", * if the default value is "@", it will be a negative counter starting at 0, decreasing. * If the default value starts with "%", it will be set to the column following the "%" * character in the first pass * @throws Exception on errors/*from w w w. jav a 2 s . com*/ */ protected void importTable(Statement stmt, final ZipFile zip, final ZipEntry ze, final String xpath, final String table, final boolean executeInsertPhase, final boolean executeUpdatePhase, final String... updateColumns) throws Exception { //analyze the table final ResultSet rs = stmt.executeQuery("SELECT * FROM " + table + " WHERE 1=2"); StringBuilder sbInsert = new StringBuilder(500); StringBuilder sbUpdate = updateColumns.length > 0 ? new StringBuilder(500) : null; if (rs == null) throw new IllegalArgumentException("Can not analyze table [" + table + "]!"); sbInsert.append("INSERT INTO ").append(table).append(" ("); final ResultSetMetaData md = rs.getMetaData(); final Map<String, ColumnInfo> updateClauseColumns = updateColumns.length > 0 ? new HashMap<String, ColumnInfo>(md.getColumnCount()) : null; final Map<String, ColumnInfo> updateSetColumns = updateColumns.length > 0 ? new LinkedHashMap<String, ColumnInfo>(md.getColumnCount()) : null; final Map<String, String> presetColumns = updateColumns.length > 0 ? new HashMap<String, String>(10) : null; //preset to a referenced column (%column syntax) final Map<String, String> presetRefColumns = updateColumns.length > 0 ? new HashMap<String, String>(10) : null; final Map<String, Integer> counters = updateColumns.length > 0 ? new HashMap<String, Integer>(10) : null; final Map<String, ColumnInfo> insertColumns = new HashMap<String, ColumnInfo>( md.getColumnCount() + (counters != null ? counters.size() : 0)); int insertIndex = 1; int updateSetIndex = 1; int updateClauseIndex = 1; boolean first = true; for (int i = 0; i < md.getColumnCount(); i++) { final String currCol = md.getColumnName(i + 1).toLowerCase(); if (updateColumns.length > 0) { boolean abort = false; for (String col : updateColumns) { if (col.indexOf(':') > 0 && !col.startsWith("KEY:")) { String value = col.substring(col.indexOf(':') + 1); col = col.substring(0, col.indexOf(':')); if ("@".equals(value)) { if (currCol.equalsIgnoreCase(col)) { counters.put(col, 0); insertColumns.put(col, new ColumnInfo(md.getColumnType(i + 1), insertIndex++)); sbInsert.append(',').append(currCol); } } else if (value.startsWith("%")) { if (currCol.equalsIgnoreCase(col)) { presetRefColumns.put(col, value.substring(1)); insertColumns.put(col, new ColumnInfo(md.getColumnType(i + 1), insertIndex++)); sbInsert.append(',').append(currCol); // System.out.println("==> adding presetRefColumn "+col+" with value of "+value.substring(1)); } } else if (!presetColumns.containsKey(col)) presetColumns.put(col, value); } if (currCol.equalsIgnoreCase(col)) { abort = true; updateSetColumns.put(currCol, new ColumnInfo(md.getColumnType(i + 1), updateSetIndex++)); break; } } if (abort) continue; } if (first) { first = false; } else sbInsert.append(','); sbInsert.append(currCol); insertColumns.put(currCol, new ColumnInfo(md.getColumnType(i + 1), insertIndex++)); } if (updateColumns.length > 0 && executeUpdatePhase) { sbUpdate.append("UPDATE ").append(table).append(" SET "); int counter = 0; for (String updateColumn : updateSetColumns.keySet()) { if (counter++ > 0) sbUpdate.append(','); sbUpdate.append(updateColumn).append("=?"); } sbUpdate.append(" WHERE "); boolean hasKeyColumn = false; for (String col : updateColumns) { if (!col.startsWith("KEY:")) continue; hasKeyColumn = true; String keyCol = col.substring(4); for (int i = 0; i < md.getColumnCount(); i++) { if (!md.getColumnName(i + 1).equalsIgnoreCase(keyCol)) continue; updateClauseColumns.put(keyCol, new ColumnInfo(md.getColumnType(i + 1), updateClauseIndex++)); sbUpdate.append(keyCol).append("=? AND "); break; } } if (!hasKeyColumn) throw new IllegalArgumentException("Update columns require a KEY!"); sbUpdate.delete(sbUpdate.length() - 5, sbUpdate.length()); //remove trailing " AND " //"shift" clause indices for (String col : updateClauseColumns.keySet()) { GenericDivisionImporter.ColumnInfo ci = updateClauseColumns.get(col); ci.index += (updateSetIndex - 1); } } if (presetColumns != null) { for (String key : presetColumns.keySet()) sbInsert.append(',').append(key); } sbInsert.append(")VALUES("); for (int i = 0; i < insertColumns.size(); i++) { if (i > 0) sbInsert.append(','); sbInsert.append('?'); } if (presetColumns != null) { for (String key : presetColumns.keySet()) sbInsert.append(',').append(presetColumns.get(key)); } sbInsert.append(')'); if (DBG) { LOG.info("Insert statement:\n" + sbInsert.toString()); if (updateColumns.length > 0) LOG.info("Update statement:\n" + sbUpdate.toString()); } //build a map containing all nodes that require attributes //this allows for matching simple xpath queries like "flatstorages/storage[@name='FX_FLAT_STORAGE']/data" final Map<String, List<String>> queryAttributes = new HashMap<String, List<String>>(5); for (String pElem : xpath.split("/")) { if (!(pElem.indexOf('@') > 0 && pElem.indexOf('[') > 0)) continue; List<String> att = new ArrayList<String>(5); for (String pAtt : pElem.split("@")) { if (!(pAtt.indexOf('=') > 0)) continue; att.add(pAtt.substring(0, pAtt.indexOf('='))); } queryAttributes.put(pElem.substring(0, pElem.indexOf('[')), att); } final PreparedStatement psInsert = stmt.getConnection().prepareStatement(sbInsert.toString()); final PreparedStatement psUpdate = updateColumns.length > 0 && executeUpdatePhase ? stmt.getConnection().prepareStatement(sbUpdate.toString()) : null; try { final SAXParser parser = SAXParserFactory.newInstance().newSAXParser(); final DefaultHandler handler = new DefaultHandler() { private String currentElement = null; private Map<String, String> data = new HashMap<String, String>(10); private StringBuilder sbData = new StringBuilder(10000); boolean inTag = false; boolean inElement = false; int counter; List<String> path = new ArrayList<String>(10); StringBuilder currPath = new StringBuilder(100); boolean insertMode = true; /** * {@inheritDoc} */ @Override public void startDocument() throws SAXException { counter = 0; inTag = false; inElement = false; path.clear(); currPath.setLength(0); sbData.setLength(0); data.clear(); currentElement = null; } /** * {@inheritDoc} */ @Override public void processingInstruction(String target, String data) throws SAXException { if (target != null && target.startsWith("fx_")) { if (target.equals("fx_mode")) insertMode = "insert".equals(data); } else super.processingInstruction(target, data); } /** * {@inheritDoc} */ @Override public void endDocument() throws SAXException { if (insertMode) LOG.info("Imported [" + counter + "] entries into [" + table + "] for xpath [" + xpath + "]"); else LOG.info("Updated [" + counter + "] entries in [" + table + "] for xpath [" + xpath + "]"); } /** * {@inheritDoc} */ @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { pushPath(qName, attributes); if (currPath.toString().equals(xpath)) { inTag = true; data.clear(); for (int i = 0; i < attributes.getLength(); i++) { String name = attributes.getLocalName(i); if (StringUtils.isEmpty(name)) name = attributes.getQName(i); data.put(name, attributes.getValue(i)); } } else { currentElement = qName; } inElement = true; sbData.setLength(0); } /** * Push a path element from the stack * * @param qName element name to push * @param att attributes */ private void pushPath(String qName, Attributes att) { if (att.getLength() > 0 && queryAttributes.containsKey(qName)) { String curr = qName + "["; boolean first = true; final List<String> attList = queryAttributes.get(qName); for (int i = 0; i < att.getLength(); i++) { if (!attList.contains(att.getQName(i))) continue; if (first) first = false; else curr += ','; curr += "@" + att.getQName(i) + "='" + att.getValue(i) + "'"; } curr += ']'; path.add(curr); } else path.add(qName); buildPath(); } /** * Pop the top path element from the stack */ private void popPath() { path.remove(path.size() - 1); buildPath(); } /** * Rebuild the current path */ private synchronized void buildPath() { currPath.setLength(0); for (String s : path) currPath.append(s).append('/'); if (currPath.length() > 1) currPath.delete(currPath.length() - 1, currPath.length()); // System.out.println("currPath: " + currPath); } /** * {@inheritDoc} */ @Override public void endElement(String uri, String localName, String qName) throws SAXException { if (currPath.toString().equals(xpath)) { if (DBG) LOG.info("Insert [" + xpath + "]: [" + data + "]"); inTag = false; try { if (insertMode) { if (executeInsertPhase) { processColumnSet(insertColumns, psInsert); counter += psInsert.executeUpdate(); } } else { if (executeUpdatePhase) { if (processColumnSet(updateSetColumns, psUpdate)) { processColumnSet(updateClauseColumns, psUpdate); counter += psUpdate.executeUpdate(); } } } } catch (SQLException e) { throw new SAXException(e); } catch (ParseException e) { throw new SAXException(e); } } else { if (inTag) { data.put(currentElement, sbData.toString()); } currentElement = null; } popPath(); inElement = false; sbData.setLength(0); } /** * Process a column set * * @param columns the columns to process * @param ps prepared statement to use * @return if data other than <code>null</code> has been set * @throws SQLException on errors * @throws ParseException on date/time conversion errors */ private boolean processColumnSet(Map<String, ColumnInfo> columns, PreparedStatement ps) throws SQLException, ParseException { boolean dataSet = false; for (String col : columns.keySet()) { ColumnInfo ci = columns.get(col); String value = data.get(col); if (insertMode && counters != null && counters.get(col) != null) { final int newVal = counters.get(col) - 1; value = String.valueOf(newVal); counters.put(col, newVal); // System.out.println("new value for " + col + ": " + newVal); } if (insertMode && presetRefColumns != null && presetRefColumns.get(col) != null) { value = data.get(presetRefColumns.get(col)); // System.out.println("Set presetRefColumn for "+col+" to ["+value+"] from column ["+presetRefColumns.get(col)+"]"); } if (value == null) ps.setNull(ci.index, ci.columnType); else { dataSet = true; switch (ci.columnType) { case Types.BIGINT: case Types.NUMERIC: if (DBG) LOG.info("BigInt " + ci.index + "->" + new BigDecimal(value)); ps.setBigDecimal(ci.index, new BigDecimal(value)); break; case java.sql.Types.DOUBLE: if (DBG) LOG.info("Double " + ci.index + "->" + Double.parseDouble(value)); ps.setDouble(ci.index, Double.parseDouble(value)); break; case java.sql.Types.FLOAT: case java.sql.Types.REAL: if (DBG) LOG.info("Float " + ci.index + "->" + Float.parseFloat(value)); ps.setFloat(ci.index, Float.parseFloat(value)); break; case java.sql.Types.TIMESTAMP: case java.sql.Types.DATE: if (DBG) LOG.info("Timestamp/Date " + ci.index + "->" + FxFormatUtils.getDateTimeFormat().parse(value)); ps.setTimestamp(ci.index, new Timestamp(FxFormatUtils.getDateTimeFormat().parse(value).getTime())); break; case Types.TINYINT: case Types.SMALLINT: if (DBG) LOG.info("Integer " + ci.index + "->" + Integer.valueOf(value)); ps.setInt(ci.index, Integer.valueOf(value)); break; case Types.INTEGER: case Types.DECIMAL: try { if (DBG) LOG.info("Long " + ci.index + "->" + Long.valueOf(value)); ps.setLong(ci.index, Long.valueOf(value)); } catch (NumberFormatException e) { //Fallback (temporary) for H2 if the reported long is a big decimal (tree...) ps.setBigDecimal(ci.index, new BigDecimal(value)); } break; case Types.BIT: case Types.CHAR: case Types.BOOLEAN: if (DBG) LOG.info("Boolean " + ci.index + "->" + value); if ("1".equals(value) || "true".equals(value)) ps.setBoolean(ci.index, true); else ps.setBoolean(ci.index, false); break; case Types.LONGVARBINARY: case Types.VARBINARY: case Types.BLOB: case Types.BINARY: ZipEntry bin = zip.getEntry(value); if (bin == null) { LOG.error("Failed to lookup binary [" + value + "]!"); ps.setNull(ci.index, ci.columnType); break; } try { ps.setBinaryStream(ci.index, zip.getInputStream(bin), (int) bin.getSize()); } catch (IOException e) { LOG.error("IOException importing binary [" + value + "]: " + e.getMessage(), e); } break; case Types.CLOB: case Types.LONGVARCHAR: case Types.VARCHAR: case SQL_LONGNVARCHAR: case SQL_NCHAR: case SQL_NCLOB: case SQL_NVARCHAR: if (DBG) LOG.info("String " + ci.index + "->" + value); ps.setString(ci.index, value); break; default: LOG.warn("Unhandled type [" + ci.columnType + "] for column [" + col + "]"); } } } return dataSet; } /** * {@inheritDoc} */ @Override public void characters(char[] ch, int start, int length) throws SAXException { if (inElement) sbData.append(ch, start, length); } }; handler.processingInstruction("fx_mode", "insert"); parser.parse(zip.getInputStream(ze), handler); if (updateColumns.length > 0 && executeUpdatePhase) { handler.processingInstruction("fx_mode", "update"); parser.parse(zip.getInputStream(ze), handler); } } finally { Database.closeObjects(GenericDivisionImporter.class, psInsert, psUpdate); } }
From source file:com.mirth.connect.donkey.server.data.jdbc.JdbcDao.java
@Override public void storeMetaData(ConnectorMessage connectorMessage, List<MetaDataColumn> metaDataColumns) { logger.debug(connectorMessage.getChannelId() + "/" + connectorMessage.getMessageId() + "/" + connectorMessage.getMetaDataId() + ": updating custom meta data"); PreparedStatement statement = null; try {//from w ww .j a v a2 s . c o m List<String> metaDataColumnNames = new ArrayList<String>(); Map<String, Object> metaDataMap = connectorMessage.getMetaDataMap(); for (MetaDataColumn metaDataColumn : metaDataColumns) { Object value = metaDataMap.get(metaDataColumn.getName()); if (value != null) { metaDataColumnNames.add(metaDataColumn.getName()); } } // Don't do anything if all values were null if (!metaDataColumnNames.isEmpty()) { Map<String, Object> values = new HashMap<String, Object>(); values.put("localChannelId", getLocalChannelId(connectorMessage.getChannelId())); values.put("metaDataColumnPlaceholders", quoteChar + StringUtils.join(metaDataColumnNames, quoteChar + " = ?, " + quoteChar) + quoteChar + " = ?"); statement = connection.prepareStatement(querySource.getQuery("storeMetaData", values)); int n = 1; for (MetaDataColumn metaDataColumn : metaDataColumns) { Object value = metaDataMap.get(metaDataColumn.getName()); if (value != null) { // @formatter:off switch (metaDataColumn.getType()) { case STRING: statement.setString(n, (String) value); break; case NUMBER: statement.setBigDecimal(n, (BigDecimal) value); break; case BOOLEAN: statement.setBoolean(n, (Boolean) value); break; case TIMESTAMP: statement.setTimestamp(n, new Timestamp(((Calendar) value).getTimeInMillis())); break; } // @formatter:on n++; } } statement.setLong(n++, connectorMessage.getMessageId()); statement.setInt(n, connectorMessage.getMetaDataId()); statement.executeUpdate(); } } catch (Exception e) { throw new DonkeyDaoException("Failed to update connector message meta data", e); } finally { close(statement); } }
From source file:com.mirth.connect.donkey.server.data.jdbc.JdbcDao.java
@Override public void insertMetaData(ConnectorMessage connectorMessage, List<MetaDataColumn> metaDataColumns) { logger.debug(connectorMessage.getChannelId() + "/" + connectorMessage.getMessageId() + "/" + connectorMessage.getMetaDataId() + ": inserting custom meta data"); PreparedStatement statement = null; try {/*from w ww . j a v a2 s. c om*/ List<String> metaDataColumnNames = new ArrayList<String>(); Map<String, Object> metaDataMap = connectorMessage.getMetaDataMap(); for (MetaDataColumn metaDataColumn : metaDataColumns) { Object value = metaDataMap.get(metaDataColumn.getName()); if (value != null) { metaDataColumnNames.add(metaDataColumn.getName()); } } // Don't do anything if all values were null if (!metaDataColumnNames.isEmpty()) { Map<String, Object> values = new HashMap<String, Object>(); values.put("localChannelId", getLocalChannelId(connectorMessage.getChannelId())); values.put("metaDataColumnNames", quoteChar + StringUtils.join(metaDataColumnNames, quoteChar + "," + quoteChar) + quoteChar); values.put("metaDataColumnPlaceholders", "?" + StringUtils.repeat(", ?", metaDataColumnNames.size() - 1)); statement = connection.prepareStatement(querySource.getQuery("insertMetaData", values)); statement.setInt(1, connectorMessage.getMetaDataId()); statement.setLong(2, connectorMessage.getMessageId()); int n = 3; for (MetaDataColumn metaDataColumn : metaDataColumns) { Object value = metaDataMap.get(metaDataColumn.getName()); if (value != null) { // @formatter:off switch (metaDataColumn.getType()) { case STRING: statement.setString(n, (String) value); break; case NUMBER: statement.setBigDecimal(n, (BigDecimal) value); break; case BOOLEAN: statement.setBoolean(n, (Boolean) value); break; case TIMESTAMP: statement.setTimestamp(n, new Timestamp(((Calendar) value).getTimeInMillis())); break; } // @formatter:on n++; } } statement.executeUpdate(); } } catch (Exception e) { throw new DonkeyDaoException("Failed to insert connector message meta data", e); } finally { close(statement); } }
From source file:net.sourceforge.squirrel_sql.fw.datasetviewer.cellcomponent.DataTypeBigDecimal.java
/** * When updating the database, insert the appropriate datatype into the * prepared statment at the given variable position. *//*from w ww. j a v a 2 s .com*/ public void setPreparedStatementValue(PreparedStatement pstmt, Object value, int position) throws java.sql.SQLException { if (value == null) { pstmt.setNull(position, _colDef.getSqlType()); } else { pstmt.setBigDecimal(position, (BigDecimal) value); } }
From source file:nl.strohalm.cyclos.utils.hibernate.AmountType.java
public void nullSafeSet(final PreparedStatement ps, final Object object, final int index) throws HibernateException, SQLException { final Amount amount = (Amount) object; BigDecimal value = null;/* w ww. j av a2s.c o m*/ Amount.Type type = null; if (amount != null) { value = amount.getValue(); type = amount.getType(); } if (value == null) { ps.setNull(index, Types.NUMERIC); } else { ps.setBigDecimal(index, value); } if (type == null) { ps.setNull(index + 1, Types.CHAR); } else { ps.setString(index + 1, type.getValue()); } if (LOG.isDebugEnabled()) { LOG.debug("Binding " + value + " to parameter: " + (index)); LOG.debug("Binding " + (type == null ? null : type.getValue()) + " to parameter: " + (index + 1)); } }
From source file:nl.strohalm.cyclos.utils.JDBCWrapper.java
/** * Set the given positional parameters on a prepared statement, guessing the argument types *//*from w w w .j a va 2s. co m*/ private static void setParameters(final PreparedStatement ps, final Object... parameters) throws SQLException { if (ps == null || ArrayUtils.isEmpty(parameters)) { return; } for (int i = 0; i < parameters.length; i++) { final Object object = parameters[i]; final int index = i + 1; if (object instanceof Number) { ps.setBigDecimal(index, CoercionHelper.coerce(BigDecimal.class, object)); } else if ((object instanceof Calendar) || (object instanceof Date)) { final Calendar cal = CoercionHelper.coerce(Calendar.class, object); ps.setTimestamp(index, new Timestamp(cal.getTimeInMillis())); } else if (object instanceof Boolean) { ps.setBoolean(index, (Boolean) object); } else { ps.setString(index, CoercionHelper.coerce(String.class, object)); } } }