List of usage examples for java.sql SQLException getLocalizedMessage
public String getLocalizedMessage()
From source file:org.pentaho.platform.plugin.action.sql.SQLExecute.java
protected boolean runSqlQuery(final SQLConnection conn, String rawQuery, final boolean live) { SqlExecuteAction sqlExecuteAction = (SqlExecuteAction) getActionDefinition(); boolean executed = false; boolean continueOnException = sqlExecuteAction.getContinueOnException().getBooleanValue(false); String[] columnHeaders = new String[] { Messages.getInstance().getString("SQLExecute.USER_AFFECTED_ROWS_COLUMN_NAME"), //$NON-NLS-1$ Messages.getInstance().getString("SQLExecute.USER_AFFECTED_ROW_STATUS") //$NON-NLS-1$ };//www.java2 s .c o m MemoryMetaData metaData = new MemoryMetaData(new String[][] { columnHeaders }, null); metaData.setColumnTypes(new String[] { "int", "string" }); //$NON-NLS-1$ //$NON-NLS-2$ MemoryResultSet affectedRowsResultSet = new MemoryResultSet(metaData); String successMsg = Messages.getInstance().getString("SQLExecute.USER_SUCCESS"); //$NON-NLS-1$ String failMsg = Messages.getInstance().getString("SQLExecute.USER_FAILED"); //$NON-NLS-1$ try { if (conn == null) { error(Messages.getInstance().getErrorString("SQLBaseComponent.ERROR_0007_NO_CONNECTION")); //$NON-NLS-1$ return false; } if (!conn.initialized()) { error(Messages.getInstance().getErrorString("SQLBaseComponent.ERROR_0007_NO_CONNECTION")); //$NON-NLS-1$ return false; } if (sqlExecuteAction.getForceSingleStatement().getBooleanValue(false)) { // Forces original execution path. // // This execution path should be used if the query // has a semi-colon in the text of the SQL statement. // // This is a legitimate condition if there is (for example) // a statement with a where-clause that has a semi-colon. // // e.g.: UPDATE sometable SET somecolumn='val1;val2' WHERE somecolumn='val3;val4' // // In this case, using StringTokenizer on semi-colon will result in multiple un-executable // statements - the whole thing will fail. // // This is (arguably) unlikely, but it is possible. That's why I've chosen to make sure // that there is a mechanism for instating the old behavior. // String query = applyInputsToFormat(rawQuery); if (ComponentBase.debug) { debug(Messages.getInstance().getString("SQLBaseComponent.DEBUG_RUNNING_QUERY", query)); //$NON-NLS-1$ } int affectedRows = conn.execute(query); executed = true; affectedRowsResultSet.addRow(new Object[] { new Integer(affectedRows), successMsg }); } else { // // Multiple statement execute support provided by contribution from Melanie Crouch // rawQuery = SQLExecute.removeLineTerminators(rawQuery.trim()).toString(); // tokenize the rawQuery passed into method to find if there are multiple updates to be executed. StringTokenizer st = new StringTokenizer(rawQuery, sqlExecuteAction.getMultiStatementSeparator().getStringValue(";")); //$NON-NLS-1$ while (st.hasMoreTokens()) { // set rawQuery equal to the nextToken. rawQuery = st.nextToken(); String query = applyInputsToFormat(rawQuery.trim()); if (ComponentBase.debug) { debug(Messages.getInstance().getString("SQLBaseComponent.DEBUG_RUNNING_QUERY", query)); //$NON-NLS-1$ } try { int affectedRows = conn.execute(query); // Normally, we'd check to see if the execution resulted in // some updated rows. affectedRowsResultSet.addRow(new Object[] { new Integer(affectedRows), successMsg }); executed = true; debug(Messages.getInstance().getString("SQLBaseComponent.DEBUG_UPDATED_QUERY", query)); //$NON-NLS-1$ } catch (SQLException e) { error(Messages.getInstance().getErrorString("SQLBaseComponent.ERROR_0006_EXECUTE_FAILED", //$NON-NLS-1$ getActionName() + " : " + e.getLocalizedMessage())); //$NON-NLS-1$ executed = continueOnException; if (!continueOnException) { break; } addErrorCode(affectedRowsResultSet, e, failMsg); } } // end while tokenizer } if (getResultOutputName() != null) { setOutputValue(this.getResultOutputName(), affectedRowsResultSet); } } catch (SQLException e) { error(Messages.getInstance().getErrorString("SQLBaseComponent.ERROR_0006_EXECUTE_FAILED", //$NON-NLS-1$ getActionName() + " : " + e.getLocalizedMessage())); //$NON-NLS-1$ executed = continueOnException; addErrorCode(affectedRowsResultSet, e, e.getLocalizedMessage()); } finally { // moved finally after last catch so one connection could be used to execute multiple updates. // // No matter what, make sure the connection // gets closed. Otherwise, the connection can // (ok, will) get stranded eating up resources // on the server. This is important. // if (connectionOwner) { conn.close(); } } return executed; }
From source file:com.flexive.core.stream.BinaryUploadProtocol.java
/** * {@inheritDoc}/*from w w w. ja v a 2 s. co m*/ */ @Override public synchronized boolean receiveStream(ByteBuffer buffer) throws IOException { if (!buffer.hasRemaining()) { //this can only happen on remote clients if (LOG.isDebugEnabled()) LOG.debug("aborting (empty)"); return false; } if (!rcvStarted) { rcvStarted = true; if (LOG.isDebugEnabled()) LOG.debug("(internal serverside) receive start"); try { pout = getContentStorage().receiveTransitBinary(division, handle, mimeType, expectedLength, timeToLive); } catch (SQLException e) { LOG.error("SQL Error trying to receive binary stream: " + e.getMessage(), e); } catch (FxNotFoundException e) { LOG.error("Failed to lookup content storage for division #" + division + ": " + e.getLocalizedMessage()); } } if (LOG.isDebugEnabled() && count + buffer.remaining() > expectedLength) { LOG.debug("poss. overflow: pos=" + buffer.position() + " lim=" + buffer.limit() + " cap=" + buffer.capacity()); LOG.debug("Curr count: " + count + " count+rem=" + (count + buffer.remaining() + " delta:" + ((count + buffer.remaining()) - expectedLength))); } count += buffer.remaining(); pout.write(buffer.array(), buffer.position(), buffer.remaining()); buffer.clear(); if (expectedLength > 0 && count >= expectedLength) { if (LOG.isDebugEnabled()) LOG.debug("aborting"); return false; } return true; }
From source file:no.sintef.jarfter.PostgresqlInteractor.java
private void loginDB() { JSONParser parser = new JSONParser(); String endpoint;/*from w w w.j av a2 s . c o m*/ String username; String password; try { Object object = parser.parse(new FileReader("/usr/local/var/jarfter_config.json")); JSONObject jsonObject = (JSONObject) object; endpoint = (String) jsonObject.get("db_endpoint"); username = (String) jsonObject.get("db_username"); password = (String) jsonObject.get("db_password"); if (empty(endpoint) || empty(username) || empty(password)) { throw new IOException( "Json file did not contain one of the following: endpoint, username, password (" + empty(endpoint) + "," + empty(username) + "," + empty(password) + ")"); } conn = DriverManager.getConnection(endpoint, username, password); } catch (SQLException sqle) { String sqleMessage = sqle.getLocalizedMessage(); log("loginDB - Found SQLException: " + sqle.getErrorCode() + " - " + sqleMessage); error(sqle); if (sqleMessage.contains("password authentication failed")) { throw new JarfterException(JarfterException.Error.DATABASE_LOGIN_ERROR); } else if (sqleMessage.contains("FATAL: database") && sqleMessage.contains("does not exist")) { throw new JarfterException(JarfterException.Error.SQL_NO_DATABASE); } else if (sqleMessage.contains("Connection refused. Check that the hostname and port are correct")) { throw new JarfterException(JarfterException.Error.SQL_NO_ENDPOINT); } else { throw new JarfterException(sqle.getClass().getName(), sqle.getLocalizedMessage()); } } catch (IOException ioe) { log("loginDB - Found IOException"); error(ioe); throw new JarfterException(JarfterException.Error.DATABASE_JSON_ERROR); } catch (ParseException pex) { log("loginDB - Found ParseException"); error(pex); throw new JarfterException(JarfterException.Error.DATABASE_JSON_ERROR); } }
From source file:edu.fullerton.ldvservlet.Upload.java
/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods. * * @param request servlet request/*from w w w . j a v a 2 s . c om*/ * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { long startTime = System.currentTimeMillis(); if (!ServletFileUpload.isMultipartContent(request)) { throw new ServletException("This action requires a multipart form with a file attached."); } ServletSupport servletSupport; servletSupport = new ServletSupport(); servletSupport.init(request, viewerConfig, false); // Create a factory for disk-based file items DiskFileItemFactory factory = new DiskFileItemFactory(); // Configure a repository (to ensure a secure temp location is used) ServletContext servletContext = this.getServletConfig().getServletContext(); File repository = (File) servletContext.getAttribute("javax.servlet.context.tempdir"); factory.setRepository(repository); // Create a new file upload handler ServletFileUpload upload = new ServletFileUpload(factory); ImageTable imageTable; String viewServletPath = request.getContextPath() + "/view"; try { imageTable = new ImageTable(servletSupport.getDb()); } catch (SQLException ex) { String ermsg = "Image upload: can't access the Image table: " + ex.getClass().getSimpleName() + " " + ex.getLocalizedMessage(); throw new ServletException(ermsg); } try { HashMap<String, String> params = new HashMap<>(); ArrayList<Integer> uploadedIds = new ArrayList<>(); Page vpage = servletSupport.getVpage(); vpage.setTitle("Image upload"); try { servletSupport.addStandardHeader(version); servletSupport.addNavBar(); } catch (WebUtilException ex) { throw new ServerException("Adding nav bar after upload", ex); } // Parse the request List<FileItem> items = upload.parseRequest(request); int cnt = items.size(); for (FileItem item : items) { if (item.isFormField()) { String name = item.getFieldName(); String value = item.getString(); if (!value.isEmpty()) { params.put(name, value); } } } for (FileItem item : items) { if (!item.isFormField()) { int imgId = addFile(item, params, vpage, servletSupport.getVuser().getCn(), imageTable); if (imgId != 0) { uploadedIds.add(imgId); } } } if (!uploadedIds.isEmpty()) { showImages(vpage, uploadedIds, imageTable, viewServletPath); } servletSupport.showPage(response); } catch (FileUploadException ex) { Logger.getLogger(Upload.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:fr.sanofi.fcl4transmart.controllers.listeners.clinicalData.LoadClinicalDataListener.java
@Override public void handleEvent(Event event) { loadDataUI.openLoadingShell();//from w w w .j a va 2 s . c o m new Thread() { public void run() { String jobPath; try { String[] splited = loadDataUI.getTopNode().split("\\\\", -1); if (splited[0].compareTo("") != 0) { loadDataUI.setMessage("A study node has to begin by the character '\\'"); loadDataUI.setIsLoading(false); return; } try { Class.forName("oracle.jdbc.driver.OracleDriver"); String connectionString = "jdbc:oracle:thin:@" + PreferencesHandler.getDbServer() + ":" + PreferencesHandler.getDbPort() + ":" + PreferencesHandler.getDbName(); Connection con = DriverManager.getConnection(connectionString, PreferencesHandler.getMetadataUser(), PreferencesHandler.getMetadataPwd()); Statement stmt = con.createStatement(); ResultSet rs = stmt .executeQuery("select * from table_access where c_name='" + splited[1] + "'"); if (!rs.next()) {//have to add a top node stmt.executeQuery("insert into table_access(" + "c_table_cd," + "c_table_name," + "c_protected_access," + "c_hlevel," + "c_fullname," + "c_name," + "c_synonym_cd," + "c_visualattributes," + "c_totalnum," + "c_facttablecolumn," + "c_dimtablename," + "c_columnname," + "c_columndatatype," + "c_operator," + "c_dimcode," + "c_tooltip," + "c_status_cd) values(" + "'" + splited[1] + "'," + "'i2b2'," + "'N'," + "0," + "'\\" + splited[1] + "\\'," + "'" + splited[1] + "'," + "'N'," + "'CA'," + "0," + "'concept_cd'," + "'concept_dimension'," + "'concept_path'," + "'T'," + "'LIKE'," + "'\\" + splited[1] + "\\'," + "'\\" + splited[1] + "\\'," + "'A')"); stmt.executeQuery("insert into i2b2 values(0, '\\" + splited[1] + "\\', '" + splited[1] + "','N','CA',0,null, null, 'CONCEPT_CD','CONCEPT_DIMENSION','CONCEPT_PATH', 'T', 'LIKE','\\" + splited[1] + "\\', null, '\\" + splited[1] + "\\', sysdate, null, null, null, null, null, '@', null, null, null)"); } con.close(); } catch (SQLException e) { e.printStackTrace(); loadDataUI.setMessage("SQL exception: " + e.getLocalizedMessage()); loadDataUI.setIsLoading(false); return; } catch (ClassNotFoundException e) { e.printStackTrace(); loadDataUI.setMessage("Class Not Found exception"); loadDataUI.setIsLoading(false); return; } //initiate kettle environment GlobalMessages.setLocale(EnvUtil.createLocale("en-US")); KettleEnvironment.init(false); //find the kettle job to initiate the loading URL jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/create_clinical_data.kjb"); jobUrl = FileLocator.toFileURL(jobUrl); jobPath = jobUrl.getPath(); //create a new job from the kettle file JobMeta jobMeta = new JobMeta(jobPath, null); Job job = new Job(null, jobMeta); //find the other files needed for job and put them in the cache jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/validate_clinical_data_params.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL("platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/get_data_filenames.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/load_lt_clinical_data.kjb"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/map_data_to_std_format.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/run_i2b2_load_clinical_data.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL("platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/set_data_filename.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); job.getJobMeta().setParameterValue("DATA_LOCATION", dataType.getPath().getAbsolutePath()); job.getJobMeta().setParameterValue("COLUMN_MAP_FILE", ((ClinicalData) dataType).getCMF().getName()); File sort = new File(dataType.getStudy().getPath().getParentFile().getAbsolutePath() + File.separator + ".sort"); if (!sort.exists()) { FileUtils.forceMkdir(sort); } job.getJobMeta().setParameterValue("SORT_DIR", sort.getAbsolutePath()); job.getJobMeta().setParameterValue("STUDY_ID", dataType.getStudy().toString()); job.getJobMeta().setParameterValue("TOP_NODE", loadDataUI.getTopNode()); if (((ClinicalData) dataType).getWMF() != null) { job.getJobMeta().setParameterValue("WORD_MAP_FILE", ((ClinicalData) dataType).getWMF().getName()); } job.getJobMeta().setParameterValue("LOAD_TYPE", "I"); job.getJobMeta().setParameterValue("TM_CZ_DB_SERVER", PreferencesHandler.getDbServer()); job.getJobMeta().setParameterValue("TM_CZ_DB_NAME", PreferencesHandler.getDbName()); job.getJobMeta().setParameterValue("TM_CZ_DB_PORT", PreferencesHandler.getDbPort()); job.getJobMeta().setParameterValue("TM_CZ_DB_USER", PreferencesHandler.getTm_czUser()); job.getJobMeta().setParameterValue("TM_CZ_DB_PWD", PreferencesHandler.getTm_czPwd()); job.getJobMeta().setParameterValue("TM_LZ_DB_SERVER", PreferencesHandler.getDbServer()); job.getJobMeta().setParameterValue("TM_LZ_DB_NAME", PreferencesHandler.getDbName()); job.getJobMeta().setParameterValue("TM_LZ_DB_PORT", PreferencesHandler.getDbPort()); job.getJobMeta().setParameterValue("TM_LZ_DB_USER", PreferencesHandler.getTm_lzUser()); job.getJobMeta().setParameterValue("TM_LZ_DB_PWD", PreferencesHandler.getTm_lzPwd()); job.start(); job.waitUntilFinished(); @SuppressWarnings("unused") Result result = job.getResult(); Log4jBufferAppender appender = CentralLogStore.getAppender(); String logText = appender.getBuffer(job.getLogChannelId(), false).toString(); Pattern pattern = Pattern.compile( ".*Finished job entry \\[run i2b2_load_clinical_data\\] \\(result=\\[true\\]\\).*", Pattern.DOTALL); Matcher matcher = pattern.matcher(logText); if (matcher.matches()) { String connectionString = "jdbc:oracle:thin:@" + PreferencesHandler.getDbServer() + ":" + PreferencesHandler.getDbPort() + ":" + PreferencesHandler.getDbName(); Connection con = DriverManager.getConnection(connectionString, PreferencesHandler.getTm_czUser(), PreferencesHandler.getTm_czPwd()); Statement stmt = con.createStatement(); //remove rows for study before adding new ones ResultSet rs = stmt.executeQuery( "select max(JOB_ID) from CZ_JOB_AUDIT where STEP_DESC='Start i2b2_load_clinical_data'"); int jobId; if (rs.next()) { jobId = rs.getInt("max(JOB_ID)"); } else { con.close(); loadDataUI.setMessage("Job identifier does not exist"); loadDataUI.setIsLoading(false); return; } logText += "\nOracle job id:\n" + String.valueOf(jobId); con.close(); } writeLog(logText); CentralLogStore.discardLines(job.getLogChannelId(), false); } catch (Exception e1) { e1.printStackTrace(); loadDataUI.setMessage("Kettle exception: " + e1.getLocalizedMessage()); loadDataUI.setIsLoading(false); return; } loadDataUI.setIsLoading(false); } }.start(); this.loadDataUI.waitForThread(); this.loadDataUI.displayMessage("Loading process is over.\n Please check monitoring step."); WorkPart.updateSteps(); WorkPart.updateFiles(); }
From source file:org.deegree.commons.jdbc.ConnectionManager.java
/** * Returns a connection from the connection pool with the given id. * /*from w w w . j av a 2 s .c om*/ * @param id * id of the connection pool * @return connection from the corresponding connection pool, null, if not available */ public Connection get(String id) { ConnectionPool pool = idToPools.get(id); if (pool == null) { throw new RuntimeException("Connection not configured."); } Connection conn = null; try { conn = pool.getConnection(); return conn; } catch (SQLException e) { LOG.warn("JDBC connection {} is not available.", id); throw new RuntimeException(e.getLocalizedMessage(), e); } }
From source file:no.sintef.jarfter.PostgresqlInteractor.java
public int addFilesEntry(String fileid, String filename, String rawDataFilename) throws JarfterException { checkConnection();//from w ww . ja v a2 s.c o m File rawDataFile = null; FileInputStream rawDataStream = null; try { rawDataFile = new File(rawDataFilename); rawDataStream = new FileInputStream(rawDataFile); // this constructor throws exception if file not exist } catch (FileNotFoundException nofile) { log("addFilesEntry - Did not find rawDataFile"); error(nofile); throw new JarfterException(JarfterException.Error.IO_NO_TEMP_RAWDATA); } int rowsUpdated; try { PreparedStatement pst = conn .prepareStatement("INSERT INTO files (fileid, filename, file) VALUES (?, ?, ?)"); pst.setString(1, fileid); pst.setString(2, filename); pst.setBinaryStream(3, rawDataStream, (int) rawDataFile.length()); rowsUpdated = pst.executeUpdate(); pst.close(); log("addFilesEntry - End of method"); return rowsUpdated; } catch (SQLException sqle) { log("addFilesEntry - got SQLException..."); error(sqle); if (sqle.getLocalizedMessage().contains("duplicate key value")) { throw new JarfterException(JarfterException.Error.SQL_DUPLICATED_KEY); } throw new JarfterException(JarfterException.Error.SQL_UNKNOWN_ERROR, sqle.getLocalizedMessage()); } }
From source file:org.opencms.db.generic.CmsSqlManager.java
/** * Attempts to close the connection, statement and result set after a statement has been executed.<p> * // www. j a v a 2 s . c o m * @param dbc the current database context * @param con the JDBC connection * @param stmnt the statement * @param res the result set */ public void closeAll(CmsDbContext dbc, Connection con, Statement stmnt, ResultSet res) { // NOTE: we have to close Connections/Statements that way, because a dbcp PoolablePreparedStatement // is not a DelegatedStatement; for that reason its not removed from the trace of the connection when it is closed. // So, the connection tries to close it again when the connection is closed itself; // as a result there is an error that forces the connection to be destroyed and not pooled if (dbc == null) { LOG.error(Messages.get().getBundle().key(Messages.LOG_NULL_DB_CONTEXT_0)); } try { // first, close the result set if (res != null) { res.close(); } } catch (SQLException e) { LOG.debug(e.getLocalizedMessage(), e); } finally { res = null; } try { // close the statement if (stmnt != null) { stmnt.close(); } } catch (SQLException e) { LOG.debug(e.getLocalizedMessage(), e); } finally { stmnt = null; } try { // close the connection if ((con != null) && !con.isClosed()) { con.close(); } } catch (SQLException e) { LOG.debug(e.getLocalizedMessage(), e); } finally { con = null; } }
From source file:no.sintef.jarfter.PostgresqlInteractor.java
public int addTransformationEntry(String uri, String name, String metadata, String clojureFileName, String jarFileName) throws JarfterException { checkConnection();/* w w w . j av a 2 s. c o m*/ File jarFile = null; FileInputStream jarFileInputStream = null; try { jarFile = new File(jarFileName); // if jarFile does not exist, the next line will throw exception jarFileInputStream = new FileInputStream(jarFileName); } catch (FileNotFoundException nofile) { log("addTransformationEntry - Did not find jarFile"); error(nofile); throw new JarfterException(JarfterException.Error.IO_NO_TEMP_JAR); } int rowsUpdated; try { PreparedStatement pst = conn.prepareStatement( "INSERT INTO transformations (uri, name, metadata, clojure, executable) VALUES (?, ?, ?, ?, ?)"); pst.setString(1, uri); pst.setString(2, name); pst.setString(3, metadata); pst.setString(4, fileToString(clojureFileName)); pst.setBinaryStream(5, jarFileInputStream, (int) jarFile.length()); rowsUpdated = pst.executeUpdate(); pst.close(); jarFileInputStream.close(); } catch (SQLException sqle) { log("addTransformationEntry - got SQLException..."); error(sqle); if (sqle.getLocalizedMessage().contains("duplicate key value")) { throw new JarfterException(JarfterException.Error.SQL_DUPLICATED_KEY); } throw new JarfterException(JarfterException.Error.SQL_UNKNOWN_ERROR, sqle.getLocalizedMessage()); } catch (FileNotFoundException nofile) { log("addTransformationEntry - Did not find jarFile"); error(nofile); throw new JarfterException(JarfterException.Error.IO_NO_TEMP_CLJ); } catch (IOException ioe) { log("addTransformationEntry - got IOException from jarFileInputStream.close()"); error(ioe); throw new JarfterException(JarfterException.Error.UNKNOWN_ERROR, ioe.getLocalizedMessage()); } log("addTransformationEntry - End of method"); return rowsUpdated; }
From source file:fr.sanofi.fcl4transmart.controllers.listeners.geneExpression.LoadGeneExpressionDataListener.java
@Override public void handleEvent(Event event) { this.topNode = this.loadDataUI.getTopNode(); this.path = this.dataType.getPath().getAbsolutePath(); this.sortName = this.dataType.getStudy().getPath().getParentFile().getAbsolutePath() + File.separator + ".sort"; loadDataUI.openLoadingShell();// w w w .ja v a2 s . c om new Thread() { public void run() { try { String[] splited = topNode.split("\\\\", -1); if (splited[0].compareTo("") != 0) { loadDataUI.setMessage("A study node has to begin by the character '\\'"); loadDataUI.setIsLoading(false); return; } try { Class.forName("oracle.jdbc.driver.OracleDriver"); String connectionString = "jdbc:oracle:thin:@" + PreferencesHandler.getDbServer() + ":" + PreferencesHandler.getDbPort() + ":" + PreferencesHandler.getDbName(); Connection con = DriverManager.getConnection(connectionString, PreferencesHandler.getMetadataUser(), PreferencesHandler.getMetadataPwd()); Statement stmt = con.createStatement(); ResultSet rs = stmt .executeQuery("select * from table_access where c_name='" + splited[1] + "'"); if (!rs.next()) {//have to add a top node stmt.executeQuery("insert into table_access(" + "c_table_cd," + "c_table_name," + "c_protected_access," + "c_hlevel," + "c_fullname," + "c_name," + "c_synonym_cd," + "c_visualattributes," + "c_totalnum," + "c_facttablecolumn," + "c_dimtablename," + "c_columnname," + "c_columndatatype," + "c_operator," + "c_dimcode," + "c_tooltip," + "c_status_cd) values(" + "'" + splited[1] + "'," + "'i2b2'," + "'N'," + "0," + "'\\" + splited[1] + "\\'," + "'" + splited[1] + "'," + "'N'," + "'CA'," + "0," + "'concept_cd'," + "'concept_dimension'," + "'concept_path'," + "'T'," + "'LIKE'," + "'\\" + splited[1] + "\\'," + "'\\" + splited[1] + "\\'," + "'A')"); stmt.executeQuery("insert into i2b2 values(0, '\\" + splited[1] + "\\', '" + splited[1] + "','N','CA',0,null, null, 'CONCEPT_CD','CONCEPT_DIMENSION','CONCEPT_PATH', 'T', 'LIKE','\\" + splited[1] + "\\', null, '\\" + splited[1] + "\\', sysdate, null, null, null, null, null, '@', null, null, null)"); } con.close(); } catch (SQLException e) { e.printStackTrace(); loadDataUI.displayMessage("SQL error: " + e.getLocalizedMessage()); loadDataUI.setIsLoading(false); return; } catch (ClassNotFoundException e) { loadDataUI.displayMessage("Java error: Class not found exception"); // TODO Auto-generated catch block e.printStackTrace(); loadDataUI.setIsLoading(false); return; } //initiate kettle environment KettleEnvironment.init(false); //find the kettle job to initiate the loading URL jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/load_gene_expression_data.kjb"); jobUrl = FileLocator.toFileURL(jobUrl); String jobPath = jobUrl.getPath(); //create a new job from the kettle file JobMeta jobMeta = new JobMeta(jobPath, null); Job job = new Job(null, jobMeta); //find the other files needed for this job and put them in the cache jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/validate_gene_expression_params.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/validate_gene_expression_columns.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/check_gene_expression_filenames.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/load_all_gene_expression_files_for_study.kjb"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/run_i2b2_process_mrna_data.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/load_subject_sample_map_to_lt.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/get_list_of_gene_expression_filenames.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/load_gene_expression_one_study.kjb"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/set_gene_expression_filename.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/validate_gene_expression_columns.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL( "platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/load_gene_expression_data_to_lz.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); jobUrl = new URL("platform:/plugin/fr.sanofi.fcl4transmart/jobs_kettle/pivot_gene_file.ktr"); jobUrl = FileLocator.toFileURL(jobUrl); job.getJobMeta().setParameterValue("DATA_FILE_PREFIX", ((GeneExpressionData) dataType).getRawFile().getName()); job.getJobMeta().setParameterValue("DATA_LOCATION", path); job.getJobMeta().setParameterValue("MAP_FILENAME", ((GeneExpressionData) dataType).getStsmf().getName()); job.getJobMeta().setParameterValue("DATA_TYPE", "R"); job.getJobMeta().setParameterValue("FilePivot_LOCATION", ""); job.getJobMeta().setParameterValue("LOAD_TYPE", "I"); job.getJobMeta().setParameterValue("LOG_BASE", "2"); job.getJobMeta().setParameterValue("SAMPLE_REMAP_FILENAME", "NOSAMPLEREMAP"); job.getJobMeta().setParameterValue("SAMPLE_SUFFIX", ".rma-Signal"); job.getJobMeta().setParameterValue("SECURITY_REQUIRED", "N"); job.getJobMeta().setParameterValue("SOURCE_CD", "STD"); File sort = new File(sortName); if (!sort.exists()) { FileUtils.forceMkdir(sort); } path = sort.getAbsolutePath(); job.getJobMeta().setParameterValue("SORT_DIR", path); job.getJobMeta().setParameterValue("STUDY_ID", dataType.getStudy().toString()); job.getJobMeta().setParameterValue("TOP_NODE", topNode); //job.getJobMeta().setParameterValue("JAVA_HOME", "/usr/local/jdk1.6.0_31"); job.getJobMeta().setParameterValue("TM_CZ_DB_SERVER", PreferencesHandler.getDbServer()); job.getJobMeta().setParameterValue("TM_CZ_DB_NAME", PreferencesHandler.getDbName()); job.getJobMeta().setParameterValue("TM_CZ_DB_PORT", PreferencesHandler.getDbPort()); job.getJobMeta().setParameterValue("TM_CZ_DB_USER", PreferencesHandler.getTm_czUser()); job.getJobMeta().setParameterValue("TM_CZ_DB_PWD", PreferencesHandler.getTm_czPwd()); job.getJobMeta().setParameterValue("TM_LZ_DB_SERVER", PreferencesHandler.getDbServer()); job.getJobMeta().setParameterValue("TM_LZ_DB_NAME", PreferencesHandler.getDbName()); job.getJobMeta().setParameterValue("TM_LZ_DB_PORT", PreferencesHandler.getDbPort()); job.getJobMeta().setParameterValue("TM_LZ_DB_USER", PreferencesHandler.getTm_lzUser()); job.getJobMeta().setParameterValue("TM_LZ_DB_PWD", PreferencesHandler.getTm_lzPwd()); job.start(); job.waitUntilFinished(); @SuppressWarnings("unused") Result result = job.getResult(); loadDataUI.displayMessage("Loading process is over.\n Please check monitoring step."); Log4jBufferAppender appender = CentralLogStore.getAppender(); String logText = appender.getBuffer(job.getLogChannelId(), false).toString(); Pattern pattern = Pattern.compile(".*Finished job entry \\[run i2b2_process_mrna_data\\].*", Pattern.DOTALL); Matcher matcher = pattern.matcher(logText); if (matcher.matches()) { String connectionString = "jdbc:oracle:thin:@" + PreferencesHandler.getDbServer() + ":" + PreferencesHandler.getDbPort() + ":" + PreferencesHandler.getDbName(); Connection con = DriverManager.getConnection(connectionString, PreferencesHandler.getTm_czUser(), PreferencesHandler.getTm_czPwd()); Statement stmt = con.createStatement(); //remove rows for this study before adding new ones ResultSet rs = stmt.executeQuery( "select max(JOB_ID) from CZ_JOB_AUDIT where STEP_DESC='Starting i2b2_process_mrna_data'"); int jobId; if (rs.next()) { jobId = rs.getInt("max(JOB_ID)"); } else { con.close(); loadDataUI.setIsLoading(false); return; } logText += "\nOracle job id:\n" + String.valueOf(jobId); con.close(); } writeLog(logText); CentralLogStore.discardLines(job.getLogChannelId(), false); } catch (Exception e1) { //this.write(e1.getMessage()); loadDataUI.displayMessage("Error: " + e1.getLocalizedMessage()); loadDataUI.setIsLoading(false); e1.printStackTrace(); } loadDataUI.setIsLoading(false); } }.start(); this.loadDataUI.waitForThread(); WorkPart.updateSteps(); WorkPart.updateFiles(); }