List of usage examples for java.sql Statement executeQuery
ResultSet executeQuery(String sql) throws SQLException;
ResultSet
object. From source file:com.vertica.hivestoragehandler.VerticaOutputFormat.java
/** * Optionally called at the end of a job to optimize any newly created and * loaded tables. Useful for new tables with more than 100k records. * /*from ww w. jav a 2 s. c o m*/ * @param conf * @throws Exception */ public static void optimize(Configuration conf) throws Exception { VerticaConfiguration vtconfig = new VerticaConfiguration(conf); Connection conn = vtconfig.getConnection(true); // TODO: consider more tables and skip tables with non-temp projections VerticaRelation vTable = new VerticaRelation(vtconfig.getOutputTableName()); Statement stmt = conn.createStatement(); ResultSet rs = null; HashSet<String> tablesWithTemp = new HashSet<String>(); //for now just add the single output table tablesWithTemp.add(vTable.getQualifiedName().toString()); // map from table name to set of projection names HashMap<String, Collection<String>> tableProj = new HashMap<String, Collection<String>>(); rs = stmt.executeQuery("select projection_schema, anchor_table_name, projection_name from projections;"); while (rs.next()) { String ptable = rs.getString(1) + "." + rs.getString(2); if (!tableProj.containsKey(ptable)) { tableProj.put(ptable, new HashSet<String>()); } tableProj.get(ptable).add(rs.getString(3)); } for (String table : tablesWithTemp) { if (!tableProj.containsKey(table)) { throw new RuntimeException("Cannot optimize table with no data: " + table); } } String designName = (new Integer(conn.hashCode())).toString(); stmt.execute("select dbd_create_workspace('" + designName + "')"); stmt.execute("select dbd_create_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_design_tables('" + designName + "', '" + vTable.getQualifiedName().toString() + "')"); stmt.execute("select dbd_populate_design('" + designName + "', '" + designName + "')"); //Execute stmt.execute("select dbd_create_deployment('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_deployment_design('" + designName + "', '" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_deployment_drop('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_execute_deployment('" + designName + "', '" + designName + "')"); //Cleanup stmt.execute("select dbd_drop_deployment('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_remove_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_drop_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_drop_workspace('" + designName + "')"); }
From source file:com.datatorrent.contrib.hive.HiveMockTest.java
public static void hiveInitializePOJODatabase(HiveStore hiveStore) throws SQLException { hiveStore.connect();// www . ja v a 2 s. c o m Statement stmt = hiveStore.getConnection().createStatement(); // show tables String sql = "show tables"; LOG.debug(sql); ResultSet res = stmt.executeQuery(sql); if (res.next()) { LOG.debug("tables are {}", res.getString(1)); } stmt.execute("DROP TABLE " + tablepojo); stmt.execute("CREATE TABLE " + tablepojo + " (col1 int) PARTITIONED BY(dt STRING) ROW FORMAT DELIMITED " + "FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\n' \n" + "STORED AS TEXTFILE "); hiveStore.disconnect(); }
From source file:com.datatorrent.contrib.hive.HiveMockTest.java
public static void hiveInitializeMapDatabase(HiveStore hiveStore) throws SQLException { hiveStore.connect();/*from w w w. j av a2s .c o m*/ Statement stmt = hiveStore.getConnection().createStatement(); // show tables String sql = "show tables"; LOG.debug(sql); ResultSet res = stmt.executeQuery(sql); if (res.next()) { LOG.debug(res.getString(1)); } stmt.execute("DROP TABLE " + tablemap); stmt.execute("CREATE TABLE IF NOT EXISTS " + tablemap + " (col1 map<string,int>) PARTITIONED BY(dt STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' \n" + "MAP KEYS TERMINATED BY '" + delimiterMap + "' \n" + "STORED AS TEXTFILE "); hiveStore.disconnect(); }
From source file:com.datatorrent.contrib.hive.HiveMockTest.java
public static void hiveInitializeDatabase(HiveStore hiveStore) throws SQLException { hiveStore.connect();/*from w w w . j a v a 2 s. c o m*/ Statement stmt = hiveStore.getConnection().createStatement(); // show tables String sql = "show tables"; LOG.debug(sql); ResultSet res = stmt.executeQuery(sql); if (res.next()) { LOG.debug("tables are {}", res.getString(1)); } stmt.execute("DROP TABLE " + tablename); stmt.execute("CREATE TABLE IF NOT EXISTS " + tablename + " (col1 String) PARTITIONED BY(dt STRING) " + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\n' \n" + "STORED AS TEXTFILE "); /*ResultSet res = stmt.execute("CREATE TABLE IF NOT EXISTS temp4 (col1 map<string,int>,col2 map<string,int>,col3 map<string,int>,col4 map<String,timestamp>, col5 map<string,double>,col6 map<string,double>,col7 map<string,int>,col8 map<string,int>) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' \n" + "COLLECTION ITEMS TERMINATED BY '\n' \n" + "MAP KEYS TERMINATED BY ':' \n" + "LINES TERMINATED BY '\n' " + "STORED AS TEXTFILE");*/ hiveStore.disconnect(); }
From source file:com.hp.test.framework.generatejellytess.GenerateJellyTests.java
public static void genjellyTests(String Parent_model_gid, String Feature_name) { Statement statement = null; try {//ww w . j av a 2 s. c o m statement = connection.createStatement(); String sql_query = "SELECT DR.GID as MODEL_GID,DSR.GID AS SCENARIO_GID,DT.GID AS TEST_GID,DR.MODEL_PATH,DT.TESTCASE,DT.RESULTS FROM DM_TESTCASE DT, DM_MODELXML_REF DR, DM_SCENARIO_REF DSR where DT.TEMPLATE_GID=DSR.GID and DSR.MODEL_GID_REF=DR.GID AND DR.MASTER_GID=" + Integer.parseInt(Parent_model_gid) + " ORDER BY DR.GID"; ResultSet rs = statement.executeQuery(sql_query); String GID_temp = ""; String Test_case = ""; String Model_xml_path = ""; int results = 0; CreateReadableTestCase.fw = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(alm_test_location + Feature_name + ".csv"), "UTF-8")); CreateReadableTestCase.fw.write("TestCaseNumber,StepName,Step Description,ExpectedResults\n"); while (rs.next()) { GID_temp = Parent_model_gid + "_" + rs.getString("MODEL_GID") + "_" + rs.getString("SCENARIO_GID") + "_" + rs.getString("TEST_GID"); // rs.getInt("GID"); log.info("Generating Jelly Testcase for MasterModel GID::" + Parent_model_gid + " MODEL XML GID::" + rs.getString("MODEL_GID") + " TEST SCENARIO GID::" + rs.getString("SCENARIO_GID") + " TESTCASE GID::" + rs.getString("TEST_GID")); Test_case = rs.getString("TESTCASE"); results = rs.getInt("RESULTS"); Model_xml_path = rs.getString("MODEL_PATH"); maptestcasesapi(GID_temp, Test_case, results, Model_xml_path); // break; } CreateReadableTestCase.fw.close(); CreateReadableTestCase.fw = null; statement.setQueryTimeout(30); // set timeout to 30 sec. if (statement != null) { statement.close(); } } catch (SQLException e) { log.error("Error in getting records from DM_TESTCASE to Execute API or to generate Jelly tests"); } catch (IOException e) { log.error("exception in IO" + e.getMessage()); } catch (ClassNotFoundException e) { log.error("" + e.getMessage()); } }
From source file:edu.ucsb.nceas.MCTestCase.java
protected static void dbQuery(String sqlStatement, String methodName) throws SQLException { DBConnectionPool connPool = DBConnectionPool.getInstance(); DBConnection dbconn = DBConnectionPool.getDBConnection(methodName); int serialNumber = dbconn.getCheckOutSerialNumber(); Statement statement = dbconn.createStatement(); debug("Executing against db: " + sqlStatement); statement.executeQuery(sqlStatement); statement.close();/*from w ww. j av a 2 s . co m*/ DBConnectionPool.returnDBConnection(dbconn, serialNumber); }
From source file:database.HashTablesTools.java
public static int countFilesWhichAreAlreadyIndexedInSequenceDB(String tableName, String tableFailureName, Map<String, List<MMcifFileInfos>> indexPDBFileInFolder) { Connection connection = HashTablesTools.getConnection(tableName, tableFailureName); // build all hash //System.out.println("starting hash list"); ResultSet resultFindEntryFailureDb = null; try {//from w w w .j a va 2s . c o m Statement stmt = connection.createStatement(); String findEntry = "SELECT * from " + tableFailureName; resultFindEntryFailureDb = stmt.executeQuery(findEntry); } catch (SQLException e) { e.printStackTrace(); } ResultSet resultFindEntrySequenceDb = null; try { Statement stmt = connection.createStatement(); String findEntry = "SELECT * from " + tableName; resultFindEntrySequenceDb = stmt.executeQuery(findEntry); } catch (SQLException e) { e.printStackTrace(); } Statement stmt = null; ResultSet resultFindEntry = null; int countOfHashFoundInFailureDB = 0; int countOfHashFoundInSequenceDB = 0; for (Map.Entry<String, List<MMcifFileInfos>> entry : indexPDBFileInFolder.entrySet()) { A: for (MMcifFileInfos fileInfos : entry.getValue()) { try { stmt = connection.createStatement(); String findEntry = "SELECT * from " + tableFailureName + " WHERE pdbfilehash = '" + fileInfos.getHash() + "'"; resultFindEntry = stmt.executeQuery(findEntry); if (resultFindEntry.next()) { countOfHashFoundInFailureDB += 1; stmt.close(); continue A; } } catch (SQLException e) { e.printStackTrace(); } // if here not found in failureDB try { stmt = connection.createStatement(); String findEntry = "SELECT * from " + tableName + " WHERE pdbfilehash = '" + fileInfos.getHash() + "'"; resultFindEntry = stmt.executeQuery(findEntry); if (resultFindEntry.next()) { countOfHashFoundInSequenceDB += 1; stmt.close(); continue A; } } catch (SQLException e) { e.printStackTrace(); } } } System.out.println("countOfHashFoundInSequenceDB = " + countOfHashFoundInSequenceDB); System.out.println("countOfHashFoundInFailureDB = " + countOfHashFoundInFailureDB); return countOfHashFoundInSequenceDB + countOfHashFoundInFailureDB; }
From source file:database.HashTablesTools.java
private static boolean fileAlreadyParsedDetectedByHash(Connection connection, String tableName, String tableFailureName, String pdbFileHash) throws SQLException { Statement stmt = connection.createStatement(); String findEntry = "SELECT * from " + tableFailureName + " WHERE pdbfilehash = '" + pdbFileHash + "'"; ResultSet resultFindEntry = stmt.executeQuery(findEntry); int foundEntriesCount = 0; String fourLetterCodeFromDB = null; if (resultFindEntry.next()) { foundEntriesCount += 1;//w w w .j a va2 s. c om fourLetterCodeFromDB = resultFindEntry.getString(2); } if (foundEntriesCount != 0) { //System.out.println("Found in failure table " + fourLetterCodeFromDB + " " + foundEntriesCount); return true; } stmt = connection.createStatement(); findEntry = "SELECT * from " + tableName + " WHERE pdbfilehash = '" + pdbFileHash + "'"; resultFindEntry = stmt.executeQuery(findEntry); foundEntriesCount = 0; fourLetterCodeFromDB = null; String chainIdFromDB = null; if (resultFindEntry.next()) { foundEntriesCount += 1; fourLetterCodeFromDB = resultFindEntry.getString(2); chainIdFromDB = resultFindEntry.getString(3); } if (foundEntriesCount != 0) { //System.out.println("duplicate entry " + fourLetterCodeFromDB + " " + chainIdFromDB + " " + foundEntriesCount); return true; } return false; }
From source file:database.HashTablesTools.java
public static int countFilesWhichAreAlreadyIndexedInSequenceDB_old(String tableName, String tableFailureName, Map<String, List<MMcifFileInfos>> indexPDBFileInFolder) { Connection connection = HashTablesTools.getConnection(tableName, tableFailureName); // build all hash //System.out.println("starting hash list"); List<String> filesHash = new ArrayList<>(); for (Map.Entry<String, List<MMcifFileInfos>> entry : indexPDBFileInFolder.entrySet()) { for (MMcifFileInfos fileInfos : entry.getValue()) { filesHash.add(fileInfos.getHash()); }//from w w w . jav a 2s .co m } System.out.println("finished hash list " + filesHash.size()); ResultSet resultFindEntryFailureDb = null; try { Statement stmt = connection.createStatement(); String findEntry = "SELECT * from " + tableFailureName; resultFindEntryFailureDb = stmt.executeQuery(findEntry); } catch (SQLException e) { e.printStackTrace(); } ResultSet resultFindEntrySequenceDb = null; try { Statement stmt = connection.createStatement(); String findEntry = "SELECT * from " + tableName; resultFindEntrySequenceDb = stmt.executeQuery(findEntry); } catch (SQLException e) { e.printStackTrace(); } int countOfFilesAlreadyFoundInFailureHashDb = 0; int countOfFilesAlreadyFoundInSequenceDb = 0; int totalcountOfFilesAlreadyFoundInFailureHashDb = 0; int totalcountOfFilesAlreadyFoundInSequenceDb = 0; Set<String> uniqueHashFailure = new HashSet<>(); Set<String> totaluniqueHashFailure = new HashSet<>(); try { System.out.println("starting hgo through failure db"); while (resultFindEntryFailureDb.next()) { String hash = resultFindEntryFailureDb.getString(1); if (filesHash.contains(hash)) { // then it is found uniqueHashFailure.add(hash); } totaluniqueHashFailure.add(hash); } countOfFilesAlreadyFoundInFailureHashDb = uniqueHashFailure.size(); totalcountOfFilesAlreadyFoundInFailureHashDb = totaluniqueHashFailure.size(); System.out.println("starting hgo through sequence db"); Set<String> uniqueHashIndexed = new HashSet<>(); Set<String> totaluniqueHashIndexed = new HashSet<>(); while (resultFindEntrySequenceDb.next()) { String hash = resultFindEntrySequenceDb.getString(1); if (filesHash.contains(hash)) { // then it is found uniqueHashIndexed.add(hash); } totaluniqueHashIndexed.add(hash); } countOfFilesAlreadyFoundInSequenceDb = uniqueHashIndexed.size(); totalcountOfFilesAlreadyFoundInSequenceDb = totaluniqueHashIndexed.size(); } catch (SQLException e) { e.printStackTrace(); } System.out.println("countOfFilesAlreadyFoundInFailureHashDb = " + countOfFilesAlreadyFoundInFailureHashDb); System.out.println( "totalcountOfFilesAlreadyFoundInFailureHashDb = " + totalcountOfFilesAlreadyFoundInFailureHashDb); System.out.println("countOfFilesAlreadyFoundInSequenceDb = " + countOfFilesAlreadyFoundInSequenceDb); System.out.println( "totalcountOfFilesAlreadyFoundInSequenceDb = " + totalcountOfFilesAlreadyFoundInSequenceDb); return countOfFilesAlreadyFoundInFailureHashDb + countOfFilesAlreadyFoundInSequenceDb; }
From source file:org.silverpeas.dbbuilder.DBBuilder.java
private static List<String> checkDBStatus() { List<String> packagesIntoDB = new ArrayList<String>(); Connection connection = null; Statement stmt = null; ResultSet rs = null;/*from w ww .j a v a 2 s .c o m*/ try { connection = ConnectionFactory.getConnection(); stmt = connection.createStatement(); rs = stmt.executeQuery("select SR_PACKAGE, SR_VERSION from SR_PACKAGES order by SR_PACKAGE"); while (rs.next()) { String srPackage = rs.getString("SR_PACKAGE"); String srVersion = rs.getString("SR_VERSION"); console.printMessage('\t' + srPackage + " v. " + srVersion); packagesIntoDB.add(srPackage); } } catch (SQLException sqlex) { } finally { DbUtils.closeQuietly(rs); DbUtils.closeQuietly(stmt); DbUtils.closeQuietly(connection); } return packagesIntoDB; }