List of usage examples for java.util Hashtable keySet
Set keySet
To view the source code for java.util Hashtable keySet.
Click Source Link
From source file:org.apache.torque.util.BasePeer.java
/** * Create a new PreparedStatement. It builds a string representation * of a query and a list of PreparedStatement parameters. *//*from ww w .j a v a2 s. c om*/ public static void createPreparedStatement(Criteria criteria, StringBuffer queryString, List params) throws TorqueException { DB db = Torque.getDB(criteria.getDbName()); DatabaseMap dbMap = Torque.getDatabaseMap(criteria.getDbName()); Query query = new Query(); StringStack selectModifiers = query.getSelectModifiers(); StringStack selectClause = query.getSelectClause(); StringStack fromClause = query.getFromClause(); StringStack whereClause = query.getWhereClause(); StringStack orderByClause = query.getOrderByClause(); StringStack orderBy = criteria.getOrderByColumns(); boolean ignoreCase = criteria.isIgnoreCase(); StringStack select = criteria.getSelectColumns(); Hashtable aliases = criteria.getAsColumns(); StringStack modifiers = criteria.getSelectModifiers(); for (int i = 0; i < modifiers.size(); i++) { selectModifiers.add(modifiers.get(i)); } for (int i = 0; i < select.size(); i++) { String columnName = select.get(i); if (columnName.indexOf('.') == -1) { throwMalformedColumnNameException("select", columnName); } String tableName = null; selectClause.add(columnName); int parenPos = columnName.indexOf('('); if (parenPos == -1) { tableName = columnName.substring(0, columnName.indexOf('.')); } else { tableName = columnName.substring(parenPos + 1, columnName.indexOf('.')); // functions may contain qualifiers so only take the last // word as the table name. int lastSpace = tableName.lastIndexOf(' '); if (lastSpace != -1) { tableName = tableName.substring(lastSpace + 1); } } String tableName2 = criteria.getTableForAlias(tableName); if (tableName2 != null) { fromClause.add(new StringBuffer(tableName.length() + tableName2.length() + 1).append(tableName2) .append(' ').append(tableName).toString()); } else { fromClause.add(tableName); } } Iterator it = aliases.keySet().iterator(); while (it.hasNext()) { String key = (String) it.next(); selectClause.add((String) aliases.get(key) + " AS " + key); } Iterator critKeys = criteria.keySet().iterator(); while (critKeys.hasNext()) { String key = (String) critKeys.next(); Criteria.Criterion criterion = (Criteria.Criterion) criteria.getCriterion(key); Criteria.Criterion[] someCriteria = criterion.getAttachedCriterion(); String table = null; for (int i = 0; i < someCriteria.length; i++) { String tableName = someCriteria[i].getTable(); table = criteria.getTableForAlias(tableName); if (table != null) { fromClause.add(new StringBuffer(tableName.length() + table.length() + 1).append(table) .append(' ').append(tableName).toString()); } else { fromClause.add(tableName); table = tableName; } boolean ignorCase = ((criteria.isIgnoreCase() || someCriteria[i].isIgnoreCase()) && (dbMap .getTable(table).getColumn(someCriteria[i].getColumn()).getType() instanceof String)); someCriteria[i].setIgnoreCase(ignorCase); } criterion.setDB(db); StringBuffer sb = new StringBuffer(); criterion.appendPsTo(sb, params); whereClause.add(sb.toString()); } List join = criteria.getJoinL(); if (join != null) { for (int i = 0; i < join.size(); i++) { String join1 = (String) join.get(i); String join2 = (String) criteria.getJoinR().get(i); if (join1.indexOf('.') == -1) { throwMalformedColumnNameException("join", join1); } if (join2.indexOf('.') == -1) { throwMalformedColumnNameException("join", join2); } String tableName = join1.substring(0, join1.indexOf('.')); String table = criteria.getTableForAlias(tableName); if (table != null) { fromClause.add(new StringBuffer(tableName.length() + table.length() + 1).append(table) .append(' ').append(tableName).toString()); } else { fromClause.add(tableName); } int dot = join2.indexOf('.'); tableName = join2.substring(0, dot); table = criteria.getTableForAlias(tableName); if (table != null) { fromClause.add(new StringBuffer(tableName.length() + table.length() + 1).append(table) .append(' ').append(tableName).toString()); } else { fromClause.add(tableName); table = tableName; } boolean ignorCase = (criteria.isIgnoreCase() && (dbMap.getTable(table) .getColumn(join2.substring(dot + 1, join2.length())).getType() instanceof String)); whereClause.add(SqlExpression.buildInnerJoin(join1, join2, ignorCase, db)); } } if (orderBy != null && orderBy.size() > 0) { // Check for each String/Character column and apply // toUpperCase(). for (int i = 0; i < orderBy.size(); i++) { String orderByColumn = orderBy.get(i); if (orderByColumn.indexOf('.') == -1) { throwMalformedColumnNameException("order by", orderByColumn); } String table = orderByColumn.substring(0, orderByColumn.indexOf('.')); // See if there's a space (between the column list and sort // order in ORDER BY table.column DESC). int spacePos = orderByColumn.indexOf(' '); String columnName; if (spacePos == -1) { columnName = orderByColumn.substring(orderByColumn.indexOf('.') + 1); } else { columnName = orderByColumn.substring(orderByColumn.indexOf('.') + 1, spacePos); } ColumnMap column = dbMap.getTable(table).getColumn(columnName); if (column.getType() instanceof String) { if (spacePos == -1) { orderByClause.add(db.ignoreCaseInOrderBy(orderByColumn)); } else { orderByClause.add(db.ignoreCaseInOrderBy(orderByColumn.substring(0, spacePos)) + orderByColumn.substring(spacePos)); } selectClause.add(db.ignoreCaseInOrderBy(table + '.' + columnName)); } else { orderByClause.add(orderByColumn); } } } // Limit the number of rows returned. int limit = criteria.getLimit(); int offset = criteria.getOffset(); String limitString = null; if (offset > 0 && db.supportsNativeOffset()) { switch (db.getLimitStyle()) { case DB.LIMIT_STYLE_MYSQL: limitString = new StringBuffer().append(offset).append(", ").append(limit).toString(); break; case DB.LIMIT_STYLE_POSTGRES: limitString = new StringBuffer().append(limit).append(", ").append(offset).toString(); break; } // Now set the criteria's limit and offset to return the // full resultset since the results are limited on the // server. criteria.setLimit(-1); criteria.setOffset(0); } else if (limit > 0 && db.supportsNativeLimit()) { limitString = String.valueOf(limit); // Now set the criteria's limit to return the full // resultset since the results are limited on the server. criteria.setLimit(-1); } if (limitString != null) { switch (db.getLimitStyle()) { case DB.LIMIT_STYLE_ORACLE: whereClause.add("rownum <= " + limitString); break; /* Don't have a Sybase install to validate this against. (dlr) case DB.LIMIT_STYLE_SYBASE: query.setRowcount(limitString); break; */ default: query.setLimit(limitString); } } String sql = query.toString(); category.debug(sql); queryString.append(sql); }
From source file:ca.queensu.cs.sail.mailboxmina2.main.modules.ThreadsModule.java
/** * This heuristic creates associations based on the subject and a time * window Default Time window is 1 month *//*from w w w. j a va 2 s .c om*/ private void heuristicSubject(List<MM2Message> messages, Connection connection) { // This is the msg_id ==> Date Hashtable<String, Date> msg_id_to_date = new Hashtable<String, Date>(); // This is (original) subject ==> msg_id Hashtable<String, String> subject_to_msg_id = new Hashtable<String, String>(); // This is msg_id ==> (processed) subject Hashtable<String, String> msg_id_to_subject = new Hashtable<String, String>(); // This is child ==> parent Hashtable<String, String> msg_id_to_msg_id = new Hashtable<String, String>(); // Capture the most commong reply patterns // Fw: Re: Aw: Wg: Pattern reply_pattern = Pattern.compile( "^(\\[.*?\\] )?(([rR][eE]:)|([aA][wW]:)|([fF][wW]:)|([wW][gG]:)|([fF][wW][dD]:)|([wW][tT][rR]:)|([aA]ntwort:))(.*?)$"); try { for (MM2Message msg : messages) { String msg_id = msg.getHeaderEntry("msg_id"); msg_id_to_date.put(msg_id, msg.getMsg_date()); // We assume the subject to be at least "" String raw_subject = msg.getSubject(); // Determine whether the subject describes a reply or an original posting Matcher matcher = reply_pattern.matcher(raw_subject); if (matcher.matches()) { String stripped_subject = matcher.group(matcher.groupCount()); Main.getLogger().debug(5, this, "I think message is a reply and the original subject is: " + stripped_subject.trim()); // Store the information in the forward table msg_id_to_subject.put(msg_id, stripped_subject.trim()); } else { // We think that this is not a reply - hence it must be an original posting ;-) subject_to_msg_id.put(raw_subject, msg_id); Main.getLogger().debug(5, this, "I think message is an original posting: " + raw_subject.trim()); } } // Now we need to find parent relations by subject. // Still we will apply a sliding window approach using a given offset // to make sure, we don't capture events of people re-using old subject names for (String child_msg_id : msg_id_to_subject.keySet()) { String origSubj = msg_id_to_subject.get(child_msg_id); String parent_msg_id = subject_to_msg_id.get(origSubj); // If we found an entry in the table if (parent_msg_id != null) { // Check if the potential parent is (OFFSET) older than child Date d1 = msg_id_to_date.get(parent_msg_id); Date d2 = DateUtils.addMonths(msg_id_to_date.get(child_msg_id), OFFSET); if (d1.compareTo(d2) >= 0) { Main.getLogger().debug(5, this, "I know that message " + child_msg_id + " has the parent " + parent_msg_id); msg_id_to_msg_id.put(child_msg_id, parent_msg_id); } } } Main.getLogger().debug(5, "original posting subjects resolved = " + subject_to_msg_id.size()); Main.getLogger().debug(5, "subjects resolved replys = " + msg_id_to_subject.size()); // Store the parents and roots into the database Main.getLogger().log("The heuristic could resolve " + msg_id_to_msg_id.size() + " parent relations!"); Main.getLogger().log("Storing associations found by in-reply-to heuristic in the database..."); storeParents(msg_id_to_msg_id, connection); } catch (Exception e) { Main.getLogger().error("Error storing messages for heuristic in-reply!", e); } }
From source file:hu.sztaki.lpds.pgportal.services.asm.ASMService.java
/** * /*from w w w . ja v a2s. co m*/ * Gets and returns detailed informations about a workflow (e.g. statuses of the current workflow instance, overall * statistics) * * @param userID * - ID of the user * @param workflowID * - ID of the workflow * @return WorkflowInstanceBean object that contains information * @throws ASM_NoValidRuntimeIDException * -it's thrown if there is No valid runtime ID */ public WorkflowInstanceBean getDetails(String userID, String workflowID) throws ASM_NoValidRuntimeIDException { String runtimeID = (String) PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID) .getAllRuntimeInstance().keys().nextElement(); if (runtimeID != null) { if (PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID).getRuntime(runtimeID) .getJobsStatus().isEmpty()) { Hashtable prp = new Hashtable(); prp.put("url", PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID).getWfsID()); ServiceType st = InformationBase.getI().getService("wfs", "portal", prp, new Vector()); try { PortalWfsClient pc = (PortalWfsClient) Class.forName(st.getClientObject()).newInstance(); pc.setServiceURL(st.getServiceUrl()); pc.setServiceID(st.getServiceID()); ComDataBean cmb = new ComDataBean(); cmb.setPortalID(PropertyLoader.getInstance().getProperty("service.url")); cmb.setUserID(userID); cmb.setWorkflowID(workflowID); cmb.setWorkflowRuntimeID(runtimeID); int getmax = 2500; long cnt = 0; int retCnt = getmax; while (retCnt == getmax) { cmb.setSize(cnt); Vector<JobInstanceBean> retVector = new Vector<JobInstanceBean>(); retVector = pc.getWorkflowInstanceJobs(cmb); // //System.out.println("wspgrade doInstanceDetails retVector.size() : " + retVector.size()); for (int i = 0; i < retVector.size(); i++) { JobInstanceBean tmp = retVector.get(i); // //System.out.println("wspgrade doInstanceDetails tmp : " + tmp.getJobID() +", "+ // tmp.getPID() +", "+ tmp.getStatus() +", "+ tmp.getResource()); PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID) .getRuntime(runtimeID).addJobbStatus(tmp.getJobID(), "" + tmp.getPID(), "" + tmp.getStatus(), tmp.getResource(), -1); } // retCnt = retVector.size(); cnt++; } } catch (Exception e) { e.printStackTrace(); } } WorkflowInstanceBean workflowinstance = new WorkflowInstanceBean(); // first key : jobname, second key : status code , second value : instancenumber UserData userdata = PortalCacheService.getInstance().getUser(userID); WorkflowData workflowdata = userdata.getWorkflow(workflowID); WorkflowRunTime runtimedata = workflowdata.getRuntime(runtimeID); Hashtable<String, Hashtable<String, String>> jobinstances = runtimedata.getCollectionJobsStatus(); Iterator jobiterator = jobinstances.keySet().iterator(); while (jobiterator.hasNext()) { String jobname = (String) jobiterator.next(); RunningJobDetailsBean jobinstance = new RunningJobDetailsBean(); jobinstance.setName(jobname); Hashtable<String, String> statuses = jobinstances.get(jobname); Iterator overviewstatusit = statuses.keySet().iterator(); while (overviewstatusit.hasNext()) { String status = overviewstatusit.next().toString(); // Integer status = Integer.parseInt((String)overviewstatusit.next().toString()); OverviewJobStatusBean overview = new OverviewJobStatusBean(); overview.setStatuscode(status); overview.setNumberofinstances(statuses.get(status)); jobinstance.getStatisticsBean().getOverviewedstatuses().add(overview); } // first key : pid value : JobStatusData Hashtable<String, JobStatusData> runtimeinstances = new Hashtable<String, JobStatusData>(); // runtimedata.getJobStatus("" + jobname).putAll(runtimeinstances); runtimeinstances.putAll(runtimedata.getJobStatus("" + jobname)); Iterator instanceiterator = runtimeinstances.keySet().iterator(); while (instanceiterator.hasNext()) { ASMJobInstanceBean instance = new ASMJobInstanceBean(); String instanceID = (String) instanceiterator.next(); String pid = runtimeinstances.get(instanceID).getPid(); String resource = runtimeinstances.get(instanceID).getResource(); String status = Integer.toString(runtimeinstances.get(instanceID).getStatus()); String stdout = this.getStdOutFile(userID, workflowID, jobname, pid, runtimeID); String stderr = this.getStdErrFile(userID, workflowID, jobname, pid, runtimeID); String systemlog = this.getSystemLogFile(userID, workflowID, jobname, pid, runtimeID); instance.setId(instanceID); instance.setStatus(status); instance.setErrorText(stderr); instance.setOutputText(stdout); instance.setLogbookText(systemlog); instance.setUsedResource(resource); jobinstance.getInstances().add(instance); } workflowinstance.getJobs().add(jobinstance); } return workflowinstance; } else { throw new ASM_NoValidRuntimeIDException(); } }
From source file:edu.uga.cs.fluxbuster.features.FeatureCalculator.java
/** * Calculates the previous cluster ratio feature for each cluster generated * on a specific run date and within the a specific window * * @param log_date the run date/*from w w w . j av a 2 s. c om*/ * @param window the number of days previous to use in feature calculation * @return a table of results, the keys of the table are cluster ids and the * values are lists of two elements. The first element is the * last_growth_ratio_prev_clusters value and the second element is the * last_growth_prefix_ratio_prev_clusters value * @throws SQLException if there is and error calculating the feature */ public Hashtable<Integer, List<Double>> calculatePrevClusterRatios(Date log_date, int window) throws SQLException { Hashtable<Integer, List<Double>> retval = new Hashtable<Integer, List<Double>>(); ArrayList<Date> prevDates = getPrevDates(log_date, window); String query1 = properties.getProperty(PREVCLUSTER_QUERY1KEY); String query2 = properties.getProperty(PREVCLUSTER_QUERY2KEY); String logDateStr = df.format(log_date); String completequery = new String(); StringBuffer addQueryBuff = new StringBuffer(); for (int i = 0; i < prevDates.size(); i++) { String prevDateStr = df.format(prevDates.get(i)); StringBuffer querybuf = new StringBuffer(); Formatter formatter = new Formatter(querybuf); formatter.format(query1, logDateStr, logDateStr, prevDateStr, prevDateStr, prevDateStr); addQueryBuff.append(querybuf.toString()); if (i < prevDates.size() - 1) { addQueryBuff.append(" UNION "); } formatter.close(); } if (addQueryBuff.length() > 0) { StringBuffer querybuf = new StringBuffer(); Formatter formatter = new Formatter(querybuf); formatter.format(query2, logDateStr, logDateStr, addQueryBuff.toString()); completequery = querybuf.toString(); formatter.close(); } if (completequery.length() > 0) { ResultSet rs = null; try { rs = dbi.executeQueryWithResult(completequery); while (rs.next()) { ArrayList<Double> temp = new ArrayList<Double>(); temp.add(rs.getDouble(3)); temp.add(rs.getDouble(4)); retval.put(rs.getInt(1), temp); } } catch (Exception e) { if (log.isErrorEnabled()) { log.error(e); } } finally { if (rs != null && !rs.isClosed()) { rs.close(); } } Hashtable<Integer, Double> queryPerDomain = getQueriesPerDomain(log_date); for (Integer clusterid : retval.keySet()) { List<Double> values = retval.get(clusterid); values.set(0, values.get(0) / queryPerDomain.get(clusterid)); values.set(1, values.get(1) / queryPerDomain.get(clusterid)); } } return retval; }
From source file:unalcol.termites.boxplots.SucessfulRatesGlobal.java
private static CategoryDataset createDataset(ArrayList<Double> Pf) { DefaultCategoryDataset defaultcategorydataset = new DefaultCategoryDataset(); String sDirectorio = "..\\results\\"; File f = new File(sDirectorio); String extension;//from w ww. ja v a 2s . c o m File[] files = f.listFiles(); Hashtable<String, String> Pop = new Hashtable<>(); PrintWriter escribir; Scanner sc = null; double sucessfulExp = 0.0; Hashtable<String, List> info = new Hashtable(); //String[] aMode = {"levywalk", "lwphevap", "hybrid", "hybrid3", "hybrid4"}; info.put("levywalk", new ArrayList()); info.put("lwphevap", new ArrayList()); info.put("hybrid", new ArrayList()); //info.put("hybrid3", new ArrayList()); //info.put("hybrid4", new ArrayList()); info.put("sequential", new ArrayList()); for (File file : files) { extension = ""; int i = file.getName().lastIndexOf('.'); int p = Math.max(file.getName().lastIndexOf('/'), file.getName().lastIndexOf('\\')); if (i > p) { extension = file.getName().substring(i + 1); } // System.out.println(file.getName() + "extension" + extension); if (file.isFile() && extension.equals("csv") && file.getName().startsWith("dataCollected") && file.getName().contains("mazeon")) { System.out.println(file.getName()); System.out.println("get: " + file.getName()); String[] filenamep = file.getName().split(Pattern.quote("+")); System.out.println("file" + filenamep[8]); int popsize = Integer.valueOf(filenamep[3]); double pf = Double.valueOf(filenamep[5]); String mode = filenamep[7]; int maxIter = -1; //if (!filenamep[8].isEmpty()) { maxIter = Integer.valueOf(filenamep[9]); //} System.out.println("psize:" + popsize); System.out.println("pf:" + pf); System.out.println("mode:" + mode); System.out.println("maxIter:" + maxIter); //String[] aMode = {"random", "levywalk", "sandc", "sandclw"}; //String[] aMode = {"lwphclwevap", "lwsandc2", "lwsandc", "lwphevap2", "lwphevap"}; // String[] aMode = {"levywalk", "lwphevap", "hybrid"}; //String[] aMode = {"levywalk", "lwphevap", "hybrid", "hybrid3", "hybrid4", "sequential"}; String[] aMode = { "levywalk", "lwphevap", "hybrid", "sequential" }; if (isInMode(aMode, mode)) { final List list = new ArrayList(); try { sc = new Scanner(file); } catch (FileNotFoundException ex) { Logger.getLogger(DataCollectedLatexConsolidatorSASOMessagesSend1.class.getName()) .log(Level.SEVERE, null, ex); } int roundNumber = 0; double globalInfoCollected = 0; String[] data = null; while (sc.hasNext()) { String line = sc.nextLine(); data = line.split(","); //System.out.println("data"); roundNumber = Integer.valueOf(data[0]); globalInfoCollected = Double.valueOf(data[4]); if (globalInfoCollected >= 90 && Pf.contains(pf)) { info.get(mode).add(roundNumber); break; } } } } } for (String key : info.keySet()) { System.out.println(key + ":" + info.get(key).size() / 30 * 100.0); defaultcategorydataset.addValue(info.get(key).size() / 30.0 * 100.0, "", getTechniqueName(key)); } return defaultcategorydataset; }
From source file:edu.ku.brc.af.ui.forms.BaseBusRules.java
/** * @param skipTableNames// w ww. ja v a2s. c om * @param idColName * @param dataClassObj * @return */ protected String[] gatherTableFieldsForDelete(final String[] skipTableNames, final String idColName, final Class<?> dataClassObj) { boolean debug = false; int fieldCnt = 0; Hashtable<String, Vector<String>> fieldHash = new Hashtable<String, Vector<String>>(); HashSet<String> skipHash = new HashSet<String>(); if (skipTableNames != null) { for (String name : skipTableNames) { skipHash.add(name); } } for (DBTableInfo ti : DBTableIdMgr.getInstance().getTables()) { String tblName = ti.getName(); if (!skipHash.contains(tblName)) { if (dataClassObj != null) { for (DBRelationshipInfo ri : ti.getRelationships()) { if (ri.getDataClass() == dataClassObj) { String colName = ri.getColName(); if (StringUtils.isNotEmpty(colName) /*&& !colName.equals(idColName)*/ //I am pretty sure the following condition reproduces the logic in revision prior to 11305, //if skipHash.contains test is removed above. //&& (!skipHash.contains(tblName) || (skipHash.contains(tblName) && !colName.equals(idColName))) ) { Vector<String> fieldList = fieldHash.get(tblName); if (fieldList == null) { fieldList = new Vector<String>(); fieldHash.put(tblName, fieldList); } fieldList.add(ri.getColName()); fieldCnt++; } } } } } } if (debug) { System.out.println("Fields to be checked:"); for (String tableName : fieldHash.keySet()) { System.out.println(" Table:" + tableName + " "); for (String fName : fieldHash.get(tableName)) { System.out.println(" Field:" + fName); } } } int inx = 0; String[] tableFieldNamePairs = new String[fieldCnt * 2]; for (String tableName : fieldHash.keySet()) { for (String fName : fieldHash.get(tableName)) { ///System.out.println("["+tableName+"]["+fName+"]"); tableFieldNamePairs[inx++] = tableName; tableFieldNamePairs[inx++] = fName; } } return tableFieldNamePairs; }
From source file:org.apache.torque.util.BasePeer.java
/** * Method to create an SQL query based on values in a Criteria. Note that * final manipulation of the limit and offset are performed when the query * is actually executed.//from w w w .j a va2s. c o m * * @param criteria A Criteria. * @exception TorqueException Trouble creating the query string. */ static Query createQuery(Criteria criteria) throws TorqueException { Query query = new Query(); DB db = Torque.getDB(criteria.getDbName()); DatabaseMap dbMap = Torque.getDatabaseMap(criteria.getDbName()); StringStack selectModifiers = query.getSelectModifiers(); StringStack selectClause = query.getSelectClause(); StringStack fromClause = query.getFromClause(); StringStack whereClause = query.getWhereClause(); StringStack orderByClause = query.getOrderByClause(); StringStack groupByClause = query.getGroupByClause(); StringStack orderBy = criteria.getOrderByColumns(); StringStack groupBy = criteria.getGroupByColumns(); boolean ignoreCase = criteria.isIgnoreCase(); StringStack select = criteria.getSelectColumns(); Hashtable aliases = criteria.getAsColumns(); StringStack modifiers = criteria.getSelectModifiers(); for (int i = 0; i < modifiers.size(); i++) { selectModifiers.add(modifiers.get(i)); } for (int i = 0; i < select.size(); i++) { String columnName = select.get(i); if (columnName.indexOf('.') == -1 && columnName.indexOf('*') == -1 && !columnName.equalsIgnoreCase("NULL")) { throwMalformedColumnNameException("select", columnName); } String tableName = null; selectClause.add(columnName); if (!columnName.equalsIgnoreCase("NULL")) { int parenPos = columnName.indexOf('('); if (parenPos == -1) { tableName = columnName.substring(0, columnName.lastIndexOf('.')); } else if (columnName.indexOf('.') > -1) { tableName = columnName.substring(parenPos + 1, columnName.lastIndexOf('.')); // functions may contain qualifiers so only take the last // word as the table name. int lastSpace = tableName.lastIndexOf(' '); if (lastSpace != -1) { tableName = tableName.substring(lastSpace + 1); } } String tableName2 = criteria.getTableForAlias(tableName); if (tableName2 != null) { fromClause.add(new StringBuffer(tableName.length() + tableName2.length() + 1).append(tableName2) .append(' ').append(tableName).toString()); } else { fromClause.add(tableName); } } } Iterator it = aliases.keySet().iterator(); while (it.hasNext()) { String key = (String) it.next(); selectClause.add((String) aliases.get(key) + " AS " + key); } Iterator critKeys = criteria.keySet().iterator(); while (critKeys.hasNext()) { String key = (String) critKeys.next(); Criteria.Criterion criterion = (Criteria.Criterion) criteria.getCriterion(key); Criteria.Criterion[] someCriteria = criterion.getAttachedCriterion(); String table = null; for (int i = 0; i < someCriteria.length; i++) { String tableName = someCriteria[i].getTable(); table = criteria.getTableForAlias(tableName); if (table != null) { fromClause.add(new StringBuffer(tableName.length() + table.length() + 1).append(table) .append(' ').append(tableName).toString()); } else { fromClause.add(tableName); table = tableName; } boolean ignorCase = ((criteria.isIgnoreCase() || someCriteria[i].isIgnoreCase()) && (dbMap .getTable(table).getColumn(someCriteria[i].getColumn()).getType() instanceof String)); someCriteria[i].setIgnoreCase(ignorCase); } criterion.setDB(db); whereClause.add(criterion.toString()); } List join = criteria.getJoinL(); if (join != null) { for (int i = 0; i < join.size(); i++) { String join1 = (String) join.get(i); String join2 = (String) criteria.getJoinR().get(i); if (join1.indexOf('.') == -1) { throwMalformedColumnNameException("join", join1); } if (join2.indexOf('.') == -1) { throwMalformedColumnNameException("join", join2); } String tableName = join1.substring(0, join1.indexOf('.')); String table = criteria.getTableForAlias(tableName); if (table != null) { fromClause.add(new StringBuffer(tableName.length() + table.length() + 1).append(table) .append(' ').append(tableName).toString()); } else { fromClause.add(tableName); } int dot = join2.indexOf('.'); tableName = join2.substring(0, dot); table = criteria.getTableForAlias(tableName); if (table != null) { fromClause.add(new StringBuffer(tableName.length() + table.length() + 1).append(table) .append(' ').append(tableName).toString()); } else { fromClause.add(tableName); table = tableName; } boolean ignorCase = (criteria.isIgnoreCase() && (dbMap.getTable(table) .getColumn(join2.substring(dot + 1, join2.length())).getType() instanceof String)); whereClause.add(SqlExpression.buildInnerJoin(join1, join2, ignorCase, db)); } } // need to allow for multiple group bys if (groupBy != null && groupBy.size() > 0) { for (int i = 0; i < groupBy.size(); i++) { String groupByColumn = groupBy.get(i); if (groupByColumn.indexOf('.') == -1) { throwMalformedColumnNameException("group by", groupByColumn); } groupByClause.add(groupByColumn); } } Criteria.Criterion having = criteria.getHaving(); if (having != null) { //String groupByString = null; query.setHaving(having.toString()); } if (orderBy != null && orderBy.size() > 0) { // Check for each String/Character column and apply // toUpperCase(). for (int i = 0; i < orderBy.size(); i++) { String orderByColumn = orderBy.get(i); if (orderByColumn.indexOf('.') == -1) { throwMalformedColumnNameException("order by", orderByColumn); } String tableName = orderByColumn.substring(0, orderByColumn.lastIndexOf('.')); String table = criteria.getTableForAlias(tableName); if (table == null) { table = tableName; } // See if there's a space (between the column list and sort // order in ORDER BY table.column DESC). int spacePos = orderByColumn.indexOf(' '); String columnName; if (spacePos == -1) { columnName = orderByColumn.substring(orderByColumn.lastIndexOf('.') + 1); } else { columnName = orderByColumn.substring(orderByColumn.lastIndexOf('.') + 1, spacePos); } ColumnMap column = dbMap.getTable(table).getColumn(columnName); if (column.getType() instanceof String) { if (spacePos == -1) { orderByClause.add(db.ignoreCaseInOrderBy(orderByColumn)); } else { orderByClause.add(db.ignoreCaseInOrderBy(orderByColumn.substring(0, spacePos)) + orderByColumn.substring(spacePos)); } selectClause.add(db.ignoreCaseInOrderBy(table + '.' + columnName)); } else { orderByClause.add(orderByColumn); } } } // Limit the number of rows returned. int limit = criteria.getLimit(); int offset = criteria.getOffset(); String limitString = null; if (offset > 0 && db.supportsNativeOffset()) { switch (db.getLimitStyle()) { case DB.LIMIT_STYLE_MYSQL: limitString = new StringBuffer().append(offset).append(", ").append(limit).toString(); break; case DB.LIMIT_STYLE_POSTGRES: limitString = new StringBuffer().append(limit).append(", ").append(offset).toString(); break; } // The following is now done in createQueryString() to enable this // method to be used as part of Criteria.toString() without altering // the criteria itself. The commented code is retained here to // make it easier to understand how the criteria is built into a // query. // Now set the criteria's limit and offset to return the // full resultset since the results are limited on the // server. //criteria.setLimit(-1); //criteria.setOffset(0); } else if (limit > 0 && db.supportsNativeLimit()) { limitString = String.valueOf(limit); // The following is now done in createQueryString() to enable this // method to be used as part of Criteria.toString() without altering // the criteria itself. The commented code is retained here to // make it easier to understand how the criteria is built into a // query. // Now set the criteria's limit to return the full // resultset since the results are limited on the server. //criteria.setLimit(-1); } if (limitString != null) { switch (db.getLimitStyle()) { case DB.LIMIT_STYLE_ORACLE: whereClause.add("rownum <= " + limitString); break; default: query.setLimit(limitString); } } return query; }
From source file:com.flexive.tests.browser.AdmContentTest.java
/** * fill the content in// ww w.ja v a 2s .c o m * @param contents key-value pairs representing the content */ private void fillInContent(Hashtable<String, Object> contents) { String htmlSrc = selenium.getHTMLSource("<div ", "</div>", "<input "); final String MARK = "<div class=\"display\" title=\"\" style=\"\">"; String[] items1 = htmlSrc.split(MARK); final String MARK_ = "<div class=display title= style=>"; String[] items2 = htmlSrc.split(MARK_); String[] items = new String[items1.length + items2.length]; System.arraycopy(items1, 0, items, 0, items1.length); System.arraycopy(items2, 0, items, items1.length, items2.length); String tmpS; String name; Hashtable<String, Hashtable<String, String>> inputs = new Hashtable<String, Hashtable<String, String>>(); Hashtable<String, String> params; int begin; int b, e; for (int i = 1; i < items.length; i++) { tmpS = items[i]; begin = tmpS.indexOf("</div>"); name = tmpS.substring(0, begin).trim(); begin = tmpS.indexOf("<input ", begin); params = new Hashtable<String, String>(); if (begin > 0) { begin += 7; b = tmpS.indexOf("id=\"", begin) + 4; if (b > 0) { e = tmpS.indexOf("\"", b); } else { b = tmpS.indexOf("id=", begin) + 3; e = tmpS.indexOf(" ", b); } params.put("id", tmpS.substring(b, e)); b = tmpS.indexOf("type=\"", begin) + 6; if (b > 0) { e = tmpS.indexOf("\"", b); } else { b = tmpS.indexOf("type=", begin) + 5; e = tmpS.indexOf(" ", b); } params.put("type", tmpS.substring(b, e).toLowerCase()); inputs.put(name, params); } } String type; String id; Object value; for (String curName : contents.keySet()) { params = inputs.get(curName); if (params != null) { type = params.get("type"); id = params.get("id"); value = contents.get(curName); if (type.equals("text")) { selenium.type(id, value.toString()); } else if (type.equals("checkbox")) { setCheckboxState(id, (Boolean) value); } } else { // TODO throw not found... } } }
From source file:edu.ku.brc.af.ui.weblink.WebLinkButton.java
/** * @return/*from w w w. j a v a2 s. c o m*/ */ private CustomDialog createPromptDlg(final Hashtable<String, String> backupHash) { if (webLinkDef != null) { // Start by getting the data needed to build the URL // so first see if we need to prompt for data. int promptCnt = webLinkDef.getPromptCount(); if (promptCnt > 0 || backupHash.size() > 0) { textFieldHash.clear(); promptCnt += backupHash != null ? backupHash.size() : 0; String rowDef = createDuplicateJGoodiesDef("p", "4px", promptCnt); //$NON-NLS-1$ //$NON-NLS-2$ PanelBuilder pb = new PanelBuilder(new FormLayout("p,2px,f:p:g", rowDef)); //$NON-NLS-1$ CellConstraints cc = new CellConstraints(); DocumentAdaptor dla = new DocumentAdaptor() { @Override protected void changed(DocumentEvent e) { super.changed(e); boolean enableOK = true; for (JTextField tf : textFieldHash.values()) { if (tf.getText().length() == 0) { enableOK = false; break; } } promptDialog.getOkBtn().setEnabled(enableOK); } }; int y = 1; for (WebLinkDefArg arg : webLinkDef.getArgs()) { if (arg.isPrompt() && valueHash.get(arg.getName()) == null) { JTextField txtField = createTextField(15); txtField.getDocument().addDocumentListener(dla); textFieldHash.put(arg.getName(), txtField); String label = arg.getTitle(); if (StringUtils.isEmpty(label)) { label = arg.getName(); } pb.add(createFormLabel(label), cc.xy(1, y)); pb.add(txtField, cc.xy(3, y)); y += 2; } } if (backupHash != null) { for (String name : backupHash.keySet()) { JTextField txtField = createTextField(15); txtField.getDocument().addDocumentListener(dla); textFieldHash.put(name, txtField); pb.add(createLabel(backupHash.get(name), SwingConstants.RIGHT), cc.xy(1, y)); pb.add(txtField, cc.xy(3, y)); y += 2; } } pb.setDefaultDialogBorder(); return new CustomDialog((Frame) getTopWindow(), getResourceString("WBLK_PROMPT_DATA"), true, CustomDialog.OKCANCELHELP, pb.getPanel()); } } return null; }
From source file:edu.uga.cs.fluxbuster.features.FeatureCalculator.java
/** * Calculates the cluster novelty feature for each cluster generated * on a specific run date./*from w ww . j a va 2 s .c om*/ * * @param log_date the run date * @param window the number of days previous to use in feature calculation * @return a table of values where the keys are cluster ids and the values * are the feature values * @throws SQLException if there is an error calculating the feature values */ public Map<Integer, Double> calculateNoveltyFeature(Date log_date, int window) throws SQLException { HashMap<Integer, Double> retval = new HashMap<Integer, Double>(); ArrayList<Date> prevDates = getPrevDates(log_date, window); if (prevDates.size() > 0) { StringBuffer querybuf = new StringBuffer(); Formatter formatter = new Formatter(querybuf); String curdatestr = df.format(log_date); formatter.format(properties.getProperty(NOVELTY_QUERY1_1KEY), curdatestr, curdatestr, curdatestr, curdatestr); for (Date prevDate : prevDates) { formatter.format(" " + properties.getProperty(NOVELTY_QUERY1_2KEY) + " ", df.format(prevDate)); } formatter.format(properties.getProperty(NOVELTY_QUERY1_3KEY), curdatestr, curdatestr); ResultSet rs2 = null; Hashtable<Integer, Hashtable<String, Long>> new_resolved_ips = new Hashtable<Integer, Hashtable<String, Long>>(); try { rs2 = dbi.executeQueryWithResult(querybuf.toString()); while (rs2.next()) { int cluster_id = rs2.getInt(2); if (!new_resolved_ips.containsKey(cluster_id)) { new_resolved_ips.put(cluster_id, new Hashtable<String, Long>()); } String secondLevelDomainName = rs2.getString(1); long newips = rs2.getLong(3); Hashtable<String, Long> clustertable = new_resolved_ips.get(cluster_id); clustertable.put(secondLevelDomainName, newips); } } catch (Exception e) { if (log.isErrorEnabled()) { log.error(e); } } finally { if (rs2 != null && !rs2.isClosed()) { rs2.close(); } formatter.close(); } Hashtable<String, List<Integer>> numDays = new Hashtable<String, List<Integer>>(); for (Date prevDate : prevDates) { String prevDateStr = df.format(prevDate); querybuf = new StringBuffer(); formatter = new Formatter(querybuf); formatter.format(properties.getProperty(NOVELTY_QUERY2KEY), curdatestr, prevDateStr, curdatestr, prevDateStr); ResultSet rs3 = null; try { rs3 = dbi.executeQueryWithResult(querybuf.toString()); while (rs3.next()) { String sldn = rs3.getString(1); if (!numDays.containsKey(sldn)) { numDays.put(sldn, new ArrayList<Integer>()); } Date pd = rs3.getDate(2); DateTime start = new DateTime(pd.getTime()); DateTime end = new DateTime(log_date.getTime()); Days d = Days.daysBetween(start, end); int diffDays = d.getDays(); numDays.get(sldn).add(diffDays); } } catch (Exception e) { if (log.isErrorEnabled()) { log.error(e); } } finally { if (rs3 != null && !rs3.isClosed()) { rs3.close(); } formatter.close(); } } Hashtable<Integer, List<Float>> clusterValues = new Hashtable<Integer, List<Float>>(); for (int clusterID : new_resolved_ips.keySet()) { clusterValues.put(clusterID, new ArrayList<Float>()); Hashtable<String, Long> sldnValues = new_resolved_ips.get(clusterID); for (String sldn : sldnValues.keySet()) { if (numDays.keySet().contains(sldn)) { long newIPCount = sldnValues.get(sldn); float f = ((float) newIPCount) / Collections.max(numDays.get(sldn)); clusterValues.get(clusterID).add(f); } } } for (int clusterID : clusterValues.keySet()) { if (clusterValues.get(clusterID) == null) { //I dont think it is possible for this to ever be true retval.put(clusterID, null); } else { double sum = 0; for (double d : clusterValues.get(clusterID)) { sum += d; } double val = 0; if (clusterValues.get(clusterID).size() > 0) { val = sum / clusterValues.get(clusterID).size(); } retval.put(clusterID, val); } } } return retval; }