List of usage examples for java.util Vector size
public synchronized int size()
From source file:dao.GeoTargetQuery.java
/** * This method lists all the results for the geotarget text from directories * @param conn the connection//from w ww . j a v a2 s . c o m * @param collabrumId the collabrumid * @return HashSet the set that has the list of moderators for these collabrums. * @throws BaseDaoException - when error occurs **/ public List run(Connection conn, String ipAddress) throws BaseDaoException { if (RegexStrUtil.isNull(ipAddress) || conn == null) { return null; } List ip = GlobalConst.httputil.parseIP4(ipAddress); StringBuffer iptable = new StringBuffer("ip4_"); iptable.append(ip.get(0)); int b = new Integer((String) ip.get(1)).intValue(); int c = new Integer((String) ip.get(2)).intValue(); String sb = "select state from cityByCountry as geo, " + iptable.toString() + " as iptable where b=" + b + " and c=" + c + " and geo.country=iptable.country and geo.city=iptable.city and (geo.state like '%New York%' or geo.state like '%California%') limit 1"; try { PreparedStatement stmt = conn.prepareStatement(sb); if (stmt == null) { return null; } ResultSet rs = stmt.executeQuery(); Vector columnNames = null; Userpage userpage = null; List pendingList = new ArrayList(); if (rs != null) { columnNames = dbutils.getColumnNames(rs); } else { return null; } while (rs.next()) { userpage = (Userpage) eop.newObject(DbConstants.USER_PAGE); for (int j = 0; j < columnNames.size(); j++) { userpage.setValue((String) columnNames.elementAt(j), (String) rs.getString((String) columnNames.elementAt(j))); } pendingList.add(userpage); } return pendingList; } catch (Exception e) { throw new BaseDaoException("Error occured while executing geotarget run query ", e); } }
From source file:dao.VisitTrafficDailySearchTagCountQuery.java
/** * This method lists all the results for the search text from directories * @param conn the connection/* ww w. j av a 2 s .co m*/ * @param collabrumId the collabrumid * @return List the set that has the list of moderators for these collabrums. * @throws BaseDaoException - when error occurs **/ public List run(Connection conn, String sString) throws BaseDaoException { if ((RegexStrUtil.isNull(sString) || conn == null)) { return null; } ResultSet rs = null; StringBuffer sb = new StringBuffer("select count(*) from visittraffic where "); ArrayList columns = new ArrayList(); columns.add("url"); sb.append(sqlSearch.getConstraint(columns, sString)); sb.append( " and YEAR(visitdate)=YEAR(CURDATE()) and MONTH(visitdate)=MONTH(CURDATE()) and DAY(visitdate)=DAY(CURDATE())"); logger.info("search query string" + sb.toString()); try { PreparedStatement stmt = conn.prepareStatement(sb.toString()); rs = stmt.executeQuery(); Vector columnNames = null; Yourkeywords tag = null; List pendingList = new ArrayList(); if (rs != null) { columnNames = dbutils.getColumnNames(rs); } else { return null; } while (rs.next()) { tag = (Yourkeywords) eop.newObject(DbConstants.YOURKEYWORDS); for (int j = 0; j < columnNames.size(); j++) { tag.setValue((String) columnNames.elementAt(j), (String) rs.getString((String) columnNames.elementAt(j))); } pendingList.add(tag); } return pendingList; } catch (Exception e) { throw new BaseDaoException("Error occured while executing search in tag run query " + sb.toString(), e); } }
From source file:edu.umn.cs.spatialHadoop.operations.Repartition.java
public static CellInfo[] packInRectangles(Path[] files, Path outFile, OperationsParams params, Rectangle fileMBR) throws IOException { final Vector<Point> sample = new Vector<Point>(); float sample_ratio = params.getFloat(SpatialSite.SAMPLE_RATIO, 0.01f); long sample_size = params.getLong(SpatialSite.SAMPLE_SIZE, 100 * 1024 * 1024); LOG.info("Reading a sample of " + (int) Math.round(sample_ratio * 100) + "%"); ResultCollector<Point> resultCollector = new ResultCollector<Point>() { @Override//from w w w.ja va 2 s .co m public void collect(Point value) { sample.add(value.clone()); } }; OperationsParams params2 = new OperationsParams(params); params2.setFloat("ratio", sample_ratio); params2.setLong("size", sample_size); params2.setClass("outshape", Point.class, TextSerializable.class); Sampler.sample(files, resultCollector, params2); LOG.info("Finished reading a sample of size: " + sample.size() + " records"); long inFileSize = Sampler.sizeOfLastProcessedFile; // Compute an approximate MBR to determine the desired number of rows // and columns Rectangle approxMBR; if (fileMBR == null) { approxMBR = new Rectangle(Double.MAX_VALUE, Double.MAX_VALUE, -Double.MAX_VALUE, -Double.MAX_VALUE); for (Point pt : sample) approxMBR.expand(pt); } else { approxMBR = fileMBR; } GridInfo gridInfo = new GridInfo(approxMBR.x1, approxMBR.y1, approxMBR.x2, approxMBR.y2); FileSystem outFs = outFile.getFileSystem(params); @SuppressWarnings("deprecation") long blocksize = outFs.getDefaultBlockSize(); gridInfo.calculateCellDimensions(Math.max(1, (int) ((inFileSize + blocksize / 2) / blocksize))); if (fileMBR == null) gridInfo.set(-Double.MAX_VALUE, -Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE); else gridInfo.set(fileMBR); Rectangle[] rectangles = RTree.packInRectangles(gridInfo, sample.toArray(new Point[sample.size()])); CellInfo[] cellsInfo = new CellInfo[rectangles.length]; for (int i = 0; i < rectangles.length; i++) cellsInfo[i] = new CellInfo(i + 1, rectangles[i]); return cellsInfo; }
From source file:edu.ku.brc.specify.conversion.CollectionInfo.java
/** * @param oldDBConn//from w w w . j a v a2 s. com * @return */ public static Vector<CollectionInfo> getCollectionInfoList(final Connection oldDBConn, final boolean doSkipCheck) { //collectionInfoList.clear(); if (collectionInfoList.isEmpty()) { String hostTaxonID = "SELECT Count(tn.TaxonomicUnitTypeID) FROM habitat h " + "INNER JOIN taxonname tn ON h.HostTaxonID = tn.TaxonNameID WHERE tn.TaxonomyTypeId = "; /*String sql = "SELECT cot.CollectionObjectTypeID, cot.CollectionObjectTypeName, csd.CatalogSeriesDefinitionID, csd.CatalogSeriesID FROM collectionobjecttype cot " + "INNER JOIN catalogseriesdefinition csd on " + "csd.ObjectTypeId = cot.CollectionObjectTypeId WHERE cot.Category = 'Biological' ORDER BY cot.CollectionObjectTypeID, csd.CatalogSeriesID"; */ String catSeriesSQL = "SELECT SeriesName, CatalogSeriesPrefix, Remarks, LastEditedBy FROM catalogseries WHERE CatalogSeriesID = "; String cntTaxonName = "SELECT COUNT(TaxonNameID) FROM taxonname WHERE TaxonName IS NOT NULL AND TaxonomyTypeId = "; String colObjCountPerCatSeriesSQL = "SELECT COUNT(cc.CatalogSeriesID) " + //, cc.CatalogSeriesID, cs.SeriesName " + "FROM collectionobjectcatalog cc INNER JOIN catalogseries cs ON cc.CatalogSeriesID = cs.CatalogSeriesID " + "WHERE cs.CatalogSeriesID = %d GROUP BY cs.CatalogSeriesID"; String colObjDetCountPerCatSeriesSQL = "SELECT COUNT(cc.CatalogSeriesID) " + "FROM determination d INNER JOIN collectionobject co ON d.BiologicalObjectID = co.CollectionObjectID " + "INNER JOIN collectionobjectcatalog cc ON co.CollectionObjectID = cc.CollectionObjectCatalogID " + "WHERE cc.CatalogSeriesID = %d AND d.TaxonNameID IS NOT NULL GROUP BY cc.CatalogSeriesID"; String colObjCatSeriesSQL = "SELECT cot.CollectionObjectTypeID, cot.CollectionObjectTypeName, csd.CatalogSeriesDefinitionID, csd.CatalogSeriesID FROM collectionobjecttype cot " + "INNER JOIN catalogseriesdefinition csd on csd.ObjectTypeId = cot.CollectionObjectTypeId " + "WHERE cot.Category = 'Biological' ORDER BY cot.CollectionObjectTypeID, csd.CatalogSeriesID"; Statement stmt = null; try { pw = new PrintWriter(String.format("sql_%d.log", pwPassCnt++)); log.debug(colObjCatSeriesSQL); logSQL("ColObj Cat Series", colObjCatSeriesSQL); HashSet<Integer> taxonTypeIdHash = new HashSet<Integer>(); stmt = oldDBConn.createStatement(); ResultSet rs = stmt.executeQuery(colObjCatSeriesSQL); while (rs.next()) { CollectionInfo info = new CollectionInfo(oldDBConn); Integer colObjTypeID = rs.getInt(1); pw.println(String.format( "%d ================================================================================", colObjTypeID)); System.err.println("ColObjType: " + colObjTypeID); info.setColObjTypeId(colObjTypeID); info.setColObjTypeName(rs.getString(2)); info.setCatSeriesDefId(rs.getInt(3)); info.setCatSeriesId(rs.getInt(4)); String sql = String.format(colObjCountPerCatSeriesSQL, info.getCatSeriesId()); log.debug(sql); logSQL("ColObj Count Per Cat Series", sql); int colObjCnt = BasicSQLUtils.getCountAsInt(oldDBConn, sql); info.setColObjCnt(colObjCnt); sql = String.format(colObjDetCountPerCatSeriesSQL, info.getCatSeriesId()); log.debug(sql); logSQL("ColObj Count Determinations Per Cat Series", sql); info.setColObjDetTaxCnt(BasicSQLUtils.getCountAsInt(oldDBConn, sql)); sql = catSeriesSQL + info.getCatSeriesId(); log.debug(sql); logSQL("Cat Series", sql); Vector<Object[]> rows = BasicSQLUtils.query(oldDBConn, sql); if (rows != null && rows.size() == 1) { Object[] row = rows.get(0); info.setCatSeriesName((String) row[0]); info.setCatSeriesPrefix((String) row[1]); info.setCatSeriesRemarks((String) row[2]); info.setCatSeriesLastEditedBy((String) row[3]); } else { log.error("Error getting CollectionInfo for CollectionObjectTypeID: " + rs.getInt(1) + " number of CatlogSeries: " + rows.size()); } if (!doSkipCheck) { String detSQLStr = "SELECT ct.TaxonomyTypeID, (select distinct relatedsubtypevalues FROM usysmetacontrol c " + "LEFT JOIN usysmetafieldsetsubtype fst ON fst.fieldsetsubtypeid = c.fieldsetsubtypeid " + "WHERE objectid = 10290 AND ct.taxonomytypeid = c.relatedsubtypevalues) AS DeterminationTaxonType " + "FROM collectiontaxonomytypes ct WHERE ct.biologicalobjecttypeid = " + info.getColObjTypeId(); log.debug(detSQLStr); logSQL("Checking USYS data", detSQLStr); Vector<Object[]> detRows = BasicSQLUtils.query(oldDBConn, detSQLStr); for (Object[] row : detRows) { Integer txnTypeId = (Integer) row[0]; String detTxnTypes = (String) row[1]; if (detTxnTypes == null) { detTxnTypes = Integer.toString(txnTypeId); } if (StringUtils.isNotEmpty(detTxnTypes)) { String txNameSQL = "SELECT TaxonomyTypeName FROM taxonomytype WHERE TaxonomyTypeID = "; logSQL("Getting Taxon Type Name", txNameSQL + txnTypeId); if (StringUtils.contains(detTxnTypes, ',')) { StringBuilder sb = new StringBuilder(); String[] toks = StringUtils.split(detTxnTypes, ','); String dtName = BasicSQLUtils.querySingleObj(oldDBConn, txNameSQL + txnTypeId); sb.append(String.format( "Warning - There are %d DeterminationTaxonTypes for TaxonObjectType %d (%s) they are:\n", toks.length, txnTypeId, dtName)); for (String id : toks) { logSQL("Getting Taxon Type Name", txNameSQL + id); String name = BasicSQLUtils.querySingleObj(oldDBConn, txNameSQL + id); sb.append(id); sb.append(" - "); sb.append(name); sb.append("\n"); } sb.append( "\nThis database will need to be fixed by hand before it can be converted."); UIRegistry.showError(sb.toString()); System.exit(0); askForFix = true; return null; } else if (StringUtils.isNumeric(detTxnTypes.trim())) { Integer txnType = Integer.parseInt(detTxnTypes); if (!txnType.equals(txnTypeId)) { String tName = BasicSQLUtils.querySingleObj(oldDBConn, txNameSQL + txnType); String dtName = BasicSQLUtils.querySingleObj(oldDBConn, txNameSQL + txnTypeId); StringBuilder sb = new StringBuilder(); sb.append(String.format( "Warning - The TaxonObjectType %d (%s) in the DeterminationTaxonTypes field\ndoesn't match the actual TaxonObjectType %d (%s)", txnType, tName, txnTypeId, dtName)); UIRegistry.showError(sb.toString()); askForFix = true; return null; } } } } } /*info.setDeterminationTaxonType(detTxnTypeStr); for (Integer id : info.getDetTaxonTypeIdList()) { log.debug("ID: "+id); }*/ // This represents a mapping from what would be the Discipline (Biological Object Type) to the Taxonomic Root sql = String.format( "SELECT tt.TaxonomyTypeID, tt.TaxonomyTypeName, tt.KingdomID, tn.TaxonNameID, tn.TaxonName, tu.TaxonomicUnitTypeID FROM taxonomytype AS tt " + "INNER JOIN taxonomicunittype AS tu ON tt.TaxonomyTypeID = tu.TaxonomyTypeID " + "INNER JOIN taxonname AS tn ON tu.TaxonomyTypeID = tn.TaxonomyTypeID " + "INNER JOIN collectiontaxonomytypes AS ct ON tn.TaxonomyTypeID = ct.TaxonomyTypeID " + "WHERE tu.RankID = 0 AND tn.RankID = 0 AND ct.BiologicalObjectTypeID = %d " + "ORDER BY ct.BiologicalObjectTypeID ASC", info.getColObjTypeId()); log.debug(sql); logSQL("Taxon -> Coll Taxon Types", sql); rows = BasicSQLUtils.query(oldDBConn, sql); if (rows != null) { Object[] row = rows.get(0); int taxonomyTypeID = (Integer) row[0]; info.setTaxonomyTypeId(taxonomyTypeID); info.setTaxonomyTypeName((String) row[1]); info.setKingdomId((Integer) row[2]); info.setTaxonNameId((Integer) row[3]); info.setTaxonName((String) row[4]); info.setTaxonomicUnitTypeID((Integer) row[5]); info.setTaxonNameCnt(BasicSQLUtils.getCountAsInt(oldDBConn, cntTaxonName + taxonomyTypeID)); log.debug("TaxonomyTypeName: " + info.getTaxonomyTypeName() + " TaxonName: " + info.getTaxonName() + " TaxonomyTypeId: " + info.getTaxonomyTypeId()); sql = hostTaxonID + taxonomyTypeID; log.debug(sql); Vector<Object> ttNames = BasicSQLUtils.querySingleCol(oldDBConn, sql); if (ttNames != null && ttNames.size() > 0 && ((Long) ttNames.get(0)) > 0) { info.setSrcHostTaxonCnt((Long) ttNames.get(0)); } else { info.setSrcHostTaxonCnt(0); } taxonTypeIdHash.add(taxonomyTypeID); } else { log.error("Error getting CollectionInfo for CollectionObjectTypeID: " + rs.getInt(1)); } collectionInfoList.add(info); //System.out.println(info.toString()); } rs.close(); // Here we figure out whether a Taxon Tree that is used by HostTaxonID is associated with a Collection. String sql = "SELECT DISTINCT tt.TaxonomyTypeID, tt.TaxonomyTypeName FROM habitat AS h " + "INNER JOIN taxonname AS tn ON h.HostTaxonID = tn.TaxonNameID " + "INNER JOIN taxonomytype AS tt ON tn.TaxonomyTypeID = tt.TaxonomyTypeID"; logSQL("Check for HostID", sql); Vector<Integer> txTypeIds = BasicSQLUtils.queryForInts(oldDBConn, sql); HashSet<Integer> txnTypeIdHashSet = new HashSet<Integer>(); for (Integer txTypId : txTypeIds) { Boolean hasColInfo = false; for (CollectionInfo colInfo : collectionInfoList) { if (colInfo.getTaxonomyTypeId().equals(txTypId)) { hasColInfo = true; } } if (!hasColInfo) { txnTypeIdHashSet.add(txTypId); } } // These TaxonTypeIds do not have CollectionInfo for (Iterator<Integer> iter = txnTypeIdHashSet.iterator(); iter.hasNext();) { Integer taxonomyTypeID = iter.next(); System.out.println(taxonomyTypeID); sql = "SELECT tt.TaxonomyTypeName, tn.TaxonName, tt.KingdomID, tn.TaxonNameID, tn.TaxonomicUnitTypeID FROM taxonomytype AS tt " + "INNER JOIN taxonomicunittype AS tut ON tt.TaxonomyTypeID = tut.TaxonomyTypeID " + "INNER JOIN taxonname AS tn ON tt.TaxonomyTypeID = tn.TaxonomyTypeID AND tut.TaxonomicUnitTypeID = tn.TaxonomicUnitTypeID " + "WHERE tt.TaxonomyTypeID = " + taxonomyTypeID + " AND tn.RankID = 0"; log.debug(sql); logSQL("Get TaxonTypeName etc from TaxonomyTypeID and RankID = 0", sql); Vector<Object[]> rows = BasicSQLUtils.query(oldDBConn, sql); if (rows.size() != 1) { String msg = "There should only be '1' TaxonTypeName for TaxonomyTypeID:" + taxonomyTypeID; log.error(msg); UIRegistry.showError(msg); continue; } CollectionInfo colInfo = new CollectionInfo(oldDBConn); String taxonTypeName = (String) rows.get(0)[0]; String taxonRootName = (String) rows.get(0)[1]; if (StringUtils.isEmpty(taxonRootName)) { taxonRootName = taxonTypeName; } //colInfo.setColObjTypeId(); colInfo.setColObjTypeName(taxonRootName); //colInfo.setCatSeriesDefId(rs.getInt(3)); //colInfo.setCatSeriesId(rs.getInt(4)); colInfo.setCatSeriesName(taxonRootName); colInfo.setCatSeriesPrefix(""); colInfo.setCatSeriesRemarks(""); colInfo.setCatSeriesLastEditedBy(""); colInfo.setColObjCnt(1); colInfo.setColObjDetTaxCnt(1); colInfo.setTaxonomyTypeId(taxonomyTypeID); colInfo.setTaxonomyTypeName(taxonTypeName); colInfo.setKingdomId((Integer) rows.get(0)[2]); colInfo.setTaxonNameId((Integer) rows.get(0)[3]); colInfo.setTaxonName(taxonRootName); colInfo.setTaxonomicUnitTypeID((Integer) rows.get(0)[4]); colInfo.setTaxonNameCnt(BasicSQLUtils.getCountAsInt(oldDBConn, cntTaxonName + taxonomyTypeID)); colInfo.setSrcHostTaxonCnt(0); collectionInfoList.add(colInfo); } // Do All /*String sqlAllTx = "SELECT cot.CollectionObjectTypeID, cot.CollectionObjectTypeName, tt.TaxonomyTypeID, tt.TaxonomyTypeName, tt.KingdomID, tn.TaxonNameID, tn.TaxonName, tn.TaxonomicUnitTypeID " + "FROM collectionobjecttype AS cot " + "INNER JOIN collectiontaxonomytypes as ctt ON cot.CollectionObjectTypeID = ctt.BiologicalObjectTypeID " + "INNER JOIN taxonomytype as tt ON ctt.TaxonomyTypeID = tt.TaxonomyTypeID " + "INNER JOIN taxonname as tn ON tt.TaxonomyTypeID = tn.TaxonomyTypeID " + "WHERE cot.Category = 'Biological' AND tn.ParentTaxonNameID IS NULL"; log.debug(sqlAllTx); Vector<Object[]> rows = BasicSQLUtils.query(oldDBConn, sqlAllTx); for (Object[] row : rows) { int taxonomyTypeID = (Integer)row[2]; if (taxonTypeIdHash.get(taxonomyTypeID) == null) { CollectionInfo info = new CollectionInfo(oldDBConn); info.setColObjTypeId((Integer)row[0]); info.setColObjTypeName((String)row[1]); info.setCatSeriesDefId(null); info.setCatSeriesId(null); info.setCatSeriesName(""); info.setCatSeriesPrefix(""); info.setCatSeriesRemarks(""); info.setCatSeriesLastEditedBy(""); info.setTaxonomyTypeId(taxonomyTypeID); info.setTaxonomyTypeName((String)row[3]); info.setKingdomId((Integer)row[4]); info.setTaxonNameId((Integer)row[5]); info.setTaxonName((String)row[6]); info.setTaxonomicUnitTypeID((Integer)row[7]); info.setTaxonNameCnt(BasicSQLUtils.getCountAsInt(oldDBConn, cntTaxonName + taxonomyTypeID)); Vector<Object> ttNames = BasicSQLUtils.querySingleCol(oldDBConn, hostTaxonID + taxonomyTypeID); if (ttNames != null && ttNames.size() > 0 && ((Long)ttNames.get(0)) > 0) { info.setSrcHostTaxonCnt((Long)ttNames.get(0)); } else { info.setSrcHostTaxonCnt(0); } taxonTypeIdHash.put(taxonomyTypeID, true); collectionInfoList.add(info); } }*/ dump(); } catch (Exception ex) { ex.printStackTrace(); try { if (stmt != null) { stmt.close(); } } catch (Exception e) { } } finally { if (pw != null) pw.close(); } } Collections.sort(collectionInfoList); return collectionInfoList; }
From source file:edu.ku.brc.specify.tasks.RecordSetTask.java
/** * @param tableIds//from w w w . j av a2 s . c o m * @param additionalRS * @param msgIfNoRecordsets * @return */ public static RecordSetIFace askForRecordSet(final Vector<Integer> tableIds, final Vector<RecordSetIFace> additionalRS, final boolean msgIfNoRecordsets) { UsageTracker.incrUsageCount("RS.ASKRS"); ChooseRecordSetDlg dlg = new ChooseRecordSetDlg(tableIds); if (additionalRS != null && additionalRS.size() > 0) { dlg.addAdditionalObjectsAsRecordSets(additionalRS); } if (dlg.hasRecordSets()) { // Commenting out because user want a chance to say // yes or cancel even when there is one item /*if (dlg.getRecordSets().size() == 1) { return dlg.getRecordSets().get(0); }*/ // else UIHelper.centerAndShow(dlg); // modal (waits for answer here) return dlg.isCancelled() ? null : dlg.getSelectedRecordSet(); } // else if (msgIfNoRecordsets) { UIRegistry.displayLocalizedStatusBarText("RecordSetTask.NoRecordsets"); } return null; }
From source file:Main.java
public static void cosineSimilarityCW() { Iterator<Integer> ids = CommentWordCount.keySet().iterator(); while (ids.hasNext()) { int com_id = ids.next(); Set<String> words1; words1 = CommentWordCount.get(com_id).keySet(); Iterator<Integer> com_iter = CommentWordCount.keySet().iterator(); while (com_iter.hasNext()) { int id = com_iter.next(); if (com_id < id) { Set<String> words2; words2 = CommentWordCount.get(id).keySet(); Vector<Integer> vecA = new Vector<Integer>(); Vector<Integer> vecB = new Vector<Integer>(); Iterator<String> w1 = words1.iterator(); Iterator<String> w2 = words2.iterator(); HashSet<String> imp = new HashSet<String>(); while (w1.hasNext()) { String s = w1.next(); imp.add(s);/*from www. ja v a 2s .c om*/ } while (w2.hasNext()) { String s = w2.next(); imp.add(s); } for (String s : imp) { if (CommentWordCount.get(com_id).containsKey(s)) { vecA.add(CommentWordCount.get(com_id).get(s)); } else vecA.add(0); if (CommentWordCount.get(id).containsKey(s)) { vecB.add(CommentWordCount.get(id).get(s)); } else vecB.add(0); } //System.out.println("Size : A"+vecA.size()+" Size: B"+vecB.size()+"maxLen:"+maxlength); double similarity; int product = 0; double sumA = 0; double sumB = 0; for (int i = 0; i < vecA.size(); i++) { product += vecA.elementAt(i) * vecB.elementAt(i); sumA += vecA.elementAt(i) * vecA.elementAt(i); sumB += vecB.elementAt(i) * vecB.elementAt(i); } sumA = Math.sqrt(sumA); sumB = Math.sqrt(sumB); similarity = product / (sumA * sumB); similarity = Math.acos(similarity) * 180 / Math.PI; //System.out.println("Result "+com_id+" "+id+" :"+similarity); if (similarity < 75) { //System.out.println("Result "+com_id+" "+id); if (Topic.containsKey(com_id)) { int val = Topic.get(com_id); val++; Topic.put(com_id, val); } else Topic.put(com_id, 1); if (Topic.containsKey(id)) { int val = Topic.get(id); val++; Topic.put(id, val); } else Topic.put(id, 1); } } } } }
From source file:com.mirth.connect.connectors.file.filesystems.SftpConnection.java
@Override public List<FileInfo> listFiles(String fromDir, String filenamePattern, boolean isRegex, boolean ignoreDot) throws Exception { lastDir = fromDir;/* w w w .j av a 2 s . c o m*/ FilenameFilter filenameFilter; if (isRegex) { filenameFilter = new RegexFilenameFilter(filenamePattern); } else { filenameFilter = new WildcardFileFilter(filenamePattern.trim().split("\\s*,\\s*")); } cwd(fromDir); @SuppressWarnings("unchecked") Vector<ChannelSftp.LsEntry> entries = client.ls("."); List<FileInfo> files = new ArrayList<FileInfo>(entries.size()); for (Iterator<ChannelSftp.LsEntry> iter = entries.iterator(); iter.hasNext();) { ChannelSftp.LsEntry entry = iter.next(); if (!entry.getAttrs().isDir() && !entry.getAttrs().isLink()) { if (((filenameFilter == null) || filenameFilter.accept(null, entry.getFilename())) && !(ignoreDot && entry.getFilename().startsWith("."))) { files.add(new SftpFileInfo(fromDir, entry)); } } } return files; }
From source file:dao.CollSearchQuery.java
/** * This method lists all the results for the search text * @param conn the connection//from w w w. ja va 2 s . c om * @param collabrumId the collabrumid * @return HashSet the set that has the list of moderators for these collabrums. * @throws BaseDaoException - when error occurs **/ public HashSet run(Connection conn, String stext) throws BaseDaoException { if ((RegexStrUtil.isNull(stext) || conn == null)) { return null; } ResultSet rs = null; StringBuffer sb = new StringBuffer( "select distinct collabrumid, name, LEFT(coldesc, 160) as info, hits from collabrum where "); ArrayList columns = new ArrayList(); columns.add("coldesc"); columns.add("name"); columns.add("keywords"); sb.append(sqlSearch.getConstraint(columns, stext)); sb.append(" order by hits DESC"); try { PreparedStatement stmt = conn.prepareStatement(sb.toString()); rs = stmt.executeQuery(); Vector columnNames = null; Collabrum collabrum = null; HashSet pendingSet = new HashSet(); if (rs != null) { columnNames = dbutils.getColumnNames(rs); } else { return null; } while (rs.next()) { collabrum = (Collabrum) eop.newObject(DbConstants.COLLABRUM); for (int j = 0; j < columnNames.size(); j++) { collabrum.setValue((String) columnNames.elementAt(j), (String) rs.getString((String) columnNames.elementAt(j))); } pendingSet.add(collabrum); } return pendingSet; } catch (Exception e) { throw new BaseDaoException("Error occured while executing search in collabrum run query ", e); } }
From source file:org.esgf.globusonline.GOauthView3Controller.java
private String[] constructEndpointInfos(Vector<EndpointInfo> endpoints) { int numEndpoints = endpoints.size(); String[] endPointNames = new String[numEndpoints]; for (int i = 0; i < numEndpoints; i++) { // we encode this in a string as follows // EPNAME^^HOSTS^^MYPROXYSERVER^^ISGLOBUSCONNECT endPointNames[i] = endpoints.get(i).getEPName() + "^^" + endpoints.get(i).getHosts() + "^^" + endpoints.get(i).getMyproxyServer() + "^^" + endpoints.get(i).isGlobusConnect(); }/*from w w w. ja v a2s .co m*/ return endPointNames; }
From source file:dao.DirectoryFileSearchQuery.java
/** * This method lists all the results for the search text from directories * @param conn the connection//w w w . j av a 2s . co m * @param collabrumId the collabrumid * @return HashSet the set that has the list of moderators for these collabrums. * @throws BaseDaoException - when error occurs **/ public HashSet run(Connection conn, String sString, String fileName, String dirName) throws BaseDaoException { //if ((RegexStrUtil.isNull(sString) || conn == null)) { if ((conn == null) || RegexStrUtil.isNull(fileName)) { return null; } //StringBuffer sb = new StringBuffer("select distinct d1.dirname, d2.directoryid, d2.entryid, d2.btitle from directory d1, dirblob d2 where"); StringBuffer sb = new StringBuffer( "select distinct d1.dirname, d2.directoryid, d2.entryid, d2.btitle from directory d1, dirblob d2 where"); if (!RegexStrUtil.isNull(sString)) { ArrayList columns = new ArrayList(); columns.add("dirname"); // set the sqlConstraint as " and " String sqlConstraint = " and "; sb.append(sqlSearch.getConstraint(columns, sString, sqlConstraint)); } else { sb.append("dirpath IS NULL"); } // directory name can be null if (!RegexStrUtil.isNull(dirName)) { sb.append(" and dirname like '%"); sb.append(dirName); sb.append("%'"); } sb.append(" and d1.directoryid=d2.directoryid and "); sb.append("d2.btitle like '%"); sb.append(fileName); sb.append("%'"); logger.info("sb.toString() = " + sb.toString()); try { PreparedStatement stmt = conn.prepareStatement(sb.toString()); ResultSet rs = stmt.executeQuery(); Vector columnNames = null; Directory directory = null; HashSet pendingSet = new HashSet(); if (rs != null) { columnNames = dbutils.getColumnNames(rs); } while (rs.next()) { directory = (Directory) eop.newObject(DbConstants.DIRECTORY); for (int j = 0; j < columnNames.size(); j++) { if (((String) (columnNames.elementAt(j))).equalsIgnoreCase(DbConstants.ENTRY_DATE)) { try { directory.setValue(DbConstants.ENTRY_DATE, GlobalConst.dncalendar.getDisplayDate(rs.getTimestamp(DbConstants.ENTRY_DATE))); } catch (ParseException e) { throw new BaseDaoException("could not parse the date for entrydate in directory " + rs.getTimestamp(DbConstants.ENTRY_DATE), e); } } else { directory.setValue((String) columnNames.elementAt(j), (String) rs.getString((String) columnNames.elementAt(j))); } } pendingSet.add(directory); } return pendingSet; } catch (Exception e) { throw new BaseDaoException("Error occured while executing search directory run query " + sb.toString(), e); } }