List of usage examples for java.util HashSet add
public boolean add(E e)
From source file:com.termmed.utils.FileHelper.java
/** * Complete file from array./*from ww w . j ava 2 s . com*/ * * @param filesArray the files array * @param fileName the file name * @return the string */ public static String completeFileFromArray(HashSet<String> filesArray, String fileName) { HashSet<File> fileSet = new HashSet<File>(); for (String sfile : filesArray) { File file = new File(sfile); if (file.exists()) { fileSet.add(file); } } if (fileSet.size() > 0) { File outputFile = new File(fileName); CommonUtils.concatFile(fileSet, outputFile); return outputFile.getAbsolutePath(); } return null; }
From source file:com.addthis.hydra.job.spawn.JobAlertUtil.java
/** * Count the total byte sizes of files along a certain path via mesh * @param jobId The job to check//w ww . j a v a 2 s . c o m * @param dirPath The path to check within the jobId, e.g. split/{{now-1}}/importantfiles/*.gz * @return A long representing the total size in bytes of files along the specified path */ public static long getTotalBytesFromMesh(MeshyClient meshyClient, String jobId, String dirPath) { String meshLookupString = "/job*/" + jobId + "/*/gold/" + expandDateMacro(dirPath); if (meshyClient != null) { try { Collection<FileReference> fileRefs = meshyClient.listFiles(new String[] { meshLookupString }); HashSet<String> fileRefKeysUsed = new HashSet<>(); long totalBytes = 0; for (FileReference fileRef : fileRefs) { // Use StreamSourceMeshy to generate a canonical path key. In particular, strip off any multi-minion prefixes if appropriate. String meshFileKey = StreamFileUtil.getCanonicalFileReferenceCacheKey(fileRef.name, pathOff, sortToken, pathTokenOffset); if (!fileRefKeysUsed.contains(meshFileKey)) { totalBytes += fileRef.size; fileRefKeysUsed.add(meshFileKey); } } return totalBytes; } catch (IOException e) { log.warn("Job alert mesh look up failed", e); } } else { log.warn( "Received mesh lookup request job={} dirPath={} while meshy client was not instantiated; returning zero", jobId, dirPath); } return 0; }
From source file:com.projity.field.Select.java
public static String toConfigurationXMLOptions(LinkedHashMap map, String keyPrefix) { // MapIterator i = map.i(); Iterator i = map.keySet().iterator(); StringBuffer buf = new StringBuffer(); HashSet duplicateSet = new HashSet(); // don't allow duplicate keys while (i.hasNext()) { String key = (String) i.next(); // notion of key and value is switched String value = (String) map.get(key); int dupCount = 2; String newKey = key;//from w w w .j a va 2 s .co m while (duplicateSet.contains(newKey)) { newKey = key + "-" + dupCount++; } key = newKey; duplicateSet.add(key); if (key == null || key.length() == 0) continue; if (value == null || value.length() == 0) continue; key = keyPrefix + key; // String key = "<html>" + keyPrefix + ": " + "<b>" + i.getValue() // +"</b></html>"; buf.append(SelectOption.toConfigurationXML(key, value)); } return buf.toString(); }
From source file:com.jaspersoft.jasperserver.jrsh.operation.PackageScanClassResolver.java
/** * Discovers the packages retrieves operation types. * * @param basePackage package to scan// w w w. ja v a 2s .c o m * @return operation classes */ public static Set<Class<? extends Operation>> findOperationClasses(String basePackage) { HashSet<Class<? extends Operation>> operationTypes = new HashSet<>(); MetadataScannerConfig config = readConfig(); List<String> externalPackagesToScan = config.getPackagesToScan(); List<String> classes = config.getClasses(); FilterBuilder filter = new FilterBuilder().includePackage(basePackage); // // Discover external operation types from configuration file // if (classes != null) { for (String aClass : classes) { try { Class clz = Class.forName(aClass); if (!Modifier.isAbstract(clz.getModifiers()) && Operation.class.isAssignableFrom(clz)) { operationTypes.add(clz); } } catch (ClassNotFoundException ignored) { } } } // // Prepare package filter to avoid unnecessary CP scanning // if (externalPackagesToScan != null) { for (String aPackage : externalPackagesToScan) { aPackage = chomp(aPackage, ".*"); filter.includePackage(aPackage); } } // // Retrieve internal operation types // Reflections ref = new Reflections(new SubTypesScanner(), filter); for (val subType : ref.getSubTypesOf(Operation.class)) { if (!Modifier.isAbstract(subType.getModifiers())) { operationTypes.add(subType); } } return operationTypes; }
From source file:com.termmed.utils.FileHelper.java
/** * Gets the modules.// www. j a v a2s .c o m * * @param tmpFile the tmp file * @return the modules * @throws IOException Signals that an I/O exception has occurred. */ public static HashSet<String> getModules(String tmpFile) throws IOException { BufferedReader br = getReader(tmpFile); br.readLine(); String line; String[] spl; HashSet<String> modules = new HashSet<String>(); while ((line = br.readLine()) != null) { spl = line.split("\t", -1); if (!modules.contains(spl[3])) { modules.add(spl[3]); } } br.close(); return modules; }
From source file:edu.msu.cme.rdp.readseq.utils.RmDupSeqs.java
public static void filterDuplicates(String inFile, String outFile, int length, boolean debug) throws IOException { HashMap<String, String> idSet = new HashMap<String, String>(); IndexedSeqReader reader = new IndexedSeqReader(new File(inFile)); BufferedWriter outWriter = new BufferedWriter(new FileWriter(new File(outFile))); Set<String> allseqIDset = reader.getSeqIdSet(); Sequence seq;//from w w w. java 2 s.c o m if (debug) { System.out.println("ID\tdescription" + "\tcontained_by_ID\tdescription"); } for (String id : allseqIDset) { seq = reader.readSeq(id); boolean dup = false; HashSet<String> tempdupSet = new HashSet<String>(); for (String exID : idSet.keySet()) { String exSeq = idSet.get(exID); if (exSeq.length() >= seq.getSeqString().length()) { if (exSeq.contains(seq.getSeqString())) { dup = true; if (debug) { Sequence temp = reader.readSeq(exID); System.out.println(id + "\t" + seq.getDesc() + "\t" + exID + "\t" + temp.getDesc()); } break; } } else if (seq.getSeqString().contains(exSeq)) { tempdupSet.add(exID); } } if (!dup) { idSet.put(id, seq.getSeqString()); } for (String dupid : tempdupSet) { idSet.remove(dupid); if (debug) { Sequence temp = reader.readSeq(dupid); System.out.println(dupid + "\t" + temp.getDesc() + "\t" + id + "\t" + seq.getDesc()); } } } // get the unique seq for (String id : idSet.keySet()) { seq = reader.readSeq(id); if (seq.getSeqString().length() >= length) { outWriter.write(">" + id + "\t" + seq.getDesc() + "\n" + seq.getSeqString() + "\n"); } } reader.close(); outWriter.close(); }
From source file:bookChapter.theoretical.AnalyzeTheoreticalMSMSCalculation.java
/** * This method load all sequences in a memory * * @param databaseName// ww w. ja v a 2 s . c o m * @return */ private static HashSet<DBEntry> getDBEntries(String databaseName) throws IOException { HashSet<DBEntry> dbEntries = new HashSet<DBEntry>(); DBLoader loader = DBLoaderLoader.loadDB(new File(databaseName)); Protein protein = null; // get a crossLinkerName object while ((protein = loader.nextProtein()) != null) { String sequence = protein.getSequence().getSequence(); String descrp = protein.getHeader().getDescription(), acc = protein.getHeader().getAccession(); Peptide tmpPep = new Peptide(sequence, new ArrayList<ModificationMatch>()); double tmpPepMass = tmpPep.getMass(); DBEntry dbEntry = new DBEntry(tmpPep, descrp, acc, tmpPepMass); dbEntries.add(dbEntry); } return dbEntries; }
From source file:edu.wpi.margrave.SQSReader.java
protected static Formula handleStatementCondition(JSONObject obj, Formula theCondition, MVocab vocab) throws JSONException, MGEBadIdentifierName, MGEUnknownIdentifier, MGEManagerException { // Condition block is a conjunctive list of conditions. all must apply. // Each condition is a conjunctive list of value sets. // Each value set is a disjunctive list of values // Keys in the condition block are functions to apply // Keys in a condition are attribute names. Values are values. // Condition block // Function : {} // Function : {} // ...//from w ww . ja v a 2 s .com JSONArray conditionNames = obj.names(); for (int iCondition = 0; iCondition < conditionNames.length(); iCondition++) { String cFunction = (String) conditionNames.get(iCondition); JSONObject condition = (JSONObject) obj.get(cFunction); //MEnvironment.errorStream.println(cFunction + ": "+condition); // condition is a key:value pair or multiple such pairs. The value may be an array. // Each sub-condition must be met. HashSet<Formula> thisCondition = new HashSet<Formula>(); JSONArray subConditionNames = condition.names(); for (int iSubCondition = 0; iSubCondition < subConditionNames.length(); iSubCondition++) { String cSubKey = (String) subConditionNames.get(iSubCondition); Object subcondition = condition.get(cSubKey); // Subcondition: is it an array or a single value? if (subcondition instanceof JSONArray) { JSONArray subarr = (JSONArray) subcondition; HashSet<Formula> valuedisj = new HashSet<Formula>(); for (int iValue = 0; iValue < subarr.length(); iValue++) { //MEnvironment.errorStream.println(cFunction+"("+cSubKey+", "+subarr.get(iValue)+")"); Formula theatom = makeSQSAtom(vocab, "c", "Condition", cFunction + "<" + cSubKey + "><" + subarr.get(iValue) + ">"); valuedisj.add(theatom); } thisCondition.add(MFormulaManager.makeDisjunction(valuedisj)); } else { //MEnvironment.errorStream.println(cFunction+"("+cSubKey+", "+subcondition+")"); Formula theatom = makeSQSAtom(vocab, "c", "Condition", cFunction + "<" + cSubKey + "><" + subcondition + ">"); thisCondition.add(theatom); } } theCondition = MFormulaManager.makeAnd(theCondition, MFormulaManager.makeConjunction(thisCondition)); } return theCondition; }
From source file:gr.scify.newsum.Utils.java
/** * Counts the number of different Sources the Summary refers to * @param Summary The summary of interest * @return The number of different sources that the summary comes from *///from ww w .j a va 2 s .c o m public static int countDiffArticles(String[] Summary) { // Init string set for sources HashSet<String> hsSources = new HashSet<String>(); // Get first entry, i.e. links and labels String sAllLinksAndLabels = Summary[0]; // if only one link if (!sAllLinksAndLabels.contains(NewSumServiceClient.getSecondLevelSeparator())) { return 1; } else { // For every pair for (String sTmps : sAllLinksAndLabels.split(NewSumServiceClient.getSecondLevelSeparator())) { // Get the link (1st field out of 2) hsSources.add(sTmps.split(NewSumServiceClient.getThirdLevelSeparator())[0]); } } // Return unique sources return hsSources.size(); }
From source file:com.clustercontrol.repository.util.FacilityTreeCache.java
private static HashSet<String> getAuthorizedRoleIdSet(FacilityInfo facilityInfo, FacilityTreeItem parentTreeItem, HashMap<String, ArrayList<String>> objectRoleMap) { HashSet<String> roleIdSet = new HashSet<String>(); // //from ww w . ja va 2 s. com roleIdSet.add(RoleIdConstant.ADMINISTRATORS); roleIdSet.add(RoleIdConstant.HINEMOS_MODULE); // // ??????????????????? if (facilityInfo.getFacilityType() == FacilityConstant.TYPE_SCOPE) { roleIdSet.add(facilityInfo.getOwnerRoleId()); } // ?????? ArrayList<String> roleIdList = objectRoleMap.get(facilityInfo.getFacilityId()); if (roleIdList != null) { roleIdSet.addAll(roleIdList); } //??? if (parentTreeItem != null && parentTreeItem.getData().getFacilityType() == FacilityConstant.TYPE_SCOPE && parentTreeItem.getAuthorizedRoleIdSet() != null) { roleIdSet.addAll(parentTreeItem.getAuthorizedRoleIdSet()); } return roleIdSet; }