List of usage examples for java.util HashSet size
public int size()
From source file:org.compass.core.lucene.engine.store.DefaultLuceneSearchEngineStore.java
public String[] internalCalcSubIndexes(String[] subIndexes, String[] aliases, Class[] types, boolean poly) { if (aliases == null && types == null) { return calcSubIndexes(subIndexes, aliases); }//w w w. j a v a 2 s .c o m HashSet<String> aliasesSet = new HashSet<String>(); if (aliases != null) { for (String alias : aliases) { ResourceMapping resourceMapping = mapping.getRootMappingByAlias(alias); if (resourceMapping == null) { throw new IllegalArgumentException("No root mapping found for alias [" + alias + "]"); } aliasesSet.add(resourceMapping.getAlias()); if (poly) { aliasesSet.addAll(Arrays.asList(resourceMapping.getExtendingAliases())); } } } if (types != null) { for (Class type : types) { ResourceMapping resourceMapping = mapping.getRootMappingByClass(type); if (resourceMapping == null) { throw new IllegalArgumentException("No root mapping found for class [" + type + "]"); } aliasesSet.add(resourceMapping.getAlias()); if (poly) { aliasesSet.addAll(Arrays.asList(resourceMapping.getExtendingAliases())); } } } return calcSubIndexes(subIndexes, aliasesSet.toArray(new String[aliasesSet.size()])); }
From source file:fastcall.FastCallSNP.java
private void updateTaxaBamPathMap(String bamListFileS) { Table t = new Table(bamListFileS); String[] existingBam = new String[t.getRowNumber()]; for (int i = 0; i < t.getRowNumber(); i++) existingBam[i] = t.content[i][0]; Arrays.sort(existingBam);/*from w w w .j a v a 2 s. c o m*/ HashSet<String> existingTaxaSet = new HashSet(); HashMap<String, String[]> updatedTaxaBamMap = new HashMap(); ArrayList<String> pathList = new ArrayList(); int cnt = 0; for (int i = 0; i < taxaNames.length; i++) { String[] bamNames = taxaBamPathMap.get(taxaNames[i]); ArrayList<String> bamList = new ArrayList(); for (int j = 0; j < bamNames.length; j++) { int index = Arrays.binarySearch(existingBam, bamNames[j]); if (index < 0) continue; bamList.add(bamNames[j]); existingTaxaSet.add(taxaNames[i]); pathList.add(bamNames[j]); } if (bamList.isEmpty()) continue; bamNames = bamList.toArray(new String[bamList.size()]); Arrays.sort(bamNames); updatedTaxaBamMap.put(taxaNames[i], bamNames); cnt += bamNames.length; } String[] updatedTaxaNames = existingTaxaSet.toArray(new String[existingTaxaSet.size()]); Arrays.sort(updatedTaxaNames); taxaNames = updatedTaxaNames; taxaBamPathMap = updatedTaxaBamMap; this.bamPaths = pathList.toArray(new String[pathList.size()]); Arrays.sort(bamPaths); System.out.println("Actual taxa number:\t" + String.valueOf(taxaNames.length)); System.out.println("Actual bam file number:\t" + String.valueOf(cnt)); System.out.println(); }
From source file:com.todoroo.astrid.actfm.sync.ActFmSyncService.java
public int fetchUsers() throws JSONException, IOException { if (!checkForToken()) return 0; JSONObject result = actFmInvoker.invoke("user_list", "token", token); JSONArray users = result.getJSONArray("list"); HashSet<Long> ids = new HashSet<Long>(); if (users.length() > 0) Preferences.setBoolean(R.string.p_show_friends_view, true); for (int i = 0; i < users.length(); i++) { JSONObject userObject = users.getJSONObject(i); ids.add(userObject.optLong("id")); actFmDataService.saveUserData(userObject); }// ww w .ja v a2s . co m Long[] idsArray = ids.toArray(new Long[ids.size()]); actFmDataService.userDao.deleteWhere(Criterion.not(User.REMOTE_ID.in(idsArray))); return result.optInt("time", 0); }
From source file:com.chap.memo.memoNodes.MemoNode.java
private StepState doStep(boolean preamble, MemoQuery query, MemoNode toCompare, ArrayList<UUID> results, HashSet<UUID> seenNodes, ArrayList<MemoNode> patterns, int topX, HashMap<String, String> arguments) { MemoNode step = query.node;/*from ww w .java 2 s. co m*/ //System.out.println("checking node:" + toCompare.getStringValue() + "/" + query.value + "("+preamble+")"); if (!query.match(toCompare)) return new StepState(false, "Node doesn't match.", query, toCompare); if (seenNodes.contains(toCompare.getId())) return new StepState(true, "Loop/Multipath detected", query, toCompare); if (preamble) { for (MemoNode pattern : patterns) { StepState res = doStep(false, MemoQuery.parseQuery(pattern.getChildren().get(0), arguments), toCompare, null, new HashSet<UUID>(), null, 0, arguments); if (res.matched) { results.add(toCompare.getId()); return new StepState(true, "Node matches pattern! Added to result, no need to search deeper.", query, toCompare); } } } seenNodes.add(toCompare.getId()); List<MemoNode> nextPats = step.getChildren(); int toMatchNo = nextPats.size(); if (toMatchNo == 0) return new StepState(true, "End of pattern", query, toCompare); List<UUID> children = toCompare.getChildIds(); if (!preamble && children.size() < toMatchNo) return new StepState(false, "Too little children for pattern", query, toCompare); ArrayList<MemoQuery> queries = new ArrayList<MemoQuery>(toMatchNo); HashSet<MemoQuery> foundQueries = new HashSet<MemoQuery>(toMatchNo); for (MemoNode nextPat : nextPats) { queries.add(MemoQuery.parseQuery(nextPat, arguments)); } MemoQuery[] queryArray = { new MemoQuery() }; queryArray = queries.toArray(queryArray); Arrays.sort(queryArray); for (UUID uuid : children) { MemoNode child = new MemoNode(uuid); for (MemoQuery iQuery : queryArray) { if (foundQueries.contains(iQuery)) continue; StepState res = doStep(preamble, iQuery, child, results, seenNodes, patterns, topX, arguments); if (topX > 0 && results.size() >= topX) return new StepState(true, "TopX results reached, returning!", query, toCompare); if (preamble || !res.isMatched()) continue; //Return on fully matched pattern foundQueries.add(iQuery); if (foundQueries.size() == queryArray.length) return new StepState(true, "Pattern fully matched", query, toCompare); } } if (preamble) return new StepState(true, "preamble return.", query, toCompare); return new StepState(false, "Pattern didn't match entirely.", query, toCompare); }
From source file:org.hyperic.hq.measurement.server.session.AvailabilityManagerImpl.java
@SuppressWarnings("unchecked") /**//from w ww . j a va 2s .c o m * get AvailabilityDataRLEs for the given DataPoints' Measurement IDs, with endData within the last 7 days. * If several AvailabilityDataRLEs exist for the same Measurement, they are listed in ascending order. * @param outOfOrderAvail * @param updateList * @return */ private Map<Integer, TreeSet<AvailabilityDataRLE>> createCurrAvails(final List<DataPoint> outOfOrderAvail, final List<DataPoint> updateList) { Map<Integer, TreeSet<AvailabilityDataRLE>> currAvails = null; final StopWatch watch = new StopWatch(); try { if (outOfOrderAvail.size() == 0 && updateList.size() == 0) { currAvails = Collections.EMPTY_MAP; } long now = TimingVoodoo.roundDownTime(System.currentTimeMillis(), 60000); HashSet<Integer> mids = getMidsWithinAllowedDataWindow(updateList, now); mids.addAll(getMidsWithinAllowedDataWindow(outOfOrderAvail, now)); if (mids.size() <= 0) { currAvails = Collections.EMPTY_MAP; } Integer[] mIds = (Integer[]) mids.toArray(new Integer[0]); currAvails = availabilityDataDAO.getHistoricalAvailMap(mIds, now - MAX_DATA_BACKLOG_TIME, false); return currAvails; } finally { if (log.isDebugEnabled()) { log.debug("AvailabilityInserter setCurrAvails: " + watch + ", size=" + currAvails.size()); } } }
From source file:org.openanzo.jdbc.container.query.FindInferred.java
/** * Find all statements in container that match the provided parameters * /*from w w w . j a v a2 s . c o m*/ * @param container * source of data * @param subj * Subject resource to match, or wildcard if null * @param prop * Predicate uri to match, or wildcard if null * @param obj * Object value to match, or wildcard if null * @param contexts * Context values to match, or wildcard if null * @return Iterable set of quads containing results of find operation * @throws AnzoException * if there was an error finding statements */ @SuppressWarnings("unchecked") public static Iterable<Quad> findStatements(RDBQuadStore container, Resource subj, URI prop, Value obj, URI... contexts) throws AnzoException { Long namedGraphId = null; String graphTable = null; int metadataGraph = -1; if (contexts != null && contexts.length == 1) { URI context = (URI) StatementUtils.convertUriToAny(contexts[0]); if (context != null) { namedGraphId = container.getNodeLayout().fetchId(context, container.getConnection()); if (namedGraphId == null) { return Collections.EMPTY_LIST; // required node is not even in db } metadataGraph = UriGenerator.isMetadataGraphUri(context) ? 1 : 0; } } else { HashSet<Value> set = new HashSet<Value>(); boolean includeAllNamedGraphs = false, includeAllMetadataGraphs = false; Map<Value, Long> graphIds = null; if (contexts != null && contexts.length > 0) { for (Resource context : contexts) { if (context.equals(GRAPHS.ALL_GRAPHS)) includeAllNamedGraphs = includeAllMetadataGraphs = true; else if (context.equals(GRAPHS.ALL_NAMEDGRAPHS)) includeAllNamedGraphs = true; else if (context.equals(GRAPHS.ALL_METADATAGRAPHS)) includeAllMetadataGraphs = true; else set.add(context); } if (set.size() > 0) { graphIds = container.getNodeLayout().resolveStoredNodes(set, false, container.getConnection(), -1); if (graphIds.size() < set.size()) { set.removeAll(graphIds.keySet()); log.debug(LogUtils.RDB_MARKER, "findStatementsInferred", new UnknownGraphException(StringUtils.join(set.iterator(), ", "))); if (graphIds.size() == 0 && !includeAllNamedGraphs && !includeAllMetadataGraphs) { return Collections.EMPTY_LIST;// required node is not even in db } } if (graphIds.size() > 0) { graphTable = SQLQueryConstants.defaultGraphsTempTable; //container.populateValidGraphs(graphIds, includeAllNamedGraphs, includeAllMetadataGraphs, graphTable); //container.populateValidGraphs(graphIds, includeAllNamedGraphs, graphTable); } } else if (includeAllNamedGraphs || includeAllMetadataGraphs) { metadataGraph = includeAllNamedGraphs ? (includeAllMetadataGraphs ? -1 : 0) : 1; } } } return findStatements(container, subj, prop, obj, namedGraphId, metadataGraph, graphTable); }
From source file:org.openanzo.jdbc.container.query.FindInferred.java
/** * Find all statements in container that match the provided parameters including inferred statements * /*w w w. j a va2 s.co m*/ * @param container * source of data * @param subj * Subject resource to match, or wildcard if null * @param prop * Predicate uri to match, or wildcard if null * @param obj * Object value to match, or wildcard if null * @param contexts * Context values to match, or wildcard if null * @param ontology * URI of ontology that contains inferred property and object definitions * @return Iterable set of quads containing results of find operation * @throws AnzoException * if there was an error finding statements */ public static Iterable<Quad> findStatementsInferred(RDBQuadStore container, Resource subj, URI prop, Value obj, Resource[] contexts, Resource ontology) throws AnzoException { //try { Long namedGraphId = null; String graphTable = null; int metadataGraph = -1; if (contexts != null && contexts.length == 1) { Resource context = (Resource) StatementUtils.convertUriToAny(contexts[0]); if (context != null) { namedGraphId = container.getNodeLayout().fetchId(context, container.getConnection()); if (namedGraphId == null) { return Collections.<Quad>emptySet(); // required node is not even in db } metadataGraph = context.toString().startsWith(NAMESPACES.METADATAGRAPH_PREFIX) ? 1 : 0; } } else { HashSet<Value> set = new HashSet<Value>(); boolean includeAllNamedGraphs = false, includeAllMetadataGraphs = false; Map<Value, Long> graphIds = null; if (contexts != null && contexts.length > 0) { for (Resource context : contexts) { if (context.equals(GRAPHS.ALL_GRAPHS)) includeAllNamedGraphs = includeAllMetadataGraphs = true; else if (context.equals(GRAPHS.ALL_NAMEDGRAPHS)) includeAllNamedGraphs = true; else if (context.equals(GRAPHS.ALL_METADATAGRAPHS)) includeAllMetadataGraphs = true; else set.add(context); } if (set.size() > 0) { graphIds = container.getNodeLayout().resolveStoredNodes(set, false, container.getConnection(), -1); if (graphIds.size() < set.size()) { set.removeAll(graphIds.keySet()); log.debug(LogUtils.DATASOURCE_MARKER, "findStatementsInferred", new UnknownGraphException(StringUtils.join(set.iterator(), ", "))); if (graphIds.size() == 0 && !includeAllNamedGraphs && !includeAllMetadataGraphs) { return Collections.<Quad>emptyList();// required node is not even in db } } if (graphIds.size() > 0) { graphTable = SQLQueryConstants.defaultGraphsTempTable; //container.populateValidGraphs(graphIds, includeAllNamedGraphs, includeAllMetadataGraphs, graphTable); //container.populateValidGraphs(graphIds, includeAllNamedGraphs, graphTable); } } else if (includeAllNamedGraphs || includeAllMetadataGraphs) { metadataGraph = includeAllNamedGraphs ? (includeAllMetadataGraphs ? -1 : 0) : 1; } } } return findStatementsInferred(container, subj, prop, obj, namedGraphId, metadataGraph, graphTable, ontology); /*} catch (SQLException e) { throw new AnzoException(ExceptionConstants.ERROR_TAGS.CORE_ERROR | ExceptionConstants.ERROR_TAGS.RDB_ERROR, ExceptionConstants.CLIENT.FAILED_CONTAINER_FIND_STATEMENTS, e); }*/ }
From source file:com.webcohesion.enunciate.modules.docs.DocsModule.java
private TreeSet<Artifact> findDocumentationArtifacts() { HashSet<String> explicitArtifacts = new HashSet<String>(); TreeSet<Artifact> artifacts = new TreeSet<Artifact>(); for (ExplicitDownloadConfig download : getExplicitDownloads()) { if (download.getArtifact() != null) { explicitArtifacts.add(download.getArtifact()); } else if (download.getFile() != null) { File downloadFile = resolveFile(download.getFile()); debug("File %s to be added as an extra download.", downloadFile.getAbsolutePath()); SpecifiedArtifact artifact = new SpecifiedArtifact(getName(), downloadFile.getName(), downloadFile); if (download.getName() != null) { artifact.setName(download.getName()); }//ww w . j ava 2 s . c o m if (download.getDescription() != null) { artifact.setDescription(download.getDescription()); } artifact.setShowLink(!"false".equals(download.getShowLink())); artifacts.add(artifact); } } for (Artifact artifact : this.enunciate.getArtifacts()) { if (artifact.isPublic() || explicitArtifacts.contains(artifact.getId())) { artifacts.add(artifact); debug("Artifact %s to be added as an extra download.", artifact.getId()); explicitArtifacts.remove(artifact.getId()); } } if (explicitArtifacts.size() > 0) { for (String artifactId : explicitArtifacts) { warn("WARNING: Unknown artifact '%s'. Will not be available for download.", artifactId); } } return artifacts; }
From source file:nl.umcg.westrah.binarymetaanalyzer.BinaryMetaAnalysis.java
private void createProbeIndex(String outdir) throws IOException { HashSet<String> confineToTheseProbes = null; HashSet<String> probePreselection = null; if (settings.getSNPProbeSelection() != null) { if (settings.getSNPProbeSelection() != null) { System.out.println(//www .j a va 2 s . c o m "Getting Probes from SNP/Probe selection file: " + settings.getSNPProbeSelection()); probePreselection = new HashSet<String>(); TextFile tf = new TextFile(settings.getSNPProbeSelection(), TextFile.R); String[] elems = tf.readLineElems(TextFile.tab); while (elems != null) { if (elems.length >= 2) { String probe = elems[1]; probePreselection.add(probe); } elems = tf.readLineElems(TextFile.tab); } tf.close(); System.out.println( "Found " + probePreselection.size() + " unique probes in SNP/Probe selection file."); if (probePreselection.isEmpty()) { System.err.println("Error: SNP/Probe selection file defined, but no Probes found."); System.exit(-1); } } } if (settings.getProbeselection() != null) { System.out.println("Selecting Probes from file: " + settings.getProbeselection()); confineToTheseProbes = new HashSet<String>(); TextFile tf = new TextFile(settings.getProbeselection(), TextFile.R); if (probePreselection == null) { confineToTheseProbes.addAll(tf.readAsArrayList()); } else { ArrayList<String> confineTMP = tf.readAsArrayList(); for (String p : confineTMP) { if (probePreselection.contains(p)) { confineToTheseProbes.add(p); } } } tf.close(); System.out.println(confineToTheseProbes.size() + " Probes loaded."); } else if (probePreselection != null) { confineToTheseProbes = probePreselection; } System.out.println(""); // TODO: write probe list of probes that we didn't find in the annotation probeIndex = new Integer[traitList.length][datasets.length]; for (int d = 0; d < datasets.length; d++) { String[] probes = datasets[d].getProbeList(); int platformId = probeAnnotation.getPlatformId(datasets[d].getPlatform()); HashMap<String, MetaQTL4MetaTrait> traitHashForPlatform = probeAnnotation .getTraitHashForPlatform(platformId); // System.out.println(probeAnnotation.getTraitHashPerPlatform().size()); // // System.out.println(datasets[d].getName() + "\t" + platformId + "\t" + datasets[d].getPlatform() + "\t" + traitHashForPlatform.size()); for (int p = 0; p < probes.length; p++) { MetaQTL4MetaTrait t = traitHashForPlatform.get(probes[p]); if (t != null) { int index = traitMap.get(t); // if (confineToTheseProbes == null || confineToTheseProbes.contains(probes[p]) || confineToTheseProbes.contains(t.getMetaTraitName())) { if (confineToTheseProbes == null || confineToTheseProbes.contains(t.getMetaTraitName())) { // TODO: was there a reason we selected specific platform probes/identifiers? probeIndex[index][d] = p; } } // else { // probeIndex[index][d] = -1; // } } } System.out.println(""); TextFile out = new TextFile(outdir + "probeindex.txt", TextFile.W); String header = "metaID"; for (int d = 0; d < datasets.length; d++) { header += "\t" + datasets[d].getName() + "-pid\t" + datasets[d].getName() + "-probename"; } out.writeln(header); for (int p = 0; p < probeIndex.length; p++) { String lnout = "" + traitList[p].getMetaTraitId(); for (int d = 0; d < datasets.length; d++) { Integer pid = probeIndex[p][d]; String probeName = null; if (pid != null) { probeName = datasets[d].getProbeList()[pid]; } lnout += "\t" + pid + "\t" + probeName; } out.writeln(lnout); } out.close(); }
From source file:gov.noaa.pfel.erddap.dataset.EDDTableFromHttpGet.java
/** * The constructor for subclasses.//from w w w.jav a 2s . c o m */ public EDDTableFromHttpGet(String tClassName, String tDatasetID, String tAccessibleTo, String tGraphsAccessibleTo, StringArray tOnChange, String tFgdcFile, String tIso19115File, String tSosOfferingPrefix, String tDefaultDataQuery, String tDefaultGraphQuery, Attributes tAddGlobalAttributes, Object[][] tDataVariables, int tReloadEveryNMinutes, int tUpdateEveryNMillis, String tFileDir, String tFileNameRegex, boolean tRecursive, String tPathRegex, String tMetadataFrom, String tCharset, int tColumnNamesRow, int tFirstDataRow, String tPreExtractRegex, String tPostExtractRegex, String tExtractRegex, String tColumnNameForExtract, String tSortedColumnSourceName, String tSortFilesBySourceNames, boolean tSourceNeedsExpandedFP_EQ, boolean tFileTableInMemory, boolean tAccessibleViaFiles, boolean tRemoveMVRows) throws Throwable { super(tClassName, tDatasetID, tAccessibleTo, tGraphsAccessibleTo, tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix, tDefaultDataQuery, tDefaultGraphQuery, tAddGlobalAttributes, tDataVariables, tReloadEveryNMinutes, tUpdateEveryNMillis, tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom, tCharset, tColumnNamesRow, tFirstDataRow, tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract, tSortedColumnSourceName, tSortFilesBySourceNames, tSourceNeedsExpandedFP_EQ, tFileTableInMemory, tAccessibleViaFiles, tRemoveMVRows); //get/remove key's from global metadata String attName = "HttpGetKeys"; String keyAr[] = StringArray.arrayFromCSV( //may be length=0 combinedGlobalAttributes.getString(attName)); sourceGlobalAttributes.remove(attName); addGlobalAttributes.remove(attName); combinedGlobalAttributes.remove(attName); //create temporary hashset HashSet<String> tKeys = new HashSet(); for (int i = 0; i < keyAr.length; i++) { if (String2.isSomething(keyAr[i])) tKeys.add(String2.canonical(keyAr[i])); } if (tKeys.size() == 0) throw new SimpleException( String2.ERROR + " in constructor: " + attName + " global attribute wasn't specified."); //then swap into place atomically keys = tKeys; //no column sourceName can be COLUMN // ... }