List of usage examples for java.util HashSet size
public int size()
From source file:org.jboss.as.test.integration.management.api.web.ConnectorTestCase.java
@Test public void testDefaultConnectorList() throws Exception { // only http connector present as a default HashSet<String> connNames = getConnectorList(); assertTrue("HTTP connector missing.", connNames.contains("http")); assertTrue(connNames.size() == 1);//from www . j a v a2 s. co m }
From source file:com.ryan.ryanreader.reddit.prepared.RedditPreparedPost.java
public static void onActionMenuItemSelected(final RedditPreparedPost post, final Fragment fragmentParent, final Action action) { final Activity activity = fragmentParent.getSupportActivity(); switch (action) { case UPVOTE:/*from w w w . j a v a 2 s .c o m*/ post.action(activity, RedditAPI.RedditAction.UPVOTE); break; case DOWNVOTE: post.action(activity, RedditAPI.RedditAction.DOWNVOTE); break; case UNVOTE: post.action(activity, RedditAPI.RedditAction.UNVOTE); break; case SAVE: post.action(activity, RedditAPI.RedditAction.SAVE); break; case UNSAVE: post.action(activity, RedditAPI.RedditAction.UNSAVE); break; case HIDE: post.action(activity, RedditAPI.RedditAction.HIDE); break; case UNHIDE: post.action(activity, RedditAPI.RedditAction.UNHIDE); break; case REPORT: new AlertDialog.Builder(activity).setTitle(R.string.action_report) .setMessage(R.string.action_report_sure) .setPositiveButton(R.string.action_report, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, final int which) { post.action(activity, RedditAPI.RedditAction.REPORT); // TODO update the view to show the result // TODO don't forget, this also hides } }).setNegativeButton(R.string.dialog_cancel, null).show(); break; case EXTERNAL: { final Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse(post.url)); activity.startActivity(intent); break; } case SELFTEXT_LINKS: { final HashSet<String> linksInComment = LinkHandler .computeAllLinks(StringEscapeUtils.unescapeHtml4(post.src.selftext)); if (linksInComment.isEmpty()) { General.quickToast(activity, R.string.error_toast_no_urls_in_self); } else { final String[] linksArr = linksInComment.toArray(new String[linksInComment.size()]); final AlertDialog.Builder builder = new AlertDialog.Builder(activity); builder.setItems(linksArr, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { LinkHandler.onLinkClicked(activity, linksArr[which], false, post.src); dialog.dismiss(); } }); final AlertDialog alert = builder.create(); alert.setTitle(R.string.action_selftext_links); alert.setCanceledOnTouchOutside(true); alert.show(); } break; } case SAVE_IMAGE: { final RedditAccount anon = RedditAccountManager.getAnon(); CacheManager.getInstance(activity) .makeRequest(new CacheRequest(General.uriFromString(post.imageUrl), anon, null, Constants.Priority.IMAGE_VIEW, 0, CacheRequest.DownloadType.IF_NECESSARY, Constants.FileType.IMAGE, false, false, false, activity) { @Override protected void onCallbackException(Throwable t) { BugReportActivity.handleGlobalError(context, t); } @Override protected void onDownloadNecessary() { General.quickToast(context, R.string.download_downloading); } @Override protected void onDownloadStarted() { } @Override protected void onFailure(RequestFailureType type, Throwable t, StatusLine status, String readableMessage) { final RRError error = General.getGeneralErrorForFailure(context, type, t, status); General.showResultDialog(activity, error); } @Override protected void onProgress(long bytesRead, long totalBytes) { } @Override protected void onSuccess(CacheManager.ReadableCacheFile cacheFile, long timestamp, UUID session, boolean fromCache, String mimetype) { File dst = new File( Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), General.uriFromString(post.imageUrl).getPath()); if (dst.exists()) { int count = 0; while (dst.exists()) { count++; dst = new File( Environment.getExternalStoragePublicDirectory( Environment.DIRECTORY_PICTURES), count + "_" + General.uriFromString(post.imageUrl).getPath().substring(1)); } } try { General.copyFile(cacheFile.getInputStream(), dst); } catch (IOException e) { notifyFailure(RequestFailureType.STORAGE, e, null, "Could not copy file"); return; } activity.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse("file://" + dst.getAbsolutePath()))); General.quickToast(context, context.getString(R.string.action_save_image_success) + " " + dst.getAbsolutePath()); } }); break; } case SHARE: { final Intent mailer = new Intent(Intent.ACTION_SEND); mailer.setType("text/plain"); mailer.putExtra(Intent.EXTRA_SUBJECT, post.title); mailer.putExtra(Intent.EXTRA_TEXT, post.url); activity.startActivity(Intent.createChooser(mailer, activity.getString(R.string.action_share))); break; } case SHARE_COMMENTS: { final Intent mailer = new Intent(Intent.ACTION_SEND); mailer.setType("text/plain"); mailer.putExtra(Intent.EXTRA_SUBJECT, "Comments for " + post.title); mailer.putExtra(Intent.EXTRA_TEXT, Constants.Reddit.getUri(Constants.Reddit.PATH_COMMENTS + post.idAlone).toString()); activity.startActivity( Intent.createChooser(mailer, activity.getString(R.string.action_share_comments))); break; } case COPY: { ClipboardManager manager = (ClipboardManager) activity.getSystemService(Context.CLIPBOARD_SERVICE); manager.setText(post.url); break; } case GOTO_SUBREDDIT: { final RedditSubreddit subreddit = new RedditSubreddit("/r/" + post.src.subreddit, "/r/" + post.src.subreddit, true); final Intent intent = new Intent(activity, PostListingActivity.class); intent.putExtra("subreddit", subreddit); activity.startActivityForResult(intent, 1); break; } case USER_PROFILE: UserProfileDialog.newInstance(post.src.author).show(activity); break; case PROPERTIES: PostPropertiesDialog.newInstance(post.src).show(activity); break; case COMMENTS: ((RedditPostView.PostSelectionListener) fragmentParent).onPostCommentsSelected(post); break; case LINK: ((RedditPostView.PostSelectionListener) fragmentParent).onPostSelected(post); break; case COMMENTS_SWITCH: if (!(activity instanceof MainActivity)) activity.finish(); ((RedditPostView.PostSelectionListener) fragmentParent).onPostCommentsSelected(post); break; case LINK_SWITCH: if (!(activity instanceof MainActivity)) activity.finish(); ((RedditPostView.PostSelectionListener) fragmentParent).onPostSelected(post); break; case ACTION_MENU: showActionMenu(activity, fragmentParent, post); break; case REPLY: final Intent intent = new Intent(activity, CommentReplyActivity.class); intent.putExtra("parentIdAndType", post.idAndType); activity.startActivity(intent); break; } }
From source file:org.alfresco.web.forms.xforms.SchemaUtil.java
public static XSModel parseSchema(final Document schemaDocument, final boolean failOnError) throws FormBuilderException { try {// w w w .ja va 2 s .c o m // Get DOM Implementation using DOM Registry System.setProperty(DOMImplementationRegistry.PROPERTY, "org.apache.xerces.dom.DOMXSImplementationSourceImpl"); final DOMImplementationRegistry registry = DOMImplementationRegistry.newInstance(); final DOMImplementationLS lsImpl = (DOMImplementationLS) registry .getDOMImplementation("XML 1.0 LS 3.0"); if (lsImpl == null) { throw new FormBuilderException("unable to create DOMImplementationLS using " + registry); } final LSInput in = lsImpl.createLSInput(); in.setStringData(XMLUtil.toString(schemaDocument)); final XSImplementation xsImpl = (XSImplementation) registry.getDOMImplementation("XS-Loader"); final XSLoader schemaLoader = xsImpl.createXSLoader(null); final DOMConfiguration config = (DOMConfiguration) schemaLoader.getConfig(); final LinkedList<DOMError> errors = new LinkedList<DOMError>(); config.setParameter("error-handler", new DOMErrorHandler() { public boolean handleError(final DOMError domError) { errors.add(domError); return true; } }); final XSModel result = schemaLoader.load(in); if (failOnError && errors.size() != 0) { final HashSet<String> messages = new HashSet<String>(); StringBuilder message = null; for (DOMError e : errors) { message = new StringBuilder(); final DOMLocator dl = e.getLocation(); if (dl != null) { message.append("at line ").append(dl.getLineNumber()).append(" column ") .append(dl.getColumnNumber()); if (dl.getRelatedNode() != null) { message.append(" node ").append(dl.getRelatedNode().getNodeName()); } message.append(": ").append(e.getMessage()); } messages.add(message.toString()); } message = new StringBuilder(); message.append(messages.size() > 1 ? "errors" : "error").append(" parsing schema: \n"); for (final String s : messages) { message.append(s).append("\n"); } throw new FormBuilderException(message.toString()); } if (result == null) { throw new FormBuilderException("invalid schema"); } return result; } catch (ClassNotFoundException x) { throw new FormBuilderException(x); } catch (InstantiationException x) { throw new FormBuilderException(x); } catch (IllegalAccessException x) { throw new FormBuilderException(x); } }
From source file:com.redhat.rhn.manager.monitoring.ModifyFilterCommand.java
/** * Update the probe states for which the filter matches to <code>values</code>. * The values must be one of the constants in {@link MonitoringConstants#PROBE_STATES} * @param values the new probe state values on which the filter matches */// w w w. j a v a 2 s .c om public void updateStates(String[] values) { if (values != null) { HashSet valueSet = new HashSet(Arrays.asList(values)); if (PROBE_STATE_SET.equals(valueSet)) { values = null; } valueSet.removeAll(PROBE_STATE_SET); if (valueSet.size() > 0) { throw new IllegalArgumentException( "The state values must be one of " + PROBE_STATE_SET + ", but also contained " + valueSet); } } updateCriteria(MatchType.STATE, values); }
From source file:com.netflix.simianarmy.aws.janitor.crawler.edda.EddaInstanceJanitorCrawler.java
private void refreshOwnerByImage(String region, List<Resource> resources) { HashSet<String> imageIds = new HashSet<String>(); for (Resource resource : resources) { if (resource.getOwnerEmail() == null) { imageIds.add(resource.getAdditionalField("imageId")); }//www .j a v a2s . com } if (imageIds.size() > 0) { HashMap<String, String> imageToOwner = new HashMap<String, String>(); String url = eddaClient.getBaseUrl(region) + "/aws/images/"; url += StringUtils.join(imageIds, ','); url += ";tags.key=owner;public=false;_expand:(imageId,tags:(owner))"; JsonNode imageJsonNode = null; try { imageJsonNode = eddaClient.getJsonNodeFromUrl(url); } catch (Exception e) { LOGGER.error(String.format("Failed to get Json node from edda for AMIs in region %s.", region), e); } if (imageJsonNode == null) { return; } for (Iterator<JsonNode> it = imageJsonNode.getElements(); it.hasNext();) { JsonNode image = it.next(); String imageId = image.get("imageId").getTextValue(); JsonNode tags = image.get("tags"); for (Iterator<JsonNode> tagIt = tags.getElements(); tagIt.hasNext();) { JsonNode tag = tagIt.next(); if (tag.get("owner") != null) { imageToOwner.put(imageId, tag.get("owner").getTextValue()); break; } } } if (imageToOwner.size() > 0) { for (Resource resource : resources) { if (resource.getOwnerEmail() == null && imageToOwner.get(resource.getAdditionalField("imageId")) != null) { resource.setOwnerEmail(imageToOwner.get(resource.getAdditionalField("imageId"))); LOGGER.info( String.format("Found owner %s for instance %s in AMI %s", resource.getOwnerEmail(), resource.getId(), resource.getAdditionalField("imageId"))); } } } } }
From source file:EndmemberExtraction.java
public static int[] assignInitialLabels(double[][] data, int nData, int nDim, int imgDim1, int imgDim2, double minThresholdAngle, double maxThresholdAngle, double stepSize, double minThresholdAbundance, String filepath) {/*w w w . j a va2 s .co m*/ int[] exemplarLabel = new int[nData]; int[][] exemplarMat; iterativeORASIS(data, nData, nDim, minThresholdAngle, maxThresholdAngle, stepSize, minThresholdAbundance, exemplarLabel); HashSet<Integer> uniqueExemplars = new HashSet<>(); for (int i = 0; i < nData; i++) { uniqueExemplars.add(exemplarLabel[i]); } //System.out.println("Unique exemplars:"); HashMap<Integer, Integer> exemplars = new HashMap<>(); int count = 0; for (int i : uniqueExemplars) { if (i != -1) { exemplars.put(i, count); //System.out.print(i+"\t"); count++; } } System.out.println("Exemplar Count:" + count); //System.out.println(); for (int i = 0; i < nData; i++) { if (exemplarLabel[i] != -1) { exemplarLabel[i] = exemplars.get(exemplarLabel[i]); } } exemplarMat = reshape(exemplarLabel, imgDim1, imgDim2); IO.writeData(exemplarMat, imgDim1, imgDim2, filepath + "exemplarMat.txt"); /* for(int i=0;i<imgDim1;i++){ System.out.print(i+" : "); for(int j=0;j<imgDim2;j++){ System.out.print(exemplarMat[i][j]+"\t"); } System.out.println(); } */ //Display exemplar classification image File exemplarImg = new File(filepath + "ExemplarImg.png"); ImageProc.imagesc(exemplarImg, exemplarMat, uniqueExemplars.size(), imgDim1, imgDim2); return exemplarLabel; }
From source file:com.bah.bahdit.main.plugins.imageindex.ImageIndex.java
/** * Gets tags from the query and returns the hashes * associated with those tags./*from www .j a v a2 s . c o m*/ * * @param tagScanner - batch scanner to the tag table * @param query - the query specified by the user * @return - hashes found that satisfy the query */ private HashSet<String> getTag(BatchScanner tagScanner, String query) { List<Range> ranges = new ArrayList<Range>(); for (String s : query.split(" ")) { ranges.add(new Range(s)); } tagScanner.setRanges(ranges); HashSet<String> hashRanges = new HashSet<String>(); for (Entry<Key, Value> e : tagScanner) { hashRanges.add(new String(e.getValue().get())); } if (hashRanges.size() == 0) { String[] suggestions = { query }; try { for (String q : query.split("\\s")) { suggestions = (String[]) ArrayUtils.addAll(suggestions, tagSpellChecker.suggestSimilar(q, 1)); } } catch (IOException e) { log.warn(e.getMessage()); } for (Entry<Key, Value> e : tagScanner) { hashRanges.add(new String(e.getValue().get())); } if (hashRanges.size() == 0) return null; } return hashRanges; }
From source file:org.docrj.smartcard.reader.AppViewActivity.java
private void deleteApp() { // adjust selected position for app select mode if (mSelectedAppPos == mAppPos) { mSelectedAppPos = 0;/* w w w . j av a2s . c om*/ } else if (mSelectedAppPos > mAppPos) { mSelectedAppPos--; } // remove app from list SmartcardApp app = mApps.remove(mAppPos); HashSet<String> groups = app.getGroups(); // only bother to adjust groups if app was assigned to // more than the default other/payment group if (groups.size() > 1) { for (String group : groups) { if (Util.isGroupEmpty(group, mApps)) { removeGroup(group); } } } writePrefs(); finish(); }
From source file:de.uni_tuebingen.ub.ixTheo.handler.component.FacetPrefixSortComponent.java
/** * Actually run the query/*w w w .j a v a2s . c o m*/ */ @Override public void process(ResponseBuilder rb) throws IOException { if (rb.doFacets) { final ModifiableSolrParams params = new ModifiableSolrParams(); final SolrParams origParams = rb.req.getParams(); final Iterator<String> iter = origParams.getParameterNamesIterator(); setCollator(origParams.get("lang")); while (iter.hasNext()) { final String paramName = iter.next(); // Deduplicate the list with LinkedHashSet, but _only_ for facet // params. if (!paramName.startsWith(FacetParams.FACET)) { params.add(paramName, origParams.getParams(paramName)); continue; } final HashSet<String> deDupe = new LinkedHashSet<>(Arrays.asList(origParams.getParams(paramName))); params.add(paramName, deDupe.toArray(new String[deDupe.size()])); } final SimplePrefixSortFacets facets = new SimplePrefixSortFacets(rb.req, rb.getResults().docSet, params, rb); final NamedList<Object> counts = org.apache.solr.handler.component.FacetComponent .getFacetCounts(facets); final String[] pivots = params.getParams(FacetParams.FACET_PIVOT); if (pivots != null && pivots.length > 0) { PivotFacetProcessor pivotProcessor = new PivotFacetProcessor(rb.req, rb.getResults().docSet, params, rb); SimpleOrderedMap<List<NamedList<Object>>> v = pivotProcessor.process(pivots); if (v != null) { counts.add(PIVOT_KEY, v); } } // Check whether we have to reorder out results // according to prefix final String sort = params.get(FacetParams.FACET_SORT); if (FacetPrefixSortParams.FACET_SORT_PREFIX.equals(sort)) { // Determine a score relative to the original query // Determine the query and make it compatible with our metric // class // by splitting the single terms String[] queryTerms = params.getParams(CommonParams.Q); final Collection<String> queryTermsCollection = new ArrayList<>(); for (String s : queryTerms) { // Split at whitespace except we have a quoted term Matcher matcher = WHITE_SPACES_WITH_QUOTES_SPLITTING_PATTERN.matcher(s); while (matcher.find()) { queryTermsCollection.add(matcher.group().replaceAll("^\"|\"$", "")); } } // In some contexts, i.e. in KWC that are derived from ordinary // keywords or if // wildcards occur, also add all the query terms as a single // phrase term // with stripped wildcards StringBuilder sb = new StringBuilder(); for (String s : queryTermsCollection) { s = s.replace("*", ""); sb.append(s); sb.append(" "); } queryTermsCollection.add(sb.toString().trim()); final ArrayList<String> queryList = new ArrayList<>(queryTermsCollection); final String facetfield = params.get(FacetParams.FACET_FIELD); // Get the current facet entry and make it compatible with our // metric class // "facet_fields" itself contains a NamedList with the // facet.field as key final NamedList<Object> facetFieldsNamedList = (NamedList<Object>) counts.get("facet_fields"); final NamedList<Object> facetFields = (NamedList<Object>) facetFieldsNamedList.get(facetfield); final List<Entry<Entry<String, Object>, Double>> facetPrefixListScored = new ArrayList<>(); for (final Entry<String, Object> entry : facetFields) { final String facetTerms = entry.getKey(); // Split up each KWC and calculate the scoring ArrayList<String> facetList = new ArrayList<>( Arrays.asList(facetTerms.split("(?<!" + Pattern.quote("\\") + ")/"))); // For usability reasons sort the result facets according to // the order of the search facetList = KeywordSort.sortToReferenceChain(queryList, facetList); final double score = KeywordChainMetric.calculateSimilarityScore(queryList, facetList); // Collect the result in a sorted list and throw away // garbage if (score > 0) { String facetTermsSorted = StringUtils.join(facetList, "/"); Map.Entry<String, Object> sortedEntry = new AbstractMap.SimpleEntry<>(facetTermsSorted, entry.getValue()); facetPrefixListScored.add(new AbstractMap.SimpleEntry<>(sortedEntry, score)); } } Collections.sort(facetPrefixListScored, ENTRY_COMPARATOR); // Extract all the values wrap it back to NamedList again and // replace in the original structure facetFieldsNamedList.clear(); NamedList<Object> facetNamedListSorted = new NamedList<>(); // We had to disable all limits and offsets sort according // Handle this accordingly now int offset = (params.getInt(FacetParams.FACET_OFFSET) != null) ? params.getInt(FacetParams.FACET_OFFSET) : 0; int limit = (params.getInt(FacetParams.FACET_LIMIT) != null) ? params.getInt(FacetParams.FACET_LIMIT) : 100; // Strip uneeded elements int s = facetPrefixListScored.size(); int off = (offset < s) ? offset : 0; limit = (limit < 0) ? s : limit; // Handle a negative limit // param, i.e. unlimited results int lim = (offset + limit <= s) ? (offset + limit) : s; final List<Entry<Entry<String, Object>, Double>> facetPrefixListScoredTruncated = facetPrefixListScored .subList(off, lim); for (Entry<Entry<String, Object>, Double> e : facetPrefixListScoredTruncated) { facetNamedListSorted.add(e.getKey().getKey(), e.getKey().getValue()); } facetFieldsNamedList.add(facetfield, facetNamedListSorted); NamedList<Object> countList = new NamedList<>(); countList.add("count", facetPrefixListScored.size()); facetFieldsNamedList.add(facetfield + "-count", countList); counts.remove("facet_fields"); counts.add("facet_fields", facetFieldsNamedList); } rb.rsp.add("facet_counts", counts); } }
From source file:org.jlinda.core.coregistration.estimation.utils.MathUtils.java
/** * Determines the unique values of v. The values are returned in no particular order. * * @param v/*from w ww . j a va 2s .c o m*/ * @return the unique values of v in no particular order. */ public static int[] uniqueValues(int[] v) { // form the values into a set, which automatically removes duplicates HashSet<Integer> uniqueValues = new HashSet<Integer>(); for (int i = 0; i < v.length; i++) { uniqueValues.add(v[i]); } // convert the set back into an array int[] vUnique = new int[uniqueValues.size()]; int i = 0; for (Integer uniqueValue : uniqueValues) { vUnique[i++] = uniqueValue; } return vUnique; }