List of usage examples for java.util HashSet size
public int size()
From source file:eu.planets_project.tb.gui.backing.service.ServiceInspector.java
public int getNumberOfExperiments() { // Single Services: if (this.srb != null) { if (srb.getServiceRecord() != null) { return srb.getServiceRecord().getExperimentIds().size(); } else {//from www. j av a 2s . c o m return 0; } } // Lists: HashSet<Long> uniques = new HashSet<Long>(); if (this.srbs != null) { for (ServiceRecordBean srb : srbs) { if (srb.getServiceRecord() != null) { for (Long id : srb.getServiceRecord().getExperimentIds()) { uniques.add(id); } } } } return uniques.size(); }
From source file:net.semanticmetadata.lire.imageanalysis.bovw.LocalFeatureHistogramBuilderKmeansPlusPlus.java
/** * Uses an existing index, where each and every document should have a set of local features. A number of * random images (numDocsForVocabulary) is selected and clustered to get a vocabulary of visual words * (the cluster means). For all images a histogram on the visual words is created and added to the documents. * Pre-existing histograms are deleted, so this method can be used for re-indexing. * * @throws java.io.IOException/*from w w w.java2 s . c o m*/ */ public void index() throws IOException { df.setMaximumFractionDigits(3); // find the documents for building the vocabulary: HashSet<Integer> docIDs = selectVocabularyDocs(); System.out.println("Using " + docIDs.size() + " documents to build the vocabulary."); KMeansPlusPlusClusterer kpp = new KMeansPlusPlusClusterer(numClusters, 15); // fill the KMeans object: LinkedList<DoublePoint> features = new LinkedList<DoublePoint>(); // Needed for check whether the document is deleted. Bits liveDocs = MultiFields.getLiveDocs(reader); for (Iterator<Integer> iterator = docIDs.iterator(); iterator.hasNext();) { int nextDoc = iterator.next(); if (reader.hasDeletions() && !liveDocs.get(nextDoc)) continue; // if it is deleted, just ignore it. Document d = reader.document(nextDoc); // features.clear(); IndexableField[] fields = d.getFields(localFeatureFieldName); String file = d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; for (int j = 0; j < fields.length; j++) { LireFeature f = getFeatureInstance(); f.setByteArrayRepresentation(fields[j].binaryValue().bytes, fields[j].binaryValue().offset, fields[j].binaryValue().length); // copy the data over to new array ... double[] feat = new double[f.getDoubleHistogram().length]; System.arraycopy(f.getDoubleHistogram(), 0, feat, 0, feat.length); features.add(new DoublePoint(f.getDoubleHistogram())); } } if (features.size() < numClusters) { // this cannot work. You need more data points than clusters. throw new UnsupportedOperationException("Only " + features.size() + " features found to cluster in " + numClusters + ". Try to use less clusters or more images."); } // do the clustering: System.out.println("Number of local features: " + df.format(features.size())); System.out.println("Starting clustering ..."); List<CentroidCluster<DoublePoint>> clusterList = kpp.cluster(features); // TODO: Serializing clusters to a file on the disk ... System.out.println("Clustering finished, " + clusterList.size() + " clusters found"); clusters = new LinkedList<double[]>(); for (Iterator<CentroidCluster<DoublePoint>> iterator = clusterList.iterator(); iterator.hasNext();) { CentroidCluster<DoublePoint> centroidCluster = iterator.next(); clusters.add(centroidCluster.getCenter().getPoint()); } System.out.println("Creating histograms ..."); int[] tmpHist = new int[numClusters]; IndexWriter iw = LuceneUtils.createIndexWriter(((DirectoryReader) reader).directory(), true, LuceneUtils.AnalyzerType.WhitespaceAnalyzer, 256d); // careful: copy reader to RAM for faster access when reading ... // reader = IndexReader.open(new RAMDirectory(reader.directory()), true); LireFeature f = getFeatureInstance(); for (int i = 0; i < reader.maxDoc(); i++) { try { if (reader.hasDeletions() && !liveDocs.get(i)) continue; for (int j = 0; j < tmpHist.length; j++) { tmpHist[j] = 0; } Document d = reader.document(i); IndexableField[] fields = d.getFields(localFeatureFieldName); // remove the fields if they are already there ... d.removeField(visualWordsFieldName); d.removeField(localFeatureHistFieldName); // find the appropriate cluster for each feature: for (int j = 0; j < fields.length; j++) { f.setByteArrayRepresentation(fields[j].binaryValue().bytes, fields[j].binaryValue().offset, fields[j].binaryValue().length); tmpHist[clusterForFeature(f, clusters)]++; } // System.out.println(Arrays.toString(tmpHist)); d.add(new StoredField(localFeatureHistFieldName, SerializationUtils.toByteArray(normalize(tmpHist)))); quantize(tmpHist); d.add(new TextField(visualWordsFieldName, arrayToVisualWordString(tmpHist), Field.Store.YES)); // remove local features to save some space if requested: if (DELETE_LOCAL_FEATURES) { d.removeFields(localFeatureFieldName); } // now write the new one. we use the identifier to update ;) iw.updateDocument(new Term(DocumentBuilder.FIELD_NAME_IDENTIFIER, d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]), d); } catch (IOException e) { e.printStackTrace(); } } iw.commit(); // this one does the "old" commit(), it removes the deleted local features. iw.forceMerge(1); iw.close(); System.out.println("Finished."); }
From source file:org.jlinda.core.coregistration.estimation.utils.MathUtils.java
/** * Determines the unique values of v. The values are returned in no particular order. * * @param v/*w w w . ja v a 2 s . co m*/ * @return the unique values of v in no particular order. */ public static double[] uniqueValues(double[] v) { // form the values into a set, which automatically removes duplicates HashSet<Double> uniqueValues = new HashSet<Double>(); for (int i = 0; i < v.length; i++) { uniqueValues.add(v[i]); } // convert the set back into an array double[] vUnique = new double[uniqueValues.size()]; int i = 0; for (Double uniqueValue : uniqueValues) { vUnique[i++] = uniqueValue; } return vUnique; }
From source file:com.att.ajsc.csilogging.util.UtilLib.java
public static String getInput(String pathinfoArr[], int arrLength, String componentType, String pathInfo) { Set<String> endpointSet = null; /*//from w w w . j a v a 2s.c o m * if (componentType.equalsIgnoreCase("rest")) { endpointSet = * DME2Helper.restletEndpointSet; } else { endpointSet = * DME2Helper.serviceEndpointSet; } */ HashSet<String> setBasedArrLenth = new HashSet<String>(); HashMap setBasedCharMap = new HashMap(); HashSet<String> setBasedValues = new HashSet<String>(); AntPathMatcher pathMatcher = new AntPathMatcher(); String inputBasedonLength[]; int globalvalue = 0; for (String s : endpointSet) { int dif = StringUtils.getLevenshteinDistance(pathInfo, s); if (globalvalue == 0 || globalvalue > dif) { globalvalue = dif; setBasedCharMap.put(globalvalue, s); } inputBasedonLength = s.split("\\/"); int i = inputBasedonLength.length; if (arrLength == i) { setBasedArrLenth.add(s); } } String inputBasedOnValues[]; for (String s1 : setBasedArrLenth) { inputBasedOnValues = s1.split("\\/"); int j = 1; while (compareValues(pathinfoArr[j], inputBasedOnValues[j])) { j++; if (j >= arrLength) { break; } } if (j == arrLength) { setBasedValues.add(s1); } } String input = ""; if (setBasedValues.size() == 1) { for (String s2 : setBasedValues) { input = s2; } } else { for (String s2 : setBasedValues) { if (pathMatcher.match(pathInfo, s2)) { input = s2; } } } if (input.isEmpty()) { input = (String) setBasedCharMap.get(globalvalue); } return "/" + componentType + input; }
From source file:org.lol.reddit.reddit.prepared.RedditPreparedPost.java
public static void onActionMenuItemSelected(final RedditPreparedPost post, final Activity activity, final Action action) { switch (action) { case UPVOTE:/* ww w .j av a 2s .c om*/ post.action(activity, RedditAPI.RedditAction.UPVOTE); break; case DOWNVOTE: post.action(activity, RedditAPI.RedditAction.DOWNVOTE); break; case UNVOTE: post.action(activity, RedditAPI.RedditAction.UNVOTE); break; case SAVE: post.action(activity, RedditAPI.RedditAction.SAVE); break; case UNSAVE: post.action(activity, RedditAPI.RedditAction.UNSAVE); break; case HIDE: post.action(activity, RedditAPI.RedditAction.HIDE); break; case UNHIDE: post.action(activity, RedditAPI.RedditAction.UNHIDE); break; case REPORT: new AlertDialog.Builder(activity).setTitle(R.string.action_report) .setMessage(R.string.action_report_sure) .setPositiveButton(R.string.action_report, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, final int which) { post.action(activity, RedditAPI.RedditAction.REPORT); // TODO update the view to show the result // TODO don't forget, this also hides } }).setNegativeButton(R.string.dialog_cancel, null).show(); break; case EXTERNAL: { final Intent intent = new Intent(Intent.ACTION_VIEW); String url = (activity instanceof WebViewActivity) ? ((WebViewActivity) activity).getCurrentUrl() : post.url; intent.setData(Uri.parse(url)); activity.startActivity(intent); break; } case SELFTEXT_LINKS: { final HashSet<String> linksInComment = LinkHandler .computeAllLinks(StringEscapeUtils.unescapeHtml4(post.src.selftext)); if (linksInComment.isEmpty()) { General.quickToast(activity, R.string.error_toast_no_urls_in_self); } else { final String[] linksArr = linksInComment.toArray(new String[linksInComment.size()]); final AlertDialog.Builder builder = new AlertDialog.Builder(activity); builder.setItems(linksArr, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { LinkHandler.onLinkClicked(activity, linksArr[which], false, post.src); dialog.dismiss(); } }); final AlertDialog alert = builder.create(); alert.setTitle(R.string.action_selftext_links); alert.setCanceledOnTouchOutside(true); alert.show(); } break; } case SAVE_IMAGE: { final RedditAccount anon = RedditAccountManager.getAnon(); CacheManager.getInstance(activity) .makeRequest(new CacheRequest(General.uriFromString(post.imageUrl), anon, null, Constants.Priority.IMAGE_VIEW, 0, CacheRequest.DownloadType.IF_NECESSARY, Constants.FileType.IMAGE, false, false, false, activity) { @Override protected void onCallbackException(Throwable t) { BugReportActivity.handleGlobalError(context, t); } @Override protected void onDownloadNecessary() { General.quickToast(context, R.string.download_downloading); } @Override protected void onDownloadStarted() { } @Override protected void onFailure(RequestFailureType type, Throwable t, StatusLine status, String readableMessage) { final RRError error = General.getGeneralErrorForFailure(context, type, t, status, url.toString()); General.showResultDialog(activity, error); } @Override protected void onProgress(long bytesRead, long totalBytes) { } @Override protected void onSuccess(CacheManager.ReadableCacheFile cacheFile, long timestamp, UUID session, boolean fromCache, String mimetype) { File dst = new File( Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), General.uriFromString(post.imageUrl).getPath()); if (dst.exists()) { int count = 0; while (dst.exists()) { count++; dst = new File( Environment.getExternalStoragePublicDirectory( Environment.DIRECTORY_PICTURES), count + "_" + General.uriFromString(post.imageUrl).getPath().substring(1)); } } try { final InputStream cacheFileInputStream = cacheFile.getInputStream(); if (cacheFileInputStream == null) { notifyFailure(RequestFailureType.CACHE_MISS, null, null, "Could not find cached image"); return; } General.copyFile(cacheFileInputStream, dst); } catch (IOException e) { notifyFailure(RequestFailureType.STORAGE, e, null, "Could not copy file"); return; } activity.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse("file://" + dst.getAbsolutePath()))); General.quickToast(context, context.getString(R.string.action_save_image_success) + " " + dst.getAbsolutePath()); } }); break; } case SHARE: { final Intent mailer = new Intent(Intent.ACTION_SEND); mailer.setType("text/plain"); mailer.putExtra(Intent.EXTRA_SUBJECT, post.title); mailer.putExtra(Intent.EXTRA_TEXT, post.url); activity.startActivity(Intent.createChooser(mailer, activity.getString(R.string.action_share))); break; } case SHARE_COMMENTS: { final Intent mailer = new Intent(Intent.ACTION_SEND); mailer.setType("text/plain"); mailer.putExtra(Intent.EXTRA_SUBJECT, "Comments for " + post.title); mailer.putExtra(Intent.EXTRA_TEXT, Constants.Reddit.getUri(Constants.Reddit.PATH_COMMENTS + post.idAlone).toString()); activity.startActivity( Intent.createChooser(mailer, activity.getString(R.string.action_share_comments))); break; } case COPY: { ClipboardManager manager = (ClipboardManager) activity.getSystemService(Context.CLIPBOARD_SERVICE); manager.setText(post.url); break; } case GOTO_SUBREDDIT: { try { final Intent intent = new Intent(activity, PostListingActivity.class); intent.setData(SubredditPostListURL.getSubreddit(post.src.subreddit).generateJsonUri()); activity.startActivityForResult(intent, 1); } catch (RedditSubreddit.InvalidSubredditNameException e) { Toast.makeText(activity, R.string.invalid_subreddit_name, Toast.LENGTH_LONG).show(); } break; } case USER_PROFILE: LinkHandler.onLinkClicked(activity, new UserProfileURL(post.src.author).toString()); break; case PROPERTIES: PostPropertiesDialog.newInstance(post.src).show(activity); break; case COMMENTS: ((RedditPostView.PostSelectionListener) activity).onPostCommentsSelected(post); break; case LINK: ((RedditPostView.PostSelectionListener) activity).onPostSelected(post); break; case COMMENTS_SWITCH: if (!(activity instanceof MainActivity)) activity.finish(); ((RedditPostView.PostSelectionListener) activity).onPostCommentsSelected(post); break; case LINK_SWITCH: if (!(activity instanceof MainActivity)) activity.finish(); ((RedditPostView.PostSelectionListener) activity).onPostSelected(post); break; case ACTION_MENU: showActionMenu(activity, post); break; case REPLY: final Intent intent = new Intent(activity, CommentReplyActivity.class); intent.putExtra("parentIdAndType", post.idAndType); activity.startActivity(intent); break; } }
From source file:org.apache.hadoop.hbase.master.GroupAssignmentManager.java
/** * balance regions which in the same group * /* www. j a v a 2s. co m*/ * @param group * the group you want to balance */ public static void balanceGroup(String group) { HashSet<HServerInfo> servers = groupServers.get(group); if (servers == null || servers.size() < 2) return; HServerInfo[] infos = new HServerInfo[servers.size()]; servers.toArray(infos); HashMap<HRegionInfo, HServerInfo> map = CheckMeta.getServerRegions(infos, true); if (map == null || map.size() < 2) return; doBalance(servers, map); }
From source file:com.adobe.cq.wcm.core.components.internal.servlets.ClientLibraryCategoriesDataSourceServlet.java
private List<Resource> getCategoryResourceList(@Nonnull SlingHttpServletRequest request, LibraryType libraryType) {// w ww . j av a2 s . c o m List<Resource> categoryResourceList = new ArrayList<>(); HashSet<String> clientLibraryCategories = new HashSet<String>(); for (ClientLibrary library : htmlLibraryManager.getLibraries().values()) { for (String category : library.getCategories()) { clientLibraryCategories.add(category); } } if (libraryType != null) { Collection<ClientLibrary> clientLibraries = htmlLibraryManager.getLibraries( clientLibraryCategories.toArray(new String[clientLibraryCategories.size()]), libraryType, true, true); clientLibraryCategories.clear(); for (ClientLibrary library : clientLibraries) { for (String category : library.getCategories()) { clientLibraryCategories.add(category); } } } for (String category : clientLibraryCategories) { categoryResourceList.add(new CategoryResource(category, request.getResourceResolver())); } return categoryResourceList; }
From source file:org.mozilla.gecko.favicons.LoadFaviconTask.java
private HttpResponse tryDownloadRecurse(URI faviconURI, HashSet<String> visited) throws URISyntaxException, IOException { if (visited.size() == MAX_REDIRECTS_TO_FOLLOW) { return null; }/*from w w w.jav a 2s . c om*/ HttpGet request = new HttpGet(faviconURI); HttpResponse response = sHttpClient.execute(request); if (response == null) { return null; } if (response.getStatusLine() != null) { // Was the response a failure? int status = response.getStatusLine().getStatusCode(); // Handle HTTP status codes requesting a redirect. if (status >= 300 && status < 400) { Header header = response.getFirstHeader("Location"); // Handle mad webservers. if (header == null) { return null; } String newURI = header.getValue(); if (newURI == null || newURI.equals(faviconURI.toString())) { return null; } if (visited.contains(newURI)) { // Already been redirected here - abort. return null; } visited.add(newURI); // Sometimes newURI is a value like "/fb/images/favicon.ico" (with no host). In which case, ignore... See #231 URI uri = new URI(newURI); if (uri.getHost() != null) { return tryDownloadRecurse(uri, visited); } } if (status >= 400) { return null; } } return response; }
From source file:de.thkwalter.et.ortskurve.OrtskurveController.java
/** * Diese Methode validiert die eingegebenen Messpunkte. Falls die eingegebenen Messpunkte nicht valide sind, wird eine * Ausnahme geworfen.//from ww w . j a v a 2s.c o m * * @param messpunkte Die Messpunkte */ private void messpunkteValidieren(Vector2D[] messpunkte) { // Alle Messpunkte werden in ein HashSet eingefgt. HashSet<Vector2D> messpunktSet = new HashSet<Vector2D>(); for (Vector2D messpunkt : messpunkte) { messpunktSet.add(messpunkt); } // Falls die Anzahl der Punkte im HashSet kleiner ist als die Anzahl der eingegebenen Messpunkte, so wurden // Messpunkte doppelt eingegeben. if (messpunktSet.size() < messpunkte.length) { // Die Fehlermeldung fr den Entwickler wird erzeugt und protokolliert. String fehlermeldung = "Es wurden " + (messpunkte.length - messpunktSet.size()) + " Messpunkte doppelt " + " eingegeben!"; OrtskurveController.logger.severe(fehlermeldung); // Die Zeichenkette fr die Fehlermeldung wird deklariert. String jsfMeldung = ""; // Falls nur ein Messpunkt doppelt eingegeben worden ist, ... if (messpunkte.length - messpunktSet.size() == 1) { // Die Zeichenkette fr die Fehlermeldung wird festgelegt. jsfMeldung = "Sie haben einen Messpunkt doppelt eingegeben! Entfernen Sie bitte den doppelt eingegebenen " + "Messpunkt."; } // Falls mehrere Messpunkte doppelt eingegeben worden sind, ... else { // Die Zeichenkette fr die Fehlermeldung wird festgelegt. jsfMeldung = "Sie haben " + (messpunkte.length - messpunktSet.size()) + " Messpunkte " + "doppelt eingegeben! Entfernen Sie bitte die doppelt eingegebenen Messpunkte."; } // Die Ausnahme wird erzeugt und geworfen. throw new ApplicationRuntimeException(jsfMeldung); } }
From source file:com.helpinput.spring.refresher.SessiontRefresher.java
@SuppressWarnings("unchecked") ManagedList<Object> getManageList(DefaultListableBeanFactory dlbf, PropertyValue oldPropertyValue) { Set<String> oldClasses = null; if (oldPropertyValue != null) { Object value = oldPropertyValue.getValue(); if (value != null && value instanceof ManagedList) { ManagedList<Object> real = (ManagedList<Object>) value; oldClasses = new HashSet<>(real.size() >>> 1); ClassLoader parentClassLoader = ClassUtils.getDefaultClassLoader(); for (Object object : real) { TypedStringValue typedStringValue = (TypedStringValue) object; String className = typedStringValue.getValue(); try { parentClassLoader.loadClass(className); oldClasses.add(className); } catch (ClassNotFoundException e) { }//from w w w .j av a 2s . c o m } } } int oldClassSize = (Utils.hasLength(oldClasses) ? oldClasses.size() : 0); Map<String, Object> beans = dlbf.getBeansWithAnnotation(Entity.class); HashSet<String> totalClasses = new HashSet<>(beans.size() + oldClassSize); if (oldClassSize > 0) { totalClasses.addAll(oldClasses); } for (Object entity : beans.values()) { String clzName = entity.getClass().getName(); if (!totalClasses.contains(clzName)) { totalClasses.add(clzName); } } ManagedList<Object> list = new ManagedList<>(totalClasses.size()); for (String clzName : totalClasses) { TypedStringValue typedStringValue = new TypedStringValue(clzName); list.add(typedStringValue); } return list; }