List of usage examples for java.util HashSet contains
public boolean contains(Object o)
From source file:com.thoughtworks.go.plugin.access.configrepo.contract.CRParameter.java
public String validateNameUniqueness(HashSet<String> keys) { if (keys.contains(this.getName())) return String.format("Param name '%s' is not unique.", this.getName()); else/* www . j a va2 s . c o m*/ keys.add(this.getName()); return null; }
From source file:com.thoughtworks.go.plugin.configrepo.contract.CREnvironmentVariable.java
public String validateNameUniqueness(HashSet<String> keys) { if (keys.contains(this.getName())) return String.format("Environment variable %s defined more than once", this.getName()); else/*from w ww . j a va 2 s . c o m*/ keys.add(this.getName()); return null; }
From source file:com.google.gwt.emultest.java.util.HashSetTest.java
public void testAddWatch() { HashSet<String> s = new HashSet<String>(); s.add("watch"); assertTrue(s.contains("watch")); }
From source file:edu.harvard.i2b2.explorer.serviceClient.PDOQueryClient.java
public static String getlldString(ArrayList<TimelineRow> tlrows, String patientRefId, int minPatient, int maxPatient, boolean bDisplayAll, boolean writeFile, boolean displayDemographics, MainComposite explorer, String filter) { try {//from ww w. ja va 2 s . c om HashSet<String> conceptPaths = new HashSet<String>(); // HashSet<String> providerPaths = new HashSet<String>(); // HashSet<String> visitPaths = new HashSet<String>(); ArrayList<PDOItem> items = new ArrayList<PDOItem>(); for (int i = 0; i < tlrows.size(); i++) { for (int j = 0; j < tlrows.get(i).pdoItems.size(); j++) { PDOItem pdoItem = tlrows.get(i).pdoItems.get(j); String path = pdoItem.fullPath; if (conceptPaths.contains(path)) { //continue; } conceptPaths.add(path); // for(int k=0; k<pdoItem.valDisplayProperties.size(); k++) // { items.add(pdoItem); // } } } PDORequestMessageModel pdoFactory = new PDORequestMessageModel(); String pid = null; if (patientRefId.equalsIgnoreCase("All")) { pid = "-1"; } else { pid = patientRefId; } String xmlStr = pdoFactory.requestXmlMessage(items, pid, new Integer(minPatient), new Integer(maxPatient), false, filter); // explorer.lastRequestMessage(xmlStr); String result = null;// sendPDOQueryRequestREST(xmlStr); if (System.getProperty("webServiceMethod").equals("SOAP")) { result = PDOQueryClient.sendPDOQueryRequestSOAP(xmlStr); } else { result = PDOQueryClient.sendPDOQueryRequestREST(xmlStr); } if (result == null || result.equalsIgnoreCase("memory error")) { return result; } // explorer.lastResponseMessage(result); return new TimelineFactory().generateTimelineData(result, tlrows, writeFile, bDisplayAll, displayDemographics, explorer); } /* * catch(org.apache.axis2.AxisFault e) { e.printStackTrace(); * java.awt.EventQueue.invokeLater(new Runnable() { public void run() { * JOptionPane.showMessageDialog(null, * "Trouble with connection to the remote server, " + * "this is often a network error, please try again", "Network Error", * JOptionPane.INFORMATION_MESSAGE); } }); * * return null; } */ catch (Exception e) { e.printStackTrace(); return null; } }
From source file:com.thoughtworks.go.plugin.configrepo.contract.CREnvironment.java
public String validateNameUniqueness(HashSet<String> keys) { if (keys.contains(this.getName())) return String.format("Environment %s is defined more than once", this.getName()); else/*from w w w. j a v a2 s . co m*/ keys.add(this.getName()); return null; }
From source file:com.espertech.esper.epl.join.base.JoinSetComposerPrototypeFactory.java
/** * Builds join tuple composer./* w ww. j ava 2 s . co m*/ * @param outerJoinDescList - list of descriptors for outer join criteria * @param optionalFilterNode - filter tree for analysis to build indexes for fast access * @param streamTypes - types of streams * @param streamNames - names of streams * @return composer implementation * @throws ExprValidationException is thrown to indicate that * validation of view use in joins failed. */ public static JoinSetComposerPrototype makeComposerPrototype(String statementName, String statementId, OuterJoinDesc[] outerJoinDescList, ExprNode optionalFilterNode, EventType[] streamTypes, String[] streamNames, StreamJoinAnalysisResult streamJoinAnalysisResult, boolean queryPlanLogging, Annotation[] annotations, HistoricalViewableDesc historicalViewableDesc, ExprEvaluatorContext exprEvaluatorContext, boolean selectsRemoveStream, boolean hasAggregations) throws ExprValidationException { // Determine if there is a historical stream, and what dependencies exist DependencyGraph historicalDependencyGraph = new DependencyGraph(streamTypes.length, false); for (int i = 0; i < streamTypes.length; i++) { if (historicalViewableDesc.getHistorical()[i]) { SortedSet<Integer> streamsThisStreamDependsOn = historicalViewableDesc .getDependenciesPerHistorical()[i]; historicalDependencyGraph.addDependency(i, streamsThisStreamDependsOn); } } if (log.isDebugEnabled()) { log.debug("Dependency graph: " + historicalDependencyGraph); } // Handle a join with a database or other historical data source for 2 streams if ((historicalViewableDesc.isHasHistorical()) && (streamTypes.length == 2)) { return makeComposerHistorical2Stream(outerJoinDescList, optionalFilterNode, streamTypes, historicalViewableDesc, queryPlanLogging, exprEvaluatorContext); } boolean isOuterJoins = !OuterJoinDesc.consistsOfAllInnerJoins(outerJoinDescList); // Query graph for graph relationships between streams/historicals // For outer joins the query graph will just contain outer join relationships QueryGraph queryGraph = new QueryGraph(streamTypes.length); if (outerJoinDescList.length > 0) { OuterJoinAnalyzer.analyze(outerJoinDescList, queryGraph); if (log.isDebugEnabled()) { log.debug(".makeComposer After outer join queryGraph=\n" + queryGraph); } } // Let the query graph reflect the where-clause if (optionalFilterNode != null) { // Analyze relationships between streams using the optional filter expression. // Relationships are properties in AND and EQUALS nodes of joins. FilterExprAnalyzer.analyze(optionalFilterNode, queryGraph, isOuterJoins); if (log.isDebugEnabled()) { log.debug(".makeComposer After filter expression queryGraph=\n" + queryGraph); } // Add navigation entries based on key and index property equivalency (a=b, b=c follows a=c) QueryGraph.fillEquivalentNav(streamTypes, queryGraph); if (log.isDebugEnabled()) { log.debug(".makeComposer After fill equiv. nav. queryGraph=\n" + queryGraph); } } // Historical index lists HistoricalStreamIndexList[] historicalStreamIndexLists = new HistoricalStreamIndexList[streamTypes.length]; QueryPlan queryPlan = QueryPlanBuilder.getPlan(streamTypes, outerJoinDescList, queryGraph, streamNames, historicalViewableDesc, historicalDependencyGraph, historicalStreamIndexLists, streamJoinAnalysisResult, queryPlanLogging, annotations, exprEvaluatorContext); // remove unused indexes - consider all streams or all unidirectional HashSet<String> usedIndexes = new HashSet<String>(); QueryPlanIndex[] indexSpecs = queryPlan.getIndexSpecs(); for (int streamNum = 0; streamNum < queryPlan.getExecNodeSpecs().length; streamNum++) { QueryPlanNode planNode = queryPlan.getExecNodeSpecs()[streamNum]; if (planNode != null) { planNode.addIndexes(usedIndexes); } } for (QueryPlanIndex indexSpec : indexSpecs) { if (indexSpec == null) { continue; } Map<String, QueryPlanIndexItem> items = indexSpec.getItems(); String[] indexNames = items.keySet().toArray(new String[items.size()]); for (String indexName : indexNames) { if (!usedIndexes.contains(indexName)) { items.remove(indexName); } } } if (queryPlanLogging && queryPlanLog.isInfoEnabled()) { queryPlanLog.info("Query plan: " + queryPlan.toQueryPlan()); QueryPlanIndexHook hook = QueryPlanIndexHookUtil.getHook(annotations); if (hook != null) { hook.join(queryPlan); } } boolean joinRemoveStream = selectsRemoveStream || hasAggregations; return new JoinSetComposerPrototypeImpl(statementName, statementId, outerJoinDescList, optionalFilterNode, streamTypes, streamNames, streamJoinAnalysisResult, annotations, historicalViewableDesc, exprEvaluatorContext, indexSpecs, queryPlan, historicalStreamIndexLists, joinRemoveStream, isOuterJoins); }
From source file:org.magnum.mobilecloud.video.VideoSvcCtrl.java
@PreAuthorize("hasRole(USER)") @RequestMapping(method = RequestMethod.POST, value = VideoSvcApi.VIDEO_SVC_PATH + "/{id}/unlike") public @ResponseBody void unlikeVideo(@PathVariable("id") long id, Principal principal, HttpServletResponse response) {// w w w . j a v a 2 s. c o m Video v = videoRepo.findOne(id); if (v != null) { HashSet<String> likers = v.getLikers(); if (likers.contains(principal.getName())) { likers.remove(principal.getName()); videoRepo.save(v); response.setStatus(HttpServletResponse.SC_OK); } else response.setStatus(HttpServletResponse.SC_BAD_REQUEST); } else response.setStatus(HttpServletResponse.SC_NOT_FOUND); }
From source file:org.magnum.mobilecloud.video.VideoSvcCtrl.java
@PreAuthorize("hasRole(USER)") @RequestMapping(method = RequestMethod.POST, value = VideoSvcApi.VIDEO_SVC_PATH + "/{id}/like") public @ResponseBody void likeVideo(@PathVariable("id") long id, Principal principal, HttpServletResponse response) {// w w w .j a v a 2s .c o m Video v = videoRepo.findOne(id); if (v != null) { HashSet<String> likers = v.getLikers(); if (likers.contains(principal.getName())) response.setStatus(HttpServletResponse.SC_BAD_REQUEST); else { likers.add(principal.getName()); videoRepo.save(v); response.setStatus(HttpServletResponse.SC_OK); } } else response.setStatus(HttpServletResponse.SC_NOT_FOUND); }
From source file:com.alibaba.cobar.manager.web.PermissionInterceptor.java
private boolean permissioned(HttpServletRequest request) { UserDO user = (UserDO) request.getSession().getAttribute("user"); if (user == null) return true; String userType = null;/*from w w w .j av a 2 s . c om*/ if (user.getUser_role().equals(ConstantDefine.SYSTEM_ADMIN)) { userType = ConstantDefine.SYSTEM_ADMIN; } else { userType = ConstantDefine.CLUSTER_ADMIN; } HashSet<String> set = (HashSet<String>) nonMatchURIMap.get(userType); String url = request.getServletPath().trim(); return set.contains(url); }
From source file:mobisocial.musubi.util.OGUtil.java
public static OGData getOrGuess(String url) { DefaultHttpClient hc = new DefaultHttpClient(); HttpResponse res;/* ww w . j a v a2 s.c o m*/ try { HttpGet hg = new HttpGet(url); res = hc.execute(hg); } catch (Exception e) { Log.e(TAG, "unable to fetch page to get og tags", e); return null; } String location = url; //TODO: if some kind of redirect magic happened, then //make the location match that OGData og = new OGData(); HttpEntity he = res.getEntity(); Header content_type = he.getContentType(); //TODO: check the content directly if they forget the type header if (content_type == null || content_type.getValue() == null) { Log.e(TAG, "page missing content type ..abandoning: " + url); return null; } og.mMimeType = content_type.getValue(); //just make a thumbnail if the shared item is an image if (og.mMimeType.startsWith("image/")) { Bitmap b; try { b = BitmapFactory.decodeStream(he.getContent()); } catch (Exception e) { return null; } //TODO: scaling int w = b.getWidth(); int h = b.getHeight(); if (w > h) { h = h * 200 / w; w = 200; } else { w = w * 200 / h; h = 200; } Bitmap b2 = Bitmap.createScaledBitmap(b, w, h, true); b.recycle(); b = b2; ByteArrayOutputStream baos = new ByteArrayOutputStream(); b.compress(CompressFormat.PNG, 100, baos); og.mImage = baos.toByteArray(); b.recycle(); return og; } //if its not html, we can't extract more details, the caller //should rely on what they already know. if (!og.mMimeType.startsWith("text/html") && !og.mMimeType.startsWith("application/xhtml")) { Log.e(TAG, "shared content is not a known type for meta data processing " + og.mMimeType); return og; } String html; try { html = IOUtils.toString(he.getContent()); } catch (Exception e) { Log.e(TAG, "failed to read html content", e); return og; } Matcher m = sTitleRegex.matcher(html); if (m.find()) { og.mTitle = StringEscapeUtils.unescapeHtml4(m.group(1)); } m = sMetaRegex.matcher(html); int offset = 0; String raw_description = null; while (m.find(offset)) { try { String meta_tag = m.group(); Matcher mp = sPropertyOfMeta.matcher(meta_tag); if (!mp.find()) continue; String type = mp.group(1); type = type.substring(1, type.length() - 1); Matcher md = sContentOfMeta.matcher(meta_tag); if (!md.find()) continue; String data = md.group(1); //remove quotes data = data.substring(1, data.length() - 1); data = StringEscapeUtils.unescapeHtml4(data); if (type.equalsIgnoreCase("og:title")) { og.mTitle = data; } else if (type.equalsIgnoreCase("og:image")) { HttpResponse resi; try { HttpGet hgi = new HttpGet(data); resi = hc.execute(hgi); } catch (Exception e) { Log.e(TAG, "unable to fetch og image url", e); continue; } HttpEntity hei = resi.getEntity(); if (!hei.getContentType().getValue().startsWith("image/")) { Log.e(TAG, "image og tag points to non image data" + hei.getContentType().getValue()); } try { Bitmap b; try { b = BitmapFactory.decodeStream(hei.getContent()); } catch (Exception e) { return null; } //TODO: scaling int w = b.getWidth(); int h = b.getHeight(); if (w > h) { h = h * Math.min(200, w) / w; w = Math.min(200, w); } else { w = w * Math.min(200, h) / h; h = Math.min(200, h); } Bitmap b2 = Bitmap.createScaledBitmap(b, w, h, true); b.recycle(); b = b2; ByteArrayOutputStream baos = new ByteArrayOutputStream(); b.compress(CompressFormat.PNG, 100, baos); b.recycle(); og.mImage = baos.toByteArray(); } catch (Exception e) { Log.e(TAG, "failed to fetch image for og", e); continue; } } else if (type.equalsIgnoreCase("description")) { raw_description = data; } else if (type.equalsIgnoreCase("og:description")) { og.mDescription = data; } else if (type.equalsIgnoreCase("og:url")) { og.mUrl = data; } } finally { offset = m.end(); } } HashSet<String> already_fetched = new HashSet<String>(); if (og.mImage == null) { int max_area = 0; m = sImageRegex.matcher(html); int img_offset = 0; while (m.find(img_offset)) { try { String img_tag = m.group(); Matcher ms = sSrcOfImage.matcher(img_tag); if (!ms.find()) continue; String img_src = ms.group(1); img_src = img_src.substring(1, img_src.length() - 1); img_src = StringEscapeUtils.unescapeHtml4(img_src); //don't fetch an image twice (like little 1x1 images) if (already_fetched.contains(img_src)) continue; already_fetched.add(img_src); HttpResponse resi; try { HttpGet hgi = new HttpGet(new URL(new URL(location), img_src).toString()); resi = hc.execute(hgi); } catch (Exception e) { Log.e(TAG, "unable to fetch image url for biggest image search" + img_src, e); continue; } HttpEntity hei = resi.getEntity(); if (hei == null) { Log.w(TAG, "image missing en ..trying entity response: " + url); continue; } Header content_type_image = hei.getContentType(); if (content_type_image == null || content_type_image.getValue() == null) { Log.w(TAG, "image missing content type ..trying anyway: " + url); } if (!content_type_image.getValue().startsWith("image/")) { Log.w(TAG, "image tag points to non image data " + hei.getContentType().getValue() + " " + img_src); } try { Bitmap b; try { b = BitmapFactory.decodeStream(hei.getContent()); } catch (Exception e) { return null; } //TODO: scaling int w = b.getWidth(); int h = b.getHeight(); if (w * h <= max_area) { continue; } if (w < 32 || h < 32) { //skip dinky crap continue; } if (w > h) { h = h * Math.min(200, w) / w; w = Math.min(200, w); } else { w = w * Math.min(200, h) / h; h = Math.min(200, h); } Bitmap b2 = Bitmap.createScaledBitmap(b, w, h, true); b.recycle(); b = b2; ByteArrayOutputStream baos = new ByteArrayOutputStream(); b.compress(CompressFormat.PNG, 100, baos); og.mImage = baos.toByteArray(); b.recycle(); max_area = w * h; } catch (Exception e) { Log.e(TAG, "failed to fetch image for og", e); continue; } } finally { img_offset = m.end(); } } } if (og.mDescription == null) og.mDescription = raw_description; return og; }