List of usage examples for java.util HashSet contains
public boolean contains(Object o)
From source file:net.sf.taverna.raven.plugins.PluginManager.java
private void enablePluginAndDeps(Plugin plugin, HashSet<Plugin> alreadyEnabled) { if (alreadyEnabled.contains(plugin)) { return;//from w w w. ja v a 2 s . c om } alreadyEnabled.add(plugin); // Also enable dependencies pluginDepLoop: for (PluginId pluginDep : plugin.getPluginDependencies()) { for (Plugin updatedDep : plugins) { if (satisfiesDependency(updatedDep, pluginDep)) { enablePluginAndDeps(updatedDep, alreadyEnabled); continue pluginDepLoop; } } logger.warn("Plugin " + plugin + " depends on unknown plugin " + pluginDep); // TODO: Set disabled? (but without invoking the event handler // again!) } if (!plugins.contains(plugin)) { return; } for (Artifact artifact : plugin.getProfile().getArtifacts()) { profile.addArtifact(artifact); if (plugin.getProfile().getSystemArtifacts().contains(artifact)) { try { Bootstrap.addSystemArtifact(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion()); } catch (MalformedURLException e) { logger.error("Error composing url for artifact " + artifact, e); } } } }
From source file:chanupdater.ChanUpdater.java
private void doUpdates() throws SQLException, IOException, FileNotFoundException, LdvTableException { if (verbose > 1) { System.out.println("Starting update process."); }/*w ww . j a va2s . c o m*/ ArrayList<ChanListSummary> chanLists; HashSet<ChanInfo> del = new HashSet<>(); totalAdds = 0; totalDels = 0; for (ChanListSummary cls : cLists) { cls.printSummary(); String server = cls.getServer(); String cTyp = cls.getcType(); if (verbose > 2) { System.out.format("Check %1$s for type:%2$s ", server, cTyp); } TreeMap<String, HashSet<ChanInfo>> chanSets = cls.getChanSets(); for (Entry<String, HashSet<ChanInfo>> ent : chanSets.entrySet()) { del.clear(); HashSet<ChanInfo> newChans = ent.getValue(); String ifo = ent.getKey(); if (verbose > 1) { System.out.format("Server: %1$s, cType: %2$s, IFO: %3$s, count: %4$,d\n", cls.getServer(), cls.getcType(), ifo, newChans.size()); } String namePat = ifo + ":%"; TreeSet<ChanInfo> oldSet = chnTbl.getAsSet(server, namePat, cTyp, newChans.size()); for (ChanInfo old : oldSet) { boolean gotit = newChans.contains(old); if (gotit) { // it's in both newChans.remove(old); } else { if (old.isAvailable()) { // only in old add it to be deleted set del.add(old); } } } totalAdds += newChans.size(); totalDels += del.size(); if ((newChans.size() > 0 || del.size() > 0)) { if (verbose > 1) { System.out.format(" add: %1$d, del %2$d\n", newChans.size(), del.size()); } for (ChanInfo ci : newChans) { if (verbose > 2) { System.out.print("Add: "); ci.print(); } chnTbl.insertNewBulk(ci); } if (newChans.size() > 0) { chnTbl.insertNewBulk(null); // complete the bulk insert } if (doDeletes) { for (ChanInfo ci : del) { if (verbose > 2) { System.out.print("Del: "); ci.print(); } chnTbl.setAvailable(ci.getId(), false); } } } else if (verbose > 1) { System.out.println(" no updates."); } } if (verbose > 0 && totalAdds + totalDels > 0) { System.out.format("Total additions: %1$,d, total removals: %2$,d, " + "Server: %3$s, type: %4$s%n", totalAdds, totalDels, cls.getServer(), cls.getcType()); } else if (verbose > 1 && totalAdds + totalDels == 0) { System.out.println("No changes to channel table. %n"); } } }
From source file:maui.main.MauiTopicExtractor.java
/** * Collects the file names// w w w .j ava 2 s . c o m */ public HashSet<String> collectStems() throws Exception { HashSet<String> stems = new HashSet<String>(); try { File dir = new File(inputDirectoryName); for (String file : dir.list()) { if (file.endsWith(".txt")) { String stem = file.substring(0, file.length() - 4); if (!stems.contains(stem)) { stems.add(stem); } } } } catch (Exception e) { throw new Exception("Problem reading directory " + inputDirectoryName); } return stems; }
From source file:es.caib.seycon.ng.servei.AutoritzacioServiceImpl.java
private Collection getCodiGrupsFillsGrup(String codiGrup) { LinkedList l_grupsUsuari = new LinkedList(); l_grupsUsuari.add(codiGrup);//from w w w . jav a2s .c o m HashSet grupsFills = new HashSet(); String codiGrupAnalitzat = null; while ((codiGrupAnalitzat = (String) l_grupsUsuari.poll()) != null) { if (!grupsFills.contains(codiGrupAnalitzat)) { // si no l'hem // analitzat ja grupsFills.add(codiGrupAnalitzat); Collection fills = getGrupEntityDao().findSubGrupsByCodi(codiGrupAnalitzat); if (fills != null) for (Iterator git = fills.iterator(); git.hasNext();) { GrupEntity fg = (GrupEntity) git.next(); if (!grupsFills.contains(fg.getCodi())) // si no s ja // analitzat l_grupsUsuari.add(fg.getCodi()); } } } return grupsFills; }
From source file:com.tremolosecurity.proxy.filter.PostProcess.java
protected void setHeadersCookies(HttpFilterRequest req, UrlHolder holder, HttpRequestBase method, String finalURL) throws Exception { Iterator<String> names; names = req.getHeaderNames();//from ww w . j ava 2s . c o m String cookieName = null; URL url = new URL(finalURL); while (names.hasNext()) { String name = names.next(); if (name.equalsIgnoreCase("Cookie")) { cookieName = name; continue; } if (logger.isDebugEnabled()) { logger.debug("Header : " + name); } Attribute attrib = req.getHeader(name); Iterator<String> attrVals = attrib.getValues().iterator(); while (attrVals.hasNext()) { String val = attrVals.next(); if (name.equalsIgnoreCase("Content-Type")) { continue; } else if (name.equalsIgnoreCase("If-Range")) { continue; } else if (name.equalsIgnoreCase("Range")) { continue; } else if (name.equalsIgnoreCase("If-None-Match")) { continue; } if (name.equalsIgnoreCase("HOST")) { if (holder.isOverrideHost()) { if (logger.isDebugEnabled()) { logger.debug("Final URL : '" + finalURL + "'"); } val = url.getHost(); if (url.getPort() != -1) { StringBuffer b = new StringBuffer(); b.append(val).append(":").append(url.getPort()); val = b.toString(); } } } else if (name.equalsIgnoreCase("Referer")) { if (holder.isOverrideReferer()) { URL origRef = new URL(val); StringBuffer newRef = new StringBuffer(); newRef.append(url.getProtocol()).append("://").append(url.getHost()); if (url.getPort() != -1) { newRef.append(':').append(url.getPort()); } newRef.append(origRef.getPath()); if (origRef.getQuery() != null) { newRef.append('?').append(origRef.getQuery()); } if (logger.isDebugEnabled()) { logger.debug("Final Ref : '" + newRef.toString() + "'"); } val = newRef.toString(); } } if (this.addHeader(name)) { if (logger.isDebugEnabled()) { logger.debug("Header Added - '" + name + "'='" + val + "'"); } method.addHeader(new BasicHeader(attrib.getName(), val)); } } } HashMap<String, Attribute> fromResults = (HashMap<String, Attribute>) req .getAttribute(AzSys.AUTO_IDM_HTTP_HEADERS); if (fromResults != null) { names = fromResults.keySet().iterator(); while (names.hasNext()) { String name = names.next(); method.removeHeaders(name); Attribute attrib = fromResults.get(name); Iterator<String> attrVals = attrib.getValues().iterator(); while (attrVals.hasNext()) { String val = attrVals.next(); if (logger.isDebugEnabled()) { logger.debug("Header Added - '" + name + "'='" + val + "'"); } method.addHeader(new BasicHeader(name, val)); } } } String sessionCookieName = ""; if (holder.getApp().getCookieConfig() != null) { sessionCookieName = holder.getApp().getCookieConfig().getSessionCookieName(); } HashSet<String> toRemove = new HashSet<String>(); toRemove.add(sessionCookieName); toRemove.add("autoIdmSessionCookieName"); toRemove.add("autoIdmAppName"); toRemove.add("JSESSIONID"); names = req.getCookieNames().iterator(); StringBuffer cookieHeader = new StringBuffer(); boolean isFirst = true; while (names.hasNext()) { String name = names.next(); if (toRemove.contains(name)) { continue; } ArrayList<Cookie> cookies = req.getCookies(name); Iterator<Cookie> itc = cookies.iterator(); while (itc.hasNext()) { Cookie cookie = itc.next(); String cookieFinalName; if (cookie.getName().startsWith("JSESSIONID")) { String host = cookie.getName().substring(cookie.getName().indexOf('-') + 1); host = host.replaceAll("[|]", " "); if (!holder.getApp().getName().equalsIgnoreCase(host)) { continue; } cookieFinalName = "JSESSIONID"; } else { cookieFinalName = cookie.getName(); } String val = cookie.getValue(); if (logger.isDebugEnabled()) { logger.debug("Cookie Added - '" + name + "'='" + val + "'"); } cookieHeader.append(cookieFinalName).append('=').append(val).append("; "); } } if (cookieHeader.length() > 0) { if (cookieName == null) { cookieName = "Cookie"; } method.addHeader(new BasicHeader(cookieName, cookieHeader.toString())); } }
From source file:com.floreantpos.model.Ticket.java
public double calculateDiscountFromType(TicketDiscount coupon, double subtotal) { List<TicketItem> ticketItems = getTicketItems(); double discount = 0; int type = coupon.getType(); double couponValue = coupon.getValue(); switch (type) { case Discount.FIXED_PER_ORDER: discount += couponValue;/* w w w .j a va2s .com*/ break; case Discount.FIXED_PER_CATEGORY: HashSet<Integer> categoryIds = new HashSet<Integer>(); for (TicketItem item : ticketItems) { Integer itemId = item.getItemId(); if (!categoryIds.contains(itemId)) { discount += couponValue; categoryIds.add(itemId); } } break; case Discount.FIXED_PER_ITEM: for (TicketItem item : ticketItems) { discount += (couponValue * item.getItemCount()); } break; case Discount.PERCENTAGE_PER_ORDER: discount += ((subtotal * couponValue) / 100.0); break; case Discount.PERCENTAGE_PER_CATEGORY: categoryIds = new HashSet<Integer>(); for (TicketItem item : ticketItems) { Integer itemId = item.getItemId(); if (!categoryIds.contains(itemId)) { discount += ((item.getUnitPrice() * couponValue) / 100.0); categoryIds.add(itemId); } } break; case Discount.PERCENTAGE_PER_ITEM: for (TicketItem item : ticketItems) { discount += ((item.getSubtotalAmountWithoutModifiers() * couponValue) / 100.0); } break; case Discount.FREE_AMOUNT: discount += couponValue; break; } return discount; }
From source file:amie.keys.CSAKey.java
public HashSet<HashSet<Integer>> buidPropertyGraph(int property) { HashSet<HashSet<Integer>> propertyPowerSets = new HashSet<>(); for (HashSet<Integer> nonKeyInt : nonKeysInt) { if (nonKeyInt.contains(property)) { HashSet<Integer> remainingSet = new HashSet<>(nonKeyInt); remainingSet.addAll(nonKeyInt); remainingSet.remove(property); propertyPowerSets.addAll(powerSet(remainingSet)); }//from ww w . ja v a2s . c o m } return propertyPowerSets; }
From source file:com.ibm.bi.dml.runtime.matrix.mapred.MapperBase.java
public void configure(JobConf job) { super.configure(job); //get the indexes that this matrix file represents, //since one matrix file can occur multiple times in a statement try {/*w ww. ja va 2 s. com*/ representativeMatrixes = MRJobConfiguration.getInputMatrixIndexesInMapper(job); } catch (IOException e) { throw new RuntimeException(e); } //get input converter information inputConverter = MRJobConfiguration.getInputConverter(job, representativeMatrixes.get(0)); DataGenMRInstruction[] allDataGenIns; MRInstruction[] allMapperIns; ReblockInstruction[] allReblockIns; CSVReblockInstruction[] allCSVReblockIns; try { allDataGenIns = MRJobConfiguration.getDataGenInstructions(job); //parse the instructions on the matrices that this file represent allMapperIns = MRJobConfiguration.getInstructionsInMapper(job); //parse the reblock instructions on the matrices that this file represent allReblockIns = MRJobConfiguration.getReblockInstructions(job); allCSVReblockIns = MRJobConfiguration.getCSVReblockInstructions(job); } catch (DMLUnsupportedOperationException e) { throw new RuntimeException(e); } catch (DMLRuntimeException e) { throw new RuntimeException(e); } //get all the output indexes byte[] outputs = MRJobConfiguration.getOutputIndexesInMapper(job); //get the dimension of all the representative matrices rlens = new long[representativeMatrixes.size()]; clens = new long[representativeMatrixes.size()]; for (int i = 0; i < representativeMatrixes.size(); i++) { rlens[i] = MRJobConfiguration.getNumRows(job, representativeMatrixes.get(i)); clens[i] = MRJobConfiguration.getNumColumns(job, representativeMatrixes.get(i)); // System.out.println("get dimension for "+representativeMatrixes.get(i)+": "+rlens[i]+", "+clens[i]); } //get the block sizes of the representative matrices brlens = new int[representativeMatrixes.size()]; bclens = new int[representativeMatrixes.size()]; for (int i = 0; i < representativeMatrixes.size(); i++) { brlens[i] = MRJobConfiguration.getNumRowsPerBlock(job, representativeMatrixes.get(i)); bclens[i] = MRJobConfiguration.getNumColumnsPerBlock(job, representativeMatrixes.get(i)); // System.out.println("get blocksize for "+representativeMatrixes.get(i)+": "+brlens[i]+", "+bclens[i]); } rbounds = new long[representativeMatrixes.size()]; cbounds = new long[representativeMatrixes.size()]; lastblockrlens = new int[representativeMatrixes.size()]; lastblockclens = new int[representativeMatrixes.size()]; //calculate upper boundaries for key value pairs if (valueClass.equals(MatrixBlock.class)) { for (int i = 0; i < representativeMatrixes.size(); i++) { rbounds[i] = (long) Math.ceil((double) rlens[i] / (double) brlens[i]); cbounds[i] = (long) Math.ceil((double) clens[i] / (double) bclens[i]); lastblockrlens[i] = (int) (rlens[i] % brlens[i]); lastblockclens[i] = (int) (clens[i] % bclens[i]); if (lastblockrlens[i] == 0) lastblockrlens[i] = brlens[i]; if (lastblockclens[i] == 0) lastblockclens[i] = bclens[i]; /* * what is this for???? // DRB: the row indexes need to be fixed rbounds[i] = rlens[i];*/ } } else { for (int i = 0; i < representativeMatrixes.size(); i++) { rbounds[i] = rlens[i]; cbounds[i] = clens[i]; lastblockrlens[i] = 1; lastblockclens[i] = 1; // System.out.println("get bound for "+representativeMatrixes.get(i)+": "+rbounds[i]+", "+cbounds[i]); } } //load data from distributed cache (if required, reuse if jvm_reuse) try { setupDistCacheFiles(job); } catch (IOException ex) { throw new RuntimeException(ex); } //collect unary instructions for each representative matrix HashSet<Byte> set = new HashSet<Byte>(); for (int i = 0; i < representativeMatrixes.size(); i++) { set.clear(); set.add(representativeMatrixes.get(i)); //collect the relavent datagen instructions for this representative matrix ArrayList<DataGenMRInstruction> dataGensForThisMatrix = new ArrayList<DataGenMRInstruction>(); if (allDataGenIns != null) { for (DataGenMRInstruction ins : allDataGenIns) { if (set.contains(ins.getInput())) { dataGensForThisMatrix.add(ins); set.add(ins.output); } } } if (dataGensForThisMatrix.size() > 1) throw new RuntimeException("only expects at most one rand instruction per input"); if (dataGensForThisMatrix.isEmpty()) dataGen_instructions.add(null); else dataGen_instructions.add(dataGensForThisMatrix.get(0)); //collect the relavent instructions for this representative matrix ArrayList<MRInstruction> opsForThisMatrix = new ArrayList<MRInstruction>(); if (allMapperIns != null) { for (MRInstruction ins : allMapperIns) { try { /* boolean toAdd=true; for(byte input: ins.getInputIndexes()) if(!set.contains(input)) { toAdd=false; break; } */ boolean toAdd = false; for (byte input : ins.getInputIndexes()) if (set.contains(input)) { toAdd = true; break; } if (toAdd) { opsForThisMatrix.add(ins); set.add(ins.output); } } catch (DMLRuntimeException e) { throw new RuntimeException(e); } } } mapper_instructions.add(opsForThisMatrix); //collect the relavent reblock instructions for this representative matrix ArrayList<ReblockInstruction> reblocksForThisMatrix = new ArrayList<ReblockInstruction>(); if (allReblockIns != null) { for (ReblockInstruction ins : allReblockIns) { if (set.contains(ins.input)) { reblocksForThisMatrix.add(ins); set.add(ins.output); } } } reblock_instructions.add(reblocksForThisMatrix); //collect the relavent reblock instructions for this representative matrix ArrayList<CSVReblockInstruction> csvReblocksForThisMatrix = new ArrayList<CSVReblockInstruction>(); if (allCSVReblockIns != null) { for (CSVReblockInstruction ins : allCSVReblockIns) { if (set.contains(ins.input)) { csvReblocksForThisMatrix.add(ins); set.add(ins.output); } } } csv_reblock_instructions.add(csvReblocksForThisMatrix); //collect the output indexes for this representative matrix ArrayList<Byte> outsForThisMatrix = new ArrayList<Byte>(); for (byte output : outputs) { if (set.contains(output)) outsForThisMatrix.add(output); } outputIndexes.add(outsForThisMatrix); } }
From source file:edu.cornell.mannlib.vitro.webapp.edit.n3editing.configuration.generators.ManageLabelsForIndividualGenerator.java
private List<HashMap<String, String>> getAvailableLocales(List<HashMap<String, String>> allLocales, HashSet<String> existingLabelsLanguageNames) { List<HashMap<String, String>> availableLocales = new ArrayList<HashMap<String, String>>(); for (HashMap<String, String> localeInfo : allLocales) { String languageName = (String) localeInfo.get("label"); //If this language label is NOT in the labels sorted by language, then available //for selection when creating a new label //The assumption here is we don't want to allow the user to add a new label when a label //already exists in that language if (languageName != "untyped" && !existingLabelsLanguageNames.contains(languageName)) { availableLocales.add(localeInfo); }/*from ww w . j a v a 2 s .co m*/ } //Sort list by language label and return Collections.sort(availableLocales, new Comparator<HashMap<String, String>>() { public int compare(HashMap<String, String> h1, HashMap<String, String> h2) { String languageName1 = (String) h1.get("label"); String languageName2 = (String) h2.get("label"); return languageName1.compareTo(languageName2); } }); return availableLocales; }
From source file:net.sf.taverna.raven.plugins.PluginManager.java
private void updatePluginAndDeps(Plugin plugin, HashSet<Plugin> alreadyUpdated) { if (alreadyUpdated.contains(plugin)) { return;// w ww. j a va 2s.com } alreadyUpdated.add(plugin); // Also update any available dependencies for (PluginId pluginDep : plugin.getPluginDependencies()) { for (Plugin updatedDep : updatedPlugins) { if (satisfiesDependency(updatedDep, pluginDep)) { updatePluginAndDeps(updatedDep, alreadyUpdated); } } } if (isUpdateAvailable(plugin)) { synchronized (updatedPlugins) { Plugin newPlugin = getUpdate(plugin); updatedPlugins.remove(newPlugin); newPlugin.setEnabled(true); // enable newly updated plugin // Remove the old plugin if (updatedPlugins.contains(plugin)) updatedPlugins.remove(plugin); if (plugins.contains(plugin)) { if (plugin.isEnabled()) { disablePlugin(plugin); } int index = plugins.indexOf(plugin); plugins.remove(plugin); plugin.removePluginListener(this); } // Add the new plugin if (!plugins.contains(newPlugin)) { plugins.add(newPlugin); sortPlugins(); for (String repositoryURL : newPlugin.getRepositories()) { try { // T2-338 - always add to the end - do not use // prependRemoteRepository repository.addRemoteRepository(new URL(repositoryURL)); } catch (MalformedURLException e) { logger.warn("Invalid remote repository URL - " + repositoryURL); } } for (Artifact artifact : newPlugin.getProfile().getArtifacts()) { repository.addArtifact(artifact); if (newPlugin.getProfile().getSystemArtifacts().contains(artifact)) { profile.addSystemArtifact(artifact); } } if (!checkPluginCompatibility(newPlugin)) { if (newPlugin.isEnabled()) { newPlugin.setEnabled(false); firePluginIncompatibleEvent( new PluginManagerEvent(this, newPlugin, plugins.indexOf(newPlugin))); } } repository.update(); if (newPlugin.isEnabled()) { enablePlugin(newPlugin); } // Notify interested parties firePluginUpdatedEvent(new PluginManagerEvent(this, newPlugin, plugins.indexOf(newPlugin))); newPlugin.addPluginListener(this); } savePlugins(); } } }