List of usage examples for java.util HashSet size
public int size()
From source file:org.cloudgraph.hbase.io.GraphEdgeWriter.java
@Override public void merge(PlasmaNode dataNode, HashSet<PlasmaDataObject> oldValues, List<PlasmaEdge> currentEdges) throws IOException { if (this.collectionBaseType == null) if (oldValues.size() > 0) fetchMetadata();//from ww w . j a v a 2 s .co m else findMetadata(); HashMap<String, PlasmaDataObject> oldEdgeMap = new HashMap<>(oldValues.size()); for (PlasmaDataObject dataObject : oldValues) { if (!dataObject.getDataGraph().getChangeSummary().isCreated(dataObject)) oldEdgeMap.put(dataObject.getUUIDAsString(), dataObject); } Map<String, PlasmaDataObject> currentEdgeMap = toMap(dataNode, currentEdges); // remove old values not found in current Iterator<String> oldIter = oldEdgeMap.keySet().iterator(); while (oldIter.hasNext()) { String key = oldIter.next(); PlasmaDataObject oldValue = oldEdgeMap.get(key); if (!currentEdgeMap.containsKey(key)) { this.remove(oldValue); } } // add new/current values not found in old Iterator<String> newIter = currentEdgeMap.keySet().iterator(); while (newIter.hasNext()) { String key = newIter.next(); PlasmaDataObject newValue = currentEdgeMap.get(key); if (!oldEdgeMap.containsKey(key)) { this.add(newValue); } } }
From source file:org.eclipse.swt.examples.graphics.GraphicsExample.java
void createTabList(Composite parent) { tabList = new Tree(parent, SWT.SINGLE | SWT.H_SCROLL | SWT.V_SCROLL | SWT.BORDER); Arrays.sort(tabs, (tab0, tab1) -> tab0.getText().compareTo(tab1.getText())); HashSet<String> set = new HashSet<>(); for (GraphicsTab tab : tabs) { set.add(tab.getCategory());/*from w ww .ja v a2s . c o m*/ } String[] categories = new String[set.size()]; set.toArray(categories); Arrays.sort(categories); for (String text : categories) { TreeItem item = new TreeItem(tabList, SWT.NONE); item.setText(text); } tabs_in_order = new ArrayList<>(); TreeItem[] items = tabList.getItems(); for (TreeItem item : items) { for (GraphicsTab tab : tabs) { if (item.getText().equals(tab.getCategory())) { TreeItem item1 = new TreeItem(item, SWT.NONE); item1.setText(tab.getText()); item1.setData(tab); tabs_in_order.add(tab); } } } tabList.addListener(SWT.Selection, event -> { TreeItem item = (TreeItem) event.item; if (item != null) { GraphicsTab gt = (GraphicsTab) item.getData(); if (gt == tab) return; setTab((GraphicsTab) item.getData()); } }); }
From source file:InlineSchemaValidator.java
public Iterator getPrefixes(String namespaceURI) { if (namespaceURI == null) { throw new IllegalArgumentException("Namespace URI cannot be null."); } else if (XMLConstants.XML_NS_URI.equals(namespaceURI)) { return new Iterator() { boolean more = true; public boolean hasNext() { return more; }/*www. j ava 2s.c o m*/ public Object next() { if (!hasNext()) { throw new NoSuchElementException(); } more = false; return XMLConstants.XML_NS_PREFIX; } public void remove() { throw new UnsupportedOperationException(); } }; } else if (XMLConstants.XMLNS_ATTRIBUTE_NS_URI.equals(namespaceURI)) { return new Iterator() { boolean more = true; public boolean hasNext() { return more; } public Object next() { if (!hasNext()) { throw new NoSuchElementException(); } more = false; return XMLConstants.XMLNS_ATTRIBUTE; } public void remove() { throw new UnsupportedOperationException(); } }; } else if (fURIToPrefixMappings != null) { HashSet prefixes = (HashSet) fURIToPrefixMappings.get(namespaceURI); if (prefixes != null && prefixes.size() > 0) { return prefixes.iterator(); } } return Collections.EMPTY_LIST.iterator(); }
From source file:com.amalto.workbench.compare.ResourceCompareInput.java
@Override public Object getAdapter(Class adapter) { if (IFile.class.equals(adapter)) { IProgressMonitor pm = new NullProgressMonitor(); // flush changes in any dirty viewer flushViewers(pm);/*from www. j ava 2 s . c o m*/ IFile[] files = (IFile[]) getAdapter(IFile[].class); if (files != null && files.length > 0) { return files[0]; // can only return one: limitation on IDE.saveAllEditors; see #64617 } return null; } if (IFile[].class.equals(adapter)) { HashSet collector = new HashSet(); collectDirtyResources(fRoot, collector); return collector.toArray(new IFile[collector.size()]); } return super.getAdapter(adapter); }
From source file:edu.isi.wings.catalog.component.api.impl.kb.ComponentCreationKB.java
@Override public boolean addComponent(Component comp, String pholderid) { // Check for uniqueness of the role names passed in HashSet<String> unique = new HashSet<String>(); for (ComponentRole role : comp.getInputs()) unique.add(role.getRoleName());/*from www . ja va2s. c om*/ for (ComponentRole role : comp.getOutputs()) unique.add(role.getRoleName()); // If there are some duplicate role ids, return false if (unique.size() < (comp.getInputs().size() + comp.getOutputs().size())) return false; String cid = comp.getID(); String cholderid = this.getComponentHolderId(cid); // If parent holder passed in, create a holder as subclass of parent holder // Else assume that current holder already exists and fetch that KBObject cls; if (pholderid != null) cls = writerkb.createClass(cholderid, pholderid); else cls = kb.getConcept(cholderid); KBObject cobj = this.writerkb.createObjectOfClass(cid, cls); KBObject inProp = kb.getProperty(this.pcns + "hasInput"); KBObject outProp = kb.getProperty(this.pcns + "hasOutput"); KBObject isConcreteProp = kb.getProperty(this.pcns + "isConcrete"); for (ComponentRole role : comp.getInputs()) { role.setID(cid + "_" + role.getRoleName()); // HACK: role id is <compid>_<rolename/argid> KBObject roleobj = this.createRole(role); if (roleobj == null) return false; this.writerkb.addTriple(cobj, inProp, roleobj); } for (ComponentRole role : comp.getOutputs()) { role.setID(cid + "_" + role.getRoleName()); KBObject roleobj = this.createRole(role); if (roleobj == null) return false; this.writerkb.addTriple(cobj, outProp, roleobj); } if (comp.getDocumentation() != null) this.setComponentDocumentation(cobj, comp.getDocumentation()); if (comp.getComponentRequirement() != null) this.setComponentRequirements(cobj, comp.getComponentRequirement(), this.kb, this.writerkb); if (comp.getLocation() != null) this.setComponentLocation(cid, comp.getLocation()); if (comp.getRulesText() != null) { this.setComponentRules(cid, comp.getRulesText()); } KBObject isConcreteVal = this.writerkb.createLiteral(comp.getType() == Component.CONCRETE); this.writerkb.setPropertyValue(cobj, isConcreteProp, isConcreteVal); if (this.externalCatalog != null) this.externalCatalog.addComponent(comp, pholderid); return true; }
From source file:org.eclipse.ecr.core.api.repository.cache.DocumentModelCacheUpdater.java
@Override public void handleEvents(OperationEvent[] events, boolean urgent) { HashSet<DocumentModel> updatedDocs = new HashSet<DocumentModel>(events.length); HashSet<DocumentModel> updatedTrees = new HashSet<DocumentModel>(events.length); for (OperationEvent event : events) { try {/*from w w w. ja va 2 s .c o m*/ handleEvent(cache, updatedDocs, updatedTrees, event); } catch (Exception e) { log.error("Exception handling event", e); } } if (!listeners.isEmpty()) { if (!updatedDocs.isEmpty()) { DocumentModel[] docs = updatedDocs.toArray(new DocumentModel[updatedDocs.size()]); for (DocumentModelCacheListener listener : listeners) { try { listener.documentsChanged(docs, urgent); } catch (Throwable error) { log.error("An error while trying fire listener for document modifications", error); } } } if (!updatedTrees.isEmpty()) { DocumentModel[] docs = updatedTrees.toArray(new DocumentModel[updatedTrees.size()]); for (DocumentModelCacheListener listener : listeners) { try { listener.subreeChanged(docs, urgent); } catch (Throwable error) { log.error("An error while trying fire listener for document modifications", error); } } } } }
From source file:chanupdater.ChanUpdater.java
private void doUpdates() throws SQLException, IOException, FileNotFoundException, LdvTableException { if (verbose > 1) { System.out.println("Starting update process."); }//from w ww .j av a2s. c om ArrayList<ChanListSummary> chanLists; HashSet<ChanInfo> del = new HashSet<>(); totalAdds = 0; totalDels = 0; for (ChanListSummary cls : cLists) { cls.printSummary(); String server = cls.getServer(); String cTyp = cls.getcType(); if (verbose > 2) { System.out.format("Check %1$s for type:%2$s ", server, cTyp); } TreeMap<String, HashSet<ChanInfo>> chanSets = cls.getChanSets(); for (Entry<String, HashSet<ChanInfo>> ent : chanSets.entrySet()) { del.clear(); HashSet<ChanInfo> newChans = ent.getValue(); String ifo = ent.getKey(); if (verbose > 1) { System.out.format("Server: %1$s, cType: %2$s, IFO: %3$s, count: %4$,d\n", cls.getServer(), cls.getcType(), ifo, newChans.size()); } String namePat = ifo + ":%"; TreeSet<ChanInfo> oldSet = chnTbl.getAsSet(server, namePat, cTyp, newChans.size()); for (ChanInfo old : oldSet) { boolean gotit = newChans.contains(old); if (gotit) { // it's in both newChans.remove(old); } else { if (old.isAvailable()) { // only in old add it to be deleted set del.add(old); } } } totalAdds += newChans.size(); totalDels += del.size(); if ((newChans.size() > 0 || del.size() > 0)) { if (verbose > 1) { System.out.format(" add: %1$d, del %2$d\n", newChans.size(), del.size()); } for (ChanInfo ci : newChans) { if (verbose > 2) { System.out.print("Add: "); ci.print(); } chnTbl.insertNewBulk(ci); } if (newChans.size() > 0) { chnTbl.insertNewBulk(null); // complete the bulk insert } if (doDeletes) { for (ChanInfo ci : del) { if (verbose > 2) { System.out.print("Del: "); ci.print(); } chnTbl.setAvailable(ci.getId(), false); } } } else if (verbose > 1) { System.out.println(" no updates."); } } if (verbose > 0 && totalAdds + totalDels > 0) { System.out.format("Total additions: %1$,d, total removals: %2$,d, " + "Server: %3$s, type: %4$s%n", totalAdds, totalDels, cls.getServer(), cls.getcType()); } else if (verbose > 1 && totalAdds + totalDels == 0) { System.out.println("No changes to channel table. %n"); } } }
From source file:org.openmainframe.ade.ext.output.ExtJaxbAnalyzedIntervalV2XmlStorer.java
/** * Copy the XSL Resources//from ww w . j a v a 2s .co m */ @Override protected String[] getXSLResources() { if (s_xslResources == null) { HashSet<String> allXSLResourcesSet = new HashSet<String>(); List<String> thisXSLResourcesList = Arrays.asList(s_thisXSLResources); allXSLResourcesSet.addAll(thisXSLResourcesList); s_xslResources = new String[allXSLResourcesSet.size()]; s_xslResources = allXSLResourcesSet.toArray(s_xslResources); } return s_xslResources; }
From source file:org.apache.hadoop.hdfs.server.namenode.ha.TestRetryCacheWithHA.java
@SuppressWarnings("unchecked") private void listCachePools(HashSet<String> poolNames, int active) throws Exception { HashSet<String> tmpNames = (HashSet<String>) poolNames.clone(); RemoteIterator<CachePoolEntry> pools = dfs.listCachePools(); int poolCount = poolNames.size(); for (int i = 0; i < poolCount; i++) { CachePoolEntry pool = pools.next(); String pollName = pool.getInfo().getPoolName(); assertTrue("The pool name should be expected", tmpNames.remove(pollName)); if (i % 2 == 0) { int standby = active; active = (standby == 0) ? 1 : 0; cluster.shutdownNameNode(standby); cluster.waitActive(active);//from w ww .j av a2s. c o m cluster.restartNameNode(standby, false); } } assertTrue("All pools must be found", tmpNames.isEmpty()); }
From source file:emp.cloud.pigutils.EmbeddedPigRunner.java
static int executeScript(Configuration hadoopConfig, PigProgressNotificationListener listener, String taskName, String script) throws Throwable { boolean verbose = false; boolean gruntCalled = false; String logFileName = null;//from w w w .j a va 2s. c o m try { Properties properties = new Properties(); PropertiesUtil.loadDefaultProperties(properties); properties.putAll(ConfigurationUtil.toProperties(hadoopConfig)); HashSet<String> optimizerRules = new HashSet<String>(); ExecType execType = ExecType.MAPREDUCE; if (properties.getProperty("aggregate.warning") == null) { // by default warning aggregation is on properties.setProperty("aggregate.warning", "" + true); } if (properties.getProperty("opt.multiquery") == null) { // by default multiquery optimization is on properties.setProperty("opt.multiquery", "" + true); } if (properties.getProperty("stop.on.failure") == null) { // by default we keep going on error on the backend properties.setProperty("stop.on.failure", "" + false); } // set up client side system properties in UDF context UDFContext.getUDFContext().setClientSystemProps(properties); // create the context with the parameter PigContext pigContext = new PigContext(execType, properties); // create the static script state object ScriptState scriptState = ScriptState.start("", pigContext); if (listener != null) { scriptState.registerListener(listener); } if (!Boolean.valueOf(properties.getProperty(PROP_FILT_SIMPL_OPT, "false"))) { // turn off if the user has not explicitly turned on this // optimization optimizerRules.add("FilterLogicExpressionSimplifier"); } if (optimizerRules.size() > 0) { pigContext.getProperties().setProperty("pig.optimizer.rules", ObjectSerializer.serialize(optimizerRules)); } if (properties.get("udf.import.list") != null) PigContext.initializeImportList((String) properties.get("udf.import.list")); PigContext.setClassLoader(pigContext.createCl(null)); pigContext.getProperties().setProperty(PigContext.JOB_NAME, taskName); Grunt grunt = null; BufferedReader in; scriptState.setScript(script); in = new BufferedReader(new StringReader(script)); grunt = new Grunt(in, pigContext); gruntCalled = true; int results[] = grunt.exec(); return getReturnCodeForStats(results); // } catch (Exception e) { // if (e instanceof PigException) { // PigException pe = (PigException) e; // int rc = (pe.retriable()) ? ReturnCode.RETRIABLE_EXCEPTION // : ReturnCode.PIG_EXCEPTION; // PigStatsUtil.setErrorCode(pe.getErrorCode()); // } // PigStatsUtil.setErrorMessage(e.getMessage()); // // if (!gruntCalled) { // LogUtils.writeLog(e, logFileName, log, verbose, // "Error before Pig is launched"); // } // FileLocalizer.deleteTempFiles(); // // if (!gruntCalled) { // LogUtils.writeLog(e, logFileName, log, verbose, // "Error before Pig is launched"); // } // throw e; // } catch (Throwable e) { // PigStatsUtil.setErrorMessage(e.getMessage()); // throw new IllegalStateException(e); } finally { // clear temp files FileLocalizer.deleteTempFiles(); } }