List of usage examples for java.util HashSet contains
public boolean contains(Object o)
From source file:com.haulmont.cuba.core.global.MetadataTools.java
protected void internalTraverseAttributes(Entity entity, EntityAttributeVisitor visitor, HashSet<Object> visited) { if (visited.contains(entity)) return;/*w w w . j av a 2 s. c o m*/ visited.add(entity); for (MetaProperty property : entity.getMetaClass().getProperties()) { if (visitor.skip(property)) continue; visitor.visit(entity, property); if (property.getRange().isClass()) { if (persistentAttributesLoadChecker.isLoaded(entity, property.getName())) { Object value = entity.getValue(property.getName()); if (value != null) { if (value instanceof Collection) { for (Object item : ((Collection) value)) { internalTraverseAttributes((Entity) item, visitor, visited); } } else { internalTraverseAttributes((Entity) value, visitor, visited); } } } } } }
From source file:eionet.cr.api.feeds.SubjectsRDFWriter.java
/** * * @param subjects/* w ww .ja va 2 s. com*/ * @param out * @throws IOException */ public void write(List<SubjectDTO> subjects, OutputStream out) throws IOException { // if no subjects, write empty rdf:RDF tag if (subjects == null || subjects.isEmpty()) { out.write("<rdf:RDF/>".getBytes()); return; } // start rdf:RDF element out.write(("<rdf:RDF" + getAttributes() + ">").getBytes()); // loop over subjects for (SubjectDTO subject : subjects) { // initialize subject processor if not initialized yet if (subjectProcessor != null) { subjectProcessor.process(subject); } // continuing has only point if subject has at least one predicate if (subject.getPredicateCount() > 0) { String subjectUri = subject.getUri(); if (StringUtils.isBlank(subjectUri)) { LOGGER.error("Subject URI must not be blank (subject hash = " + subject.getUriHash() + ")"); continue; } // start rdf:Description tag StringBuffer buf = new StringBuffer("\n\t<rdf:Description rdf:about=\""); buf.append(StringEscapeUtils.escapeXml(subjectUri)).append("\">"); // loop over this subject's predicates for (Entry<String, Collection<ObjectDTO>> entry : subject.getPredicates().entrySet()) { String predicate = entry.getKey(); Collection<ObjectDTO> objects = entry.getValue(); // continue only if predicate has at least one object if (objects != null && !objects.isEmpty()) { // get namespace URI for this predicate String nsUrl = extractNamespace(predicate); if (nsUrl == null || nsUrl.trim().length() == 0) { throw new CRRuntimeException("Could not extract namespace URL from " + predicate); } // include only predicates from supplied namespaces if (namespaces.containsKey(nsUrl)) { // extract predicate's local name String localName = StringUtils.substringAfterLast(predicate, nsUrl); if (localName == null || localName.trim().length() == 0) { throw new CRRuntimeException("Could not extract local name from " + predicate); } // hash-set for remembering already written object values HashSet<String> alreadyWritten = new HashSet<String>(); // loop over this predicate's objects for (ObjectDTO object : entry.getValue()) { // skip literal values of rdf:type if (object.isLiteral() && predicate.equals(Predicates.RDF_TYPE)) { continue; } String objectValue = object.getValue(); boolean isDerivedObject = object.getDerivSourceHash() != 0; // include only non-blank and non-derived objects // that have not been written yet if (!StringUtils.isBlank(objectValue) && !alreadyWritten.contains(objectValue) && (includeDerivedValues || !isDerivedObject)) { // start predicate tag buf.append("\n\t\t<").append(namespaces.get(nsUrl)).append(":") .append(localName); // prepare escaped-for-XML object value String escapedValue = StringEscapeUtils.escapeXml(objectValue); // write object value, depending on whether it is literal or not // (close the predicate tag too) if (!object.isLiteral() && URLUtil.isURL(objectValue)) { buf.append(" rdf:resource=\"").append(escapedValue).append("\"/>"); } else { buf.append(">").append(escapedValue).append("</") .append(namespaces.get(nsUrl)).append(":").append(localName) .append(">"); } alreadyWritten.add(objectValue); } } } } } // close rdf:Description tag buf.append("\n\t</rdf:Description>"); out.write(buf.toString().getBytes()); } } // close rdf:RDF tag out.write("</rdf:RDF>\n".getBytes()); }
From source file:com.ibm.bi.dml.api.DMLScript.java
/** * //from www. j a va2 s . c om * @param config * @throws IOException * @throws DMLRuntimeException */ private static void checkSecuritySetup(DMLConfig config) throws IOException, DMLRuntimeException { //analyze local configuration String userName = System.getProperty("user.name"); HashSet<String> groupNames = new HashSet<String>(); try { //check existence, for backwards compatibility to < hadoop 0.21 if (UserGroupInformation.class.getMethod("getCurrentUser") != null) { String[] groups = UserGroupInformation.getCurrentUser().getGroupNames(); for (String g : groups) groupNames.add(g); } } catch (Exception ex) { } //analyze hadoop configuration JobConf job = ConfigurationManager.getCachedJobConf(); boolean localMode = InfrastructureAnalyzer.isLocalMode(job); String taskController = job.get("mapred.task.tracker.task-controller", "org.apache.hadoop.mapred.DefaultTaskController"); String ttGroupName = job.get("mapreduce.tasktracker.group", "null"); String perm = job.get(MRConfigurationNames.DFS_PERMISSIONS, "null"); //note: job.get("dfs.permissions.supergroup",null); URI fsURI = FileSystem.getDefaultUri(job); //determine security states boolean flagDiffUser = !(taskController.equals("org.apache.hadoop.mapred.LinuxTaskController") //runs map/reduce tasks as the current user || localMode // run in the same JVM anyway || groupNames.contains(ttGroupName)); //user in task tracker group boolean flagLocalFS = fsURI == null || fsURI.getScheme().equals("file"); boolean flagSecurity = perm.equals("yes"); LOG.debug("SystemML security check: " + "local.user.name = " + userName + ", " + "local.user.groups = " + ProgramConverter.serializeStringCollection(groupNames) + ", " + "mapred.job.tracker = " + job.get("mapred.job.tracker") + ", " + "mapred.task.tracker.task-controller = " + taskController + "," + "mapreduce.tasktracker.group = " + ttGroupName + ", " + "fs.default.name = " + ((fsURI != null) ? fsURI.getScheme() : "null") + ", " + MRConfigurationNames.DFS_PERMISSIONS + " = " + perm); //print warning if permission issues possible if (flagDiffUser && (flagLocalFS || flagSecurity)) { LOG.warn("Cannot run map/reduce tasks as user '" + userName + "'. Using tasktracker group '" + ttGroupName + "'."); } //validate external filenames working directories String localtmpdir = config.getTextValue(DMLConfig.LOCAL_TMP_DIR); String hdfstmpdir = config.getTextValue(DMLConfig.SCRATCH_SPACE); if (!LocalFileUtils.validateExternalFilename(localtmpdir, false)) throw new DMLRuntimeException("Invalid (non-trustworthy) local working directory."); if (!LocalFileUtils.validateExternalFilename(hdfstmpdir, true)) throw new DMLRuntimeException("Invalid (non-trustworthy) hdfs working directory."); }
From source file:edu.cornell.mannlib.vitro.webapp.dao.jena.PropertyDaoJena.java
private void getAllSubPropertyURIs(String propertyURI, HashSet<String> subtree) { List<String> directSubproperties = getSubPropertyURIs(propertyURI); Iterator<String> it = directSubproperties.iterator(); while (it.hasNext()) { String uri = it.next();/*w ww . jav a 2s. c om*/ if (!subtree.contains(uri)) { subtree.add(uri); getAllSubPropertyURIs(uri, subtree); } } }
From source file:de.uni_potsdam.hpi.bpt.promnicat.persistenceApi.orientdbObj.index.IndexIntersection.java
/** * Load the intersecting referenced objects from the specified indices. * First load the database ids from all indices, intersect them, and load the remaining ids. * /*from w w w. j a va2 s .co m*/ * @return the resulting {@link IndexCollectionElement}s */ @SuppressWarnings({ "unchecked", "rawtypes" }) public Collection<IndexCollectionElement<V>> load() { //load dbIds only and sort them by result set size TreeList rawResults = new TreeList(); //no generics possible int maxSize = 0; for (AbstractIndex index : indices) { ResultSet<V> oneResultSet = new ResultSet<V>(index.loadIdsOnly(), index.getName()); rawResults.add(oneResultSet); maxSize = Math.max(maxSize, oneResultSet.getSize()); } // create a list of intersecting dbIds // start with the smallest result set and intersect with the second smallest, intersect this result with the third smallest a.s.o. HashSet<String> intersectingDbIds = new HashSet<String>(maxSize); for (Object r : rawResults) { ResultSet<V> aResult = (ResultSet<V>) r; if (intersectingDbIds.isEmpty()) { intersectingDbIds.addAll(aResult.getDbIds()); } else { intersectingDbIds.retainAll(aResult.getDbIds()); } if (intersectingDbIds.isEmpty()) { break; } } //create Map of IndexElements each, i.e. group by referenced id. Every group is stored in a IndexCollectedElement HashMap<String, IndexCollectionElement<V>> finalElements = new HashMap<String, IndexCollectionElement<V>>( indices.size()); for (Object r : rawResults) { ResultSet<V> aResult = (ResultSet<V>) r; for (IndexElement indexElement : aResult.getList()) { String currentString = indexElement.getDbId(); if (intersectingDbIds.contains(currentString)) { if (!finalElements.containsKey(currentString)) { finalElements.put(currentString, new IndexCollectionElement<V>(currentString)); } finalElements.get(currentString).addIndexElements(indexElement); } } } //load pojos for (IndexCollectionElement<V> collectionElement : finalElements.values()) { collectionElement.loadPojo(papi); } return finalElements.values(); }
From source file:edu.cornell.mannlib.vitro.webapp.dao.jena.PropertyDaoJena.java
private void getAllSuperPropertyURIs(String propertyURI, HashSet<String> subtree) { List<String> directSuperproperties = getSuperPropertyURIs(propertyURI, true); Iterator<String> it = directSuperproperties.iterator(); while (it.hasNext()) { String uri = it.next();//from ww w. ja v a2 s . c om if (!subtree.contains(uri)) { subtree.add(uri); getAllSuperPropertyURIs(uri, subtree); } } }
From source file:afest.datastructures.tree.decision.erts.grower.AERTGrower.java
/** * Return k random attributes (non-constant) picked without replacement unless less then k attributes are non-constant. * @param constantAttributes attributes that are constant. * @param attributeList list of all attributes present in each point in the set. * @return k random attributes (non-constant) picked without replacement unless less then k attributes are non-constant. *//* w ww .j av a2 s . c o m*/ private ArrayList<R> getKRandomAttributes(ArrayList<R> constantAttributes, ArrayList<R> attributeList) { ArrayList<R> kRandomAttributes = new ArrayList<R>(); HashSet<R> pickedAttributes = new HashSet<R>(constantAttributes); for (int k = 0; k < fK; k++) { // If all non-constant attributes have been picked and k is not reached yet, start resampling the non-constant attributes. if (pickedAttributes.size() == attributeList.size()) { pickedAttributes.clear(); pickedAttributes.addAll(constantAttributes); } // Count the number of attributes that are available for a pick int numNotPicked = attributeList.size() - pickedAttributes.size(); // get a random attribute int randomAttribute = fRandom.nextInt(numNotPicked); int count = 0; for (R aR : attributeList) { // If the attribute is not picked if (!pickedAttributes.contains(aR)) { // verify if it is the one corresponding to the random pick if (count == randomAttribute) { kRandomAttributes.add(aR); pickedAttributes.add(aR); break; } else // increase the count { count++; } } } } return kRandomAttributes; }
From source file:es.caib.seycon.ng.servei.AutoritzacioServiceImpl.java
protected Collection<AutoritzacioRol> handleGetDescriptionUserAuthorizations(String codiUsuari) throws Exception { Collection autoritzacionsRolUsuari = handleGetUserAuthorizations(codiUsuari); // IMPORTANT: la seva clau es el nom del rol + codiAutoritzacio + // descripci [valor_domini] HashMap<String, AutoritzacioRol> autoritzacionsSenseRepeticions = new HashMap(); // Afegim informaci addicional: if (autoritzacionsRolUsuari != null) { for (Iterator it = autoritzacionsRolUsuari.iterator(); it.hasNext();) { AutoritzacioRol auto = (AutoritzacioRol) it.next(); AutoritzacioSEU autoSEU = (AutoritzacioSEU) getAuthorizations().get(auto.getAutoritzacio()); if (autoSEU != null) { // formategem els valor de domini String valorDominiUsuari = ""; //$NON-NLS-1$ if (auto.getValorDominiRolUsuari() != null && auto.getValorDominiRolUsuari().size() > 0) { HashSet valors = new HashSet(); for (Iterator vit = auto.getValorDominiRolUsuari().iterator(); vit.hasNext();) { ValorDomini vd = (ValorDomini) vit.next(); valors.add(vd.getValor()); }// www . j a v a2 s . co m if (valors.size() == 1 && valors.contains("*")) //$NON-NLS-1$ ; else valorDominiUsuari = " " + valors.toString(); //$NON-NLS-1$ } auto.setDescripcio(autoSEU.getDescripcio() //$NON-NLS-1$ + valorDominiUsuari); auto.setTipusDomini(autoSEU.getTipusDomini()); auto.setScope(autoSEU.getScope()); auto.setAmbit(autoSEU.getAmbit()); auto.setHereta(autoSEU.getHereta()); // separat per comes autoritzacionsSenseRepeticions .put(auto.getRol().getNom() + auto.getAutoritzacio() + auto.getDescripcio(), auto); } } // Les ordenem LinkedList autosOrdenades = new LinkedList(autoritzacionsSenseRepeticions.values()); Collections.sort(autosOrdenades, new ComparaAutos()); return autosOrdenades; } return autoritzacionsSenseRepeticions.values(); }
From source file:freenet.client.ArchiveManager.java
private void handleZIPArchive(ArchiveStoreContext ctx, FreenetURI key, InputStream data, String element, ArchiveExtractCallback callback, MutableBoolean gotElement, boolean throwAtExit, ClientContext context) throws ArchiveFailureException, ArchiveRestartException { if (logMINOR) Logger.minor(this, "Handling a ZIP Archive"); ZipInputStream zis = null;/*ww w. j av a2 s. c o m*/ try { zis = new ZipInputStream(data); // MINOR: Assumes the first entry in the zip is a directory. ZipEntry entry; byte[] buf = new byte[32768]; HashSet<String> names = new HashSet<String>(); boolean gotMetadata = false; outerZIP: while (true) { entry = zis.getNextEntry(); if (entry == null) break; if (entry.isDirectory()) continue; String name = stripLeadingSlashes(entry.getName()); if (names.contains(name)) { Logger.error(this, "Duplicate key " + name + " in archive " + key); continue; } long size = entry.getSize(); if (name.equals(".metadata")) gotMetadata = true; if (size > maxArchivedFileSize && !name.equals(element)) { addErrorElement(ctx, key, name, "File too big: " + maxArchivedFileSize + " greater than current archived file size limit " + maxArchivedFileSize, true); } else { // Read the element long realLen = 0; Bucket output = tempBucketFactory.makeBucket(size); OutputStream out = output.getOutputStream(); try { int readBytes; while ((readBytes = zis.read(buf)) > 0) { out.write(buf, 0, readBytes); readBytes += realLen; if (readBytes > maxArchivedFileSize) { addErrorElement(ctx, key, name, "File too big: " + maxArchivedFileSize + " greater than current archived file size limit " + maxArchivedFileSize, true); out.close(); out = null; output.free(); continue outerZIP; } } } finally { if (out != null) out.close(); } if (size <= maxArchivedFileSize) { addStoreElement(ctx, key, name, output, gotElement, element, callback, context); names.add(name); trimStoredData(); } else { // We are here because they asked for this file. callback.gotBucket(output, context); gotElement.value = true; addErrorElement( ctx, key, name, "File too big: " + size + " greater than current archived file size limit " + maxArchivedFileSize, true); } } } // If no metadata, generate some if (!gotMetadata) { generateMetadata(ctx, key, names, gotElement, element, callback, context); trimStoredData(); } if (throwAtExit) throw new ArchiveRestartException("Archive changed on re-fetch"); if ((!gotElement.value) && element != null) callback.notInArchive(context); } catch (IOException e) { throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e); } finally { if (zis != null) { try { zis.close(); } catch (IOException e) { Logger.error(this, "Failed to close stream: " + e, e); } } } }
From source file:net.countercraft.movecraft.async.rotation.RotationTask.java
private boolean checkChests(Material mBlock, MovecraftLocation newLoc, HashSet<MovecraftLocation> existingBlockSet) { Material testMaterial;//from ww w . ja v a2 s . c o m MovecraftLocation aroundNewLoc; aroundNewLoc = newLoc.translate(1, 0, 0); testMaterial = getCraft().getW().getBlockAt(aroundNewLoc.getX(), aroundNewLoc.getY(), aroundNewLoc.getZ()) .getType(); if (testMaterial.equals(mBlock)) { if (!existingBlockSet.contains(aroundNewLoc)) { return false; } } aroundNewLoc = newLoc.translate(-1, 0, 0); testMaterial = getCraft().getW().getBlockAt(aroundNewLoc.getX(), aroundNewLoc.getY(), aroundNewLoc.getZ()) .getType(); if (testMaterial.equals(mBlock)) { if (!existingBlockSet.contains(aroundNewLoc)) { return false; } } aroundNewLoc = newLoc.translate(0, 0, 1); testMaterial = getCraft().getW().getBlockAt(aroundNewLoc.getX(), aroundNewLoc.getY(), aroundNewLoc.getZ()) .getType(); if (testMaterial.equals(mBlock)) { if (!existingBlockSet.contains(aroundNewLoc)) { return false; } } aroundNewLoc = newLoc.translate(0, 0, -1); testMaterial = getCraft().getW().getBlockAt(aroundNewLoc.getX(), aroundNewLoc.getY(), aroundNewLoc.getZ()) .getType(); if (testMaterial.equals(mBlock)) { if (!existingBlockSet.contains(aroundNewLoc)) { return false; } } return true; }