List of usage examples for java.util HashSet size
public int size()
From source file:com.evolveum.liferay.usercreatehook.service.CustomUserLocalServiceImpl.java
protected static void checkPasswordValidity(String password) throws UserPasswordException { HashSet<String> tmp = new HashSet<String>( com.evolveum.liferay.usercreatehook.password.StringPolicyUtils.stringTokenizer(password)); boolean containsUpperCase = password.matches(".*\\p{javaLowerCase}.*"); boolean containsLowerCase = password.matches(".*\\p{javaUpperCase}.*"); boolean containsNumber = password.matches(".*\\d.*"); boolean containsSpecial = password.matches(".*[\\_\\.\\!\\@\\$\\*\\=\\-\\?].*"); if (WSConfig.getMidpointPasswordMinUniqueChars() > tmp.size() || WSConfig.getMidpointPasswordMinLenght() > password.length() || !containsUpperCase || !containsLowerCase || !containsNumber || !containsSpecial) { throw new UserPasswordException(UserPasswordException.PASSWORD_TOO_TRIVIAL); }/* w w w .j a va2s .com*/ }
From source file:org.apache.hadoop.hdfs.server.namenode.ValidateNamespaceDirPolicy.java
static void validatePolicy(Configuration conf, int policy, Collection<URI> configuredLocations, String configName, Map<URI, NNStorageLocation> result) throws IOException { /* DFS name node directory policy: 0 - No enforcement/*from w w w . j av a 2 s .co m*/ 1 - Enforce that there should be at least two copies and they must be on different devices 2 - Enforce that there should be at least two copies on different devices and at least one must be on an NFS/remote device */ // convert uri's for directory names List<NNStorageLocation> locations = new ArrayList<NNStorageLocation>(); String shared = conf.get(configName + ".shared"); URI sharedLocation = shared == null ? null : Util.stringAsURI(shared); for (URI name : configuredLocations) { FLOG.info("Conf validation - checking location: " + name); NNStorageLocation desc = checkLocation(name, conf, sharedLocation); locations.add(desc); result.put(desc.location, desc); FLOG.info("Conf validation - checked location: " + desc); } switch (policy) { case 0: // No check needed. break; case 1: case 2: boolean foundRemote = false; HashSet<String> mountPoints = new HashSet<String>(); // Check that there should be at least two copies if (locations.size() < 2) { throw new IOException("Configuration parameter " + configName + " violated DFS name node directory policy:" + " There should be at least two copies."); } for (NNStorageLocation location : locations) { foundRemote |= (location.type == StorageLocationType.REMOTE || location.type == StorageLocationType.SHARED); mountPoints.add(location.mountPoint); } // Check that there should be at least two directories on different // mount points if (mountPoints.size() < 2) { throw new IOException( "Configuration parameter " + configName + " violated DFS name node directory policy:" + " There must be at least two copies on different" + " devices"); } // If policy is 2, check that at least one directory is on NFS device if (policy == 2 && !foundRemote) { throw new IOException( "Configuration parameter " + configName + " violated DFS name node directory policy:" + " There must be at least one copy on an NFS/remote device"); } break; default: throw new IOException("Unexpected configuration parameters: dfs.name.dir.policy = " + policy + ", must be between 0 and 2."); } }
From source file:org.haplo.javascript.Runtime.java
/** * Initialize the shared JavaScript environment. Loads libraries and removes * methods of escaping the sandbox.// w w w .j a v a 2s. c om */ public static void initializeSharedEnvironment(String frameworkRoot, boolean pluginDebuggingEnabled) throws java.io.IOException { // Don't allow this to be called twice if (sharedScope != null) { return; } long startTime = System.currentTimeMillis(); final Context cx = Runtime.enterContext(); try { final ScriptableObject scope = cx.initStandardObjects(null, false /* don't seal the standard objects yet */); if (!scope.has("JSON", scope)) { throw new RuntimeException( "Expecting built-in JSON support in Rhino, check version is at least 1.7R3"); } if (standardTemplateLoader == null) { throw new RuntimeException("StandardTemplateLoader for Runtime hasn't been set."); } String standardTemplateJSON = standardTemplateLoader.standardTemplateJSON(); scope.put("$STANDARDTEMPLATES", scope, standardTemplateJSON); // Define the HaploTemplate host object now, so the JS code can use it to parse templates // TODO: Convert all standard templates from Handlebars, move HaploTemplate host object declaraction back with the others, remove $HaploTemplate from whitelist defineSealedHostClass(scope, HaploTemplate.class); // Load the library code FileReader bootScriptsFile = new FileReader(frameworkRoot + "/lib/javascript/bootscripts.txt"); LineNumberReader bootScripts = new LineNumberReader(bootScriptsFile); String scriptFilename = null; while ((scriptFilename = bootScripts.readLine()) != null) { FileReader script = new FileReader(frameworkRoot + "/" + scriptFilename); cx.evaluateReader(scope, script, scriptFilename, 1, null /* no security domain */); script.close(); } bootScriptsFile.close(); // Insert plugin debugging flag if (pluginDebuggingEnabled) { Scriptable o = (Scriptable) scope.get("O", scope); o.put("PLUGIN_DEBUGGING_ENABLED", o, true); } // Load the list of allowed globals FileReader globalsWhitelistFile = new FileReader( frameworkRoot + "/lib/javascript/globalswhitelist.txt"); HashSet<String> globalsWhitelist = new HashSet<String>(); LineNumberReader whitelist = new LineNumberReader(globalsWhitelistFile); String globalName = null; while ((globalName = whitelist.readLine()) != null) { String g = globalName.trim(); if (g.length() > 0) { globalsWhitelist.add(g); } } globalsWhitelistFile.close(); // Remove all the globals which aren't allowed, using a whitelist for (Object propertyName : scope.getAllIds()) // the form which includes the DONTENUM hidden properties { if (propertyName instanceof String) // ConsString is checked { // Delete any property which isn't in the whitelist if (!(globalsWhitelist.contains(propertyName))) { scope.delete((String) propertyName); // ConsString is checked } } else { // Not expecting any other type of property name in the global namespace throw new RuntimeException( "Not expecting global JavaScript scope to contain a property which isn't a String"); } } // Run through the globals again, just to check nothing escaped for (Object propertyName : scope.getAllIds()) { if (!(globalsWhitelist.contains(propertyName))) { throw new RuntimeException("JavaScript global was not destroyed: " + propertyName.toString()); } } // Run through the whilelist, and make sure that everything in it exists for (String propertyName : globalsWhitelist) { if (!scope.has(propertyName, scope)) { // The whitelist should only contain non-host objects created by the JavaScript source files. throw new RuntimeException( "JavaScript global specified in whitelist does not exist: " + propertyName); } } // And make sure java has gone, to check yet again that everything expected has been removed if (scope.get("java", scope) != Scriptable.NOT_FOUND) { throw new RuntimeException("JavaScript global 'java' escaped destruction"); } // Seal the scope and everything within in, so nothing else can be added and nothing can be changed // Asking initStandardObjects() to seal the standard library doesn't actually work, as it will leave some bits // unsealed so that decodeURI.prototype.pants = 43; works, and can pass information between runtimes. // This recursive object sealer does actually work. It can't seal the main host object class, so that's // added to the scope next, with the (working) seal option set to true. HashSet<Object> sealedObjects = new HashSet<Object>(); recursiveSealObjects(scope, scope, sealedObjects, false /* don't seal the root object yet */); if (sealedObjects.size() == 0) { throw new RuntimeException("Didn't seal any JavaScript globals"); } // Add the host object classes. The sealed option works perfectly, so no need to use a special seal function. defineSealedHostClass(scope, KHost.class); defineSealedHostClass(scope, KPlatformGenericInterface.class); defineSealedHostClass(scope, KObjRef.class); defineSealedHostClass(scope, KScriptable.class); defineSealedHostClass(scope, KLabelList.class); defineSealedHostClass(scope, KLabelChanges.class); defineSealedHostClass(scope, KLabelStatements.class); defineSealedHostClass(scope, KDateTime.class); defineSealedHostClass(scope, KObject.class); defineSealedHostClass(scope, KText.class); defineSealedHostClass(scope, KQueryClause.class); defineSealedHostClass(scope, KQueryResults.class); defineSealedHostClass(scope, KPluginAppGlobalStore.class); defineSealedHostClass(scope, KPluginResponse.class); defineSealedHostClass(scope, KTemplatePartialAutoLoader.class); defineSealedHostClass(scope, KAuditEntry.class); defineSealedHostClass(scope, KAuditEntryQuery.class); defineSealedHostClass(scope, KUser.class); defineSealedHostClass(scope, KUserData.class); defineSealedHostClass(scope, KWorkUnit.class); defineSealedHostClass(scope, KWorkUnitQuery.class); defineSealedHostClass(scope, KEmailTemplate.class); defineSealedHostClass(scope, KBinaryData.class); defineSealedHostClass(scope, KBinaryDataInMemory.class, true /* map inheritance */); defineSealedHostClass(scope, KBinaryDataStaticFile.class, true /* map inheritance */); defineSealedHostClass(scope, KBinaryDataTempFile.class, true /* map inheritance */); defineSealedHostClass(scope, KUploadedFile.class, true /* map inheritance */); defineSealedHostClass(scope, KStoredFile.class); defineSealedHostClass(scope, KJob.class); defineSealedHostClass(scope, KFilePipelineResult.class); defineSealedHostClass(scope, KSessionStore.class); defineSealedHostClass(scope, KKeychainCredential.class); // HaploTemplate created earlier as required by some of the setup defineSealedHostClass(scope, HaploTemplateDeferredRender.class); defineSealedHostClass(scope, JSFunctionThis.class); defineSealedHostClass(scope, GenericDeferredRender.class); defineSealedHostClass(scope, KSecurityRandom.class); defineSealedHostClass(scope, KSecurityBCrypt.class); defineSealedHostClass(scope, KSecurityDigest.class); defineSealedHostClass(scope, KSecurityHMAC.class); defineSealedHostClass(scope, JdNamespace.class); defineSealedHostClass(scope, JdTable.class); defineSealedHostClass(scope, JdDynamicTable.class, true /* map inheritance */); defineSealedHostClass(scope, JdSelectClause.class); defineSealedHostClass(scope, JdSelect.class, true /* map inheritance */); defineSealedHostClass(scope, KGenerateTable.class, true /* map inheritance */); defineSealedHostClass(scope, KGenerateXLS.class, true /* map inheritance */); defineSealedHostClass(scope, KRefKeyDictionary.class); defineSealedHostClass(scope, KRefKeyDictionaryHierarchical.class, true /* map inheritance */); defineSealedHostClass(scope, KRefSet.class); defineSealedHostClass(scope, KCheckingLookupObject.class); defineSealedHostClass(scope, WorkUnitTags.class); defineSealedHostClass(scope, GetterDictionaryBase.class); defineSealedHostClass(scope, InterRuntimeSignal.class); defineSealedHostClass(scope, KRequestContinuation.class); defineSealedHostClass(scope, JsBigDecimal.class); defineSealedHostClass(scope, JsDecimalFormat.class); defineSealedHostClass(scope, XmlDocument.class); defineSealedHostClass(scope, XmlCursor.class); defineSealedHostClass(scope, KCollaborationService.class); defineSealedHostClass(scope, KCollaborationFolder.class); defineSealedHostClass(scope, KCollaborationItemList.class); defineSealedHostClass(scope, KCollaborationItem.class); defineSealedHostClass(scope, KAuthenticationService.class); defineSealedHostClass(scope, KMessageBusPlatformSupport.class); defineSealedHostClass(scope, KUUIDPlatformSupport.class); defineSealedHostClass(scope, StdReporting.class); defineSealedHostClass(scope, StdWebPublisher.class); defineSealedHostClass(scope, StdWebPublisher.RenderedAttributeListView.class); defineSealedHostClass(scope, StdWebPublisher.ValueView.class); // Seal the root now everything has been added scope.sealObject(); // Check JavaScript TimeZone checkJavaScriptTimeZoneIsGMT(); // Templating integration JSPlatformIntegration.parserConfiguration = new TemplateParserConfiguration(); JSPlatformIntegration.includedTemplateRenderer = new TemplateIncludedRenderer(); JSPlatformIntegration.platformFunctionRenderer = new TemplateFunctionRenderer(); sharedScope = scope; } finally { cx.exit(); } initializeSharedEnvironmentTimeTaken = System.currentTimeMillis() - startTime; }
From source file:och.comp.ops.BillingOps.java
/** ? ? ? (? ? ) */ public static Pair<Integer, Integer> reinitAccsBlocked(Props props, MainDb db, Cache cache) { try {//from w ww . j a v a 2s.c o m UniversalQueries universal = db.universal; log.info("reinitAccsBlockedCache..."); HashSet<ChatAccount> blockedAccs = new HashSet<>(); HashSet<ChatAccount> unblockedAccs = new HashSet<>(); List<ChatAccount> accs = universal.select(new GetAllChatAccounts()); HashSet<String> blockedUids = new HashSet<>(); List<UserBalance> blockedUsers = universal.select(new GetAllBlockedUsers()); for (UserBalance user : blockedUsers) { List<String> blocked = db.chats.getOwnerAccs(user.userId); blockedUids.addAll(blocked); } for (ChatAccount acc : accs) { if (blockedUids.contains(acc.uid)) blockedAccs.add(acc); else unblockedAccs.add(acc); } int unblockedCount = unblockedAccs.size(); int blockedCount = blockedAccs.size(); log.info("found blocked accs: " + blockedCount + ", unblocked accs: " + unblockedCount); Map<Long, ServerRow> servers = getServersMap(universal); //send reqs sendBlockedReqs(props, blockedAccs, servers, true); sendBlockedReqs(props, unblockedAccs, servers, false); //update cache for (ChatAccount acc : blockedAccs) { cache.tryPutCache(getBlockedAccFlag(acc.uid), "true"); } for (ChatAccount acc : unblockedAccs) { cache.tryRemoveCache(getBlockedAccFlag(acc.uid)); } log.info("done"); return new Pair<>(unblockedCount, blockedCount); } catch (Exception e) { log.error("can't reinitAccsBlockedCache: " + e); return null; } }
From source file:org.aksw.gerbil.tools.DatasetAnalyzer.java
private void analyzeAsD2W(DatasetConfiguration config) throws GerbilException { C2WDataset dataset = (C2WDataset) config.getDataset(ExperimentType.C2W); if (dataset == null) { return;// ww w . ja v a 2 s. c o m } output.print("C2W dataset: " + config.toString()); output.print(" size=" + dataset.getSize()); List<HashSet<Tag>> goldStandard = dataset.getC2WGoldStandardList(); double averageAnnotation = 0; for (HashSet<Tag> annotations : goldStandard) { averageAnnotation += annotations.size(); } output.println(" Tags=" + averageAnnotation); output.println(" avg.Tags=" + (averageAnnotation / dataset.getSize())); }
From source file:org.nuxeo.ecm.webengine.app.WebEngineModule.java
private static Class<?>[] readWebTypes(WebLoader loader, InputStream in) throws Exception { HashSet<Class<?>> types = new HashSet<Class<?>>(); BufferedReader reader = null; try {/*from w w w. j a va2 s . com*/ reader = new BufferedReader(new InputStreamReader(in)); String line; while ((line = reader.readLine()) != null) { line = line.trim(); if (line.length() == 0 || line.startsWith("#")) { continue; } int p = line.indexOf('|'); if (p > -1) { line = line.substring(0, p); } Class<?> cl = loader.loadClass(line); types.add(cl); } } finally { if (reader != null) { try { reader.close(); } catch (IOException e) { } } } return types.toArray(new Class<?>[types.size()]); }
From source file:org.aksw.gerbil.tools.DatasetAnalyzer.java
private void analyzeAsC2W(DatasetConfiguration config) throws GerbilException { D2WDataset dataset = (D2WDataset) config.getDataset(ExperimentType.D2W); if (dataset == null) { return;/* w w w .j a v a 2 s . co m*/ } output.print("D2W dataset: " + config.getName()); output.print(" size=" + dataset.getSize()); List<HashSet<Annotation>> goldStandard = dataset.getD2WGoldStandardList(); double averageAnnotation = 0; for (HashSet<Annotation> annotations : goldStandard) { averageAnnotation += annotations.size(); } output.println(" Annotations=" + averageAnnotation); output.println(" avg.Annotations=" + (averageAnnotation / dataset.getSize())); }
From source file:disko.flow.analyzers.ConsoleOutputAnalyzer.java
public void process(AnalysisContext<TextDocument> ctx, Ports ports) throws InterruptedException { log.debug("ConsoleOutputAnalyzer starts"); HashSet<InputPort<?>> closedPorts = new HashSet<InputPort<?>>(); while (closedPorts.size() < ports.getInputCount()) { for (InputPort<?> inputPort : ports.getInputPorts()) { if (closedPorts.contains(inputPort)) continue; Object data = inputPort.take(); if (inputPort.isEOS(data)) { closedPorts.add(inputPort); continue; }// ww w . j ava 2 s. c o m if (data instanceof EntityMaintainer) { EntityMaintainer em = (EntityMaintainer) data; System.out.println(inputPort.getChannel() + ": '" + em.getConvertedSentence() + "'\n" + em); } if (data instanceof SentenceInterpretation) { SentenceInterpretation s = (SentenceInterpretation) data; System.out.println(s.getSentence()); for (RelOccurrence occ : s.getRelOccs()) System.out.println(occ.getRelation().toString(ctx.getGraph())); System.out.println(); } else { System.out.println(inputPort.getChannel() + ":\n'" + data + "'\n"); } } } log.debug("ConsoleOutputAnalyzer ends"); }
From source file:amie.keys.CombinationsExplorationNew.java
private static HashSet<Node> createChildren(GraphNew newGraph, HashSet<Node> parents, Rule conditionRule) { HashSet<Node> allChildren = new HashSet<>(); for (Node parent1 : parents) { for (Node parent2 : parents) { if (parent1 != parent2) { HashSet<Integer> newSet = new HashSet<>(); newSet.addAll(parent1.set); newSet.addAll(parent2.set); HashSet<Integer> newSet2 = new HashSet<>(); newSet2.addAll(newSet);//from ww w .j av a 2s. c om newSet2.addAll(newSet); if ((newSet.size() == parent1.set.size() + 1) && (getSupport(newSet2, conditionRule, support))) { Node child = new Node(newSet); HashSet<Node> children1 = newGraph.graph.get(parent1); children1.add(child); newGraph.nodes.put(child, child); newGraph.graph.put(parent1, children1); HashSet<Node> children2 = newGraph.graph.get(parent2); children2.add(child); newGraph.graph.put(parent2, children2); allChildren.add(child); //System.out.println("child:"+child); } } } } // System.out.println("allChildren:" + allChildren); return allChildren; }
From source file:opengovcrawler.DB.java
static boolean UpdateGroupOfRemovedMinitries(HashSet ogReadMins) throws SQLException { Statement stmt = connection.createStatement(); ResultSet rs = stmt.executeQuery("SELECT title FROM ORGANIZATION_LKP;"); HashSet dbMinTitles = new HashSet(); // int id = -1; while (rs.next()) { dbMinTitles.add(rs.getString(1)); // String group_title = rs.getString(2); // if (group_title== null || !group_title.equals(minGroup)){ // stmt.executeUpdate("UPDATE ORGANIZATION_LKP SET group_title = '" + minGroup + "' WHERE id = " + id + ";"); // } }/*from w ww .j a va 2s . c o m*/ if (dbMinTitles.size() != ogReadMins.size()) { ArrayList<String> deprecatedMins = new ArrayList<String>( CollectionUtils.subtract(dbMinTitles, ogReadMins)); String updMinGroup = "UPDATE ORGANIZATION_LKP SET " + "group_title = ?" + "WHERE title = ?"; PreparedStatement prepUpdUrlsSt = connection.prepareStatement(updMinGroup); for (String minName : deprecatedMins) { prepUpdUrlsSt.setString(1, "Deprecated"); prepUpdUrlsSt.setString(2, minName); prepUpdUrlsSt.executeUpdate(); } prepUpdUrlsSt.close(); } return false; }