List of usage examples for java.util Set contains
boolean contains(Object o);
From source file:com.github.anba.test262.environment.Environments.java
/** * Creates a new Rhino environment//from w w w .j a v a2s . c o m */ public static <T extends GlobalObject> EnvironmentProvider<T> rhino(final Configuration configuration) { final int version = configuration.getInt("rhino.version", Context.VERSION_DEFAULT); final String compiler = configuration.getString("rhino.compiler.default"); List<?> enabledFeatures = configuration.getList("rhino.features.enabled", emptyList()); List<?> disabledFeatures = configuration.getList("rhino.features.disabled", emptyList()); final Set<Integer> enabled = intoCollection(filterMap(enabledFeatures, notEmptyString, toInteger), new HashSet<Integer>()); final Set<Integer> disabled = intoCollection(filterMap(disabledFeatures, notEmptyString, toInteger), new HashSet<Integer>()); /** * Initializes the global {@link ContextFactory} according to the * supplied configuration * * @see ContextFactory#initGlobal(ContextFactory) */ final ContextFactory factory = new ContextFactory() { @Override protected boolean hasFeature(Context cx, int featureIndex) { if (enabled.contains(featureIndex)) { return true; } else if (disabled.contains(featureIndex)) { return false; } return super.hasFeature(cx, featureIndex); } @Override protected Context makeContext() { Context context = super.makeContext(); context.setLanguageVersion(version); return context; } }; EnvironmentProvider<RhinoGlobalObject> provider = new EnvironmentProvider<RhinoGlobalObject>() { @Override public RhinoEnv<RhinoGlobalObject> environment(final String testsuite, final String sourceName, final Test262Info info) { Configuration c = configuration.subset(testsuite); final boolean strictSupported = c.getBoolean("strict", false); final String encoding = c.getString("encoding", "UTF-8"); final String libpath = c.getString("lib_path"); final Context cx = factory.enterContext(); final AtomicReference<RhinoGlobalObject> $global = new AtomicReference<>(); final RhinoEnv<RhinoGlobalObject> environment = new RhinoEnv<RhinoGlobalObject>() { @Override public RhinoGlobalObject global() { return $global.get(); } @Override protected String getEvaluator() { return compiler; } @Override protected String getCharsetName() { return encoding; } @Override public void exit() { Context.exit(); } }; @SuppressWarnings({ "serial" }) final RhinoGlobalObject global = new RhinoGlobalObject() { { cx.initStandardObjects(this, false); } @Override protected boolean isStrictSupported() { return strictSupported; } @Override protected String getDescription() { return info.getDescription(); } @Override protected void failure(String message) { failWith(message, sourceName); } @Override protected void include(Path path) throws IOException { // resolve the input file against the library path Path file = Paths.get(libpath).resolve(path); InputStream source = Files.newInputStream(file); environment.eval(file.getFileName().toString(), source); } }; $global.set(global); return environment; } }; @SuppressWarnings("unchecked") EnvironmentProvider<T> p = (EnvironmentProvider<T>) provider; return p; }
From source file:com.perl5.lang.perl.util.PerlPackageUtil.java
public static void processParentClassesSubs(PerlNamespaceDefinition childClass, Set<String> processedSubsNames, Set<PerlNamespaceDefinition> recursionMap, Processor<PerlSubBase> processor) { if (childClass == null || recursionMap.contains(childClass)) { return;// w ww . j av a2 s. co m } recursionMap.add(childClass); for (PerlNamespaceDefinition parentNamespace : childClass.getParentNamespaceDefinitions()) { for (PsiElement subDefinitionBase : PerlPsiUtil.collectNamespaceMembers(parentNamespace, PerlSubBaseStub.class, PerlSubBase.class)) { String subName = ((PerlSubBase) subDefinitionBase).getSubName(); if (subDefinitionBase.isValid() && ((PerlSubBase) subDefinitionBase).isMethod() && !processedSubsNames.contains(subName)) { processedSubsNames.add(subName); processor.process(((PerlSubBase) subDefinitionBase)); } } processParentClassesSubs(parentNamespace, processedSubsNames, recursionMap, processor); } }
From source file:com.mindquarry.desktop.workspace.ConflictHelper.java
/** * Finds all conflicts where a locally replaced folder conflicts with a * remote modification of (in) that folder. *///from w ww . ja v a 2 s . c om public static List<Conflict> findLocalContainerReplacedConflicts(List<Status> remoteAndLocalChanges) { List<Conflict> conflicts = new ArrayList<Conflict>(); Iterator<Status> iter = remoteAndLocalChanges.iterator(); // remember any replaced dirs we have already handled to // avoid an endless recursion of the main while loop Set<Status> handledReplacedDirs = new HashSet<Status>(); while (iter.hasNext()) { Status status = iter.next(); // REPLACED DIRECTORIES (locally) if (status.getNodeKind() == NodeKind.dir && status.getTextStatus() == StatusKind.replaced && !handledReplacedDirs.contains(status)) { // conflict if there is a modification inside the directory // remotely Status conflictParent = status; handledReplacedDirs.add(conflictParent); List<Status> localChildren = new ArrayList<Status>(); List<Status> remoteChildren = new ArrayList<Status>(); // find all children while (iter.hasNext()) { status = iter.next(); if (FileHelper.isParent(conflictParent.getPath(), status.getPath())) { if (status.getRepositoryTextStatus() == StatusKind.added || status.getRepositoryTextStatus() == StatusKind.replaced || status.getRepositoryTextStatus() == StatusKind.modified || status.getRepositoryTextStatus() == StatusKind.deleted) { remoteChildren.add(status); } else { localChildren.add(status); } // TODO: some might have to be added to both local and // remote iter.remove(); } else { // no more children found, this conflict is done break; } } if (remoteChildren.size() > 0) { // also remove the deleted folder status object remoteAndLocalChanges.remove(conflictParent); conflicts.add(new ReplaceConflict(conflictParent, localChildren, remoteChildren)); } // reset global iterator for next conflict search iter = remoteAndLocalChanges.iterator(); } } return conflicts; }
From source file:com.siemens.sw360.importer.ComponentImportUtils.java
private static void filterRelevantCSVRecordsAndGetAttachmentContents( Iterable<ComponentCSVRecord> compCSVRecords, Map<String, String> componentNameToId, Set<String> knownReleaseIdentifiers, List<ComponentCSVRecord> relevantCSVRecords, HashMap<String, List<String>> releaseIdentifierToDownloadURL, List<AttachmentContent> attachmentContentsToUpdate) { for (ComponentCSVRecord componentCSVRecord : compCSVRecords) { String releaseIdentifier = componentCSVRecord.getReleaseIdentifier(); if (knownReleaseIdentifiers.contains(releaseIdentifier) || !componentCSVRecord.isSetRelease()) { log.debug("skipping existing release " + releaseIdentifier); } else {/* w w w. j a va 2 s . com*/ String componentId = componentNameToId.get(componentCSVRecord.getComponentName()); if (!isNullOrEmpty(componentId)) { if (componentCSVRecord.isSetAttachmentContent()) { List<AttachmentContent> attachmentContents = componentCSVRecord.getAttachmentContents(); final ImmutableList<String> attachmentURLs = CommonUtils .getAttachmentURLsFromAttachmentContents(attachmentContents); releaseIdentifierToDownloadURL.put(releaseIdentifier, attachmentURLs); attachmentContentsToUpdate.addAll(attachmentContents); } relevantCSVRecords.add(componentCSVRecord); knownReleaseIdentifiers.add(releaseIdentifier); } else { log.error("Broken component: " + componentCSVRecord); } } } }
From source file:io.hops.hopsworks.common.util.HopsUtils.java
/** * Validate user defined properties against a list of blacklisted Spark properties * @param sparkProps Parsed user defined properties * @param sparkDir spark installation directory *//*from ww w . j a v a 2s. com*/ public static Map<String, String> validateUserProperties(String sparkProps, String sparkDir) throws IOException { Map<String, String> userProperties = parseSparkProperties(sparkProps); Set<String> blackListedProps = readBlacklistedSparkProperties(sparkDir); for (String userProperty : userProperties.keySet()) { if (blackListedProps.contains(userProperty)) { throw new IllegalArgumentException("User defined property <" + userProperty + "> is blacklisted!"); } } return userProperties; }
From source file:com.ikanow.aleph2.analytics.spark.utils.SparkTechnologyUtils.java
/** Builds a map of streaming spark inputs * @param context/*ww w .j a va 2s . co m*/ * @param maybe_test_spec * @param streaming_context * @param exclude_names * @return */ public static Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>> buildStreamingSparkInputs( final IAnalyticsContext context, final Optional<ProcessingTestSpecBean> maybe_test_spec, final JavaStreamingContext streaming_context, final Set<String> exclude_names) { final AnalyticThreadJobBean job = context.getJob().get(); final Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>> mutable_builder = HashMultimap .create(); transformInputBean(Optionals.ofNullable(job.inputs()).stream(), maybe_test_spec) .filter(job_input -> !exclude_names.contains(job_input.name())).forEach(job_input -> { final List<String> topics = context.getInputTopics(context.getBucket(), job, job_input); final JavaPairInputDStream<String, String> k_stream = KafkaUtils.createDirectStream( streaming_context, String.class, String.class, StringDecoder.class, StringDecoder.class, com.ikanow.aleph2.distributed_services.utils.KafkaUtils.getProperties(), ImmutableSet.<String>builder().addAll(topics).build()); mutable_builder.put(job_input.name(), k_stream.mapToPair(t2 -> Tuples._2T(t2._1(), Tuples._2T(0L, new BatchRecordUtils.JsonBatchRecord(_mapper.readTree(t2._2())))))); }); return mutable_builder; }
From source file:com.mindquarry.desktop.workspace.ConflictHelper.java
/** * Finds all conflicts where a remote folder delete conflicts with locally * added or modified files in that directory. *//*www . j a v a 2s . c o m*/ public static List<Conflict> findRemoteContainerDeleteConflicts(List<Status> remoteAndLocalChanges) { List<Conflict> conflicts = new ArrayList<Conflict>(); Iterator<Status> iter = remoteAndLocalChanges.iterator(); // remember any deleted dirs we have already handled to // avoid an endless recursion of the main while loop Set<Status> handledDeletedDirs = new HashSet<Status>(); while (iter.hasNext()) { Status status = iter.next(); // DELETED DIRECTORIES (remotely) if (status.getNodeKind() == NodeKind.dir && status.getRepositoryTextStatus() == StatusKind.deleted && !handledDeletedDirs.contains(status)) { // conflict if there is a child that is added or removed locally Status conflictParent = status; handledDeletedDirs.add(conflictParent); List<Status> localModList = new ArrayList<Status>(); // find all children while (iter.hasNext()) { status = iter.next(); if (FileHelper.isParent(conflictParent.getPath(), status.getPath())) { if (status.getTextStatus() == StatusKind.added || status.getTextStatus() == StatusKind.replaced || status.getTextStatus() == StatusKind.modified) { localModList.add(status); } iter.remove(); } else { // no more children found, this conflict is done break; } } if (localModList.size() > 0) { // also remove the deleted folder status object remoteAndLocalChanges.remove(conflictParent); conflicts.add(new DeleteWithModificationConflict(false, conflictParent, localModList)); } // reset global iterator for next conflict search iter = remoteAndLocalChanges.iterator(); } } return conflicts; }
From source file:acromusashi.stream.ml.clustering.kmeans.KmeansCalculator.java
/** * ???//from w w w .j a v a 2s .c o m * * @param centroidNum * @param allDistance ? * @return */ protected static Map<Integer, Integer> createCentroidMappings(int centroidNum, List<CentroidMapping> allDistance) { Set<Integer> baseSet = new HashSet<>(); Set<Integer> targetSet = new HashSet<>(); Map<Integer, Integer> resultMapping = new TreeMap<>(); int mappingNum = 0; // ????? for (CentroidMapping targetDistance : allDistance) { // ????????? if (baseSet.contains(targetDistance.getBaseIndex()) || targetSet.contains(targetDistance.getTargetIndex())) { continue; } baseSet.add(targetDistance.getBaseIndex()); targetSet.add(targetDistance.getTargetIndex()); resultMapping.put(targetDistance.getBaseIndex(), targetDistance.getTargetIndex()); mappingNum++; // ???????? if (mappingNum >= centroidNum) { break; } } return resultMapping; }
From source file:com.egt.ejb.toolkit.ToolKitUtils.java
public static String translabel(String label) { if (StringUtils.isBlank(label)) { return ""; }//from www .j a v a2 s. com String key; Set<String> keySet = BundlePalabras.getKeySet(); String[] tokens = StringUtils.split(StringUtils.trimToEmpty(label)); for (int i = 0; i < tokens.length; i++) { key = tokens[i].toLowerCase(); if (keySet.contains(key)) { tokens[i] = BundlePalabras.getString(key); } tokens[i] = StringUtils.trimToEmpty(tokens[i]); // acentuar la ltima slaba if (tokens[i].length() > 3 && tokens[i].endsWith("on")) { tokens[i] = tokens[i].substring(0, tokens[i].length() - 2) + "n"; } } key = StringUtils.join(StringUtils.split(StringUtils.join(tokens, ' ')), ' '); String string = BundleEtiquetas.getString(key); return string.trim(); }
From source file:ch.entwine.weblounge.common.impl.language.LanguageUtils.java
/** * Returns the language out of <code>choices</code> that matches the client's * requirements as indicated through the <code>Accept-Language</code> header. * If no match is possible, <code>null</code> is returned. * /*from w w w . j a va 2 s . co m*/ * @param choices * the available locales * @param request * the http request */ public static Language getPreferredLanguage(Set<Language> choices, HttpServletRequest request) { if (request.getHeader("Accept-Language") != null) { Enumeration<?> locales = request.getLocales(); while (locales.hasMoreElements()) { try { Language l = getLanguage((Locale) locales.nextElement()); if (choices.contains(l)) return l; } catch (UnknownLanguageException e) { // never mind, some clients will send stuff like "*" as the locale } } } return null; }