Example usage for java.util Set addAll

List of usage examples for java.util Set addAll

Introduction

In this page you can find the example usage for java.util Set addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:org.web4thejob.util.L10nUtil.java

public static List<L10nString> getLocalizableResources(final Class<?> localizable) {
    final List<L10nString> strings = new ArrayList<L10nString>();
    final Set<Class> classes = new HashSet<Class>();
    classes.add(localizable);/*from w w w  .  jav a  2  s . c  o m*/
    classes.addAll(ClassUtils.getAllInterfacesForClassAsSet(localizable));

    for (Class<?> clazz : classes) {
        ReflectionUtils.doWithFields(clazz, new ReflectionUtils.FieldCallback() {
            @Override
            public void doWith(Field field) throws IllegalArgumentException, IllegalAccessException {
                strings.add((L10nString) field.get(null));
            }
        }, new ReflectionUtils.FieldFilter() {
            @Override
            public boolean matches(Field field) {
                return ReflectionUtils.isPublicStaticFinal(field) && L10nString.class.equals(field.getType());
            }
        });
    }

    //get localizable resources from extension modules
    for (Module module : ContextUtil.getModules()) {
        if (module instanceof LocalizableModule) {
            strings.addAll(((LocalizableModule) module).getLocalizableStrings(classes));
        }
    }

    // add commands,settings and global strings here...
    if (DesktopLayoutPanel.class.isAssignableFrom(localizable)) {
        for (CommandEnum commandEnum : CommandEnum.values()) {
            L10nString l10nString = new L10nString(CommandEnum.class, commandEnum.name(),
                    L10nUtil.getMessage(commandEnum.getClass(), commandEnum.name(), commandEnum.name()));
            strings.add(l10nString);
        }

        for (SettingEnum settingEnum : SettingEnum.values()) {
            L10nString l10nString = new L10nString(SettingEnum.class, settingEnum.name(),
                    L10nUtil.getMessage(settingEnum.getClass(), settingEnum.name(), settingEnum.name()));
            strings.add(l10nString);
        }

        for (Condition condition : Condition.getConditions()) {
            L10nString l10nString = new L10nString(Condition.class, condition.getKey(), condition.getKey());
            strings.add(l10nString);
        }
    }

    return strings;
}

From source file:com.ctriposs.rest4j.common.testutils.DataAssert.java

/**
 * Asserts that two {@link DataMap}s are equal, subject to the {@code excludedProperties} and
 * {@code nullShouldEqualEmptyListOrMap} arguments.
 *
 * @param actualMap the {@link DataMap} we are checking
 * @param expectedMap the expected {@link DataMap}
 * @param excludedProperties the properties that will be ignored while checking the two DataMaps
 * @param nullShouldEqualEmptyListOrMap true if null should equal an empty {@link DataMap} or {@link DataList}
 *///from   www  .ja  va2 s .c  o m
public static void assertDataMapsEqual(DataMap actualMap, DataMap expectedMap, Set<String> excludedProperties,
        boolean nullShouldEqualEmptyListOrMap) {
    if (excludedProperties == null) {
        excludedProperties = Collections.emptySet();
    }

    if (actualMap == null || expectedMap == null) {
        Assert.assertEquals(actualMap, expectedMap, "Only one of the data maps is null!");
        return;
    }

    Set<String> failKeys = new HashSet<String>();

    // Assert key by key so it's easy to debug on assertion failure
    Set<String> allKeys = new HashSet<String>(actualMap.keySet());
    allKeys.addAll(expectedMap.keySet());
    for (String key : allKeys) {
        if (excludedProperties.contains(key)) {
            continue;
        }

        Object actualObject = actualMap.get(key);
        Object expectedObject = expectedMap.get(key);
        if (actualObject == null) {
            if (nullShouldEqualEmptyListOrMap && isEmptyListOrMap(expectedObject)) {
                continue;
            }
            if (expectedObject != null) {
                failKeys.add(key);
            }
        } else if (!actualObject.equals(expectedObject)) {
            if (nullShouldEqualEmptyListOrMap && expectedObject == null && isEmptyListOrMap(actualObject)) {
                continue;
            }
            failKeys.add(key);
        }
    }

    if (!failKeys.isEmpty()) {
        List<String> errorMessages = new ArrayList<String>();
        errorMessages.add(failKeys.size() + " properties don't match:");
        for (String k : failKeys) {
            errorMessages.add("\tMismatch on property \"" + k + "\", expected:<" + expectedMap.get(k)
                    + "> but was:<" + actualMap.get(k) + ">");
        }
        Assert.fail(StringUtils.join(errorMessages, ERROR_MESSAGE_SEPARATOR));
    }
}

From source file:de.tudarmstadt.ukp.similarity.experiments.coling2012.util.WordIdfValuesGenerator.java

@SuppressWarnings("unchecked")
public static void computeIdfScores(Dataset dataset) throws Exception {
    File outputFile = new File(UTILS_DIR + "/word-idf/" + dataset.toString() + ".txt");

    System.out.println("Computing word idf values");

    if (outputFile.exists()) {
        System.out.println(" - skipping, already exists");
    } else {/*from ww w. j a v a 2 s.com*/
        System.out.println(" - this may take a while...");

        CollectionReader reader = ColingUtils.getCollectionReader(dataset);

        // Tokenization
        AnalysisEngineDescription seg = createPrimitiveDescription(BreakIteratorSegmenter.class);
        AggregateBuilder builder = new AggregateBuilder();
        builder.add(seg, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_1);
        builder.add(seg, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_2);
        AnalysisEngine aggr_seg = builder.createAggregate();

        // POS Tagging
        AnalysisEngineDescription pos = createPrimitiveDescription(OpenNlpPosTagger.class,
                OpenNlpPosTagger.PARAM_LANGUAGE, "en");
        builder = new AggregateBuilder();
        builder.add(pos, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_1);
        builder.add(pos, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_2);
        AnalysisEngine aggr_pos = builder.createAggregate();

        // Lemmatization
        AnalysisEngineDescription lem = createPrimitiveDescription(StanfordLemmatizer.class);
        builder = new AggregateBuilder();
        builder.add(lem, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_1);
        builder.add(lem, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_2);
        AnalysisEngine aggr_lem = builder.createAggregate();

        // Output Writer
        AnalysisEngine writer = createPrimitive(WordIdfValuesGeneratorWriter.class,
                WordIdfValuesGeneratorWriter.PARAM_OUTPUT_FILE, outputFile.getAbsolutePath());

        SimplePipeline.runPipeline(reader, aggr_seg, aggr_pos, aggr_lem, writer);

        // Now we have the text format lemma1###lemma2###...###lemman
        List<String> lines = FileUtils.readLines(outputFile);

        Map<String, Double> idfValues = new HashMap<String, Double>();

        // Build up token representations of texts
        Set<List<String>> docs = new HashSet<List<String>>();

        for (String line : lines) {
            List<String> doc = CollectionUtils.arrayToList(line.split("###"));

            docs.add(doc);
        }

        // Get the shared token list
        Set<String> tokens = new HashSet<String>();
        for (List<String> doc : docs)
            tokens.addAll(doc);

        // Get the idf numbers
        for (String token : tokens) {
            double count = 0;
            for (List<String> doc : docs) {
                if (doc.contains(token))
                    count++;
            }
            idfValues.put(token, count);
        }

        // Compute the idf
        for (String lemma : idfValues.keySet()) {
            double idf = Math.log10(lines.size() / idfValues.get(lemma));
            idfValues.put(lemma, idf);
        }

        // Store persistently
        StringBuilder sb = new StringBuilder();
        for (String key : idfValues.keySet()) {
            sb.append(key + "\t" + idfValues.get(key) + LF);
        }
        FileUtils.writeStringToFile(outputFile, sb.toString());

        System.out.println(" - done");
    }
}

From source file:it.jackbergus.graphdatabase.matrices.MatrixOp.java

/**
 * Matrix sum/*from   w w w .  j a  v a  2  s  .  c  o m*/
 * @param left
 * @param right
 * @return
 */
public static IMatrix sum(IMatrix left, IMatrix right) {
    Set<Pair<Long, Long>> iter = left.getValueRange();
    iter.addAll(right.getValueRange());
    IMatrix g = new GuavaMatrix();
    iter.stream().forEach((x) -> {
        g.set(x, left.get(x) + right.get(x));
    });
    return g;
}

From source file:it.jackbergus.graphdatabase.matrices.MatrixOp.java

/**
 * MAtrix difference/*from   w  w  w  .j  ava2  s .c  om*/
 * @param left
 * @param right
 * @return
 */
public static IMatrix diff(IMatrix left, IMatrix right) {
    Set<Pair<Long, Long>> iter = left.getValueRange();
    iter.addAll(right.getValueRange());
    IMatrix g = new GuavaMatrix();
    iter.stream().forEach((x) -> {
        g.set(x, left.get(x) - right.get(x));
    });
    return g;
}

From source file:eu.annocultor.converters.europeana.RecordCompletenessRanking.java

/**
 * Utility function: count new words and compute points for them.
 *///from w  w w .j a  v  a 2s  . c  o m
static int computePoints(Set<String> words, Collection<String> fields, int wordsPerPoint) {

    int wordsBefore = words.size();
    words.addAll(extractWordsFromFields(fields));
    return capPoints((words.size() - wordsBefore) / wordsPerPoint);
}

From source file:com.opengamma.financial.analytics.model.YieldCurveNodeSensitivitiesHelper.java

/**
 * @deprecated Use {@link #getInstrumentLabelledSensitivitiesForCurve(String, YieldCurveBundle, DoubleMatrix1D, InterpolatedYieldCurveSpecificationWithSecurities, ValueSpecification)}
 * instead// www.  j  a  v  a  2 s .c  o  m
 * @param forwardCurveName The forward curve name
 * @param fundingCurveName The funding curve name
 * @param forwardResultSpecification The forward result specification
 * @param fundingResultSpecification The funding result specification
 * @param bundle The bundle containing the yield curves
 * @param sensitivitiesForCurves A matrix containing the sensitivities to each curve in the bundle
 * @param curveSpecs The specifications for the forward and funding curves
 * @return The computed value
 */
@Deprecated
public static Set<ComputedValue> getSensitivitiesForMultipleCurves(final String forwardCurveName,
        final String fundingCurveName, final ValueSpecification forwardResultSpecification,
        final ValueSpecification fundingResultSpecification, final YieldCurveBundle bundle,
        final DoubleMatrix1D sensitivitiesForCurves,
        final Map<String, InterpolatedYieldCurveSpecificationWithSecurities> curveSpecs) {
    if (!(bundle.getCurve(forwardCurveName) instanceof YieldCurve)) { //TODO: make it more generic
        throw new IllegalArgumentException("Can only handle YieldCurve");
    }
    final int nForward = ((YieldCurve) bundle.getCurve(forwardCurveName)).getCurve().size();
    final int nFunding = ((YieldCurve) bundle.getCurve(fundingCurveName)).getCurve().size();
    final Map<String, DoubleMatrix1D> sensitivities = new HashMap<>();
    sensitivities.put(fundingCurveName,
            new DoubleMatrix1D(Arrays.copyOfRange(sensitivitiesForCurves.toArray(), 0, nFunding)));
    sensitivities.put(forwardCurveName, new DoubleMatrix1D(
            Arrays.copyOfRange(sensitivitiesForCurves.toArray(), nFunding, nForward + nFunding)));
    final Set<ComputedValue> results = new HashSet<>();
    results.addAll(getSensitivitiesForCurve(bundle.getCurve(fundingCurveName),
            sensitivities.get(fundingCurveName), curveSpecs.get(fundingCurveName), fundingResultSpecification));
    results.addAll(getSensitivitiesForCurve(bundle.getCurve(forwardCurveName),
            sensitivities.get(forwardCurveName), curveSpecs.get(forwardCurveName), forwardResultSpecification));
    return results;
}

From source file:de.unisb.cs.st.javalanche.mutation.results.MutationCoverageFile.java

public static void addCoveredMutations(Set<Long> add) {
    Set<Long> result = new HashSet<Long>(add);
    Set<Long> coveredMutations = getCoveredMutations();
    result.addAll(coveredMutations);
    SerializeIo.serializeToFile(result, COVERED_FILE);
    reset();//  www .  ja  v a2  s.  com
}

From source file:com.hurence.logisland.plugin.PluginManager.java

private static Set<ArtifactDownloadReport> downloadArtifacts(Ivy ivy, ModuleRevisionId moduleRevisionId,
        String[] confs) throws Exception {
    ResolveOptions resolveOptions = new ResolveOptions();
    resolveOptions.setDownload(true);//from www  . ja  v  a 2  s  .  co m
    resolveOptions.setTransitive(true);
    resolveOptions.setOutputReport(false);
    resolveOptions.setConfs(confs);
    resolveOptions.setLog(null);
    resolveOptions.setValidate(false);
    resolveOptions.setCheckIfChanged(true);

    ResolveReport report = (ivy.resolve(moduleRevisionId, resolveOptions, true));
    Set<ArtifactDownloadReport> reports = new LinkedHashSet<>();
    reports.addAll(Arrays.asList(report.getAllArtifactsReports()));

    resolveOptions.setTransitive(false);
    for (ArtifactDownloadReport artifactDownloadReport : report.getFailedArtifactsReports()) {
        reports.add(
                ivy.getResolveEngine().download(
                        new DefaultArtifact(artifactDownloadReport.getArtifact().getModuleRevisionId(),
                                artifactDownloadReport.getArtifact().getPublicationDate(),
                                artifactDownloadReport.getArtifact().getName(), "jar", "jar"),
                        new DownloadOptions()));
    }
    return reports;
}

From source file:com.espertech.esper.metrics.jmx.CommonJMXUtil.java

private static ModelMBeanAttributeInfo[] extractAttributeInfo(Object object) {
    Map<String, Method> getters = new HashMap<String, Method>();
    Map<String, Method> setters = new HashMap<String, Method>();
    Map<String, String> descriptions = new HashMap<String, String>();
    for (Method m : object.getClass().getMethods()) {
        JmxGetter getter = m.getAnnotation(JmxGetter.class);
        if (getter != null) {
            getters.put(getter.name(), m);
            descriptions.put(getter.name(), getter.description());
        }//from w  ww  .  j  av a  2 s.com
        JmxSetter setter = m.getAnnotation(JmxSetter.class);
        if (setter != null) {
            setters.put(setter.name(), m);
            descriptions.put(setter.name(), setter.description());
        }
    }

    Set<String> attributes = new HashSet<String>(getters.keySet());
    attributes.addAll(setters.keySet());
    List<ModelMBeanAttributeInfo> infos = new ArrayList<ModelMBeanAttributeInfo>();
    for (String name : attributes) {
        try {
            Method getter = getters.get(name);
            Method setter = setters.get(name);
            ModelMBeanAttributeInfo info = new ModelMBeanAttributeInfo(name, descriptions.get(name), getter,
                    setter);
            Descriptor descriptor = info.getDescriptor();
            if (getter != null)
                descriptor.setField("getMethod", getter.getName());
            if (setter != null)
                descriptor.setField("setMethod", setter.getName());
            info.setDescriptor(descriptor);
            infos.add(info);
        } catch (IntrospectionException e) {
            throw new RuntimeException(e);
        }
    }

    return infos.toArray(new ModelMBeanAttributeInfo[infos.size()]);
}