Example usage for java.util Set addAll

List of usage examples for java.util Set addAll

Introduction

In this page you can find the example usage for java.util Set addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:com.baidu.rigel.biplatform.ma.report.utils.QueryUtils.java

/**
 * //w ww .j av  a2  s  .  c  om
 * ?
 * @param dsDefine 
 * 
 * @param queryAction
 *            
 * @return 
 * @throws QueryModelBuildException
 *             
 */
public static QuestionModel convert2QuestionModel(DataSourceDefine dsDefine, ReportDesignModel reportModel,
        QueryAction queryAction, String securityKey) throws QueryModelBuildException {
    if (queryAction == null) {
        throw new QueryModelBuildException("query action is null");
    }
    ConfigQuestionModel questionModel = new ConfigQuestionModel();
    String areaId = queryAction.getExtendAreaId();
    if (StringUtils.isEmpty(areaId)) {
        throw new QueryModelBuildException("area id is empty");
    }
    ExtendArea area = reportModel.getExtendById(areaId);
    if (area == null) {
        throw new QueryModelBuildException("can not get area with id : " + areaId);
    }
    Cube cube = getCubeWithExtendArea(reportModel, area);
    if (cube == null) {
        throw new QueryModelBuildException("can not get cube define in area : " + areaId);
    }
    // ?
    questionModel.setAxisMetas(buildAxisMeta(reportModel.getSchema(), area, queryAction));
    // ?
    questionModel.setQueryConditions(buildQueryConditions(reportModel, area, queryAction));
    questionModel.setCubeId(area.getCubeId());
    ((MiniCube) cube).setProductLine(dsDefine.getProductLine());
    // TODO ?cube ? ?
    Set<Item> tmp = Sets.newHashSet();
    tmp.addAll(queryAction.getSlices().keySet());
    tmp.addAll(queryAction.getRows().keySet());
    //        updateLogicCubeWithSlices(cube, tmp,
    //                reportModel.getSchema().getCubes().get(area.getCubeId()));
    questionModel.setCube(cube);
    questionModel.setDataSourceInfo(buidDataSourceInfo(dsDefine, securityKey));
    MeasureOrderDesc orderDesc = queryAction.getMeasureOrderDesc();
    if (orderDesc != null) {
        SortType sortType = SortType.valueOf(orderDesc.getOrderType());
        String uniqueName = "[Measure].[" + orderDesc.getName() + "]";
        SortRecord sortRecord = new SortRecord(sortType, uniqueName, orderDesc.getRecordSize());
        questionModel.setSortRecord(sortRecord);
    }
    // TODO ?????
    questionModel.getQueryConditionLimit().setWarningAtOverFlow(false);
    if (queryAction.isNeedOthers()) {
        // TODO ?? ?
        questionModel.getRequestParams().put("NEED_OTHERS", "1");
    }
    putSliceConditionIntoParams(queryAction, questionModel);
    questionModel.setFilterBlank(queryAction.isFilterBlank());
    return questionModel;
}

From source file:edu.umass.cs.utils.Util.java

/**
 * //from   w ww.  j  a v  a2s  .c o m
 * @param dir
 * @param match
 * @return Files (recursively) within {@code dir} matching any of the match
 *         patterns in {@code match}.
 */
public static File[] getMatchingFiles(String dir, String... match) {
    File dirFile = new File(dir);
    Set<File> matchFiles = new HashSet<File>();
    for (String m : match)
        if (dirFile.getPath().toString().startsWith(m.replaceAll("/$", "")))
            matchFiles.add(dirFile);
    if (dirFile.isDirectory()) {
        // check constituent files in directory
        for (File f : dirFile.listFiles())
            matchFiles.addAll(Arrays.asList(getMatchingFiles(f.getPath(), match)));
    }
    return matchFiles.toArray(new File[0]);
}

From source file:io.fabric8.maven.core.util.KubernetesResourceUtil.java

public static Set<HasMetadata> loadResources(File manifest) throws IOException {
    Object dto = KubernetesHelper.loadYaml(manifest, KubernetesResource.class);
    if (dto == null) {
        throw new IllegalStateException("Cannot load kubernetes YAML: " + manifest);
    }/*www. ja  va2 s .c o  m*/

    if (dto instanceof Template) {
        Template template = (Template) dto;
        boolean failOnMissingParameterValue = false;
        dto = Templates.processTemplatesLocally(template, failOnMissingParameterValue);
    }

    Set<KubernetesResource<?>> resources = new LinkedHashSet<>();

    Set<HasMetadata> entities = new TreeSet<>(new HasMetadataComparator());
    for (KubernetesResource<?> resource : resources) {
        entities.addAll(KubernetesHelper.toItemList(resource));
    }

    entities.addAll(KubernetesHelper.toItemList(dto));
    return entities;
}

From source file:cascading.flow.hadoop.util.HadoopUtil.java

public static <C extends Configuration> C copyConfiguration(Map<Object, Object> srcProperties,
        C dstConfiguration) {/*w w w. j a v a 2  s.c  o  m*/
    Set<Object> keys = new HashSet<Object>(srcProperties.keySet());

    // keys will only be grabbed if both key/value are String, so keep orig keys
    if (srcProperties instanceof Properties)
        keys.addAll(((Properties) srcProperties).stringPropertyNames());

    for (Object key : keys) {
        Object value = srcProperties.get(key);

        if (value == null && srcProperties instanceof Properties && key instanceof String)
            value = ((Properties) srcProperties).getProperty((String) key);

        if (value == null) // don't stuff null values
            continue;

        // don't let these objects pass, even though toString is called below.
        if (value instanceof Class || value instanceof JobConf)
            continue;

        dstConfiguration.set(key.toString(), value.toString());
    }

    return dstConfiguration;
}

From source file:models.NotificationEvent.java

private static Set<User> getDefaultReceivers(PullRequest pullRequest) {
    Set<User> watchers = pullRequest.getWatchers();
    watchers.addAll(getMentionedUsers(pullRequest.body));
    return watchers;
}

From source file:eu.project.ttc.models.index.JsonTermIndexIO.java

public static void save(Writer writer, TermIndex termIndex, JsonOptions options) throws IOException {
    JsonFactory jsonFactory = new JsonFactory(); // or, for data binding, org.codehaus.jackson.mapper.MappingJsonFactory 
    //      jsonFactory.configure(f, state)
    JsonGenerator jg = jsonFactory.createGenerator(writer); // or Stream, Reader
    jg.useDefaultPrettyPrinter();/*from w w w .j  a va 2s .co m*/

    jg.writeStartObject();

    jg.writeFieldName(METADATA);
    jg.writeStartObject();
    jg.writeFieldName(NAME);
    jg.writeString(termIndex.getName());
    jg.writeFieldName(LANG);
    jg.writeString(termIndex.getLang().getCode());
    if (termIndex.getCorpusId() != null) {
        jg.writeFieldName(CORPUS_ID);
        jg.writeString(termIndex.getCorpusId());
    }

    jg.writeFieldName(OCCURRENCE_STORAGE);
    if (options.isMongoDBOccStore()) {
        jg.writeString(OCCURRENCE_STORAGE_MONGODB);
        jg.writeFieldName(OCCURRENCE_MONGODB_STORE_URI);
        jg.writeString(options.getMongoDBOccStore());
    } else if (options.isEmbeddedOccurrences())
        jg.writeString(OCCURRENCE_STORAGE_EMBEDDED);
    else
        throw new IllegalStateException("Unknown storage mode");

    jg.writeFieldName(NB_WORD_ANNOTATIONS);
    jg.writeNumber(termIndex.getWordAnnotationsNum());
    jg.writeFieldName(NB_SPOTTED_TERMS);
    jg.writeNumber(termIndex.getSpottedTermsNum());

    jg.writeEndObject();

    jg.writeFieldName(INPUT_SOURCES);
    int idCnt = 0;
    Map<String, Integer> inputSources = Maps.newTreeMap();
    for (Document d : termIndex.getDocuments())
        if (!inputSources.containsKey(d.getUrl()))
            inputSources.put(d.getUrl(), ++idCnt);
    jg.writeStartObject();
    for (String uri : inputSources.keySet()) {
        jg.writeFieldName(inputSources.get(uri).toString());
        jg.writeString(uri);
    }
    jg.writeEndObject();

    jg.writeFieldName(WORDS);
    jg.writeStartArray();
    for (Word w : termIndex.getWords()) {
        jg.writeStartObject();
        jg.writeFieldName(LEMMA);
        jg.writeString(w.getLemma());
        jg.writeFieldName(STEM);
        jg.writeString(w.getStem());
        if (w.isCompound()) {
            jg.writeFieldName(COMPOUND_TYPE);
            jg.writeString(w.getCompoundType().name());
            jg.writeFieldName(COMPONENTS);
            jg.writeStartArray();
            for (Component c : w.getComponents()) {
                jg.writeStartObject();
                jg.writeFieldName(LEMMA);
                jg.writeString(c.getLemma());
                jg.writeFieldName(BEGIN);
                jg.writeNumber(c.getBegin());
                jg.writeFieldName(END);
                jg.writeNumber(c.getEnd());
                jg.writeEndObject();
            }
            jg.writeEndArray();
        }

        jg.writeEndObject();
    }
    jg.writeEndArray();

    Set<TermVariation> termVariations = Sets.newHashSet();

    jg.writeFieldName(TERMS);
    jg.writeStartArray();
    for (Term t : termIndex.getTerms()) {
        termVariations.addAll(t.getVariations());

        jg.writeStartObject();
        jg.writeFieldName(ID);
        jg.writeNumber(t.getId());
        jg.writeFieldName(RANK);
        jg.writeNumber(t.getRank());
        jg.writeFieldName(GROUPING_KEY);
        jg.writeString(t.getGroupingKey());
        jg.writeFieldName(WORDS);
        jg.writeStartArray();
        for (TermWord tw : t.getWords()) {
            jg.writeStartObject();
            jg.writeFieldName(SYN);
            jg.writeString(tw.getSyntacticLabel());
            jg.writeFieldName(LEMMA);
            jg.writeString(tw.getWord().getLemma());
            jg.writeEndObject();
        }
        jg.writeEndArray();

        jg.writeFieldName(FREQUENCY);
        jg.writeNumber(t.getFrequency());
        jg.writeFieldName(FREQ_NORM);
        jg.writeNumber(t.getFrequencyNorm());
        jg.writeFieldName(GENERAL_FREQ_NORM);
        jg.writeNumber(t.getGeneralFrequencyNorm());
        jg.writeFieldName(SPECIFICITY);
        jg.writeNumber(t.getSpecificity());
        jg.writeFieldName(SPOTTING_RULE);
        jg.writeString(t.getSpottingRule());

        if (options.withOccurrences() && options.isEmbeddedOccurrences()) {
            jg.writeFieldName(OCCURRENCES);
            jg.writeStartArray();
            for (TermOccurrence termOcc : t.getOccurrences()) {
                jg.writeStartObject();
                jg.writeFieldName(BEGIN);
                jg.writeNumber(termOcc.getBegin());
                jg.writeFieldName(END);
                jg.writeNumber(termOcc.getEnd());
                jg.writeFieldName(TEXT);
                jg.writeString(termOcc.getCoveredText());
                jg.writeFieldName(FILE);
                jg.writeNumber(inputSources.get(termOcc.getSourceDocument().getUrl()));
                jg.writeEndObject();
            }
            jg.writeEndArray();
        }

        if (options.isWithContexts() && t.isContextVectorComputed()) {
            jg.writeFieldName(CONTEXT);
            jg.writeStartObject();

            jg.writeFieldName(TOTAL_COOCCURRENCES);
            jg.writeNumber(t.getContextVector().getTotalCoccurrences());
            jg.writeFieldName(CO_OCCURRENCES);
            jg.writeStartArray();
            if (t.isContextVectorComputed()) {
                for (ContextVector.Entry contextEntry : t.getContextVector().getEntries()) {
                    jg.writeStartObject();
                    jg.writeFieldName(CO_TERM);
                    jg.writeString(contextEntry.getCoTerm().getGroupingKey());
                    jg.writeFieldName(NB_COCCS);
                    jg.writeNumber(contextEntry.getNbCooccs());
                    jg.writeFieldName(ASSOC_RATE);
                    jg.writeNumber(contextEntry.getAssocRate());
                    jg.writeEndObject();
                }
            }
            jg.writeEndArray();
            jg.writeEndObject();
        }

        jg.writeEndObject();
    }
    jg.writeEndArray();

    /* Variants */
    jg.writeFieldName(TERM_VARIATIONS);
    jg.writeStartArray();
    for (TermVariation v : termVariations) {
        jg.writeStartObject();
        jg.writeFieldName(BASE);
        jg.writeString(v.getBase().getGroupingKey());
        jg.writeFieldName(VARIANT);
        jg.writeString(v.getVariant().getGroupingKey());
        jg.writeFieldName(VARIANT_TYPE);
        jg.writeString(v.getVariationType().getShortName());
        jg.writeFieldName(INFO);
        jg.writeString(v.getInfo().toString());
        jg.writeFieldName(VARIANT_SCORE);
        jg.writeNumber(v.getScore());
        jg.writeEndObject();
    }
    jg.writeEndArray();

    jg.writeEndObject();
    jg.close();
}

From source file:com.espertech.esper.epl.join.plan.NStreamOuterQueryPlanBuilder.java

/**
 * Recusivly builds a substream-per-stream ordered tree graph using the
 * join information supplied for outer joins and from the query graph (where clause).
 * <p>/*from  w  w w .java2  s  . c o  m*/
 * Required streams are considered first and their lookup is placed first in the list
 * to gain performance.
 * @param streamNum is the root stream number that supplies the incoming event to build the tree for
 * @param queryGraph contains where-clause stream relationship info
 * @param outerInnerGraph contains the outer join stream relationship info
 * @param completedStreams is a temporary holder for streams already considered
 * @param substreamsPerStream is the ordered, tree-like structure to be filled
 * @param requiredPerStream indicates which streams are required and which are optional
 * @param streamCallStack the query plan call stack of streams available via cursor
 * @param dependencyGraph - dependencies between historical streams
 * @throws ExprValidationException if the query planning failed
 */
protected static void recursiveBuild(int streamNum, Stack<Integer> streamCallStack, QueryGraph queryGraph,
        OuterInnerDirectionalGraph outerInnerGraph, InnerJoinGraph innerJoinGraph,
        Set<Integer> completedStreams, LinkedHashMap<Integer, int[]> substreamsPerStream,
        boolean[] requiredPerStream, DependencyGraph dependencyGraph) throws ExprValidationException {
    // add this stream to the set of completed streams
    completedStreams.add(streamNum);

    // check if the dependencies have been satisfied
    if (dependencyGraph.hasDependency(streamNum)) {
        Set<Integer> dependencies = dependencyGraph.getDependenciesForStream(streamNum);
        for (Integer dependentStream : dependencies) {
            if (!streamCallStack.contains(dependentStream)) {
                throw new ExprValidationException(
                        "Historical stream " + streamNum + " parameter dependency originating in stream "
                                + dependentStream + " cannot or may not be satisfied by the join");
            }
        }
    }

    // Determine the streams we can navigate to from this stream
    Set<Integer> navigableStreams = queryGraph.getNavigableStreams(streamNum);

    // add unqualified navigable streams (since on-expressions in outer joins are optional)
    Set<Integer> unqualifiedNavigable = outerInnerGraph.getUnqualifiedNavigableStreams().get(streamNum);
    if (unqualifiedNavigable != null) {
        navigableStreams.addAll(unqualifiedNavigable);
    }

    // remove those already done
    navigableStreams.removeAll(completedStreams);

    // Which streams are inner streams to this stream (optional), which ones are outer to the stream (required)
    Set<Integer> requiredStreams = getOuterStreams(streamNum, navigableStreams, outerInnerGraph);

    // Add inner joins, if any, unless already completed for this stream
    innerJoinGraph.addRequiredStreams(streamNum, requiredStreams, completedStreams);

    Set<Integer> optionalStreams = getInnerStreams(streamNum, navigableStreams, outerInnerGraph, innerJoinGraph,
            completedStreams);

    // Remove from the required streams the optional streams which places 'full' joined streams
    // into the optional stream category
    requiredStreams.removeAll(optionalStreams);

    // if we are a leaf node, we are done
    if (navigableStreams.isEmpty()) {
        substreamsPerStream.put(streamNum, new int[0]);
        return;
    }

    // First the outer (required) streams to this stream, then the inner (optional) streams
    int[] substreams = new int[requiredStreams.size() + optionalStreams.size()];
    substreamsPerStream.put(streamNum, substreams);
    int count = 0;
    for (int stream : requiredStreams) {
        substreams[count++] = stream;
        requiredPerStream[stream] = true;
    }
    for (int stream : optionalStreams) {
        substreams[count++] = stream;
    }

    // next we look at all the required streams and add their dependent streams
    for (int stream : requiredStreams) {
        completedStreams.add(stream);
    }

    for (int stream : requiredStreams) {
        streamCallStack.push(stream);
        recursiveBuild(stream, streamCallStack, queryGraph, outerInnerGraph, innerJoinGraph, completedStreams,
                substreamsPerStream, requiredPerStream, dependencyGraph);
        streamCallStack.pop();
    }
    // look at all the optional streams and add their dependent streams
    for (int stream : optionalStreams) {
        streamCallStack.push(stream);
        recursiveBuild(stream, streamCallStack, queryGraph, outerInnerGraph, innerJoinGraph, completedStreams,
                substreamsPerStream, requiredPerStream, dependencyGraph);
        streamCallStack.pop();
    }
}

From source file:models.NotificationEvent.java

public static void afterNewComment(Comment comment) {
    AbstractPosting post = comment.getParent();

    NotificationEvent notiEvent = createFromCurrentUser(comment);
    notiEvent.title = formatReplyTitle(post);
    Set<User> receivers = getReceivers(post);
    receivers.addAll(getMentionedUsers(comment.contents));
    receivers.remove(UserApp.currentUser());
    notiEvent.receivers = receivers;/*  www .  j  a va 2 s .  co  m*/
    notiEvent.eventType = NEW_COMMENT;
    notiEvent.oldValue = null;
    notiEvent.newValue = comment.contents;
    notiEvent.resourceType = comment.asResource().getType();
    notiEvent.resourceId = comment.asResource().getId();

    NotificationEvent.add(notiEvent);
}

From source file:models.NotificationEvent.java

public static void afterNewCommentWithState(Comment comment, State state) {
    AbstractPosting post = comment.getParent();

    NotificationEvent notiEvent = createFromCurrentUser(comment);
    notiEvent.title = formatReplyTitle(post);
    Set<User> receivers = getReceivers(post);
    receivers.addAll(getMentionedUsers(comment.contents));
    receivers.remove(UserApp.currentUser());
    notiEvent.receivers = receivers;/*from w  ww . j a  va2s . co  m*/
    notiEvent.eventType = NEW_COMMENT;
    notiEvent.oldValue = null;
    notiEvent.newValue = comment.contents + "\n" + state.state();
    notiEvent.resourceType = comment.asResource().getType();
    notiEvent.resourceId = comment.asResource().getId();

    NotificationEvent.add(notiEvent);
}

From source file:models.NotificationEvent.java

private static Set<User> getReceivers(AbstractPosting abstractPosting) {
    Set<User> receivers = abstractPosting.getWatchers();
    receivers.addAll(getMentionedUsers(abstractPosting.body));
    receivers.remove(UserApp.currentUser());
    return receivers;
}