Example usage for java.util LinkedHashSet add

List of usage examples for java.util LinkedHashSet add

Introduction

In this page you can find the example usage for java.util LinkedHashSet add.

Prototype

boolean add(E e);

Source Link

Document

Adds the specified element to this set if it is not already present (optional operation).

Usage

From source file:ArrayUtils.java

/**
 * Merges all elements of a set of arrays into a single array with no
 * duplicates. For primitive types.// w w  w  . ja v  a 2s .c o  m
 * 
 * @param type
 *            The type of the result
 * @param arrays
 *            The arrays to merge
 * @return A new array containing all elements of <code>array1</code> and
 *         all elements of <code>array2</code> that are not present in
 *         <code>array1</code>
 * @throws NullPointerException
 *             If either array is null
 * @throws ArrayStoreException
 *             If elements in the arrays are incompatible with
 *             <code>type</code>
 */
public static Object mergeInclusiveP(Class<?> type, Object... arrays) {
    java.util.LinkedHashSet<Object> set = new java.util.LinkedHashSet<Object>();
    int i, j;
    for (i = 0; i < arrays.length; i++) {
        int len = Array.getLength(arrays[i]);
        for (j = 0; j < len; j++)
            set.add(Array.get(arrays[i], j));
    }
    Object ret = Array.newInstance(type, set.size());
    i = 0;
    for (Object el : set) {
        put(ret, el, i);
        i++;
    }
    return ret;
}

From source file:com.streamsets.pipeline.stage.processor.fieldfilter.FieldFilterProcessor.java

@Override
protected void process(Record record, SingleLaneBatchMaker batchMaker) throws StageException {
    // use List to preserve the order of list fieldPaths - need to watch out for duplicates though
    List<String> allFieldPaths = record.getEscapedFieldPathsOrdered();
    // use LinkedHashSet to preserve order and dedupe as we go
    LinkedHashSet<String> fieldsToRemove;
    switch (filterOperation) {
    case REMOVE:/*w ww  .jav a 2s.  co m*/
        fieldsToRemove = new LinkedHashSet<>();
        for (String field : fields) {
            List<String> matchingFieldPaths = FieldPathExpressionUtil.evaluateMatchingFieldPaths(field,
                    fieldPathEval, fieldPathVars, record, allFieldPaths);
            fieldsToRemove.addAll(matchingFieldPaths);
        }
        break;
    case REMOVE_NULL:
        fieldsToRemove = new LinkedHashSet<>();
        for (String field : fields) {
            List<String> matchingFieldPaths = FieldPathExpressionUtil.evaluateMatchingFieldPaths(field,
                    fieldPathEval, fieldPathVars, record, allFieldPaths);
            for (String fieldPath : matchingFieldPaths) {
                if (record.has(fieldPath) && record.get(fieldPath).getValue() == null) {
                    fieldsToRemove.add(fieldPath);
                }
            }
        }
        break;
    case REMOVE_EMPTY:
        fieldsToRemove = new LinkedHashSet<>();
        for (String field : fields) {
            List<String> matchingFieldPaths = FieldPathExpressionUtil.evaluateMatchingFieldPaths(field,
                    fieldPathEval, fieldPathVars, record, allFieldPaths);
            for (String fieldPath : matchingFieldPaths) {
                if (record.has(fieldPath) && record.get(fieldPath).getValue() != null
                        && record.get(fieldPath).getValue().equals("")) {
                    fieldsToRemove.add(fieldPath);
                }
            }
        }
        break;
    case REMOVE_NULL_EMPTY:
        fieldsToRemove = new LinkedHashSet<>();
        for (String field : fields) {
            List<String> matchingFieldPaths = FieldPathExpressionUtil.evaluateMatchingFieldPaths(field,
                    fieldPathEval, fieldPathVars, record, allFieldPaths);
            for (String fieldPath : matchingFieldPaths) {
                if (record.has(fieldPath) && (record.get(fieldPath).getValue() == null
                        || record.get(fieldPath).getValue().equals(""))) {
                    fieldsToRemove.add(fieldPath);
                }
            }
        }
        break;
    case REMOVE_CONSTANT:
        fieldsToRemove = new LinkedHashSet<>();
        for (String field : fields) {
            List<String> matchingFieldPaths = FieldPathExpressionUtil.evaluateMatchingFieldPaths(field,
                    fieldPathEval, fieldPathVars, record, allFieldPaths);
            for (String fieldPath : matchingFieldPaths) {
                if (record.has(fieldPath) && record.get(fieldPath).getValue() != null
                        && record.get(fieldPath).getValue().equals(constant)) {
                    fieldsToRemove.add(fieldPath);
                }
            }
        }
        break;
    case KEEP:
        //Algorithm:
        // - Get all possible field paths in the record
        //
        // - Remove arguments fields which must be retained, its parent fields and the child fields from above set
        //   (Account for presence of wild card characters while doing so) The remaining set of fields is what must be
        //   removed from the record.
        //
        // - Keep fieldsToRemove in order - sorting is too costly
        //List all the possible field paths in this record
        fieldsToRemove = new LinkedHashSet<>(allFieldPaths);
        for (String field : fields) {
            //Keep parent fields
            //get the parent fieldPaths for each of the fields to keep
            List<String> parentFieldPaths = getParentFields(field);
            //remove parent paths from the fieldsToRemove set
            //Note that parent names could contain wild card characters
            for (String parentField : parentFieldPaths) {
                List<String> matchingFieldPaths = FieldRegexUtil.getMatchingFieldPaths(parentField,
                        allFieldPaths);
                fieldsToRemove.removeAll(matchingFieldPaths);
            }

            //Keep the field itself
            //remove the field path itself from the fieldsToRemove set
            //Consider wild card characters
            List<String> matchingFieldPaths = FieldPathExpressionUtil.evaluateMatchingFieldPaths(field,
                    fieldPathEval, fieldPathVars, record, allFieldPaths);
            fieldsToRemove.removeAll(matchingFieldPaths);

            //Keep the children of the field
            //For each of the fieldPaths that match the argument field path, remove all the child paths
            // Remove children at the end to avoid ConcurrentModificationException
            Set<String> childrenToKeep = new HashSet<>();
            for (String matchingFieldPath : matchingFieldPaths) {
                for (String fieldToRemove : fieldsToRemove) {
                    // for the old way, startsWith is appropriate when we have
                    // different path structures, or "nested" (multiple dimensioned) index structures.
                    //  eg: /USA[0]/SanFrancisco/folsom/streets[0] must still match:
                    //      /USA[0]/SanFrancisco/folsom/streets[0][0]   hence: startsWith.
                    if (StringUtils.countMatches(fieldToRemove, "/") == StringUtils
                            .countMatches(matchingFieldPath, "/")
                            && StringUtils.countMatches(fieldToRemove, "[") == StringUtils
                                    .countMatches(matchingFieldPath, "[")) {
                        if (fieldToRemove.equals(matchingFieldPath)) {
                            childrenToKeep.add(fieldToRemove);
                        }
                    } else {
                        if (fieldToRemove.startsWith(matchingFieldPath)) {
                            childrenToKeep.add(fieldToRemove);
                        }
                    }
                }
            }
            fieldsToRemove.removeAll(childrenToKeep);
        }
        break;
    default:
        throw new IllegalStateException(
                Utils.format("Unexpected Filter Operation '{}'", filterOperation.name()));
    }
    // We don't sort because we maintained list fields in ascending order (but not a full ordering)
    // Instead we just iterate in reverse to delete
    Iterator<String> itr = (new LinkedList<>(fieldsToRemove)).descendingIterator();
    while (itr.hasNext()) {
        record.delete(itr.next());
    }
    batchMaker.addRecord(record);
}

From source file:org.opencb.opencga.storage.mongodb.variant.load.variants.MongoDBVariantMerger.java

protected LinkedHashSet<String> getSampleNamesInFile(Integer fileId) {
    LinkedHashSet<String> samples = new LinkedHashSet<>();
    getSamplesInFile(fileId).forEach(sampleId -> {
        samples.add(studyConfiguration.getSampleIds().inverse().get(sampleId));
    });/*w ww.j  a va2 s .  c  om*/
    return samples;
}

From source file:com.haulmont.cuba.web.toolkit.ui.CubaGroupTable.java

@Override
protected LinkedHashSet<Object> getItemIdsInRange(Object startItemId, final int length) {
    Set<Object> rootIds = super.getItemIdsInRange(startItemId, length);
    LinkedHashSet<Object> ids = new LinkedHashSet<>();
    for (Object itemId : rootIds) {
        if (itemId instanceof GroupInfo) {
            if (!isExpanded(itemId)) {
                Collection<?> itemIds = getGroupItemIds(itemId);
                ids.addAll(itemIds);//from   w  ww .  j  a v  a  2  s.  c  om
                expand(itemId, true);
            }

            List<GroupInfo> children = (List<GroupInfo>) getChildren(itemId);
            for (GroupInfo groupInfo : children) {
                if (!isExpanded(groupInfo)) {
                    expand(groupInfo, true);
                }
            }
        } else {
            ids.add(itemId);
        }
    }
    return ids;
}

From source file:org.apache.tajo.engine.planner.LogicalPlanner.java

private static LinkedHashSet<Target> createFieldTargetsFromRelation(QueryBlock block, RelationNode relationNode,
        Set<String> newlyEvaluatedRefNames) {
    LinkedHashSet<Target> targets = Sets.newLinkedHashSet();
    for (Column column : relationNode.getTableSchema().getColumns()) {
        String aliasName = block.namedExprsMgr.checkAndGetIfAliasedColumn(column.getQualifiedName());
        if (aliasName != null) {
            targets.add(new Target(new FieldEval(column), aliasName));
            newlyEvaluatedRefNames.remove(aliasName);
        } else {/* w w w  .ja va2  s.  c o  m*/
            targets.add(new Target(new FieldEval(column)));
        }
    }
    return targets;
}

From source file:net.hillsdon.reviki.search.impl.LuceneSearcher.java

private LinkedHashSet<SearchMatch> doQuery(final IndexReader reader, final Analyzer analyzer,
        final Searcher searcher, final String field, final boolean provideExtracts, final Query query)
        throws IOException, CorruptIndexException {
    Highlighter highlighter = null;
    if (provideExtracts) {
        highlighter = new Highlighter(new SimpleHTMLFormatter("<strong>", "</strong>"), new SimpleHTMLEncoder(),
                new QueryScorer(query));
    }/*from   w w  w . j a v a 2 s .  com*/
    Hits hits = searcher.search(query);
    LinkedHashSet<SearchMatch> results = new LinkedHashSet<SearchMatch>();
    @SuppressWarnings("unchecked")
    Iterator<Hit> iter = hits.iterator();
    while (iter.hasNext()) {
        Hit hit = iter.next();
        String text = hit.get(field);
        String extract = null;
        // The text is not stored for all fields, just provide a null extract.
        if (highlighter != null && text != null) {
            TokenStream tokenStream = analyzer.tokenStream(field, new StringReader(text));
            // Get 3 best fragments and separate with a "..."
            extract = highlighter.getBestFragments(tokenStream, text, 3, "...");
        }
        results.add(new SearchMatch(_wikiName.equals(hit.get(FIELD_WIKI)), hit.get(FIELD_WIKI),
                hit.get(FIELD_PATH), extract));
    }
    return results;
}

From source file:eionet.cr.dao.virtuoso.VirtuosoStagingDatabaseDAO.java

@Override
public Set<String> prepareStatement(String sql, String dbName) throws DAOException {

    if (StringUtils.isBlank(sql)) {
        throw new IllegalArgumentException("The given SQL statement must not be blank!");
    }//from  w ww .j  a  v a  2s. co m

    LinkedHashSet<String> result = new LinkedHashSet<String>();

    Connection conn = null;
    PreparedStatement pstmt = null;
    try {
        conn = getSQLConnection(dbName);
        pstmt = SQLUtil.prepareStatement(sql, null, conn);
        ResultSetMetaData metaData = pstmt.getMetaData();
        int colCount = metaData.getColumnCount();
        for (int i = 1; i <= colCount; i++) {
            String colName = metaData.getColumnName(i);
            result.add(colName);
        }
    } catch (SQLException e) {
        throw new DAOException(e.getMessage(), e);
    } finally {
        SQLUtil.close(pstmt);
        SQLUtil.close(conn);
    }

    return result;
}

From source file:probe.com.selectionmanager.StudiesFilterManager.java

/**
 * update the current filtered dataset indexes
 *
 * @param datasetIndexes/*ww  w.ja  v a2 s . co  m*/
 */
private void updateFilteredDatasetList(int[] datasetIndexes) {

    if (datasetIndexes.length == 0) {
        filteredQuantDatasetArr = inUsefullQuantDatasetMap;
        return;
    }
    resetHeatmapRowsAndColumn();
    filteredQuantDatasetArr.clear();
    Set<String> tColLab = new HashSet<String>();
    Set<String> tRowLab = new HashSet<String>();
    selectedDiseaseGroupMap.clear();

    for (int i : datasetIndexes) {

        QuantDatasetObject quantDS = inUsefullQuantDatasetMap.get(i);
        filteredQuantDatasetArr.put(i, quantDS);

        if (fullDiseaseGroupMap.containsKey(i)) {
            DiseaseGroup dg = fullDiseaseGroupMap.get(i);
            tColLab.add(dg.getPatientsSubGroupI());
            tColLab.add(dg.getPatientsSubGroupII());
            tRowLab.add(dg.getPatientsSubGroupI());
            tRowLab.add(dg.getPatientsSubGroupII());
            selectedDiseaseGroupMap.put(i, dg);
        }

    }

    LinkedHashSet<String> tSelectedColLab = new LinkedHashSet<String>();
    LinkedHashSet<String> tSelectedRowLab = new LinkedHashSet<String>();
    for (String str : selectedHeatMapRows) {
        if (tRowLab.contains(str) && !str.contains(userDiseaseGroupB)) {
            tSelectedRowLab.add(str);

        }

    }

    for (String str : selectedHeatMapColumns) {
        if (tColLab.contains(str) && !str.contains(userDiseaseGroupA)) {
            tSelectedColLab.add(str);
        }

    }

    selectedHeatMapColumns.clear();
    selectedHeatMapColumns.addAll(tSelectedColLab);
    selectedHeatMapRows.clear();
    selectedHeatMapRows.addAll(tSelectedRowLab);

}

From source file:org.unitedinternet.cosmo.model.hibernate.EntityConverter.java

/**
 * Expands an event calendar and returns a set of notes representing the
 * master and exception items.//from w  w  w. jav  a2  s  .  c o m
 * <p>
 * The provided note corresponds to the recurrence master or, for
 * non-recurring items, the single event in the calendar. The result set
 * includes both the master note as well as a modification note for
 * exception event in the calendar.
 * </p>
 * <p>
 * If the master note does not already have a UUID or an event stamp, one
 * is assigned to it. A UUID is assigned because any modification items
 * that are created require the master's UUID in order to construct
 * their own.
 * </p>
 * <p>
 * If the given note is already persistent, and the calendar does not
 * contain an exception event matching an existing modification, that
 * modification is set inactive. It is still returned in the result set.
 * </p>
 * @param note The note item.
 * @param calendar The calendar.
 * @return set note item.
 */
public Set<NoteItem> convertEventCalendar(NoteItem note, Calendar calendar) {
    EventStamp eventStamp = (EventStamp) note.getStamp(EventStamp.class);

    if (eventStamp == null) {
        eventStamp = entityFactory.createEventStamp(note);
        note.addStamp(eventStamp);
    }

    if (note.getUid() == null) {
        note.setUid(entityFactory.generateUid());
    }

    updateEventInternal(note, calendar);

    LinkedHashSet<NoteItem> items = new LinkedHashSet<NoteItem>();
    items.add(note);

    // add modifications to set of items
    for (Iterator<NoteItem> it = note.getModifications().iterator(); it.hasNext();) {
        NoteItem mod = it.next();
        items.add(mod);
    }

    return items;
}

From source file:org.apache.ws.scout.registry.BusinessQueryManagerV3Impl.java

/**
 * Finds all Service objects that match all of the criteria specified by
 * the parameters of this call.  This is a logical AND operation between
 * all non-null parameters//from  w w w  .  j av a2 s .  co  m
 *
 * TODO - support findQualifiers, classifications and specifications
 *
 * @param orgKey
 * @param findQualifiers
 * @param namePatterns
 * @param classifications
 * @param specificationa
 * @return BulkResponse
 * @throws JAXRException
 */
public BulkResponse findServices(Key orgKey, Collection findQualifiers, Collection namePatterns,
        Collection classifications, Collection specificationa) throws JAXRException {
    BulkResponseImpl blkRes = new BulkResponseImpl();

    IRegistryV3 iRegistry = (IRegistryV3) registryService.getRegistry();
    FindQualifiers juddiFindQualifiers = mapFindQualifiers(findQualifiers);
    Name[] juddiNames = mapNamePatterns(namePatterns);

    try {
        /*
         * hit the registry.  The key is not required for UDDI2
         */

        String id = null;

        if (orgKey != null) {
            id = orgKey.getId();
        }

        ServiceList serviceList = iRegistry.findService(id, juddiNames,
                ScoutJaxrUddiV3Helper.getCategoryBagFromClassifications(classifications), null,
                juddiFindQualifiers, registryService.getMaxRows());

        /*
         * now convert  from jUDDI ServiceInfo objects to JAXR Services
         */
        if (serviceList != null) {

            ServiceInfos serviceInfos = serviceList.getServiceInfos();
            LinkedHashSet<Service> col = new LinkedHashSet<Service>();

            if (serviceInfos != null && serviceInfos.getServiceInfo() != null) {
                for (ServiceInfo si : serviceInfos.getServiceInfo()) {
                    Service srv = (Service) getRegistryObject(si.getServiceKey(), LifeCycleManager.SERVICE);
                    col.add(srv);
                }

            }
            blkRes.setCollection(col);
        }
    } catch (RegistryV3Exception e) {
        throw new JAXRException(e.getLocalizedMessage());
    }

    return blkRes;
}