Example usage for java.util LinkedHashSet add

List of usage examples for java.util LinkedHashSet add

Introduction

In this page you can find the example usage for java.util LinkedHashSet add.

Prototype

boolean add(E e);

Source Link

Document

Adds the specified element to this set if it is not already present (optional operation).

Usage

From source file:ubic.gemma.core.loader.expression.geo.model.GeoValues.java

/**
 * Only needs to be called 'externally' if you know there is no data for the sample.
 *
 * @param sample                sample/*from   w  w w .ja va2s .co  m*/
 * @param quantitationTypeIndex QT index
 * @return geo platform
 */
private GeoPlatform addSample(GeoSample sample, Integer quantitationTypeIndex) {
    if (sample.getPlatforms().size() > 1) {
        throw new IllegalArgumentException(sample + ": Can't handle samples that use multiple platforms");
    }

    GeoPlatform platform = sample.getPlatforms().iterator().next();
    if (!sampleDimensions.containsKey(platform)) {
        sampleDimensions.put(platform, new HashMap<Integer, LinkedHashSet<GeoSample>>());
    }

    Map<Integer, LinkedHashSet<GeoSample>> samplePlatformMap = sampleDimensions.get(platform);
    if (!samplePlatformMap.containsKey(quantitationTypeIndex)) {
        samplePlatformMap.put(quantitationTypeIndex, new LinkedHashSet<GeoSample>());
    }

    LinkedHashSet<GeoSample> sampleQtMap = samplePlatformMap.get(quantitationTypeIndex);
    sampleQtMap.add(sample);
    return platform;
}

From source file:org.objectweb.proactive.extensions.dataspaces.vfs.VFSSpacesMountManagerImpl.java

/**
 * Mounts the first available VFS file system on the given dataspace
 * @param spaceInfo space information/*from w ww .  ja v a  2 s  . c o m*/
 * @throws FileSystemException if no file system could be mounted
 */
private void mountFirstAvailableFileSystem(final SpaceInstanceInfo spaceInfo) throws FileSystemException {

    final DataSpacesURI mountingPoint = spaceInfo.getMountingPoint();

    try {
        writeLock.lock();
        if (!mountedSpaces.containsKey(mountingPoint)) {
            mountedSpaces.put(mountingPoint, new ConcurrentHashMap<String, FileObject>());
        }
        ConcurrentHashMap<String, FileObject> fileSystems = mountedSpaces.get(mountingPoint);

        if (spaceInfo.getUrls().size() == 0) {
            throw new IllegalStateException("Empty Space configuration");
        }

        DataSpacesURI spacePart = mountingPoint.getSpacePartOnly();
        ArrayList<String> urls = new ArrayList<String>(spaceInfo.getUrls());
        if (urls.size() == 1) {
            urls.add(0, Utils.getLocalAccessURL(urls.get(0), spaceInfo.getPath(), spaceInfo.getHostname()));
        }

        logger.debug("[VFSMountManager] Request mounting VFS root list : " + urls);

        try {
            VFSMountManagerHelper.mountAny(urls, fileSystems);

            if (!accessibleFileObjectUris.containsKey(mountingPoint)) {
                LinkedHashSet<String> srl = new LinkedHashSet<String>();
                accessibleFileObjectUris.put(mountingPoint, srl);
            }

            LinkedHashSet<String> srl = accessibleFileObjectUris.get(mountingPoint);

            for (String uri : urls) {
                if (fileSystems.containsKey(uri)) {
                    srl.add(uri);
                }
            }
            if (srl.isEmpty()) {
                throw new IllegalStateException("Invalid empty size list when trying to mount " + urls
                        + " mounted map content is " + fileSystems);
            }
            accessibleFileObjectUris.put(mountingPoint, srl);

            if (logger.isDebugEnabled())
                logger.debug(
                        String.format("[VFSMountManager] Mounted space: %s (access URL: %s)", spacePart, srl));

            mountedSpaces.put(mountingPoint, fileSystems);

        } catch (org.apache.commons.vfs.FileSystemException e) {
            mountedSpaces.remove(mountingPoint);
            throw new FileSystemException("An error occurred while trying to mount " + spaceInfo.getName(), e);
        }
    } finally {
        writeLock.unlock();
    }
}

From source file:org.occiware.clouddesigner.occi.linkeddata.connector.LdprojectConnector.java

/**
 * //from w  ww  .ja v  a2 s .co  m
 * @param resource must come from getExistingOrNew()
 */
private void updateAttributes(DCResource resource) {
    resource.set("dcmpv:name", this.getName()); // (actually only if creation)         
    ///resource.set("dcmp:frozenModelNames", ); // NO ONLY ACTIONS ELSE VOIDS VALUE
    resource.set("dcmpvdb:robust", this.getRobustness() == Robustness.CLUSTER);

    ///resource.set("dcmpvdb:uri", this.dburi); // rather using links :
    String ldDbUri = null;
    if (this.links != null) { // ex. in Mart
        List<Link> lddLinks = this.links.stream()
                .filter(l -> l instanceof Lddatabaselink && l.getTarget() instanceof Compute)
                .collect(Collectors.toList());
        if (!lddLinks.isEmpty()) {
            Lddatabaselink lddLink = (Lddatabaselink) lddLinks.iterator().next(); // first one matters only
            Compute customSecondaryCompute = (Compute) lddLink.getTarget();
            if (customSecondaryCompute.getHostname() == null
                    || customSecondaryCompute.getHostname().trim().length() == 0) {
                throw new RuntimeException("Lddatabaselink's target Compute has no hostname");
            }
            ldDbUri = "mongodb://" + customSecondaryCompute.getHostname() + ":" + lddLink.getPort() + "/"
                    + lddLink.getDatabase();
        }
    }
    resource.set("dcmpvdb:uri", ldDbUri);

    LinkedHashSet<String> lvp = new LinkedHashSet<String>();
    @SuppressWarnings("unchecked")
    Collection<String> lvpFound = (Collection<String>) resource.get("dcmp:localVisibleProjects"); // else java.lang.ClassCastException: java.util.LinkedHashSet cannot be cast to java.util.List !
    if (lvpFound == null) {
        lvp = new LinkedHashSet<String>();
        lvp.add(UriHelper.buildUri(ldContainerUrl, "dcmp:Project_0", "oasis.meta")); // all projects must see metamodel
    } else {
        lvp = new LinkedHashSet<String>(lvpFound);
    }
    if (this.links != null) { // ex. in Mart
        List<String> ldpLinkTargetProjectUris = this.links.stream()
                .filter(l -> l instanceof Ldprojectlink && l.getTarget() instanceof Ldproject)
                .map(ldpl -> UriHelper.buildUri(ldContainerUrl, "dcmp:Project_0",
                        ((Ldproject) ldpl.getTarget()).getName()))
                .collect(Collectors.toList());
        lvp.addAll(ldpLinkTargetProjectUris);
    }
    resource.set("dcmp:localVisibleProjects", lvp);
}

From source file:org.openhab.binding.network.internal.utils.NetworkUtils.java

/**
 * Takes the interfaceIPs and fetches every IP which can be assigned on their network
 *
 * @param networkIPs The IPs which are assigned to the Network Interfaces
 * @param maximumPerInterface The maximum of IP addresses per interface or 0 to get all.
 * @return Every single IP which can be assigned on the Networks the computer is connected to
 *//*from   w  w  w  .  j  a v a 2  s .  com*/
public Set<String> getNetworkIPs(Set<String> interfaceIPs, int maximumPerInterface) {
    LinkedHashSet<String> networkIPs = new LinkedHashSet<>();

    for (String string : interfaceIPs) {
        try {
            // gets every ip which can be assigned on the given network
            SubnetUtils utils = new SubnetUtils(string);
            String[] addresses = utils.getInfo().getAllAddresses();
            int len = addresses.length;
            if (maximumPerInterface != 0 && maximumPerInterface < len) {
                len = maximumPerInterface;
            }
            for (int i = 0; i < len; i++) {
                networkIPs.add(addresses[i]);
            }

        } catch (Exception ex) {
        }
    }

    return networkIPs;
}

From source file:de.bund.bfr.knime.node.editableTable.JSONDataTable.java

/**
 * Creates a new data table which can be serialized into a JSON string from a given BufferedDataTable.
 * @param dTable the data table to read the rows from
 * @param firstRow the first row to store (must be greater than zero)
 * @param maxRows the number of rows to store (must be zero or more)
 * @param excludeColumns a list of columns to exclude
 * @param execMon the object listening to our progress and providing cancel functionality.
 * @throws CanceledExecutionException If the execution of the node has been cancelled.
 */// www. ja  v a 2s  .c o  m
public JSONDataTable(final DataTable dTable, final int firstRow, final int maxRows,
        final String[] excludeColumns, final ExecutionMonitor execMon) throws CanceledExecutionException {

    if (dTable == null) {
        throw new NullPointerException("Must provide non-null data table" + " for DataArray");
    }
    if (firstRow < 1) {
        throw new IllegalArgumentException("Starting row must be greater" + " than zero");
    }
    if (maxRows < 0) {
        throw new IllegalArgumentException("Number of rows to read must be" + " greater than or equal zero");
    }

    int numOfColumns = 0;
    ArrayList<Integer> includeColIndices = new ArrayList<Integer>();
    DataTableSpec spec = dTable.getDataTableSpec();
    for (int i = 0; i < spec.getNumColumns(); i++) {
        String colName = spec.getColumnNames()[i];
        if (!Arrays.asList(excludeColumns).contains(colName)) {
            includeColIndices.add(i);
            numOfColumns++;
        }
    }
    long numOfRows = maxRows;
    if (dTable instanceof BufferedDataTable) {
        numOfRows = Math.min(((BufferedDataTable) dTable).size(), maxRows);
    }

    //int numOfColumns = spec.getNumColumns();
    DataCell[] maxValues = new DataCell[numOfColumns];
    DataCell[] minValues = new DataCell[numOfColumns];
    Object[] minJSONValues = new Object[numOfColumns];
    Object[] maxJSONValues = new Object[numOfColumns];

    // create a new list for the values - but only for native string columns
    Vector<LinkedHashSet<Object>> possValues = new Vector<LinkedHashSet<Object>>();
    possValues.setSize(numOfColumns);
    for (int c = 0; c < numOfColumns; c++) {
        if (spec.getColumnSpec(includeColIndices.get(c)).getType().isCompatible(NominalValue.class)) {
            possValues.set(c, new LinkedHashSet<Object>());
        }
    }

    RowIterator rIter = dTable.iterator();
    int currentRowNumber = 0;
    int numRows = 0;

    ArrayList<String> rowColorList = new ArrayList<String>();
    ArrayList<JSONDataTableRow> rowList = new ArrayList<JSONDataTableRow>();

    while ((rIter.hasNext()) && (currentRowNumber + firstRow - 1 < maxRows)) {
        // get the next row
        DataRow row = rIter.next();
        currentRowNumber++;

        if (currentRowNumber < firstRow) {
            // skip all rows until we see the specified first row
            continue;
        }

        String rC = CSSUtils.cssHexStringFromColor(spec.getRowColor(row).getColor());
        rowColorList.add(rC);

        String rowKey = row.getKey().getString();
        rowList.add(new JSONDataTableRow(rowKey, numOfColumns));
        numRows++;

        // add cells, check min, max values and possible values for each column
        for (int c = 0; c < numOfColumns; c++) {
            int col = includeColIndices.get(c);
            DataCell cell = row.getCell(col);

            Object cellValue;
            if (!cell.isMissing()) {
                cellValue = getJSONCellValue(cell);
            } else {
                cellValue = null;
            }

            rowList.get(currentRowNumber - firstRow).getData()[c] = cellValue;
            if (cellValue == null) {
                continue;
            }

            DataValueComparator comp = spec.getColumnSpec(col).getType().getComparator();

            // test the min value
            if (minValues[c] == null) {
                minValues[c] = cell;
                minJSONValues[c] = getJSONCellValue(cell);
            } else {
                if (comp.compare(minValues[c], cell) > 0) {
                    minValues[c] = cell;
                    minJSONValues[c] = getJSONCellValue(cell);
                }
            }
            // test the max value
            if (maxValues[c] == null) {
                maxValues[c] = cell;
                maxJSONValues[c] = getJSONCellValue(cell);
            } else {
                if (comp.compare(maxValues[c], cell) < 0) {
                    maxValues[c] = cell;
                    maxJSONValues[c] = getJSONCellValue(cell);
                }
            }
            // add it to the possible values if we record them for this col
            LinkedHashSet<Object> possVals = possValues.get(c);
            if (possVals != null) {
                // non-string cols have a null list and will be skipped here
                possVals.add(getJSONCellValue(cell));
            }
        }
        if (execMon != null) {
            execMon.setProgress(((double) currentRowNumber - firstRow) / numOfRows,
                    "Creating JSON table. Processing row " + (currentRowNumber - firstRow) + " of "
                            + numOfRows);
        }
    }

    // TODO: Add extensions (color, shape, size, inclusion, selection, hiliting, ...)
    Object[][] extensionArray = null;

    JSONDataTableSpec jsonTableSpec = new JSONDataTableSpec(spec, excludeColumns, numRows);
    jsonTableSpec.setMinValues(minJSONValues);
    jsonTableSpec.setMaxValues(maxJSONValues);
    jsonTableSpec.setPossibleValues(possValues);

    setSpec(jsonTableSpec);
    getSpec().setRowColorValues(rowColorList.toArray(new String[0]));
    setRows(rowList.toArray(new JSONDataTableRow[0]));
    setExtensions(extensionArray);
}

From source file:org.artifactory.storage.db.build.service.BuildStoreServiceImpl.java

@Override
public Set<BuildRun> getLatestBuildsPaging(String offset, String orderBy, String direction, String limit) {
    try {/*from  www . jav a 2s  .co m*/
        List<BuildEntity> allBuildNames = buildsDao.getAllBuildNamePaging(offset, orderBy, direction, limit);
        LinkedHashSet<BuildRun> results = new LinkedHashSet<>(allBuildNames.size());
        for (BuildEntity buildEntity : allBuildNames) {
            if (buildEntity != null) {
                results.add(getBuildRun(buildEntity));
            }
        }
        return results;
    } catch (SQLException e) {
        throw new StorageException("Could not list all builds by name and latest build date", e);
    }
}

From source file:ubic.gemma.loader.expression.geo.model.GeoValues.java

/**
 * Only call this to add a sample for which there are no data.
 * /* ww  w  .  j  a va  2s  .c  om*/
 * @param sample
 * @return
 */
public void addSample(GeoSample sample) {
    GeoPlatform platform = sample.getPlatforms().iterator().next();

    if (platform.getTechnology().equals(PlatformType.MPSS)
            || platform.getTechnology().equals(PlatformType.SAGE)) {
        /*
         * We're not going to add data for this. Note
         */
        return;

    } else if (!sampleDimensions.containsKey(platform)) {
        /*
         * Problem: if this is the first sample, we don't know how many quantitation types to expect. However, for
         * some data sets, there is no data provided in the SOFT file (e.g., RNA-seq), so this would be okay.
         */
        if (sample.isMightNotHaveDataInFile()) {
            addSample(sample, 0);
            log.warn("Adding dummy quantitation type");
            return;
            // throw new IllegalStateException( "Samples must have a platform assigned." );
        } else {
            throw new UnsupportedOperationException(
                    "Can't deal with empty samples when that sample is the first one on its platform.");
        }
    } else {

        Map<Object, LinkedHashSet<GeoSample>> samplePlatformMap = sampleDimensions.get(platform);
        for (Object quantitationTypeIndex : samplePlatformMap.keySet()) {
            LinkedHashSet<GeoSample> sampleQtMap = samplePlatformMap.get(quantitationTypeIndex);
            sampleQtMap.add(sample);
        }
    }

}

From source file:gr.forth.ics.isl.webservice.XPathsWebservice.java

/**
 * Constructs a LinkedHashSet that has a Map<String,String>
 * to accomplish the format we want//from   w  w w .  j  av  a2s  .  c  o m
 *
 * @param hashSet
 * @return
 */
private LinkedHashSet<Map<String, String>> FormatHashSet(LinkedHashSet<String> hashSet) {

    LinkedHashSet<Map<String, String>> newHashSet = new LinkedHashSet<Map<String, String>>();
    Iterator<String> it = hashSet.iterator();

    for (; it.hasNext();) {//iterates HashSet 

        //builds a map with 2 key-value pair {"id" : "", "text" : ""} 
        Map<String, String> hashMap = new HashMap<String, String>();
        String path = it.next();
        hashMap.put("id", path);
        hashMap.put("text", path);

        //Stores map in the HashSet that is to be returned
        newHashSet.add(hashMap);
    }
    return newHashSet;
}

From source file:org.jahia.taglibs.template.include.ModuleTag.java

protected List<String> contributeTypes(RenderContext renderContext, JCRNodeWrapper node) {

    if (!"contributemode".equals(renderContext.getEditModeConfigName())) {
        return null;
    }/*from w w  w . j a  v a 2  s . c o  m*/
    JCRNodeWrapper contributeNode = null;
    if (renderContext.getRequest().getAttribute("areaListResource") != null) {
        contributeNode = (JCRNodeWrapper) renderContext.getRequest().getAttribute("areaListResource");
    }

    try {
        if (node.hasProperty(Constants.JAHIA_CONTRIBUTE_TYPES)) {
            contributeNode = node;
        }
        if (contributeNode != null && contributeNode.hasProperty(Constants.JAHIA_CONTRIBUTE_TYPES)) {
            LinkedHashSet<String> l = new LinkedHashSet<String>();
            Value[] v = contributeNode.getProperty(Constants.JAHIA_CONTRIBUTE_TYPES).getValues();
            if (v.length == 0) {
                l.add("jmix:editorialContent");
            } else {
                for (Value value : v) {
                    l.add(value.getString());
                }
            }
            LinkedHashSet<String> subtypes = new LinkedHashSet<String>();
            final Set<String> installedModulesWithAllDependencies = renderContext.getSite()
                    .getInstalledModulesWithAllDependencies();
            for (String s : l) {
                ExtendedNodeType nt = NodeTypeRegistry.getInstance().getNodeType(s);
                if (nt != null) {
                    if (!nt.isAbstract() && !nt.isMixin() && (nt.getTemplatePackage() == null
                            || installedModulesWithAllDependencies.contains(nt.getTemplatePackage().getId()))) {
                        subtypes.add(nt.getName());
                    }
                    for (ExtendedNodeType subtype : nt.getSubtypesAsList()) {
                        if (!subtype.isAbstract() && !subtype.isMixin()
                                && (subtype.getTemplatePackage() == null || installedModulesWithAllDependencies
                                        .contains(subtype.getTemplatePackage().getId()))) {
                            subtypes.add(subtype.getName());
                        }
                    }
                }
            }
            if (subtypes.size() < 10) {
                return new ArrayList<String>(subtypes);
            }
            return new ArrayList<String>(l);
        }
    } catch (RepositoryException e) {
        logger.error(e.getMessage(), e);
    }
    return null;
}