Example usage for java.util Set toArray

List of usage examples for java.util Set toArray

Introduction

In this page you can find the example usage for java.util Set toArray.

Prototype

<T> T[] toArray(T[] a);

Source Link

Document

Returns an array containing all of the elements in this set; the runtime type of the returned array is that of the specified array.

Usage

From source file:org.eclipse.virgo.ide.facet.core.BundleFacetUninstallDelegate.java

protected void removeFromClasspath(IJavaProject javaProject, IClasspathEntry entry, IProgressMonitor monitor)
        throws CoreException {
    Set<IClasspathEntry> entries = new LinkedHashSet<IClasspathEntry>();
    for (IClasspathEntry existingEntry : javaProject.getRawClasspath()) {
        if (!existingEntry.equals(entry)) {
            entries.add(existingEntry);//from www . j av  a 2s  .c  o  m
        }
    }
    javaProject.setRawClasspath(entries.toArray(new IClasspathEntry[entries.size()]), monitor);
}

From source file:jp.co.nemuzuka.dao.ProjectDao.java

/**
 * List?.//from   w  w w  . java2s .  co  m
 * @param projectName ??(?)
 * @return 
 */
public List<ProjectModel> getList(String projectName) {
    ProjectModelMeta e = (ProjectModelMeta) getModelMeta();
    Set<FilterCriterion> filterSet = new HashSet<FilterCriterion>();
    if (StringUtils.isNotEmpty(projectName)) {
        filterSet.add(e.projectName.startsWith(projectName));
    }
    return Datastore.query(e).filter(filterSet.toArray(new FilterCriterion[0])).sortInMemory(e.key.asc)
            .asList();
}

From source file:org.openhealthtools.openatna.audit.persistence.dao.hibernate.HibernateParticipantDao.java

/**
 * This checks for any codes that are NOT in the DB.
 * Codes that are considered to be in the DB should not be added again,
 * while those that are not, should not be in the participant.
 * <p/>/*  ww w.j  a  v a2 s.  com*/
 * For each code in the participant:
 * remove it.
 * find an existing code that maches it.
 * if one is found, add this to the list.
 * <p/>
 * This means codes that have been modified (e.g. display name was changed)
 * will not be persisted in this call. To modify, one would have to call
 * the save on the code itself.
 * <p/>
 * If the participant's version is null, then a matching participant based on the (alt)user id
 * is queried for. If one is found, this throws a DUPLICATE_PARTICIPANT
 * AtnaParticipantException. Otherwise, the save is allowed to proceed.
 *
 * @param pe
 */
public void save(ParticipantEntity pe, PersistencePolicies policies) throws AtnaPersistenceException {
    Set<ParticipantCodeEntity> codes = pe.getParticipantTypeCodes();
    if (codes.size() > 0) {
        ParticipantCodeEntity[] arr = codes.toArray(new ParticipantCodeEntity[codes.size()]);
        CodeDao dao = AtnaFactory.codeDao();
        for (int i = 0; i < arr.length; i++) {
            ParticipantCodeEntity code = arr[i];
            CodeEntity codeEnt = dao.get(code);
            if (codeEnt == null) {
                if (policies.isAllowNewCodes()) {
                    dao.save(code, policies);
                } else {
                    throw new AtnaPersistenceException(code.toString(),
                            AtnaPersistenceException.PersistenceError.NON_EXISTENT_CODE);
                }
            } else {
                if (codeEnt instanceof ParticipantCodeEntity) {
                    arr[i] = ((ParticipantCodeEntity) codeEnt);
                } else {
                    throw new AtnaPersistenceException("code is defined but is of a different type.",
                            AtnaPersistenceException.PersistenceError.WRONG_CODE_TYPE);
                }
            }
        }

        pe.setParticipantTypeCodes(new HashSet<ParticipantCodeEntity>(Arrays.asList(arr)));
    }

    if (pe.getVersion() == null) {
        // new one.
        ParticipantEntity existing = get(pe);
        if (existing != null) {
            if (policies.isErrorOnDuplicateInsert()) {
                throw new AtnaPersistenceException(pe.toString(),
                        AtnaPersistenceException.PersistenceError.DUPLICATE_PARTICIPANT);
            } else {
                return;
            }
        }
    }
    currentSession().saveOrUpdate(pe);
}

From source file:net.gtl.movieanalytics.data.InfoStore.java

public String[] getAllFeatureNames() {
    Set<String> names = featureMap.keySet();
    String[] array = new String[names.size()];
    array = names.toArray(array);
    return array;
}

From source file:org.openhealthtools.openatna.audit.persistence.dao.hibernate.HibernateSourceDao.java

public void save(SourceEntity entity, PersistencePolicies policies) throws AtnaPersistenceException {
    Set<SourceCodeEntity> codes = entity.getSourceTypeCodes();
    if (codes.size() > 0) {
        CodeDao dao = AtnaFactory.codeDao();
        SourceCodeEntity[] arr = codes.toArray(new SourceCodeEntity[codes.size()]);
        for (int i = 0; i < arr.length; i++) {
            SourceCodeEntity code = arr[i];
            CodeEntity codeEnt = dao.get(code);
            if (codeEnt == null) {
                if (policies.isAllowNewCodes()) {
                    dao.save(code, policies);
                } else {
                    throw new AtnaPersistenceException(code.toString(),
                            AtnaPersistenceException.PersistenceError.NON_EXISTENT_CODE);
                }//from  w  w  w.  j av  a 2 s. c  om
            } else {
                if (codeEnt instanceof SourceCodeEntity) {
                    arr[i] = ((SourceCodeEntity) codeEnt);
                } else {
                    throw new AtnaPersistenceException("code is defined but is of a different type.",
                            AtnaPersistenceException.PersistenceError.WRONG_CODE_TYPE);
                }
            }
        }
        entity.setSourceTypeCodes(new HashSet<SourceCodeEntity>(Arrays.asList(arr)));
    }

    if (entity.getVersion() == null) {
        // new one.
        SourceEntity existing = get(entity);
        if (existing != null) {
            if (policies.isErrorOnDuplicateInsert()) {
                throw new AtnaPersistenceException(entity.toString(),
                        AtnaPersistenceException.PersistenceError.DUPLICATE_SOURCE);
            } else {
                return;
            }
        }
    }

    currentSession().saveOrUpdate(entity);

}

From source file:pl.edu.icm.comac.vis.server.service.AtomicGraphServiceImpl.java

@Override
public Graph constructGraphs(String[] ids) throws OpenRDFException {
    List<NodeCacheEntry> favCacheNodes = fetchNodes(ids);
    //build link map
    Map<String, Set<String>> links = favCacheNodes.parallelStream().filter(x -> !x.isOverflow())
            .map(x -> x.getRelations()).flatMap(x -> x.stream())
            .flatMap(x -> Arrays.stream(
                    new String[][] { { x.getSubject(), x.getObject() }, { x.getObject(), x.getSubject() } }))
            .collect(Collectors.groupingBy(x -> x[0], Collectors.mapping(x -> x[1], Collectors.toSet())));
    Set<String> large = favCacheNodes.stream().filter(x -> x.isOverflow()).map(x -> x.getId())
            .collect(Collectors.toSet());
    Set<String> normal = favCacheNodes.stream().filter(x -> !x.isOverflow()).map(x -> x.getId())
            .collect(Collectors.toSet());
    Set<String> unfav = graphToolkit.calculateAdditions(normal, large, links, MAX_RETURNED_RELATIONS);
    //now fetch the unfavs:
    List<NodeCacheEntry> unfavCacheNodes = fetchNodes(unfav.toArray(new String[unfav.size()]));
    List<NodeCacheEntry> allNodes = new ArrayList<NodeCacheEntry>();
    allNodes.addAll(favCacheNodes);//from   w w w.j a  v  a2s .  c om
    allNodes.addAll(unfavCacheNodes);
    List<NodeCacheEntry> largeNodes = allNodes.stream().filter(x -> x.isOverflow())
            .collect(Collectors.toList());
    List<RelationCacheEntry> largeRelations = calculateRelations(largeNodes);
    //now build the graph:

    List<Node> nodes = new ArrayList<>();

    List<Node> fnodes = favCacheNodes.stream().map(cached -> {
        Node res = new Node(cached.getId(), cached.getType(), cached.getName(), 1.0);
        res.setFavourite(true);
        return res;
    }).collect(Collectors.toList());
    nodes.addAll(fnodes);
    List<Node> ufnodes = unfavCacheNodes.stream().map(cached -> {
        Node res = new Node(cached.getId(), cached.getType(), cached.getName(), 1.0);
        res.setFavourite(false);
        return res;
    }).collect(Collectors.toList());
    nodes.addAll(ufnodes);
    Set<String> nodeIdSet = nodes.stream().map(x -> x.getId()).collect(Collectors.toSet());

    Set<Link> graphRelations = allNodes.parallelStream().filter(x -> !x.isOverflow())
            .flatMap(x -> x.getRelations().stream())
            .filter(x -> nodeIdSet.contains(x.subject) && nodeIdSet.contains(x.object))
            .map(x -> new Link(x.getPredicate(), x.getSubject(), x.getObject())).collect(Collectors.toSet());
    Graph res = new Graph();

    res.setNodes(nodes);
    res.setLinks(new ArrayList<Link>(graphRelations));
    return res;
}

From source file:eu.dime.ps.semantic.query.impl.AbstractQuery.java

/**
 * Retrieves a resource from the knowledge base or model set provided.
 * //  ww w  . j ava 2  s .c o m
 * @param resource
 * @return
 * @throws NotFoundException
 */
protected T get(org.ontoware.rdf2go.model.node.Resource resource) throws NotFoundException {
    if (selectedProperties != null && selectedProperties.length > 0 && discardedProperties != null
            && discardedProperties.length > 0) {
        Set<URI> properties = new HashSet<URI>();
        properties.addAll(Arrays.asList(selectedProperties));
        properties.removeAll(Arrays.asList(discardedProperties));
        return rdfStore.get(resource, returnType, properties.toArray(new URI[properties.size()]));
    } else if (selectedProperties != null && selectedProperties.length > 0) {
        return rdfStore.get(resource, returnType, selectedProperties);
        //      } else if (discardedProperties != null && discardedProperties.length > 0) {
        //         return rdfStore.get(resource, returnType, true, discardedProperties);
    } else {
        return rdfStore.get(resource, returnType);
    }
}

From source file:com.olacabs.fabric.compute.builder.impl.JarScanner.java

private URL[] genUrls(URL[] jarFileURLs) {
    URLClassLoader loader = (URLClassLoader) ClassLoader.getSystemClassLoader();
    URL[] originalURLs = loader.getURLs();
    Set<URL> mergedJarURLs = new HashSet<URL>(originalURLs.length + jarFileURLs.length);
    mergedJarURLs.addAll(Arrays.asList(originalURLs));
    mergedJarURLs.addAll(Arrays.asList(jarFileURLs));
    return mergedJarURLs.toArray(new URL[mergedJarURLs.size()]);
}

From source file:com.tealcube.minecraft.bukkit.hilt.HiltItemStack.java

public HiltItemStack setItemFlags(Set<ItemFlag> flags) {
    createItemMeta();/*from  w w  w.ja  v  a 2 s  .co  m*/
    ItemMeta itemMeta = getItemMeta();
    itemMeta.removeItemFlags(ItemFlag.values());
    itemMeta.addItemFlags(flags.toArray(new ItemFlag[flags.size()]));
    setItemMeta(itemMeta);
    return this;
}

From source file:com.opengamma.financial.analytics.model.equity.indexoption.EquityIndexOptionVegaMatrixFunction.java

@Override
protected Object computeValues(EquityIndexOption derivative, StaticReplicationDataBundle market) {
    final NodalDoublesSurface vegaSurface = CALCULATOR.calcBlackVegaForEntireSurface(derivative, market, SHIFT);
    final Double[] xValues;
    final Double[] yValues;
    if (market.getVolatilitySurface() instanceof BlackVolatilitySurfaceMoneynessFcnBackedByGrid) {
        BlackVolatilitySurfaceMoneynessFcnBackedByGrid volDataBundle = (BlackVolatilitySurfaceMoneynessFcnBackedByGrid) market
                .getVolatilitySurface();
        xValues = ArrayUtils.toObject(volDataBundle.getGridData().getExpiries());
        double[][] strikes2d = volDataBundle.getGridData().getStrikes();
        Set<Double> strikeSet = new HashSet<Double>();
        for (int i = 0; i < strikes2d.length; i++) {
            strikeSet.addAll(Arrays.asList(ArrayUtils.toObject(strikes2d[i])));
        }/* w w  w .  ja v a 2s .  c  o  m*/
        yValues = strikeSet.toArray(new Double[] {});
    } else {
        xValues = vegaSurface.getXData();
        yValues = vegaSurface.getYData();
    }

    final Set<Double> xSet = new HashSet<Double>(Arrays.asList(xValues));
    final Set<Double> ySet = new HashSet<Double>(Arrays.asList(yValues));
    final Double[] uniqueX = xSet.toArray(new Double[0]);
    final Double[] uniqueY = ySet.toArray(new Double[0]);
    final double[][] values = new double[ySet.size()][xSet.size()];
    int i = 0;
    for (final Double x : xSet) {
        int j = 0;
        for (final Double y : ySet) {
            double vega;
            try {
                vega = vegaSurface.getZValue(x, y);
            } catch (final IllegalArgumentException e) {
                vega = 0;
            }
            values[j++][i] = vega;
        }
        i++;
    }
    final DoubleLabelledMatrix2D matrix = new DoubleLabelledMatrix2D(uniqueX, uniqueY, values);
    return matrix;
}