Example usage for java.util HashSet toArray

List of usage examples for java.util HashSet toArray

Introduction

In this page you can find the example usage for java.util HashSet toArray.

Prototype

<T> T[] toArray(T[] a);

Source Link

Document

Returns an array containing all of the elements in this set; the runtime type of the returned array is that of the specified array.

Usage

From source file:org.eurekastreams.server.persistence.DomainGroupMapper.java

/**
 * Get a String representation the Person.id of all of the Person.ids for coordinators and followers of the input
 * group./*ww  w  .  j  a v a 2  s  . c  o  m*/
 * 
 * @param domainGroup
 *            the DomainGroup to find coordinators and followers for
 * @return an array of all of the Person.ids for coordinators and followers of the input group
 */
@SuppressWarnings("unchecked")
public Long[] getFollowerAndCoordinatorPersonIds(final DomainGroup domainGroup) {
    // use a set to eliminate duplicates
    HashSet<Long> peopleIds = new HashSet<Long>();
    Query q = getEntityManager()
            .createQuery("SELECT pk.followerId FROM GroupFollower WHERE followingId=:groupId")
            .setParameter("groupId", domainGroup.getId());
    peopleIds.addAll(q.getResultList());

    q = getEntityManager().createQuery(
            "SELECT p.id FROM Person p, DomainGroup g WHERE p MEMBER OF g.coordinators AND g.id=:groupId")
            .setParameter("groupId", domainGroup.getId());
    peopleIds.addAll(q.getResultList());

    return peopleIds.toArray(new Long[peopleIds.size()]);
}

From source file:es.pode.gestorFlujo.presentacion.objetosPendientes.Publicar.PublicarControllerImpl.java

private String[] interseccionNodos(String nodos) throws Exception {
    SrvNodoService nodosPlataforma = this.getSrvNodoService();

    String[] nodosListados = obtenNodosLocalesIds(nodosPlataforma);
    HashSet nodosTotal = new HashSet(Arrays.asList(nodosListados));
    ArrayList listaNodos = new ArrayList(Arrays.asList(nodos.split(",")));
    HashSet nodosLeidos = new HashSet(listaNodos);
    nodosLeidos.retainAll(nodosTotal);/*from ww  w  .  j  a  v  a 2 s .c o m*/
    if (!nodosLeidos.contains(nodosListados[0])) {
        nodosLeidos.add(nodosListados[0]);
    }
    String[] retorno = (String[]) nodosLeidos.toArray(new String[0]);
    return retorno;
}

From source file:org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo.java

/**
 * Returns an array of storages where the replicas are stored
 *///from w  w  w  .  ja v a  2s . co  m
protected DatanodeStorageInfo[] getStorages(DatanodeManager datanodeMgr, List<? extends ReplicaBase> replicas) {
    int numLocations = replicas.size();
    HashSet<DatanodeStorageInfo> set = new HashSet<>();
    for (int i = numLocations - 1; i >= 0; i--) {
        DatanodeStorageInfo desc = datanodeMgr.getStorage(replicas.get(i).getStorageId());
        if (desc != null) {
            set.add(desc);
        } else {
            replicas.remove(i);
        }
    }
    DatanodeStorageInfo[] storages = new DatanodeStorageInfo[set.size()];
    return set.toArray(storages);
}

From source file:org.owasp.jbrofuzz.core.Database.java

/**
 * <p>// w  w w.  j a  v  a2  s  .  c o  m
 * Given a category, return all prototype names that belong to that
 * category.
 * </p>
 * 
 * @param category
 *            the category as a string to check
 * @return String[] array of prototype names
 * 
 * @author subere@uncon.org
 * @version 1.3
 * @since 1.2
 */
public String[] getPrototypeNamesInCategory(final String category) {

    final HashSet<String> o = new HashSet<String>();
    final String[] ids = getAllPrototypeIDs();

    for (final String id : ids) {

        final Prototype g = prototypes.get(id);
        if (g.isAMemberOfCategory(category)) {
            o.add(g.getName());
        }
    }

    final String[] uCategotiesArray = new String[o.size()];
    o.toArray(uCategotiesArray);

    return uCategotiesArray;
}

From source file:org.apache.nutch.crawl.CrawlDb.java

public int run(String[] args) throws Exception {
    if (args.length < 2) {
        System.err.println(/* ww  w  . j  a  va  2s.c o  m*/
                "Usage: CrawlDb <crawldb> (-dir <segments> | <seg1> <seg2> ...) [-force] [-normalize] [-filter] [-noAdditions]");
        System.err.println("\tcrawldb\tCrawlDb to update");
        System.err.println("\t-dir segments\tparent directory containing all segments to update from");
        System.err.println("\tseg1 seg2 ...\tlist of segment names to update from");
        System.err.println("\t-force\tforce update even if CrawlDb appears to be locked (CAUTION advised)");
        System.err
                .println("\t-normalize\tuse URLNormalizer on urls in CrawlDb and segment (usually not needed)");
        System.err.println("\t-filter\tuse URLFilters on urls in CrawlDb and segment");
        System.err.println(
                "\t-noAdditions\tonly update already existing URLs, don't add any newly discovered URLs");
        return -1;
    }
    boolean normalize = false;
    boolean filter = false;
    boolean force = false;
    final FileSystem fs = FileSystem.get(getConf());
    boolean additionsAllowed = getConf().getBoolean(CRAWLDB_ADDITIONS_ALLOWED, true);
    HashSet<Path> dirs = new HashSet<Path>();
    for (int i = 1; i < args.length; i++) {
        if (args[i].equals("-normalize")) {
            normalize = true;
        } else if (args[i].equals("-filter")) {
            filter = true;
        } else if (args[i].equals("-force")) {
            force = true;
        } else if (args[i].equals("-noAdditions")) {
            additionsAllowed = false;
        } else if (args[i].equals("-dir")) {
            FileStatus[] paths = fs.listStatus(new Path(args[++i]), HadoopFSUtil.getPassDirectoriesFilter(fs));
            dirs.addAll(Arrays.asList(HadoopFSUtil.getPaths(paths)));
        } else {
            dirs.add(new Path(args[i]));
        }
    }
    try {
        update(new Path(args[0]), dirs.toArray(new Path[dirs.size()]), normalize, filter, additionsAllowed,
                force);
        return 0;
    } catch (Exception e) {
        LOG.fatal("CrawlDb update: " + StringUtils.stringifyException(e));
        return -1;
    }
}

From source file:org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo.java

/**
 * Returns an array of storages where the replicas are stored
 *//* ww w.j a v  a 2  s. c  o  m*/
protected DatanodeStorageInfo[] getStorages(DatanodeManager datanodeMgr, List<? extends ReplicaBase> replicas,
        final DatanodeStorage.State state) {
    int numLocations = replicas.size();
    HashSet<DatanodeStorageInfo> set = new HashSet<>();
    for (int i = numLocations - 1; i >= 0; i--) {
        DatanodeStorageInfo desc = datanodeMgr.getStorage(replicas.get(i).getStorageId());
        if (desc != null && desc.getState().equals(state)) {
            set.add(desc);
        } else {
            replicas.remove(i);
        }
    }
    DatanodeStorageInfo[] storages = new DatanodeStorageInfo[set.size()];
    return set.toArray(storages);
}

From source file:org.ramadda.geodata.cdmdata.GridPointOutputHandler.java

/**
 * Get the grid dates//from w  w w .ja  v a 2s  .co m
 *
 * @param dataset  the dataset
 *
 * @return  the dates or null
 */
public static List<CalendarDate> getGridDates(GridDataset dataset) {
    List<CalendarDate> gridDates = new ArrayList<CalendarDate>();
    if (dataset == null) {
        return gridDates;
    }
    List<GridDatatype> grids = dataset.getGrids();
    HashSet<CalendarDate> dateHash = new HashSet<CalendarDate>();
    List<CoordinateAxis1DTime> timeAxes = new ArrayList<CoordinateAxis1DTime>();

    for (GridDatatype grid : grids) {
        GridCoordSystem gcs = grid.getCoordinateSystem();
        CoordinateAxis1DTime timeAxis = gcs.getTimeAxis1D();
        if ((timeAxis != null) && !timeAxes.contains(timeAxis)) {
            timeAxes.add(timeAxis);

            List<CalendarDate> timeDates = timeAxis.getCalendarDates();
            for (CalendarDate timeDate : timeDates) {
                dateHash.add(timeDate);
            }
        }
    }
    if (!dateHash.isEmpty()) {
        gridDates = Arrays.asList(dateHash.toArray(new CalendarDate[dateHash.size()]));
        Collections.sort(gridDates);
    }

    return gridDates;
}

From source file:org.compass.core.lucene.engine.store.AbstractLuceneSearchEngineStore.java

public String[] calcSubIndexes(String[] subIndexes, String[] aliases) {
    if (aliases == null) {
        if (subIndexes == null) {
            return getSubIndexes();
        }//  w  w w.  j  a  va 2 s  .  c  o  m
        return subIndexes;
    }
    HashSet<String> ret = new HashSet<String>();
    for (String aliase : aliases) {
        List<String> subIndexesList = subIndexesByAlias.get(aliase);
        if (subIndexesList == null) {
            throw new IllegalArgumentException("No sub-index is mapped to alias [" + aliase + "]");
        }
        for (String subIndex : subIndexesList) {
            ret.add(subIndex);
        }
    }
    if (subIndexes != null) {
        ret.addAll(Arrays.asList(subIndexes));
    }
    return ret.toArray(new String[ret.size()]);
}

From source file:com.qwazr.cluster.manager.ClusterManager.java

private ClusterManager(ExecutorService executor, String publicAddress, Set<String> myGroups)
        throws IOException, URISyntaxException {
    myAddress = ClusterNode.toAddress(publicAddress);
    if (logger.isInfoEnabled())
        logger.info("Server: " + myAddress + " Groups: " + ArrayUtils.prettyPrint(myGroups));
    this.myGroups = myGroups;

    this.executor = executor;

    // Load the configuration
    String masters_env = System.getenv("QWAZR_MASTERS");

    // No configuration file ? Okay, we are a simple node
    if (StringUtils.isEmpty(masters_env)) {
        clusterMasterArray = null;//from   w w w  .java2s  . c  om
        clusterNodeMap = null;
        clusterClient = null;
        checkTimeMap = null;
        lastTimeCheck = null;
        isMaster = false;
        isCluster = false;
        if (logger.isInfoEnabled())
            logger.info("No QWAZR_MASTERS environment variable. This node is not part of a cluster.");
        return;
    }

    // Store the last time a master checked the node
    checkTimeMap = new ConcurrentHashMap<>();
    lastTimeCheck = new AtomicLong();

    // Build the master list and check if I am a master
    boolean isMaster = false;
    HashSet<String> masterSet = new HashSet<>();
    int i = 0;
    String[] masters = StringUtils.split(masters_env, ',');
    for (String master : masters) {
        String masterAddress = ClusterNode.toAddress(master.trim());
        logger.info("Add a master: " + masterAddress);
        masterSet.add(masterAddress);
        if (masterAddress == myAddress) {
            isMaster = true;
            if (logger.isInfoEnabled())
                logger.info("I am a master!");
        }
    }
    isCluster = true;
    clusterMasterArray = masterSet.toArray(new String[masterSet.size()]);
    clusterClient = new ClusterMultiClient(executor, clusterMasterArray, 60000);
    this.isMaster = isMaster;
    if (!isMaster) {
        clusterNodeMap = null;
        return;
    }

    // We load the cluster node map
    clusterNodeMap = new ClusterNodeMap();
}