Example usage for java.util HashSet toArray

List of usage examples for java.util HashSet toArray

Introduction

In this page you can find the example usage for java.util HashSet toArray.

Prototype

<T> T[] toArray(T[] a);

Source Link

Document

Returns an array containing all of the elements in this set; the runtime type of the returned array is that of the specified array.

Usage

From source file:org.sakaiproject.component.impl.BasicConfigurationService.java

/**
 * Get the list of allowed locales as controlled by config params for {@value #SAKAI_LOCALES_KEY} and {@value #SAKAI_LOCALES_MORE}
 * @return an array of all allowed Locales for this installation
 *///from www . j  av a  2 s.  c  o m
public Locale[] getSakaiLocales() {
    String localesStr = getString(SAKAI_LOCALES_KEY, SakaiLocales.SAKAI_LOCALES_DEFAULT);
    if (localesStr == null) { // means locales= is set
        localesStr = ""; // empty to get default locale only
    } else if (StringUtils.isBlank(localesStr)) { // missing or not set
        localesStr = SakaiLocales.SAKAI_LOCALES_DEFAULT;
    }
    String[] locales = StringUtils.split(localesStr, ','); // NOTE: these need to be trimmed (which getLocaleFromString will do)
    String[] localesMore = getStrings(SAKAI_LOCALES_MORE);

    locales = (String[]) ArrayUtils.addAll(locales, localesMore);
    HashSet<Locale> localesSet = new HashSet<Locale>();
    // always include the default locale
    localesSet.add(Locale.getDefault());
    if (!ArrayUtils.isEmpty(locales)) {
        // convert from strings to Locales
        //noinspection ForLoopReplaceableByForEach
        for (int i = 0; i < locales.length; i++) {
            localesSet.add(getLocaleFromString(locales[i]));
        }
    }
    // Sort Locales and remove duplicates
    Locale[] localesArray = localesSet.toArray(new Locale[localesSet.size()]);
    Arrays.sort(localesArray, new LocaleComparator());
    return localesArray;
}

From source file:org.gcaldaemon.core.notifier.GmailNotifier.java

private static final String[] getActiveUsers() {
    HashSet users = new HashSet();
    try {//from  ww  w  .j  a v  a 2 s. c  o  m
        String me = System.getProperty("user.name");
        if (me != null) {
            users.add(me);
        }
    } catch (Exception ignored) {
    }
    try {
        String os = System.getProperty("os.name", "unknown");
        if (commandExecutable && os.toLowerCase().indexOf("windows") != -1) {

            // Execute script
            ProcessBuilder builder = new ProcessBuilder(TASK_COMMAND);
            Process tasklist = builder.start();

            // Read command output
            InputStream in = tasklist.getInputStream();
            QuickWriter buffer = new QuickWriter();
            BufferedInputStream bis = new BufferedInputStream(in);
            InputStreamReader isr = new InputStreamReader(bis);
            char[] chars = new char[1024];
            int len;
            while ((len = isr.read(chars)) != -1) {
                buffer.write(chars, 0, len);
            }

            // Parse output
            String token, out = buffer.toString();
            StringTokenizer lines = new StringTokenizer(out, "\r\n");
            StringTokenizer tokens;
            int i;
            while (lines.hasMoreTokens()) {
                tokens = new StringTokenizer(lines.nextToken(), "\"", false);
                while (tokens.hasMoreTokens()) {
                    token = tokens.nextToken();
                    i = token.indexOf('\\');
                    if (i != -1) {
                        token = token.substring(i + 1);
                        if (token.length() != 0) {
                            users.add(token);
                            break;
                        }
                    }
                }
            }

        }
    } catch (Exception invalidSyntax) {
        commandExecutable = false;
        log.debug(invalidSyntax);
    }
    String[] array = new String[users.size()];
    if (array.length > 0) {
        users.toArray(array);
        if (log.isDebugEnabled()) {
            QuickWriter writer = new QuickWriter(100);
            for (int i = 0; i < array.length; i++) {
                writer.write(array[i]);
                if (i < array.length - 1) {
                    writer.write(", ");
                }
            }
            log.debug("Active users: " + writer.toString());
        }
    }
    return array;
}

From source file:com.linkedin.harisekhon.Utils.java

public static final String[] uniqArray(String[] list) {
    HashSet<String> set = new HashSet<String>();
    for (String item : list) {
        set.add(item);//from ww  w .  j  av  a 2 s.  c om
    }
    String[] a = {};
    return set.toArray(a);
}

From source file:compare.handler.get.CompareGetHandler.java

/**
 * Format the requested URN version as HTML
 * @param request the original http request
 * @param response the response to write to
 * @param urn the original request urn//from   ww  w.jav a2s . c om
 * @throws CompareException if the response could not be written
 */
protected void handleGetVersion(HttpServletRequest request, HttpServletResponse response, String urn)
        throws CompareException, CalliopeException {
    String version1 = request.getParameter(Params.VERSION1);
    if (version1 == null) {
        try {
            response.getWriter().println("<p>version1 parameter required</p>");
        } catch (Exception e) {
            throw new CompareException(e);
        }
    } else {
        String selectedVersions = request.getParameter(Params.SELECTED_VERSIONS);
        //System.out.println("version1="+version1);
        EcdosisVersion corTex = doGetResourceVersion(Database.CORTEX, urn, version1);
        // 1. get corcodes and styles
        String[] corCodes = request.getParameterValues(Params.CORCODE);
        String[] styles = request.getParameterValues(Params.STYLE);
        HashSet<String> styleSet = new HashSet<String>();
        for (int i = 0; i < styles.length; i++)
            styleSet.add(styles[i]);
        try {
            for (int i = 0; i < corCodes.length; i++) {
                String ccResource = Utils.canonisePath(urn, corCodes[i]);
                EcdosisVersion ev = doGetResourceVersion(Database.CORCODE, ccResource, version1);
                Comment comment = new Comment();
                comment.addText("version-length: " + ev.getVersionLength());
                response.setCharacterEncoding("UTF-8");
                response.getWriter().println(comment.toString());
                styleSet.add(ev.getStyle());
                corCodes[i] = ev.getVersionString();
            }
        } catch (Exception e) {
            // this won't ever happen because UTF-8 is always supported
            throw new CompareException(e);
        }
        // 2. add mergeids if needed
        if (selectedVersions != null && selectedVersions.length() > 0) {
            corCodes = addMergeIds(corCodes, corTex.getMVD(), version1, selectedVersions);
            styleSet.add("diffs/default");
        }
        // 3. recompute styles array (docids)
        styles = new String[styleSet.size()];
        styleSet.toArray(styles);
        // 4. convert style names to actual corforms
        styles = fetchStyles(styles);
        // 5. call the native library to format it
        JSONResponse html = new JSONResponse(JSONResponse.HTML);
        String text = corTex.getVersionString();
        int res = new AeseFormatter().format(text, corCodes, styles, html);
        if (res == 0)
            throw new CompareException("formatting failed");
        else {
            response.setContentType("text/html;charset=UTF-8");
            try {
                Comment comment = new Comment();
                comment.addText("styles: ");
                for (int i = 0; i < styles.length; i++)
                    comment.addText(styles[i]);
                response.getWriter().println(comment.toString());
                response.getWriter().println(html.getBody());
            } catch (Exception e) {
                throw new CompareException(e);
            }
        }
    }
}

From source file:org.cytobank.acs.core.TableOfContents.java

/**
 * Returns an array of all unique <code>FileResourceIdentifier</code>s contained within this <code>TableOfContents</code> instance
 * that have been identified as project workspaces.
 *
 * @return an array of all the <code>FileResourceIdentifier</code>s that are project workspaces
 * @throws InvalidIndexException If there is a problem with the <code>TableOfContents</code>
 * @throws URISyntaxException If there is a problem with any of the URIs contained within the <code>TableOfContents</code> or if the URI is a duplicate
 * @throws InvalidAssociationException if there is an invalid association
 * @see RelationshipTypes#isProjectWorkspace
 *//*from  w  w w . ja  va 2  s .c o m*/
public FileResourceIdentifier[] getProjectWorkspaces()
        throws InvalidAssociationException, InvalidIndexException, URISyntaxException {
    HashSet<FileResourceIdentifier> projectWorkspaces = new HashSet<FileResourceIdentifier>();

    // Find all fileResource associations and add the associated file to projectWorkspaces
    // if that association relationship is a project workspace.
    for (FileResourceIdentifier fileResource : fileResourceIdentifiers) {
        for (Association association : fileResource.associations) {
            if (RelationshipTypes.isProjectWorkspace(association.getRelationship())) {
                projectWorkspaces.add(association.getAssociatedTo());
            }
        }
    }

    FileResourceIdentifier[] results = new FileResourceIdentifier[projectWorkspaces.size()];
    projectWorkspaces.toArray(results);

    return results;
}

From source file:org.apache.sysml.hops.codegen.template.PlanSelectionFuseCostBased.java

private void createAndAddMultiAggPlans(CPlanMemoTable memo, ArrayList<Hop> roots) {
    //collect full aggregations as initial set of candidates
    HashSet<Long> fullAggs = new HashSet<Long>();
    Hop.resetVisitStatus(roots);/*from w  w  w . j  a v a2s. c om*/
    for (Hop hop : roots)
        rCollectFullAggregates(hop, fullAggs);
    Hop.resetVisitStatus(roots);

    //remove operators with assigned multi-agg plans
    fullAggs.removeIf(p -> memo.contains(p, TemplateType.MultiAggTpl));

    //check applicability for further analysis
    if (fullAggs.size() <= 1)
        return;

    if (LOG.isTraceEnabled()) {
        LOG.trace("Found across-partition ua(RC) aggregations: "
                + Arrays.toString(fullAggs.toArray(new Long[0])));
    }

    //collect information for all candidates 
    //(subsumed aggregations, and inputs to fused operators) 
    List<AggregateInfo> aggInfos = new ArrayList<AggregateInfo>();
    for (Long hopID : fullAggs) {
        Hop aggHop = memo._hopRefs.get(hopID);
        AggregateInfo tmp = new AggregateInfo(aggHop);
        for (int i = 0; i < aggHop.getInput().size(); i++) {
            Hop c = HopRewriteUtils.isMatrixMultiply(aggHop) && i == 0
                    ? aggHop.getInput().get(0).getInput().get(0)
                    : aggHop.getInput().get(i);
            rExtractAggregateInfo(memo, c, tmp, TemplateType.CellTpl);
        }
        if (tmp._fusedInputs.isEmpty()) {
            if (HopRewriteUtils.isMatrixMultiply(aggHop)) {
                tmp.addFusedInput(aggHop.getInput().get(0).getInput().get(0).getHopID());
                tmp.addFusedInput(aggHop.getInput().get(1).getHopID());
            } else
                tmp.addFusedInput(aggHop.getInput().get(0).getHopID());
        }
        aggInfos.add(tmp);
    }

    if (LOG.isTraceEnabled()) {
        LOG.trace("Extracted across-partition ua(RC) aggregation info: ");
        for (AggregateInfo info : aggInfos)
            LOG.trace(info);
    }

    //sort aggregations by num dependencies to simplify merging
    //clusters of aggregations with parallel dependencies
    aggInfos = aggInfos.stream().sorted(Comparator.comparing(a -> a._inputAggs.size()))
            .collect(Collectors.toList());

    //greedy grouping of multi-agg candidates
    boolean converged = false;
    while (!converged) {
        AggregateInfo merged = null;
        for (int i = 0; i < aggInfos.size(); i++) {
            AggregateInfo current = aggInfos.get(i);
            for (int j = i + 1; j < aggInfos.size(); j++) {
                AggregateInfo that = aggInfos.get(j);
                if (current.isMergable(that)) {
                    merged = current.merge(that);
                    aggInfos.remove(j);
                    j--;
                }
            }
        }
        converged = (merged == null);
    }

    if (LOG.isTraceEnabled()) {
        LOG.trace("Merged across-partition ua(RC) aggregation info: ");
        for (AggregateInfo info : aggInfos)
            LOG.trace(info);
    }

    //construct and add multiagg template plans (w/ max 3 aggregations)
    for (AggregateInfo info : aggInfos) {
        if (info._aggregates.size() <= 1)
            continue;
        Long[] aggs = info._aggregates.keySet().toArray(new Long[0]);
        MemoTableEntry me = new MemoTableEntry(TemplateType.MultiAggTpl, aggs[0], aggs[1],
                (aggs.length > 2) ? aggs[2] : -1);
        for (int i = 0; i < aggs.length; i++) {
            memo.add(memo._hopRefs.get(aggs[i]), me);
            addBestPlan(aggs[i], me);
            if (LOG.isTraceEnabled())
                LOG.trace("Added multiagg* plan: " + aggs[i] + " " + me);

        }
    }
}

From source file:org.strasa.middleware.manager.CreateFieldBookManagerImpl.java

/**
 * Validate site./*  www.  j  av a2 s.c o m*/
 * 
 * @param shObservation
 *            the sh observation
 * @param shSiteInfo
 *            the sh site info
 * @throws Exception
 *             the exception
 */
public void validateSite(Sheet shObservation, Sheet shSiteInfo) throws Exception {

    Integer colSite = getHeaderColumnNumber("Site", shObservation);
    HashSet<String> uniqueSite = new HashSet<String>();
    uniqueSite.addAll(readRowsByColumn(shObservation, 1, colSite).get(0));
    System.out.println(readRowsByColumn(shObservation, 1, colSite).get(0).get(0));
    ArrayList<String> lstUnknownSite = readRowsByColumn(shSiteInfo, 1, 0).get(0);
    if (lstUnknownSite.size() > uniqueSite.size()) {

        lstUnknownSite.removeAll(uniqueSite);
        throw new CreateFieldBookException("Invalid list of sites detected. Could not find {"
                + StringUtils.join(lstUnknownSite.toArray(new String[lstUnknownSite.size()]), ",")
                + "} in Observation sheet.");
    }
    if (uniqueSite.size() > lstUnknownSite.size()) {
        uniqueSite.removeAll(lstUnknownSite);
        throw new CreateFieldBookException("Invalid list of sites detected. Could not find {"
                + StringUtils.join(uniqueSite.toArray(new String[uniqueSite.size()]), ",")
                + "} in Site Information sheet.");

    }
    lstUnknownSite.removeAll(uniqueSite);
    if (!lstUnknownSite.isEmpty()) {
        throw new CreateFieldBookException("Invalid list of sites detected. Could not find {"
                + StringUtils.join(lstUnknownSite.toArray(new String[lstUnknownSite.size()]), ",")
                + "} in Site Information sheet.");

    }

}

From source file:ml.shifu.shifu.core.dtrain.nn.NNMaster.java

private HashSet<Integer> dropoutNodes() {
    Random random = new Random(System.currentTimeMillis());

    HashSet<Integer> droppedNodeIndices = new HashSet<Integer>();

    // from input to last hidden layer. (exclude output layer)
    for (int i = this.flatNetwork.getLayerIndex().length - 1; i > 0; i--) {
        int beginNeuronIndex = this.flatNetwork.getLayerIndex()[i];
        // exclude constant neuron
        int neuronCount = this.flatNetwork.getLayerFeedCounts()[i];

        // from first neuron to last neuron in current layer
        for (int j = 0; j < neuronCount; j++) {
            if (random.nextDouble() < this.flatNetwork.getLayerDropoutRates()[i]) {
                // drop this node by adding it into list and will passing
                // this list to workers
                droppedNodeIndices.add(beginNeuronIndex + j);
            }//  w w  w  .j  a  v a2 s. c  o  m
        }
    }

    LOG.info("layerIndex:{}; layerCounts:{}; dropoutNodes:{}",
            Arrays.toString(this.flatNetwork.getLayerIndex()),
            Arrays.toString(this.flatNetwork.getLayerCounts()),
            Arrays.toString(droppedNodeIndices.toArray(new Integer[droppedNodeIndices.size()])));
    return droppedNodeIndices;
}

From source file:org.apache.solr.handler.component.FacetComponent.java

@Override
public void prepare(ResponseBuilder rb) throws IOException {
    if (rb.req.getParams().getBool(FacetParams.FACET, false)) {
        rb.setNeedDocSet(true);/* w w  w  .  j a v a2s  . c om*/
        rb.doFacets = true;

        // Deduplicate facet params
        ModifiableSolrParams params = new ModifiableSolrParams();
        SolrParams origParams = rb.req.getParams();
        Iterator<String> iter = origParams.getParameterNamesIterator();
        while (iter.hasNext()) {
            String paramName = iter.next();
            // Deduplicate the list with LinkedHashSet, but _only_ for facet params.
            if (!paramName.startsWith(FacetParams.FACET)) {
                params.add(paramName, origParams.getParams(paramName));
                continue;
            }
            HashSet<String> deDupe = new LinkedHashSet<>(Arrays.asList(origParams.getParams(paramName)));
            params.add(paramName, deDupe.toArray(new String[deDupe.size()]));

        }
        rb.req.setParams(params);

        // Initialize context
        FacetContext.initContext(rb);
    }
}

From source file:com.cloud.bridge.io.S3CAStorBucketAdapter.java

private synchronized ScspClient myClient(String mountedRoot) {
    if (_scspClient != null) {
        return _scspClient;
    }//from www  . j  av  a 2  s  . c o  m
    // The castor cluster is specified either by listing the ip addresses of some nodes, or
    // by specifying "zeroconf=" and the cluster's mdns name -- this is "cluster" in castor's node.cfg.
    // The "domain" to store streams can be specified. If not specified, streams will be written
    // without a "domain" query arg, so they will go into the castor default domain.
    // The port is optional and must be at the end of the config string, defaults to 80.
    // Examples: "castor 172.16.78.130 172.16.78.131 80", "castor 172.16.78.130 domain=mycluster.example.com", 
    // "castor zeroconf=mycluster.example.com domain=mycluster.example.com 80"
    String[] cfg = mountedRoot.split(" ");
    int numIPs = cfg.length - 1;
    String possiblePort = cfg[cfg.length - 1];
    int castorPort = DEFAULT_SCSP_PORT;
    try {
        castorPort = Integer.parseInt(possiblePort);
        --numIPs;
    } catch (NumberFormatException nfe) {
        // okay, it's an ip address, not a port number
    }
    if (numIPs <= 0) {
        throw new ConfigurationException("No CAStor nodes specified in '" + mountedRoot + "'");
    }
    HashSet<String> ips = new HashSet<String>();
    String clusterName = null;
    for (int i = 0; i < numIPs; ++i) {
        String option = cfg[i + 1]; // ip address or zeroconf=mycluster.example.com or domain=mydomain.example.com
        if (option.toLowerCase().startsWith("zeroconf=")) {
            String[] confStr = option.split("=");
            if (confStr.length != 2) {
                throw new ConfigurationException("Could not parse cluster name from '" + option + "'");
            }
            clusterName = confStr[1];
        } else if (option.toLowerCase().startsWith("domain=")) {
            String[] confStr = option.split("=");
            if (confStr.length != 2) {
                throw new ConfigurationException("Could not parse domain name from '" + option + "'");
            }
            _domain = confStr[1];
        } else {
            ips.add(option);
        }
    }
    if (clusterName == null && ips.isEmpty()) {
        throw new ConfigurationException("No CAStor nodes specified in '" + mountedRoot + "'");
    }
    String[] castorNodes = ips.toArray(new String[0]); // list of configured nodes
    if (clusterName == null) {
        try {
            _locator = new StaticLocator(castorNodes, castorPort, LOCATOR_RETRY_TIMEOUT);
            _locator.start();
        } catch (IOException e) {
            throw new ConfigurationException(
                    "Could not create CAStor static locator for '" + Arrays.toString(castorNodes) + "'");
        }
    } else {
        try {
            clusterName = clusterName.replace(".", "_"); // workaround needed for CAStorSDK 1.3.1
            _locator = new ZeroconfLocator(clusterName);
            _locator.start();
        } catch (IOException e) {
            throw new ConfigurationException(
                    "Could not create CAStor zeroconf locator for '" + clusterName + "'");
        }
    }
    try {
        s_logger.info("CAStor client starting: " + (_domain == null ? "default domain" : "domain " + _domain)
                + " " + (clusterName == null ? Arrays.toString(castorNodes) : clusterName) + " :" + castorPort);
        _scspClient = new ScspClient(_locator, castorPort, DEFAULT_MAX_POOL_SIZE, DEFAULT_MAX_RETRIES,
                CONNECTION_TIMEOUT, CM_IDLE_TIMEOUT);
        _scspClient.start();
    } catch (Exception e) {
        s_logger.error("Unable to create CAStor client for '" + mountedRoot + "': " + e.getMessage(), e);
        throw new ConfigurationException("Unable to create CAStor client for '" + mountedRoot + "': " + e);
    }
    return _scspClient;
}