Example usage for java.util HashSet size

List of usage examples for java.util HashSet size

Introduction

In this page you can find the example usage for java.util HashSet size.

Prototype

public int size() 

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:loci.plugins.util.LibraryChecker.java

/**
 * Reports missing libraries in the given hash set to the user.
 * @return true iff no libraries are missing (the hash set is empty).
 *//*from  w w  w. j  a va 2  s. c  om*/
public static boolean checkMissing(HashSet<String> missing) {
    int num = missing.size();
    if (num == 0)
        return true;
    StringBuffer sb = new StringBuffer();
    sb.append("The following librar");
    sb.append(num == 1 ? "y was" : "ies were");
    sb.append(" not found:");
    Iterator<String> iter = missing.iterator();
    for (int i = 0; i < num; i++)
        sb.append("\n    " + iter.next());
    String them = num == 1 ? "it" : "them";
    sb.append("\nPlease download ");
    sb.append(them);
    sb.append(" from the LOCI website at");
    sb.append("\n    " + URL_LOCI_SOFTWARE);
    sb.append("\nand place ");
    sb.append(them);
    sb.append(" in the ImageJ plugins folder.");
    IJ.error("LOCI Plugins", sb.toString());
    return false;
}

From source file:Main.java

public static <T> Set<Set<T>> extractSubSets(Set<T> initialSet, int subSetSize) {
    int nbSources = initialSet.size();
    int expectedNumberOfSets = expectedNumberOfSets(nbSources, subSetSize);
    Set<Set<T>> setOfSets = new HashSet<>(expectedNumberOfSets);
    if (nbSources == subSetSize) {
        // Already OK
        setOfSets.add(initialSet);/*from  w  w  w .  j a v  a2s. c o m*/
        return setOfSets;
    }
    List<T> setAsList = new ArrayList<>(initialSet);
    int[] iterators = new int[subSetSize];
    for (int i = 0; i < iterators.length; i++) {
        iterators[i] = i;
    }
    while (setOfSets.size() != expectedNumberOfSets) {
        HashSet<T> result = new HashSet<>(subSetSize);
        for (int pos : iterators) {
            result.add(setAsList.get(pos));
        }
        if (result.size() != subSetSize) {
            throw new IllegalStateException("Hard!");
        }
        setOfSets.add(result);
        int maxPos = -1;
        for (int i = 0; i < iterators.length; i++) {
            int pos = iterators[i];
            if (pos == (nbSources - iterators.length + i)) {
                maxPos = i;
                break;
            }
        }
        if (maxPos == -1) {
            // Up last iterator
            iterators[iterators.length - 1]++;
        } else if (maxPos == 0) {
            // Finished
            if (setOfSets.size() != expectedNumberOfSets) {
                System.err.println("Something wrong!");
            }
        } else {
            // Up the one before maxPos and reinit the others
            iterators[maxPos - 1]++;
            for (int i = maxPos; i < iterators.length; i++) {
                iterators[i] = iterators[i - 1] + 1;
            }
        }
    }
    return setOfSets;
}

From source file:Main.java

private static String[] convertCursorAsStringArrayWithCloseCursor(Cursor cursor, int colIdx) {
    String[] result = null;//from w  ww. ja v a  2  s . co  m
    try {
        int resultCount = cursor.getCount();
        if (resultCount > 0) {
            HashSet<String> phones = new HashSet<String>(resultCount);
            while (cursor.moveToNext()) {
                String phone = cursor.getString(0);
                phones.add(phone);
            }
            result = phones.toArray(new String[phones.size()]);
        }
        Log.d(TAG,
                "ConvertCursor As StringArray : found " + resultCount + " String converted from idx " + colIdx);
    } finally {
        cursor.close();
    }
    return result;
}

From source file:edu.illinois.cs.cogcomp.question_typer.QuestionTyperFeatureExtractorsUtils.java

public static List<String> getWordGroupFeatures(TextAnnotation s) {
    List<Constituent> lemma = s.getView(ViewNames.LEMMA).getConstituents();
    Set<String> lemmaLabels = new HashSet<>();
    for (Constituent c : lemma)
        lemmaLabels.add(c.getLabel());/*from   w w  w.  ja va 2s  .c  o  m*/

    List<String> overlapLabels = new ArrayList<>();
    for (Object label : list.keySet()) {
        HashSet set = (HashSet) list.get(label);
        HashSet lemmaLabelsClone = new HashSet(lemmaLabels);
        lemmaLabelsClone.retainAll(set);
        if (lemmaLabelsClone.size() > 0)
            overlapLabels.add((String) label);
    }
    return overlapLabels;
}

From source file:Transform.java

public static void multiplePages() throws Exception {
    InputStream isXML = Transform.class.getResourceAsStream(file("manual.xml"));
    InputStream isXSL = Transform.class.getResourceAsStream("html-pages.xsl");

    StreamSource xsl = new StreamSource(isXSL);
    TransformerFactory factory = TransformerFactory.newInstance();
    Transformer transformer = factory.newTransformer(xsl);

    Match manual = $(isXML);//from ww w .  j av a 2s. com
    replaceVariables(manual);

    List<String> ids = manual.find("section").ids();
    HashSet<String> uniqueIds = new HashSet<String>(ids);

    if (ids.size() != uniqueIds.size()) {
        for (String id : uniqueIds) {
            ids.remove(id);
        }

        throw new Exception("Duplicate section ids found! " + ids);
    }

    int blanks = 0, completed = 0;
    for (Match section : manual.find("section").each()) {
        Match sections = section.add(section.parents("section")).reverse();

        String path = path(StringUtils.join(sections.ids(), "/"));
        String relativePath = relative(path);
        String root = root();
        File dir = new File(path);
        dir.mkdirs();

        PrintStream stream = System.out;
        boolean blank = StringUtils.isBlank(section.find("content").text());
        if (blank) {
            blanks++;
            stream = System.err;
        } else {
            completed++;
        }

        stream.print("[");
        stream.print(blank ? " " : "x");
        stream.println("] Transforming section " + path);

        File file = new File(dir, "index.php");
        file.delete();
        FileOutputStream out = new FileOutputStream(file);

        Source source = new DOMSource(manual.document());
        Result target = new StreamResult(out);

        transformer.setParameter("sectionID", section.id());
        transformer.setParameter("relativePath", relativePath);
        transformer.setParameter("root", root);
        transformer.transform(source, target);

        out.close();
    }
    System.out.println("    Completed sections : " + completed + " / " + (blanks + completed) + " ("
            + (100 * completed / (blanks + completed)) + "%)");
}

From source file:edu.cudenver.bios.matrix.OrthogonalPolynomials.java

/**
 * Computes orthogonal polynomial contrasts for the specified data values.  Currently only
 * supports fitting (not prediction contrasts).  
 * //from ww w.  ja v  a2s. c o m
 *    @param x the points at which the polynomials will be evaluated
 * @param maxDegree contrasts will be computed for degrees 1 to maxDegree
 * @return matrix containing 0th,1st, 2nd,...maxDegree-th degree contrasts in each column
 * @throws IllegalArgumentException
 */
public static RealMatrix orthogonalPolynomialCoefficients(double[] x, int maxDegree)
        throws IllegalArgumentException {
    if (x == null)
        throw new IllegalArgumentException("no data specified");
    if (maxDegree < 1)
        throw new IllegalArgumentException("max polynomial degree must be greater than 1");
    // count number of unique values
    HashSet<Double> s = new HashSet<Double>();
    for (double i : x)
        s.add(i);
    int uniqueCount = s.size();
    if (maxDegree >= uniqueCount)
        throw new IllegalArgumentException(
                "max polynomial degree must be less than the number of unique points");

    // center the data
    double xBar = StatUtils.mean(x);
    double[] xCentered = new double[x.length];
    for (int i = 0; i < x.length; i++)
        xCentered[i] = x[i] - xBar;
    // compute an "outer product" of the centered x vector and a vector 
    // containing the sequence 0 to maxDegree-1, but raise the x values
    // to the power in the sequence array
    double[][] xOuter = new double[x.length][maxDegree + 1];
    int row = 0;
    for (double xValue : xCentered) {
        for (int col = 0; col <= maxDegree; col++) {
            xOuter[row][col] = Math.pow(xValue, col);
        }
        row++;
    }
    // do some mysterious QR decomposition stuff.  See Emerson (1968)
    RealMatrix outerVector = new Array2DRowRealMatrix(xOuter);
    QRDecomposition qrDecomp = new QRDecomposition(outerVector);

    RealMatrix z = MatrixUtils.getDiagonalMatrix(qrDecomp.getR());
    RealMatrix raw = qrDecomp.getQ().multiply(z);

    // column sum of squared elements in raw
    double[] normalizingConstants = new double[raw.getColumnDimension()];
    for (int col = 0; col < raw.getColumnDimension(); col++) {
        normalizingConstants[col] = 0;
        for (row = 0; row < raw.getRowDimension(); row++) {
            double value = raw.getEntry(row, col);
            normalizingConstants[col] += value * value;
        }
    }

    // now normalize the raw values
    for (int col = 0; col < raw.getColumnDimension(); col++) {
        double normalConstantSqrt = Math.sqrt(normalizingConstants[col]);
        for (row = 0; row < raw.getRowDimension(); row++) {
            raw.setEntry(row, col, raw.getEntry(row, col) / normalConstantSqrt);
        }
    }

    return raw;
}

From source file:SerialVersionUID.java

/**
 * Create a Map<String, ClassVersionInfo> for the jboss dist jars.
 * //from  ww  w .  ja  v a2 s.co  m
 * @param j2eeHome -
 *          the j2ee ri dist root directory
 * @return Map<String, ClassVersionInfo>
 * @throws IOException
 */
public static Map generateRISerialVersionUIDReport(File j2eeHome) throws IOException {
    // Obtain the jars from the /lib
    HashSet jarFiles = new HashSet();
    File lib = new File(j2eeHome, "lib");
    buildJarSet(lib, jarFiles);
    URL[] cp = new URL[jarFiles.size()];
    jarFiles.toArray(cp);
    ClassLoader parent = Thread.currentThread().getContextClassLoader();
    URLClassLoader completeClasspath = new URLClassLoader(cp, parent);

    TreeMap classVersionMap = new TreeMap();
    Iterator jarIter = jarFiles.iterator();
    while (jarIter.hasNext()) {
        URL jar = (URL) jarIter.next();
        try {
            generateJarSerialVersionUIDs(jar, classVersionMap, completeClasspath, "javax");
        } catch (IOException e) {
            log.info("Failed to process jar: " + jar);
        }
    }

    return classVersionMap;
}

From source file:org.oscarehr.util.TrackingBasicDataSource.java

public static void releaseThreadConnections() {
    HashSet<Connection> threadConnections = connections.get();
    if (threadConnections != null && threadConnections.size() > 0) {

        threadConnections = new HashSet<Connection>(threadConnections);
        for (Connection c : threadConnections) {
            try {
                if (!c.isClosed()) {
                    c.close();//  w  w w  .ja  v  a 2s  .  c  o  m
                }
            } catch (SQLException e) {
                logger.error("Error closing jdbc connection.", e);
            }
        }
    }

    connections.remove();
}

From source file:Main.java

public static void addTaintInformationToIntent(Intent i, HashSet<String> taintCategories) {
    boolean intentHasNoExtras = i.getExtras() == null ? true : false;

    //A bit of limitation here, because we do only care about the extras
    if (!intentHasNoExtras) {
        Bundle extras = i.getExtras();//ww  w .  jav a  2 s  . c om

        String taintKeyName = generateKeyNameForTaintInfo(extras.keySet());

        String taintInformation = null;

        if (taintCategories.size() > 1)
            taintInformation = taintCategories.toString().substring(1, taintCategories.toString().length() - 1);
        else
            taintInformation = taintCategories.iterator().next();

        i.putExtra(taintKeyName, taintInformation);
    }
}

From source file:com.battlelancer.seriesguide.util.TraktTools.java

/**
 * Uploads all watched and collected episodes to trakt.
 *
 * @return Any of the {@link TraktTools} result codes.
 *///from   w  w  w.ja  v a  2s .c  o m
public static int uploadToTrakt(Context context, Trakt trakt, HashSet<Integer> localShows) {
    if (localShows.size() == 0) {
        return SUCCESS_NOWORK;
    }

    ShowService showService = trakt.showService();
    for (Integer showTvdbId : localShows) {
        // build a list of all watched episodes
        /**
         * We do not have to worry about uploading episodes that are already watched on
         * trakt, it will keep the original timestamp of the episodes being watched.
         */
        List<ShowService.Episodes.Episode> watchedEpisodesToUpload = new ArrayList<>();
        Cursor watchedEpisodes = context.getContentResolver().query(
                SeriesGuideContract.Episodes.buildEpisodesOfShowUri(showTvdbId), EpisodesQuery.PROJECTION,
                SeriesGuideContract.Episodes.SELECTION_WATCHED, null, null);
        if (watchedEpisodes == null) {
            return FAILED;
        }
        buildEpisodeList(watchedEpisodesToUpload, watchedEpisodes);
        watchedEpisodes.close();

        // build a list of collected episodes
        List<ShowService.Episodes.Episode> collectedEpisodesToUpload = new ArrayList<>();
        Cursor collectedEpisodes = context.getContentResolver().query(
                SeriesGuideContract.Episodes.buildEpisodesOfShowUri(showTvdbId), EpisodesQuery.PROJECTION,
                SeriesGuideContract.Episodes.SELECTION_COLLECTED, null, null);
        if (collectedEpisodes == null) {
            return FAILED;
        }
        buildEpisodeList(collectedEpisodesToUpload, collectedEpisodes);
        collectedEpisodes.close();

        try {
            // post to trakt
            // watched episodes
            if (watchedEpisodesToUpload.size() > 0) {
                showService.episodeSeen(new ShowService.Episodes(showTvdbId, watchedEpisodesToUpload));
            }
            // collected episodes
            if (collectedEpisodesToUpload.size() > 0) {
                showService.episodeLibrary(new ShowService.Episodes(showTvdbId, collectedEpisodesToUpload));
            }
        } catch (RetrofitError e) {
            Timber.e(e, "Uploading episodes to trakt failed");
            return FAILED_API;
        }
    }

    return SUCCESS;
}