Example usage for java.util HashSet add

List of usage examples for java.util HashSet add

Introduction

In this page you can find the example usage for java.util HashSet add.

Prototype

public boolean add(E e) 

Source Link

Document

Adds the specified element to this set if it is not already present.

Usage

From source file:com.expedia.seiso.domain.service.search.SpaceDelimitedDatabaseWildCardTokenizer.java

public Set<String> tokenize(String termsString) {
    HashSet<String> tokens = new LinkedHashSet<String>();

    if (!StringUtils.isEmpty(termsString)) {
        String[] terms = termsString.split(TERM_DELIMITER);
        for (String term : terms) {
            if (!StringUtils.isEmpty(term)) {
                tokens.add(new StringBuilder(WILD_CARD).append(term.trim()).append(WILD_CARD).toString());
            }//from www .  j a va  2s .  c o  m
        }
    }

    return tokens;
}

From source file:mobisocial.musubi.util.OGUtil.java

public static OGData getOrGuess(String url) {
    DefaultHttpClient hc = new DefaultHttpClient();
    HttpResponse res;/*from  w  w w  . ja v  a  2  s. com*/
    try {
        HttpGet hg = new HttpGet(url);
        res = hc.execute(hg);
    } catch (Exception e) {
        Log.e(TAG, "unable to fetch page to get og tags", e);
        return null;
    }
    String location = url;
    //TODO: if some kind of redirect magic happened, then
    //make the location match that

    OGData og = new OGData();
    HttpEntity he = res.getEntity();
    Header content_type = he.getContentType();
    //TODO: check the content directly if they forget the type header
    if (content_type == null || content_type.getValue() == null) {
        Log.e(TAG, "page missing content type ..abandoning: " + url);
        return null;
    }
    og.mMimeType = content_type.getValue();
    //just make a thumbnail if the shared item is an image
    if (og.mMimeType.startsWith("image/")) {
        Bitmap b;
        try {
            b = BitmapFactory.decodeStream(he.getContent());
        } catch (Exception e) {
            return null;
        }
        //TODO: scaling
        int w = b.getWidth();
        int h = b.getHeight();
        if (w > h) {
            h = h * 200 / w;
            w = 200;
        } else {
            w = w * 200 / h;
            h = 200;
        }

        Bitmap b2 = Bitmap.createScaledBitmap(b, w, h, true);
        b.recycle();
        b = b2;
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        b.compress(CompressFormat.PNG, 100, baos);
        og.mImage = baos.toByteArray();
        b.recycle();
        return og;
    }
    //if its not html, we can't extract more details, the caller
    //should rely on what they already know.
    if (!og.mMimeType.startsWith("text/html") && !og.mMimeType.startsWith("application/xhtml")) {
        Log.e(TAG, "shared content is not a known type for meta data processing " + og.mMimeType);
        return og;
    }

    String html;
    try {
        html = IOUtils.toString(he.getContent());
    } catch (Exception e) {
        Log.e(TAG, "failed to read html content", e);
        return og;
    }

    Matcher m = sTitleRegex.matcher(html);
    if (m.find()) {
        og.mTitle = StringEscapeUtils.unescapeHtml4(m.group(1));

    }
    m = sMetaRegex.matcher(html);
    int offset = 0;
    String raw_description = null;
    while (m.find(offset)) {
        try {
            String meta_tag = m.group();
            Matcher mp = sPropertyOfMeta.matcher(meta_tag);
            if (!mp.find())
                continue;
            String type = mp.group(1);
            type = type.substring(1, type.length() - 1);
            Matcher md = sContentOfMeta.matcher(meta_tag);
            if (!md.find())
                continue;
            String data = md.group(1);
            //remove quotes
            data = data.substring(1, data.length() - 1);
            data = StringEscapeUtils.unescapeHtml4(data);
            if (type.equalsIgnoreCase("og:title")) {
                og.mTitle = data;
            } else if (type.equalsIgnoreCase("og:image")) {
                HttpResponse resi;
                try {
                    HttpGet hgi = new HttpGet(data);
                    resi = hc.execute(hgi);
                } catch (Exception e) {
                    Log.e(TAG, "unable to fetch og image url", e);
                    continue;
                }
                HttpEntity hei = resi.getEntity();
                if (!hei.getContentType().getValue().startsWith("image/")) {
                    Log.e(TAG, "image og tag points to non image data" + hei.getContentType().getValue());
                }
                try {
                    Bitmap b;
                    try {
                        b = BitmapFactory.decodeStream(hei.getContent());
                    } catch (Exception e) {
                        return null;
                    }
                    //TODO: scaling
                    int w = b.getWidth();
                    int h = b.getHeight();
                    if (w > h) {
                        h = h * Math.min(200, w) / w;
                        w = Math.min(200, w);
                    } else {
                        w = w * Math.min(200, h) / h;
                        h = Math.min(200, h);
                    }
                    Bitmap b2 = Bitmap.createScaledBitmap(b, w, h, true);
                    b.recycle();
                    b = b2;
                    ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    b.compress(CompressFormat.PNG, 100, baos);
                    b.recycle();
                    og.mImage = baos.toByteArray();
                } catch (Exception e) {
                    Log.e(TAG, "failed to fetch image for og", e);
                    continue;
                }
            } else if (type.equalsIgnoreCase("description")) {
                raw_description = data;
            } else if (type.equalsIgnoreCase("og:description")) {
                og.mDescription = data;
            } else if (type.equalsIgnoreCase("og:url")) {
                og.mUrl = data;
            }
        } finally {
            offset = m.end();
        }
    }
    HashSet<String> already_fetched = new HashSet<String>();
    if (og.mImage == null) {
        int max_area = 0;
        m = sImageRegex.matcher(html);
        int img_offset = 0;
        while (m.find(img_offset)) {
            try {
                String img_tag = m.group();
                Matcher ms = sSrcOfImage.matcher(img_tag);
                if (!ms.find())
                    continue;
                String img_src = ms.group(1);
                img_src = img_src.substring(1, img_src.length() - 1);
                img_src = StringEscapeUtils.unescapeHtml4(img_src);
                //don't fetch an image twice (like little 1x1 images)
                if (already_fetched.contains(img_src))
                    continue;
                already_fetched.add(img_src);
                HttpResponse resi;
                try {
                    HttpGet hgi = new HttpGet(new URL(new URL(location), img_src).toString());
                    resi = hc.execute(hgi);
                } catch (Exception e) {
                    Log.e(TAG, "unable to fetch image url for biggest image search" + img_src, e);
                    continue;
                }
                HttpEntity hei = resi.getEntity();
                if (hei == null) {
                    Log.w(TAG, "image missing en ..trying entity response: " + url);
                    continue;
                }
                Header content_type_image = hei.getContentType();
                if (content_type_image == null || content_type_image.getValue() == null) {
                    Log.w(TAG, "image missing content type ..trying anyway: " + url);
                }
                if (!content_type_image.getValue().startsWith("image/")) {
                    Log.w(TAG, "image tag points to non image data " + hei.getContentType().getValue() + " "
                            + img_src);
                }
                try {
                    Bitmap b;
                    try {
                        b = BitmapFactory.decodeStream(hei.getContent());
                    } catch (Exception e) {
                        return null;
                    }
                    //TODO: scaling
                    int w = b.getWidth();
                    int h = b.getHeight();
                    if (w * h <= max_area) {
                        continue;
                    }
                    if (w < 32 || h < 32) {
                        //skip dinky crap
                        continue;
                    }
                    if (w > h) {
                        h = h * Math.min(200, w) / w;
                        w = Math.min(200, w);
                    } else {
                        w = w * Math.min(200, h) / h;
                        h = Math.min(200, h);
                    }
                    Bitmap b2 = Bitmap.createScaledBitmap(b, w, h, true);
                    b.recycle();
                    b = b2;
                    ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    b.compress(CompressFormat.PNG, 100, baos);
                    og.mImage = baos.toByteArray();
                    b.recycle();
                    max_area = w * h;
                } catch (Exception e) {
                    Log.e(TAG, "failed to fetch image for og", e);
                    continue;
                }
            } finally {
                img_offset = m.end();
            }
        }

    }
    if (og.mDescription == null)
        og.mDescription = raw_description;
    return og;
}

From source file:org.matsim.pt.analysis.RouteTimeDiagram.java

public void createGraph(final String filename, final TransitRoute route) {

    HashMap<Id, Integer> stopIndex = new HashMap<Id, Integer>();
    int idx = 0;//from   w  w w.j av a2s . c  o  m
    for (TransitRouteStop stop : route.getStops()) {
        stopIndex.put(stop.getStopFacility().getId(), idx);
        idx++;
    }

    HashSet<Id> vehicles = new HashSet<Id>();
    for (Departure dep : route.getDepartures().values()) {
        vehicles.add(dep.getVehicleId());
    }

    XYSeriesCollection dataset = new XYSeriesCollection();
    int numSeries = 0;
    double earliestTime = Double.POSITIVE_INFINITY;
    double latestTime = Double.NEGATIVE_INFINITY;

    for (Map.Entry<Id, List<Tuple<Id, Double>>> entry : this.positions.entrySet()) {
        if (vehicles.contains(entry.getKey())) {
            XYSeries series = new XYSeries("t", false, true);
            for (Tuple<Id, Double> pos : entry.getValue()) {
                Integer stopIdx = stopIndex.get(pos.getFirst());
                if (stopIdx != null) {
                    double time = pos.getSecond().doubleValue();
                    series.add(stopIdx.intValue(), time);
                    if (time < earliestTime) {
                        earliestTime = time;
                    }
                    if (time > latestTime) {
                        latestTime = time;
                    }
                }
            }
            dataset.addSeries(series);
            numSeries++;

        }
    }

    JFreeChart c = ChartFactory.createXYLineChart("Route-Time Diagram, Route = " + route.getId(), "stops",
            "time", dataset, PlotOrientation.VERTICAL, false, // legend?
            false, // tooltips?
            false // URLs?
    );
    c.setBackgroundPaint(new Color(1.0f, 1.0f, 1.0f, 1.0f));

    XYPlot p = (XYPlot) c.getPlot();

    p.getRangeAxis().setInverted(true);
    p.getRangeAxis().setRange(earliestTime, latestTime);
    XYItemRenderer renderer = p.getRenderer();
    for (int i = 0; i < numSeries; i++) {
        renderer.setSeriesPaint(i, Color.black);
    }

    try {
        ChartUtilities.saveChartAsPNG(new File(filename), c, 1024, 768, null, true, 9);
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:RemoveDuplicateFiles.java

public static void addActionHandlers(Stage primaryStage) {
    //Sort Files Button Pressed.
    sortFilesButton.setOnAction((ActionEvent event) -> {
        //Move to the SortFiles window
        new FileSort(primaryStage);
    });/*from   ww w  .  j a  v a  2 s  .  co  m*/

    //Batch Rename Button Pressed
    batchRenameButton.setOnAction((ActionEvent event) -> {
        //Move to the BatchRename window
        new BatchRename(primaryStage);
    });

    //Action Handler: remove the duplicate files in the directory.
    rmvButton.setOnAction((ActionEvent event) -> {
        //Clear the actionTarget
        actionTarget.setFill(Color.BLACK);
        actionTarget.setText("");

        //Make sure we have the right path
        selectedDirectory = new File(address.getText());

        if (selectedDirectory != null && selectedDirectory.isDirectory()) {
            //Grab the list of file types from the textbox
            String[] extensions = UtilFunctions.parseFileTypes(fileTypes.getText());

            //Grab the list of files in the selectedDirectory
            List<File> files = (List<File>) FileUtils.listFiles(selectedDirectory, extensions, true);
            HashSet<String> hashCodes = new HashSet<>();
            ArrayList<File> duplicates = new ArrayList<>();

            //Progress reporting values
            actionTarget.setFill(Color.BLACK);
            int totalFileCount = files.size();
            int filesProcessed = 0;

            //Find the duplicate files
            for (File f : files) {
                try {
                    //Update the status
                    filesProcessed++;
                    actionTarget.setText("Processing file " + filesProcessed + " of " + totalFileCount);

                    //Grab the file's hash code
                    String hash = UtilFunctions.makeHash(f);

                    //If we already have a file matching that hash code
                    if (hashCodes.contains(hash)) {
                        //Add the file to the list of files to be deleted
                        duplicates.add(f);
                    } else {
                        hashCodes.add(hash);
                    }

                } catch (Exception except) {
                }
            } //End for

            //Progress reporting
            filesProcessed = 0;
            totalFileCount = duplicates.size();
            Iterator<File> itr = duplicates.iterator();

            //Remove the duplicate files
            while (itr.hasNext()) {
                try {
                    //Update the status
                    filesProcessed++;
                    actionTarget.setText("Deleting file " + filesProcessed + " of " + totalFileCount);

                    //Grab the file
                    File file = itr.next();

                    if (!file.delete()) {
                        JOptionPane.showMessageDialog(null, file.getPath() + " not deleted.");
                    }

                } catch (Exception except) {
                }
            } //End while

            actionTarget.setText("Deleted: " + filesProcessed);

        } else {
            actionTarget.setFill(Color.FIREBRICK);
            actionTarget.setText("Invalid directory.");
        }
    });

}

From source file:edu.mayo.cts2.framework.plugin.service.bioportal.profile.resolvedvalueset.BioportalRestResolvedValueSetResolutionService.java

@Override
public Set<? extends MatchAlgorithmReference> getSupportedMatchAlgorithms() {
    HashSet<MatchAlgorithmReference> returnSet = new HashSet<MatchAlgorithmReference>();

    returnSet.add(StandardMatchAlgorithmReference.CONTAINS.getMatchAlgorithmReference());
    returnSet.add(StandardMatchAlgorithmReference.EXACT_MATCH.getMatchAlgorithmReference());

    return returnSet;
}

From source file:mobac.program.model.Atlas.java

public boolean checkData() {
    if (name == null) // name set?
        return true;
    // Check for duplicate layer names
    HashSet<String> names = new HashSet<String>(layers.size());
    for (LayerInterface layer : layers)
        names.add(layer.getName());
    if (names.size() < layers.size())
        return true; // at least one duplicate name found
    return false;
}

From source file:com.illustrationfinder.process.post.HtmlPostProcessor.java

@Override
public List<String> generateKeywords() {
    // TODO If two words are always close to each other, they should be considered as an expression and managed like one word
    if (this.url == null)
        return null;

    try {/*from w  w  w .  j  a  v  a 2 s.com*/
        // Retrieve the document and store it temporary
        try (final InputStream stream = this.url.openStream()) {
            final String rawText = IOUtils.toString(stream);

            // Retrieve useful HTML data
            final Document document = Jsoup.parse(rawText);

            String htmlTitle = document.title();
            String htmlKeywords = document.select("meta[name=keywords]").text();
            String htmlDescription = document.select("meta[name=description]").text();

            // Extract the content of the raw text
            String content = ArticleExtractor.getInstance().getText(rawText);

            // Now we apply a simple algorithm to get keywords
            //  1) We remove all punctuation marks from the title
            //  2) We remove all words with less than 4 characters
            //  3) We remove excessive spacing and tabulations

            htmlTitle = htmlTitle.toLowerCase();
            htmlTitle = htmlTitle.replaceAll(PUNCTUATION_REGEX, "");
            htmlTitle = htmlTitle.replaceAll(WORD_WITH_LESS_THAN_4_CHARACTERS_REGEX, "");
            htmlTitle = htmlTitle.replaceAll(EXCESSIVE_SPACING_REGEX, " ");

            final List<String> keywords = new ArrayList<>();
            final List<String> keywordsList = Arrays.asList(htmlTitle.split(" "));
            for (String tmp : keywordsList) {
                if (tmp.length() >= MINIMUM_WORD_LENGTH) {
                    keywords.add(tmp);
                }
            }

            // If there is enough keywords, we return
            if (keywords.size() >= MINIMUM_KEYWORDS_COUNT) {
                return keywords;
            } else {
                // Otherwise, we look for more keywords from the text by taking the more frequent words
                content = content.toLowerCase();
                content = content.replaceAll(PUNCTUATION_REGEX, "");
                content = content.replaceAll(WORD_WITH_LESS_THAN_4_CHARACTERS_REGEX, "");
                content = content.replaceAll(EXCESSIVE_SPACING_REGEX, " ");

                final Map<String, Integer> frequencies = new HashMap<>();
                final String[] words = content.split(" ");

                // Count word frequencies
                for (final String word : words) {
                    if (frequencies.containsKey(word)) {
                        frequencies.put(word, frequencies.get(word) + 1);
                    } else {
                        frequencies.put(word, 1);
                    }
                }

                // Sort the words per frequency
                final SortedMap<Integer, HashSet<String>> sortedWords = new TreeMap<>();

                for (Map.Entry<String, Integer> entry : frequencies.entrySet()) {
                    if (sortedWords.containsKey(entry.getValue())) {
                        sortedWords.get(entry.getValue()).add(entry.getKey());
                    } else {
                        final HashSet<String> set = new HashSet<>();
                        set.add(entry.getKey());
                        sortedWords.put(entry.getValue(), set);
                    }
                }

                // Add the most frequent words until we reach the minimu keywords count
                while (keywords.size() < MINIMUM_KEYWORDS_COUNT) {
                    final HashSet<String> set = sortedWords.get(sortedWords.lastKey());
                    final String keyword = set.iterator().next();

                    set.remove(keyword);
                    if (set.size() == 0) {
                        sortedWords.remove(sortedWords.lastKey());
                    }

                    if (keyword.length() > MINIMUM_WORD_LENGTH) {
                        keywords.add(keyword);
                    }
                }

                return keywords;
            }
        }
    } catch (BoilerpipeProcessingException e) {
        // TODO
        e.printStackTrace();
    } catch (IOException e) {
        // TODO
        e.printStackTrace();
    }

    return null;
}

From source file:cross.io.AFragmentCommandServiceLoader.java

/**
 * Returns the list of available user commands, given by class names in the <code>fragmentCommands</code> collection.
 *
 * @param of the object factory/*from  w  ww.j  a  v a  2s  .c  o m*/
 * @return the list of user commands
 */
public List<AFragmentCommand> getAvailableUserCommands(ObjectFactory of) {
    HashSet<AFragmentCommand> s = new HashSet<>();
    for (String uc : fragmentCommands) {
        try {
            AFragmentCommand af = of.instantiate(uc, AFragmentCommand.class);
            s.add(af);
        } catch (IllegalArgumentException iae) {
            log.warn(iae.getLocalizedMessage());
        }
    }
    return createSortedListFromSet(s, new ClassNameLexicalComparator());
}

From source file:com.juick.android.Utils.java

public static Set<String> string2set(String str) {
    List<String> strings = Arrays.asList(str.split("@"));
    HashSet<String> strings1 = new HashSet<String>();
    for (String string : strings) {
        string = string.replace("[SOBAKA]", "@"); // kind of escaped
        strings1.add(string);
    }//from  w  w  w . j ava2s .co  m
    return strings1;
}

From source file:com.evermal.xtractor.MaintenanceClassifierWordMatcher.java

private HashSet<String> hackWords() {
    HashSet<String> words = new HashSet<String>();
    words.add("hack");
    words.add("retarded");
    words.add("at a loss");
    words.add("stupid");
    words.add("remove this code");
    words.add("ugly");
    words.add("take care");
    words.add("something's gone wrong");
    words.add("nuke");
    words.add("is problematic");
    words.add("may cause problem");
    words.add("hacky");
    words.add("unknown why we ever experience this");
    words.add("treat this as a soft error");
    words.add("silly");
    words.add("workaround for bug");
    words.add("kludge");
    words.add("fixme");
    words.add("this isn't quite right");
    words.add("trial and error");
    words.add("give up");
    words.add("this is wrong");
    words.add("hang our heads in shame");
    words.add("temporary solution");
    words.add("causes issue");
    words.add("something bad is going on");
    words.add("cause for issue");
    words.add("this doesn't look right");
    words.add("is this next line safe");
    words.add("this indicates a more fundamental problem");
    words.add("temporary crutch");
    words.add("this can be a mess");
    words.add("this isn't very solid");
    words.add("this is temporary and will go away");
    words.add("is this line really safe");
    //      words.add("there is a problem"); removed to analyze eclipse projects
    words.add("some fatal error");
    words.add("something serious is wrong");
    words.add("don't use this");
    words.add("get rid of this");
    words.add("doubt that this would work");
    words.add("this is bs");
    words.add("give up and go away");
    words.add("risk of this blowing up");
    words.add("just abandon it");
    words.add("prolly a bug");
    words.add("probably a bug");
    words.add("hope everything will work");
    words.add("toss it");
    //      words.add("barf"); removed to analyze eclipse projects
    words.add("something bad happened");
    words.add("fix this crap");
    words.add("yuck");
    words.add("certainly buggy");
    words.add("remove me before production");
    words.add("you can be unhappy now");
    words.add("this is uncool");
    words.add("bail out");
    words.add("it doesn't work yet");
    words.add(" crap ");
    words.add("inconsistency");
    words.add("abandon all hope");
    words.add("kaboom");
    return words;
}