Example usage for java.util HashSet contains

List of usage examples for java.util HashSet contains

Introduction

In this page you can find the example usage for java.util HashSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:carmen.LocationResolver.java

@SuppressWarnings("unchecked")
protected void loadLocationFile(String filename) throws JsonParseException, JsonMappingException, IOException {
    ObjectMapper mapper = new ObjectMapper();

    Scanner inputScanner = new Scanner(new FileInputStream(filename));
    while (inputScanner.hasNextLine()) {
        String line = inputScanner.nextLine();
        Map<String, Object> locationObj = mapper.readValue(line, Map.class);
        Location location = Location.parseLocationFromJsonObj(locationObj);

        List<String> aliases = (List<String>) locationObj.get("aliases");
        this.idToLocation.put(location.getId(), location);
        this.locationToId.put(location, location.getId());
        HashSet<String> justAddedAliases = new HashSet<String>();
        if (aliases != null) {
            for (String alias : aliases) {
                if (justAddedAliases.contains(alias))
                    continue;

                if (this.locationNameToLocation.containsKey(alias))
                    logger.warn("Duplicate location name: " + alias);
                else
                    this.locationNameToLocation.put(alias, location);
                justAddedAliases.add(alias);

                // Add entries without punctuation.
                String newEntry = alias.replaceAll("\\p{Punct}", " ").replaceAll("\\s+", " ");
                if (justAddedAliases.contains(newEntry))
                    continue;

                if (!newEntry.equals(alias)) {
                    if (this.locationNameToLocation.containsKey(newEntry))
                        logger.warn("Duplicate location name: " + newEntry);
                    else
                        this.locationNameToLocation.put(newEntry, location);
                }//  w  w  w  .j  av a2  s  .co m

                justAddedAliases.add(newEntry);

            }
        }
    }
    inputScanner.close();
}

From source file:com.cloudera.recordservice.tests.MiniClusterController.java

/**
 * This method checks the current state of the MiniClusterController object
 * against the actual state of the system. Returns false if some running
 * cluster nodes are not tracked by this MiniClusterController, or if some
 * nodes tracked by this MiniClusterController are not running. Returns true
 * otherwise./*  w ww .  j a v a2s .  c o  m*/
 */
public boolean isClusterStateCorrect() {
    HashSet<Integer> pidSet = getRunningMiniNodePids();
    // Check the cluster list
    if (pidSet.size() > 0 && (clusterList_ == null || clusterList_.size() <= 0)) {
        printPids(pidSet, "were found but are not being tracked by the MiniClusterController");
        return false;
    } else {
        for (MiniClusterNode node : clusterList_) {
            if (!pidSet.contains(node.pid_)) {
                System.err.println("Node with pid = " + node.pid_ + " was expected but not found");
                return false;
            }
            // Two nodes cannot share the same process ID
            pidSet.remove(node.pid_);
        }
        if (pidSet.size() > 0) {
            printPids(pidSet, "were found but are not being tracked by the MiniClusterController");
            return false;
        }
    }
    return true;
}

From source file:net.shibboleth.idp.attribute.resolver.ad.impl.ScriptedAttributeTest.java

@Test
public void v2Context()
        throws IOException, ComponentInitializationException, ResolutionException, ScriptException {

    final ScriptedAttributeDefinition scripted = new ScriptedAttributeDefinition();
    scripted.setId("scripted");
    scripted.setScript(new EvaluableScript(SCRIPT_LANGUAGE, getScript("requestContext.script")));
    scripted.initialize();//  w  w w. j a va 2s  . com

    IdPAttribute result = scripted.resolve(generateContext());
    HashSet<IdPAttributeValue> set = new HashSet(result.getValues());
    Assert.assertEquals(set.size(), 3);
    Assert.assertTrue(set.contains(new StringAttributeValue(TestSources.PRINCIPAL_ID)));
    Assert.assertTrue(set.contains(new StringAttributeValue(TestSources.IDP_ENTITY_ID)));
    Assert.assertTrue(set.contains(new StringAttributeValue(TestSources.SP_ENTITY_ID)));

}

From source file:net.sourceforge.eclipsetrader.fix.core.BanzaiTradingProvider.java

private boolean alreadyProcessed(ExecID execID, SessionID sessionID) {
    HashSet set = (HashSet) execIDs.get(sessionID);
    if (set == null) {
        set = new HashSet();
        set.add(execID);/*from  www  .  j a v a2s . co  m*/
        execIDs.put(sessionID, set);
        return false;
    } else {
        if (set.contains(execID))
            return true;
        set.add(execID);
        return false;
    }
}

From source file:gsn.http.GMLHandler.java

public String buildOutput(String reqName, String reqGroup, String reqUsername, String reqPassword) {

    boolean authenticateUserFromURL = false;
    User user = null;//w w w  . j a v a  2 s.co  m

    if (Main.getContainerConfig().isAcEnabled()) {

        if ((reqUsername != null) && (reqPassword != null)) {
            authenticateUserFromURL = true;
            user = UserUtils.allowUserToLogin(reqUsername, reqPassword);
        }
    }

    SimpleDateFormat sdf = new SimpleDateFormat(Main.getContainerConfig().getTimeFormat());
    StringBuilder outsb = new StringBuilder(
            "<gsn:FeatureCollection xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"./gsn.xsd\" xmlns:gsn=\"http://gsn.ch/\" xmlns:gml=\"http://www.opengis.net/gml\"> \n");
    Iterator<VSensorConfig> vsIterator = Mappings.getAllVSensorConfigs();
    HashSet<String> sensorsSet = new HashSet<String>();
    if (reqName != null && reqName.contains(","))
        sensorsSet = new HashSet<String>(Arrays.asList(reqName.split(",")));
    else
        sensorsSet.add(reqName);

    while (vsIterator.hasNext()) {
        StringBuilder sb = new StringBuilder();
        String lat = null;
        String lon = null;
        VSensorConfig sensorConfig = vsIterator.next();
        if (reqName != null && !sensorsSet.contains(sensorConfig.getName()))
            continue;
        if (reqGroup != null && !(sensorConfig.getName().startsWith(reqGroup + "_")))
            continue;

        if (Main.getContainerConfig().isAcEnabled()) {
            if (user == null) {
                if (authenticateUserFromURL)
                    continue; // means that username and password provided are rejected dince they don't map to a correct User object
                else // no username was provided, show only public sensors
                if (DataSource.isVSManaged(sensorConfig.getName()))
                    continue; //skip sensor if it is managed by access control
            } else // user authenticated, verify that it has the right credentials
            if (!user.hasReadAccessRight(sensorConfig.getName()) && !user.isAdmin()
                    && !DataSource.isVSManaged(sensorConfig.getName()))
                continue;
        }

        for (KeyValue df : sensorConfig.getAddressing()) {
            if (StringEscapeUtils.escapeXml(df.getKey().toString().toLowerCase()).contentEquals("latitude"))
                lat = new String(StringEscapeUtils.escapeXml(df.getValue().toString()));
            if (StringEscapeUtils.escapeXml(df.getKey().toString().toLowerCase()).contentEquals("longitude"))
                lon = new String(StringEscapeUtils.escapeXml(df.getValue().toString()));
        }
        if (lat != null && lon != null) {
            sb.append("<gml:featureMember>\n");
            sb.append("<gsn:sensors");
            sb.append(" fid=\"").append(sensorConfig.getName()).append("\"");
            sb.append(">\n");
            sb.append("\t<gsn:geometryProperty><gml:Point><gml:coordinates>").append(lon).append(",")
                    .append(lat).append("</gml:coordinates></gml:Point></gsn:geometryProperty>\n");
        } else
            continue;

        if (lat.isEmpty() || lon.isEmpty()) // skip sensors with empty coordinates
            continue;

        ArrayList<StreamElement> ses = getMostRecentValueFor(sensorConfig.getName());
        int counter = 1;
        if (ses != null) {
            for (StreamElement se : ses) {
                sb.append("\t<gsn:sensor>").append(sensorConfig.getName()).append("</gsn:sensor>\n");
                for (KeyValue df : sensorConfig.getAddressing()) {
                    sb.append("\t<gsn:")
                            .append(StringEscapeUtils.escapeXml(df.getKey().toString().toLowerCase()))
                            .append(">");
                    sb.append(StringEscapeUtils.escapeXml(df.getValue().toString()));
                    sb.append("</gsn:")
                            .append(StringEscapeUtils.escapeXml(df.getKey().toString().toLowerCase()))
                            .append(">\n");
                }
                for (DataField df : sensorConfig.getOutputStructure()) {
                    sb.append("\t<gsn:").append(df.getName().toLowerCase()).append(">");
                    if (se != null)
                        if (df.getType().toLowerCase().trim().indexOf("binary") > 0)
                            sb.append(se.getData(df.getName()));
                        else
                            sb.append(se.getData(StringEscapeUtils.escapeXml(df.getName())));
                    sb.append("</gsn:").append(df.getName().toLowerCase()).append(">\n");
                }
                counter++;
            }
        }
        sb.append("</gsn:sensors>\n");
        sb.append("</gml:featureMember>\n");
        outsb.append(sb);
    }
    outsb.append("</gsn:FeatureCollection>\n");
    return outsb.toString();
}

From source file:com.microsoft.tfs.core.clients.workitem.internal.query.QueryImpl.java

private void initialize(final WITContext witContext, final String wiql, final Map<String, Object> queryContext,
        final int[] ids, final int[] revs, final boolean dayPrecision) {
    if (log.isDebugEnabled()) {
        log.debug(MessageFormat.format("using WIQL: [{0}]", wiql)); //$NON-NLS-1$
    }//from  w w w .j  a va2s . c o m

    Check.notNull(witContext, "witContext"); //$NON-NLS-1$
    Check.notNull(wiql, "wiql"); //$NON-NLS-1$

    if (ids != null) {
        if (revs != null && ids.length != revs.length) {
            throw new IllegalArgumentException(MessageFormat.format(
                    "Should be equal numbers of Ids and Revs passed. ids=int[{0}], revs=[{1}]", //$NON-NLS-1$
                    ids.length, revs.length));
        }

        // Check all ID's are unique
        final HashSet<Integer> idSet = new HashSet<Integer>(ids.length);
        for (int i = 0; i < ids.length; i++) {
            if (idSet.contains(new Integer(ids[i]))) {
                throw new DuplicateBatchReadParameterException();
            }

            idSet.add(new Integer(ids[i]));
        }

    }

    this.witContext = witContext;

    try {
        final WIQLAdapter wiqlAdapter = new WIQLAdapter(witContext);
        wiqlAdapter.setContext(queryContext);
        wiqlAdapter.setDayPrecision(dayPrecision);

        // Parse syntax
        wiqlNode = Parser.parseSyntax(wiql);
        wiqlNode.bind(wiqlAdapter, null, null);

        final boolean serverSupportsWIQLEvaluation = witContext.getServerInfo()
                .isSupported(SupportedFeatures.WIQL_EVALUATION_ON_SERVER);

        if (!serverSupportsWIQLEvaluation) {
            // Optimize on the client
            wiqlNode = (NodeSelect) wiqlNode.optimize(wiqlAdapter, null, null);
        }

        if (ids != null) {
            // Check for where clause in batch mode
            if (wiqlNode.getWhere() != null || wiqlNode.getOrderBy() != null) {
                throw new ValidationException(
                        Messages.getString("QueryImpl.WhereAndOrderByClausesNotSupportedOnParameterizedQuery")); //$NON-NLS-1$
            }

            if (isLinkQuery()) {
                throw new ValidationException(
                        Messages.getString("QueryImpl.FromClauseCannotSpecifyLinksOnParameterizedQuery")); //$NON-NLS-1$
            }

            batchIds = ids;
            batchRevs = revs;
        } else {
            if (serverSupportsWIQLEvaluation) {
                // Provide WIQL statement to the server as it is and
                // include context for macros resolution (e.g. current
                // project and team names)
                queryXML = wiqlAdapter.getQueryXML(wiql, queryContext, isLinkQuery(), dayPrecision);
            } else {
                // Use parser to build the query XML
                if (isLinkQuery()) {
                    final LinkQueryXMLResult result = wiqlAdapter.getLinkQueryXML(wiqlNode);
                    queryXML = result.getLinkXML();
                    linkGroup = result.getLinkGroup();
                } else {
                    queryXML = wiqlAdapter.getQueryXML(wiqlNode);
                }
            }
        }

        queryAsOf = wiqlAdapter.getAsOfUTC(wiqlNode);
        // resultAsOf = queryAsOf;

        // Create display field list
        displayFieldList = (DisplayFieldListImpl) wiqlAdapter.getDisplayFieldList(witContext, wiqlNode);

        // Create sort field list
        sortFieldList = (SortFieldListImpl) wiqlAdapter.getSortFieldList(witContext, wiqlNode);
    } catch (final SyntaxException ex) {
        throw new InvalidQueryTextException(ex.getDetails(), wiql, ex);
    }
}

From source file:edu.tum.cs.conqat.quamoco.qiesl.QIESLEngine.java

/**
 * @param expression/*  ww  w . j a  v a 2  s . c o  m*/
 * @param mandatoryVariables
 * @param nameMapping
 * @param usedTechnicalNames
 * @throws QIESLException
 */
private void checkMandatoryVariables(String expression, Map<String, Object> mandatoryVariables,
        Map<String, String> nameMapping, HashSet<String> usedTechnicalNames) throws QIESLException {
    HashSet<String> unusedModelVariables = new HashSet<String>(mandatoryVariables.keySet());
    for (String technicalName : usedTechnicalNames) {
        unusedModelVariables.remove(nameMapping.get(technicalName));
    }

    if (!unusedModelVariables.isEmpty() && !usedTechnicalNames.contains(ALL_IMPACTS_AND_REFINEMENTS_LITERAL)) {
        throw new QIESLException("Expression " + expression + " does not use mandatory variables: "
                + StringUtils.concat(unusedModelVariables, ", "));
    }
}

From source file:main.java.repartition.SimpleTr.java

void populateMigrationList(Cluster cluster, WorkloadBatch wb) {
    this.migrationPlanList = new ArrayList<MigrationPlan>();

    // Based on: https://code.google.com/p/combinatoricslib/
    // Create the initial vector
    ICombinatoricsVector<Integer> initialVector = Factory.createVector(this.serverDataSet.keySet());

    // Create a simple permutation generator to generate N-permutations of the initial vector
    Generator<Integer> permutationGen = Factory.createPermutationWithRepetitionGenerator(initialVector,
            this.serverDataSet.size());
    HashMap<HashSet<Integer>, Integer> uniqueFromSet = new HashMap<HashSet<Integer>, Integer>();

    // Get all possible N-permutations      
    HashMap<Integer, HashSet<Integer>> dataMap;

    idtGainRank = new TreeSet<Double>();
    lbGainRank = new TreeSet<Double>();

    for (ICombinatoricsVector<Integer> permutations : permutationGen) {
        HashSet<Integer> fromSet = new HashSet<Integer>();

        for (int i = 0; i < permutations.getSize() - 1; i++)
            fromSet.add(permutations.getValue(i));

        int to = permutations.getValue(permutations.getSize() - 1);

        if (!fromSet.contains(to)) {
            if (!uniqueFromSet.containsKey(fromSet)
                    || (uniqueFromSet.containsKey(fromSet) && !uniqueFromSet.get(fromSet).equals(to))) {

                //System.out.println(">> fromSet = "+fromSet.size());
                if (fromSet.size() <= (this.serverDataSet.size() - 1)) {

                    dataMap = new HashMap<Integer, HashSet<Integer>>();
                    int req_data_mgr = 0;

                    for (int from : fromSet) {
                        req_data_mgr += this.serverDataSet.get(from).size();
                        dataMap.put(from, this.serverDataSet.get(from));
                    }// www. jav  a2  s  .com

                    MigrationPlan m = new MigrationPlan(fromSet, to, dataMap, req_data_mgr);
                    this.migrationPlanList.add(m); // From Source Server

                    m.delta_idt = getDeltaIdt(wb, this, m);
                    m.delta_lb = getDeltaLb(cluster, this, m);

                    idtGainRank.add(m.delta_idt);
                    lbGainRank.add(m.delta_lb);

                    if (fromSet.size() > 1)
                        uniqueFromSet.put(fromSet, to);
                }
            } //end-if()
        } //end-if()   
    } // end-for(

    // Get the maximum Idt and Lb gains for this transaction for normalization purpose
    max_delta_idt = idtGainRank.last();
    max_delta_lb = lbGainRank.last();

    // Sorting migration list
    sortMigrationPlanList();

    this.max_idt_gain = this.migrationPlanList.get(0).delta_idt;
    this.max_lb_gain = this.migrationPlanList.get(0).delta_lb;
    this.min_data_mgr = this.migrationPlanList.get(0).req_data_mgr;
    this.max_combined_weight = this.migrationPlanList.get(0).combined_weight;

    // Testing
    /*System.out.println("-------------------------------------------------------------------------");
    //System.out.println("max_delta_id = "+max_delta_idt);
    //System.out.println("max_delta_lb = "+max_delta_lb);
    System.out.println("Sorting based on combined ranking ...");
    System.out.println("--> "+this.toString());
    for(MigrationPlan m : this.migrationPlanList) {
       System.out.println("\t"+m.toString());
    }*/
}

From source file:com.thinkbiganalytics.ingest.TableMergeSyncSupportTest.java

private void doTestMergePK(String targetSchema, String targetTable, PartitionSpec spec) {

    List<String> results = fetchEmployees(targetSchema, targetTable);
    assertEquals(1, results.size());//from w  w  w  .  j  ava 2s .co  m

    ColumnSpec columnSpec1 = new ColumnSpec("id", "String", "", true, false, false);
    ColumnSpec columnSpec2 = new ColumnSpec("name", "String", "", false, false, false);
    ColumnSpec[] columnSpecs = Arrays.asList(columnSpec1, columnSpec2).toArray(new ColumnSpec[0]);
    // Call merge
    mergeSyncSupport.doPKMerge(sourceSchema, sourceTable, targetSchema, targetTable, spec, processingPartition,
            columnSpecs);

    // We should have 4 records
    results = fetchEmployees(targetSchema, targetTable);
    assertEquals(4, results.size());
    assertFalse("Should not have old valur", results.stream().anyMatch(s -> s.contains("OLD")));

    // Run merge with dedupe and should get the following two additional results. The result should not include any duplicates in the target table.
    hiveShell.execute(
            "insert into emp_sr.employee_valid partition(processing_dttm='20160119074340') (  `id`,  `name`,`company`,`zip`,`phone`,`email`,  `hired`,`country`) values (100,'Bruce',"
                    + "'OLD'," + "'94550','555-1212','bruce@acme.org','2016-01-01','Canada');");
    hiveShell.execute(
            "insert into emp_sr.employee_valid partition(processing_dttm='20160119074340') (  `id`,  `name`,`company`,`zip`,`phone`,`email`,  `hired`,`country`) values (101,'Harry',"
                    + "'OLD'," + "'94550','555-1212','harry@acme.org','2016-01-01','Canada');");

    mergeSyncSupport.doPKMerge(sourceSchema, sourceTable, targetSchema, targetTable, spec, processingPartition,
            columnSpecs);

    results = fetchEmployees(targetSchema, targetTable);
    assertEquals(6, results.size());
    // Verify no duplicates exist in the table
    HashSet<String> existing = new HashSet<>();
    for (String r : results) {
        assertFalse(existing.contains(r));
        existing.add(r);
    }

    hiveShell.execute(
            "insert into emp_sr.employee_valid partition(processing_dttm='20160119074540') (  `id`,  `name`,`company`,`zip`,`phone`,`email`,  `hired`,`country`) values (100,'Bruce',"
                    + "'ABC'," + "'94550','555-1212','bruce@acme.org','2016-01-01','Canada');");
    hiveShell.execute(
            "insert into emp_sr.employee_valid partition(processing_dttm='20160119074540') (  `id`,  `name`,`company`,`zip`,`phone`,`email`,  `hired`,`country`) values (101,'Harry',"
                    + "'ABC'," + "'94550','555-1212','harry@acme.org','2016-01-01','Canada');");
    hiveShell.execute(
            "insert into emp_sr.employee_valid partition(processing_dttm='20160119074540') (  `id`,  `name`,`company`,`zip`,`phone`,`email`,  `hired`,`country`) values (102,'Buddy',"
                    + "'ABC'," + "'94550','555-1212','buddy@acme.org','2016-01-01','Canada');");

    mergeSyncSupport.doPKMerge(sourceSchema, sourceTable, targetSchema, targetTable, spec, "20160119074540",
            columnSpecs);
    results = fetchEmployees(targetSchema, targetTable);
    assertEquals(7, results.size());
    existing = new HashSet<>();
    for (String r : results) {
        assertFalse(existing.contains(r));
        existing.add(r);
    }

    assertFalse("Should not have old valur", results.stream().anyMatch(s -> s.contains("OLD")));

}

From source file:com.gfan.sbbs.utils.images.ImageManager.java

public void cleanup(HashSet<String> keepers) {
    String[] files = mContext.fileList();
    HashSet<String> hashedUrls = new HashSet<String>();

    for (String imageUrl : keepers) {
        hashedUrls.add(getMd5(imageUrl));
    }//from w w  w  . j  a v a  2 s  .com

    for (String file : files) {
        if (!hashedUrls.contains(file)) {
            Log.d(TAG, "Deleting unused file: " + file);
            mContext.deleteFile(file);
        }
    }
}