Example usage for java.util HashSet contains

List of usage examples for java.util HashSet contains

Introduction

In this page you can find the example usage for java.util HashSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:de.iteratec.iteraplan.general.PropertiesTest.java

public Set<String> checkFileForDuplicateKeys(String path) {
    HashSet<String> errorMessages = new HashSet<String>();

    InputStream resourceAsStream = PropertiesTest.class.getResourceAsStream(path);
    if (resourceAsStream == null) {
        try {/*from  www  .ja v a2 s  . c  om*/
            resourceAsStream = new FileInputStream(new File(path));
        } catch (FileNotFoundException fnfe) {
            errorMessages.add("File not found: " + path);
        }
    }

    BufferedReader br = new BufferedReader(new InputStreamReader(resourceAsStream));
    HashSet<String> keys = new HashSet<String>();
    try {
        String strLine = br.readLine();
        while (strLine != null && strLine.length() > 0) {
            if (strLine.contains(EQUALs)) {
                String[] keyValuePair = strLine.split(EQUALs);
                String key = keyValuePair[0];
                if (keys.contains(key)) {
                    errorMessages.add("Duplicate key " + key + " detected in " + path);
                } else {
                    keys.add(key);
                }
            } else if (!strLine.isEmpty() && strLine.charAt(0) != '#') {
                System.out.println("WARNING: found a probably malformatted line in " + path);
            }
            strLine = br.readLine();
        }
        br.close();
    } catch (IOException ex) {
        fail("IO Exception in file " + path);
    }
    return errorMessages;
}

From source file:com.amazonaws.service.apigateway.importer.impl.sdk.ApiGatewaySdkRamlApiImporter.java

private void cleanupMethods(RestApi api, Resource resource, Map<ActionType, Action> actions) {
    final HashSet<String> methods = new HashSet<>();

    for (ActionType action : actions.keySet()) {
        methods.add(action.toString());/*from www  .  j  a  v  a  2 s . co m*/
    }

    for (Method m : resource.getResourceMethods().values()) {
        String httpMethod = m.getHttpMethod().toUpperCase();

        if (!methods.contains(httpMethod)) {
            LOG.info(format("Removing deleted method %s for resource %s", httpMethod, resource.getId()));

            m.deleteMethod();
        }
    }
}

From source file:edu.ku.brc.specify.config.FixDBAfterLogin.java

/**
 * /*  w  w w. j a va 2s.co  m*/
 */
public static void fixDefaultDates() {
    boolean doFix = !AppPreferences.getGlobalPrefs().getBoolean(FIX_DEFDATES_PREF, false);
    //log.debug("fixDefaultDates -  Going To Fix["+doFix+"]");
    if (doFix) {
        HashMap<DBTableInfo, List<FormCellFieldIFace>> tblToFldHash = new HashMap<DBTableInfo, List<FormCellFieldIFace>>();
        HashSet<String> nameHash = new HashSet<String>();

        for (ViewIFace view : ((SpecifyAppContextMgr) AppContextMgr.getInstance()).getEntirelyAllViews()) {
            String tableClassName = view.getClassName();
            DBTableInfo ti = DBTableIdMgr.getInstance().getByClassName(tableClassName);
            if (ti != null) {
                if (nameHash.contains(tableClassName)) {
                    continue;
                }
                nameHash.add(tableClassName);

                //log.debug(tableClassName);

                for (AltViewIFace avi : view.getAltViews()) {
                    if (avi.getMode() == AltViewIFace.CreationMode.EDIT) {
                        ViewDefIFace vd = (ViewDefIFace) avi.getViewDef();
                        if (vd instanceof FormViewDef) {
                            FormViewDefIFace fvd = (FormViewDefIFace) vd;
                            for (FormRowIFace fri : fvd.getRows()) {
                                for (FormCellIFace fci : fri.getCells()) {
                                    if (fci instanceof FormCellFieldIFace) {
                                        //log.debug(ti.getName()+" - "+fci.getIdent()+"  "+fci.getName());

                                        FormCellFieldIFace fcf = (FormCellFieldIFace) fci;
                                        String defValue = fcf.getDefaultValue();
                                        if (StringUtils.isNotEmpty(defValue) && !defValue.equals("Today")) {
                                            List<FormCellFieldIFace> fieldList = tblToFldHash.get(ti);
                                            if (fieldList == null) {
                                                fieldList = new ArrayList<FormCellFieldIFace>();
                                                tblToFldHash.put(ti, fieldList);
                                            }
                                            fieldList.add(fcf);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                //} else
                //{
                //log.debug("Skipping table Class Name["+tableClassName+"]");
            }
        }

        log.debug("Number of Tables Found[" + tblToFldHash.size() + "]");
        processTableDefaultDates(tblToFldHash, false);

        AppPreferences.getGlobalPrefs().putBoolean(FIX_DEFDATES_PREF, true);
    }
}

From source file:com.pinterest.arcee.autoscaling.AwsAutoScaleGroupManager.java

@Override
public ASGStatus getAutoScalingGroupStatus(String groupName) throws Exception {
    DescribeAutoScalingGroupsRequest request = new DescribeAutoScalingGroupsRequest();
    List<String> groupNames = new ArrayList<>();
    groupNames.add(groupName);//  ww  w .  j  a  v  a  2 s.  c o  m
    request.setAutoScalingGroupNames(groupNames);
    DescribeAutoScalingGroupsResult result = aasClient.describeAutoScalingGroups(request);
    List<AutoScalingGroup> groups = result.getAutoScalingGroups();
    if (groups.isEmpty()) {
        return ASGStatus.UNKNOWN;
    }

    AutoScalingGroup group = groups.get(0);
    List<SuspendedProcess> suspendedProcesses = group.getSuspendedProcesses();
    HashSet<String> processName = new HashSet<>();
    for (SuspendedProcess process : suspendedProcesses) {
        processName.add(process.getProcessName());
    }
    if (processName.contains(PROCESS_ALARMNOTIFICATION) && processName.contains(PROCESS_SCHEDULEDACTIONS)) {
        return ASGStatus.DISABLED;
    } else {
        return ASGStatus.ENABLED;
    }
}

From source file:eionet.cr.dao.virtuoso.VirtuosoFolderDAO.java

/**
 * @param sqlConn/*  ww  w  .  j  a v a2  s.c  o m*/
 * @param statements
 * @throws SQLException
 */
private void createNeverHarvestedSources(Connection sqlConn, List<Statement> statements) throws SQLException {

    // Create harvest sources for all distinct contexts found in the above statements.
    // Doing it one-by-one for convenience, because the number distinct contexts in the
    // given statements should never be more than a couple or so.

    HashSet<String> sourcesDone = new HashSet<String>();
    for (Statement statement : statements) {

        String sourceUrl = statement.getContext().stringValue();
        if (!sourcesDone.contains(sourceUrl)) {
            List<Object> values = new ArrayList<Object>();
            values.add(sourceUrl);
            values.add(Hashes.spoHash(sourceUrl));
            SQLUtil.executeUpdate(INSERT_NEVER_HARVESTED_SOURCE_SQL, values, sqlConn);
            sourcesDone.add(sourceUrl);
        }
    }
}

From source file:net.shibboleth.idp.attribute.resolver.ad.impl.ScriptedAttributeTest.java

@Test
public void examples() throws ScriptException, IOException, ComponentInitializationException {

    IdPAttribute attribute = runExample("example1.script", "example1.attribute.xml", "swissEduPersonUniqueID");

    Assert.assertEquals(attribute.getValues().iterator().next().getValue(),
            DigestUtils.md5Hex("12345678some#salt#value#12345679") + "@switch.ch");

    attribute = runExample("example2.script", "example2.attribute.xml", "eduPersonAffiliation");
    HashSet<IdPAttributeValue> set = new HashSet(attribute.getValues());
    Assert.assertEquals(set.size(), 3);/*from  w ww . ja  v  a2s.c  o m*/
    Assert.assertTrue(set.contains(new StringAttributeValue("affiliate")));
    Assert.assertTrue(set.contains(new StringAttributeValue("student")));
    Assert.assertTrue(set.contains(new StringAttributeValue("staff")));

    attribute = runExample("example3.script", "example3.attribute.xml", "eduPersonAffiliation");
    set = new HashSet(attribute.getValues());
    Assert.assertEquals(set.size(), 2);
    Assert.assertTrue(set.contains(new StringAttributeValue("member")));
    Assert.assertTrue(set.contains(new StringAttributeValue("staff")));

    attribute = runExample("example3.script", "example3.attribute.2.xml", "eduPersonAffiliation");
    set = new HashSet(attribute.getValues());
    Assert.assertEquals(set.size(), 3);
    Assert.assertTrue(set.contains(new StringAttributeValue("member")));
    Assert.assertTrue(set.contains(new StringAttributeValue("staff")));
    Assert.assertTrue(set.contains(new StringAttributeValue("walkin")));

    attribute = runExample("example4.script", "example4.attribute.xml", "eduPersonEntitlement");
    set = new HashSet(attribute.getValues());
    Assert.assertEquals(set.size(), 1);
    Assert.assertTrue(set.contains(new StringAttributeValue("urn:mace:dir:entitlement:common-lib-terms")));

    attribute = runExample("example4.script", "example4.attribute.2.xml", "eduPersonEntitlement");
    set = new HashSet(attribute.getValues());
    Assert.assertEquals(set.size(), 2);
    Assert.assertTrue(set.contains(new StringAttributeValue("urn:mace:dir:entitlement:common-lib-terms")));
    Assert.assertTrue(set.contains(new StringAttributeValue("LittleGreenMen")));

    attribute = runExample("example4.script", "example4.attribute.3.xml", "eduPersonEntitlement");
    Assert.assertNull(attribute);

}

From source file:ca.uhn.fhir.jpa.dao.FhirSearchDao.java

private List<Long> doSearch(String theResourceName, SearchParameterMap theParams, Long theReferencingPid) {
    FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);

    List<Long> pids = null;

    /*//from   ww  w .  ja va 2s.  c  om
     * Handle textual params
     */
    /*
    for (String nextParamName : theParams.keySet()) {
       for (List<? extends IQueryParameterType> nextAndList : theParams.get(nextParamName)) {
    for (Iterator<? extends IQueryParameterType> orIterator = nextAndList.iterator(); orIterator.hasNext();) {
       IQueryParameterType nextParam = orIterator.next();
       if (nextParam instanceof TokenParam) {
          TokenParam nextTokenParam = (TokenParam) nextParam;
          if (nextTokenParam.isText()) {
             orIterator.remove();
             QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceIndexedSearchParamString.class).get();
             BooleanJunction<?> bool = qb.bool();
            
             bool.must(qb.keyword().onField("myParamName").matching(nextParamName).createQuery());
             if (isNotBlank(theResourceName)) {
                bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
             }
    //                     
             //@formatter:off
             String value = nextTokenParam.getValue().toLowerCase();
             bool.must(qb.keyword().onField("myValueTextEdgeNGram").matching(value).createQuery());
                     
             //@formatter:on
                     
             FullTextQuery ftq = em.createFullTextQuery(bool.createQuery(), ResourceIndexedSearchParamString.class);
            
             List<?> resultList = ftq.getResultList();
             pids = new ArrayList<Long>();
             for (Object next : resultList) {
                ResourceIndexedSearchParamString nextAsArray = (ResourceIndexedSearchParamString) next;
                pids.add(nextAsArray.getResourcePid());
             }
          }
       }
    }
       }
    }
            
    if (pids != null && pids.isEmpty()) {
       return pids;
    }
    */

    QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
    BooleanJunction<?> bool = qb.bool();

    /*
     * Handle _content parameter (resource body content)
     */
    List<List<? extends IQueryParameterType>> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT);
    addTextSearch(qb, bool, contentAndTerms, "myContentText");

    /*
     * Handle _text parameter (resource narrative content)
     */
    List<List<? extends IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT);
    addTextSearch(qb, bool, textAndTerms, "myNarrativeText");

    if (theReferencingPid != null) {
        bool.must(qb.keyword().onField("myResourceLinks.myTargetResourcePid").matching(theReferencingPid)
                .createQuery());
    }

    if (bool.isEmpty()) {
        return pids;
    }

    if (isNotBlank(theResourceName)) {
        bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
    }

    Query luceneQuery = bool.createQuery();

    // wrap Lucene query in a javax.persistence.Query
    FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, ResourceTable.class);
    jpaQuery.setProjection("myId");

    // execute search
    List<?> result = jpaQuery.getResultList();

    HashSet<Long> pidsSet = pids != null ? new HashSet<Long>(pids) : null;

    ArrayList<Long> retVal = new ArrayList<Long>();
    for (Object object : result) {
        Object[] nextArray = (Object[]) object;
        Long next = (Long) nextArray[0];
        if (next != null && (pidsSet == null || pidsSet.contains(next))) {
            retVal.add(next);
        }
    }

    return retVal;
}

From source file:com.thinkbiganalytics.ingest.TableMergeSyncSupportTest.java

private void doTestMergePKWithDifferentPartitions(String targetSchema, String targetTable, PartitionSpec spec) {

    // Insert one record to start
    hiveShell.execute(//from   ww  w.  j a v a 2 s . c  om
            "insert into emp_sr.employee partition(country='USA',year=2012) (  `id`,  `timestamp`,`name`,`company`,`zip`,`phone`,`email`,  `hired`)  values (1,'1','Sally','OLD VALUE','94550',"
                    + "'555-1212'," + "'sally@acme.org','2012-01-01');");
    hiveShell.execute(
            "insert into emp_sr.employee partition(country='USA',year=2012) (  `id`,  `timestamp`,`name`,`company`,`zip`,`phone`,`email`,  `hired`)  values (1002,'1','Jimbo','VALUE','94550',"
                    + "'555-1212'," + "'sally@acme.org','2012-01-01');");

    hiveShell.execute(
            "insert into emp_sr.employee partition(country='USA',year=2015) (  `id`,  `timestamp`,`name`,`company`,`zip`,`phone`,`email`,  `hired`)  values (1000,'1','Jill','ORIG','94550',"
                    + "'555-1212'," + "'sally@acme.org','2015-01-01');");
    hiveShell.execute(
            "insert into emp_sr.employee partition(country='USA',year=2013) (  `id`,  `timestamp`,`name`,`company`,`zip`,`phone`,`email`,  `hired`)  values (2,'1','Bill','OLD VALUE','94550',"
                    + "'555-1212'," + "'sally@acme.org','2013-01-01');");
    hiveShell.execute(
            "insert into emp_sr.employee partition(country='USA',year=2013) (  `id`,  `timestamp`,`name`,`company`,`zip`,`phone`,`email`,  `hired`)  values (3,'1','Ray','OLD VALUE','94550',"
                    + "'555-1212'," + "'sally@acme.org','2013-01-01');");
    hiveShell.execute(
            "insert into emp_sr.employee partition(country='USA',year=2013) (  `id`,  `timestamp`,`name`,`company`,`zip`,`phone`,`email`,  `hired`)  values (1001,'1','Fred','VALUE','94550',"
                    + "'555-1212'," + "'sally@acme.org','2013-01-01');");

    List<String> results = fetchEmployees(targetSchema, targetTable);
    assertEquals(6, results.size());

    ColumnSpec columnSpec1 = new ColumnSpec("id", "String", "", true, false, false);
    ColumnSpec columnSpec2 = new ColumnSpec("name", "String", "", false, false, false);
    ColumnSpec[] columnSpecs = Arrays.asList(columnSpec1, columnSpec2).toArray(new ColumnSpec[0]);
    // Call merge
    mergeSyncSupport.doPKMerge(sourceSchema, sourceTable, targetSchema, targetTable, spec, processingPartition,
            columnSpecs);

    // We should have 6 records
    results = fetchEmployees(targetSchema, targetTable);
    assertEquals(6, results.size());
    assertFalse("Should not have old value", results.stream().anyMatch(s -> s.contains("OLD")));

    // Run merge with dedupe and should get the following two additional results. The result should not include any duplicates in the target table.
    hiveShell.execute(
            "insert into emp_sr.employee_valid partition(processing_dttm='20160119074340') (  `id`,  `name`,`company`,`zip`,`phone`,`email`,  `hired`,`country`) values (100,'Bruce',"
                    + "'OLD'," + "'94550','555-1212','bruce@acme.org','2016-01-01','Canada');");
    hiveShell.execute(
            "insert into emp_sr.employee_valid partition(processing_dttm='20160119074340') (  `id`,  `name`,`company`,`zip`,`phone`,`email`,  `hired`,`country`) values (101,'Harry',"
                    + "'OLD'," + "'94550','555-1212','harry@acme.org','2016-01-01','Canada');");

    mergeSyncSupport.doPKMerge(sourceSchema, sourceTable, targetSchema, targetTable, spec, processingPartition,
            columnSpecs);

    results = fetchEmployees(targetSchema, targetTable);
    assertEquals(8, results.size());
    // Verify no duplicates exist in the table
    HashSet<String> existing = new HashSet<>();
    for (String r : results) {
        assertFalse(existing.contains(r));
        existing.add(r);
    }

    hiveShell.execute(
            "insert into emp_sr.employee_valid partition(processing_dttm='20160119074540') (  `id`,  `name`,`company`,`zip`,`phone`,`email`,  `hired`,`country`) values (100,'Bruce',"
                    + "'ABC'," + "'94550','555-1212','bruce@acme.org','2016-01-01','Canada');");
    hiveShell.execute(
            "insert into emp_sr.employee_valid partition(processing_dttm='20160119074540') (  `id`,  `name`,`company`,`zip`,`phone`,`email`,  `hired`,`country`) values (101,'Harry',"
                    + "'ABC'," + "'94550','555-1212','harry@acme.org','2016-01-01','Canada');");
    hiveShell.execute(
            "insert into emp_sr.employee_valid partition(processing_dttm='20160119074540') (  `id`,  `name`,`company`,`zip`,`phone`,`email`,  `hired`,`country`) values (102,'Buddy',"
                    + "'ABC'," + "'94550','555-1212','buddy@acme.org','2016-01-01','Canada');");

    mergeSyncSupport.doPKMerge(sourceSchema, sourceTable, targetSchema, targetTable, spec, "20160119074540",
            columnSpecs);
    results = fetchEmployees(targetSchema, targetTable);
    assertEquals(9, results.size());
    existing = new HashSet<>();
    for (String r : results) {
        assertFalse(existing.contains(r));
        existing.add(r);
    }

    assertFalse("Should not have old value", results.stream().anyMatch(s -> s.contains("OLD")));

}

From source file:importer.handler.post.ImporterPostHandler.java

/**
 * Add a batch of annotations to the database
 * @param notes an array of annotation objects
 * @param clean if true remove old annotations for this docid
 * @throws AeseException /*w  ww  .  ja  va  2  s .  c o  m*/
 */
protected void addAnnotations(ArrayList notes, boolean clean) throws ImporterException {
    try {
        Connection conn = Connector.getConnection();
        if (clean) {
            // remove all existing annotations for this document
            String[] docids = conn.listDocuments("annotations", docid + "/.*", JSONKeys.DOCID);
            for (int i = 0; i < docids.length; i++)
                conn.removeFromDb("annotations", docids[i]);
        }
        // ensure that the annotations are all unique
        HashSet<String> unique = new HashSet<String>();
        String[] docids = conn.listDocuments("annotations", docid + "/.*", JSONKeys.DOCID);
        for (int i = 0; i < docids.length; i++)
            unique.add(docids[i]);
        for (int i = 0; i < notes.size(); i++) {
            String fullId = docid + "/" + UUID.randomUUID().toString();
            while (unique.contains(fullId))
                fullId = docid + "/" + UUID.randomUUID().toString();
            conn.putToDb("annotations", docid, notes.get(i).toString());
            unique.add(docid);
        }
    } catch (DbException e) {
        throw new ImporterException(e);
    }
}

From source file:de.laeubisoft.tools.ant.validation.W3CMarkupValidationTask.java

@Override
public void execute() throws BuildException {
    validateParameter();//from  ww w.j  a v a 2s.c om
    List<URL> urlsToCheck = new ArrayList<URL>();
    urlsToCheck.add(uri);
    HashSet<String> checkedURIs = new HashSet<String>();
    while (!urlsToCheck.isEmpty()) {
        URL url = urlsToCheck.remove(0);
        if (url != null) {
            String uriString = url.toString();
            if (checkedURIs.contains(uriString)) {
                continue;
            }
            checkedURIs.add(uriString);
        }
        //Check the URI (might be null if fragment or file was given...)
        if (checkURI(url)) {
            //If we should recurse, parse the URL and determine all links
            if (recurse) {
                Set<URL> recurseInto = recurseInto(url);
                urlloop: for (URL newUrl : recurseInto) {
                    String string = newUrl.toString();
                    if (checkedURIs.contains(string)) {
                        continue;
                    }
                    for (Pattern pattern : ignorePatternList) {
                        if (pattern.matcher(string).matches()) {
                            log("pattern " + pattern + " matches " + string + ", URL will be ignored",
                                    Project.MSG_INFO);
                            checkedURIs.add(string);
                            continue urlloop;
                        }
                    }
                    urlsToCheck.add(newUrl);
                }
            }
        }
    }
}