List of usage examples for java.util HashSet contains
public boolean contains(Object o)
From source file:com.ikanow.infinit.e.api.social.sharing.ShareHandler.java
/** * Return true if user is a (member of the community) or (admin is true and ignoreadmin is false) * // w w w .ja v a2s . c om * @param communityId * @param ignoreAdmin * @param bAdmin * @param memberOf * @return */ private boolean isMemberOwnerAdminOfCommunity(ObjectId communityId, boolean ignoreAdmin, boolean bAdmin, HashSet<ObjectId> memberOf) { try { if (ignoreAdmin || !bAdmin) { if ((null != memberOf) && (memberOf.contains(communityId))) { return true; } } else { // admin, allowed it return true; } } catch (Exception e) { } return false; }
From source file:com.vmware.identity.idm.server.LocalOsIdentityProviderTest.java
@Test public void TestUsers() throws Exception { for (UserInfo userInfo : _users.values()) { // --------------------- // findUser // --------------------- PersonUser user = localOsProvider.findUser(new PrincipalId(userInfo.getName(), domainName)); Assert.assertNotNull(String.format("User '%s' should exist.", userInfo.getName()), user); validateUser(user, userInfo);/*w ww . jav a2 s . co m*/ // --------------------- // IsActive // --------------------- boolean isActive = SystemUtils.IS_OS_LINUX ? true : !userInfo.isDisabled(); Assert.assertEquals(isActive, localOsProvider.IsActive(new PrincipalId(userInfo.getName(), domainName))); // --------------------- // getAttributes // --------------------- Collection<AttributeValuePair> attributes = localOsProvider .getAttributes(new PrincipalId(userInfo.getName(), domainName), getAttributes()); Assert.assertNotNull( String.format("Should be able to retrieve attributes for User '%s'.", userInfo.getName()), attributes); for (AttributeValuePair attr : attributes) { Assert.assertNotNull(attr); Assert.assertNotNull(attr.getAttrDefinition()); // new sids attributes comes without the friendly name. // Assert.assertNotNull( attr.getAttrDefinition().getFriendlyName() ); if (GROUPS_FRIENDLY_NAME.equalsIgnoreCase(attr.getAttrDefinition().getFriendlyName())) { Set<GroupInfo> groups = _usersToGroups.get(userInfo.getName()); if (groups != null && groups.isEmpty() == false) { Assert.assertNotNull(attr.getValues()); Assert.assertTrue(groups.size() <= attr.getValues().size()); HashSet<String> attrGroups = new HashSet<String>(); for (String attributeValue : attr.getValues()) { Assert.assertNotNull(attributeValue); Assert.assertTrue(attributeValue.startsWith(domainName) || (providerHasAlias() && attributeValue.startsWith(domainAlias))); Assert.assertTrue(attributeValue.contains("\\")); String groupName = attributeValue; Assert.assertFalse(groupName.isEmpty()); attrGroups.add(groupName); } for (GroupInfo info : groups) { Assert.assertTrue( String.format("group '%s' is expected to be present", domainName + "\\" + info.getName()), attrGroups.contains(domainName + "\\" + info.getName())); if (providerHasAlias()) { Assert.assertTrue( String.format("group '%s' is expected to be present", domainAlias + "\\" + info.getName()), attrGroups.contains(domainAlias + "\\" + info.getName())); } } } } else if (LAST_NAME_FRIENDLY_NAME.equalsIgnoreCase(attr.getAttrDefinition().getFriendlyName())) { Assert.assertNotNull(attr.getValues()); Assert.assertEquals(1, attr.getValues().size()); assertEqualsString(userInfo.getLastName(), attr.getValues().get(0)); } else if (FIRST_NAME_FRIENDLY_NAME.equalsIgnoreCase(attr.getAttrDefinition().getFriendlyName())) { Assert.assertNotNull(attr.getValues()); Assert.assertEquals(1, attr.getValues().size()); assertEqualsString(userInfo.getFirstName(), attr.getValues().get(0)); } else if (SUBJECT_TYPE_FRIENDLY_NAME .equalsIgnoreCase(attr.getAttrDefinition().getFriendlyName())) { Assert.assertNotNull(attr.getValues()); Assert.assertEquals(1, attr.getValues().size()); assertEqualsString("false", attr.getValues().get(0)); } else if (USER_PRINCIPAL_NAME_FRIENDLY_NAME .equalsIgnoreCase(attr.getAttrDefinition().getFriendlyName())) { Assert.assertNotNull(attr.getValues()); Assert.assertEquals(1, attr.getValues().size()); assertEqualsString(userInfo.getName() + "@" + domainName, attr.getValues().get(0)); } } // --------------------- // findDirectParentGroups, findNestedParentGroups // --------------------- Set<GroupInfo> groups = _usersToGroups.get(userInfo.getName()); PrincipalGroupLookupInfo directParentGroups = localOsProvider .findDirectParentGroups(new PrincipalId(userInfo.getName(), domainName)); validateGroupsSubset(groups, ((directParentGroups == null) ? null : directParentGroups.getGroups()), domainName, domainAlias); PrincipalGroupLookupInfo userGroups = localOsProvider .findNestedParentGroups(new PrincipalId(userInfo.getName(), domainName)); validateGroupsSubset(groups, ((userGroups == null) ? null : userGroups.getGroups()), domainName, domainAlias); } }
From source file:edu.ku.brc.specify.conversion.ConvertTaxonHelper.java
/** ============================================================================= * Convert Taxon/*from w w w .j a va2s. c om*/ * ============================================================================= */ private void convertTaxonRecords() { txMapper = IdMapperMgr.getInstance().get("taxonname", "TaxonNameID"); txTypMapper = IdMapperMgr.getInstance().get("TaxonomyType", "TaxonomyTypeID"); txUnitTypMapper = IdMapperMgr.getInstance().get("TaxonomicUnitType", "TaxonomicUnitTypeID"); mappers = new IdMapperIFace[] { txMapper, txMapper, txTypMapper, txMapper, txUnitTypMapper }; IdHashMapper.setTblWriter(tblWriter); newToOldColMap.put("TaxonID", "TaxonNameID"); newToOldColMap.put("ParentID", "ParentTaxonNameID"); newToOldColMap.put("TaxonTreeDefID", "TaxonomyTypeID"); newToOldColMap.put("TaxonTreeDefItemID", "TaxonomicUnitTypeID"); newToOldColMap.put("Name", "TaxonName"); newToOldColMap.put("FullName", "FullTaxonName"); newToOldColMap.put("IsAccepted", "Accepted"); oldToNewColMap.put("TaxonNameID", "TaxonID"); oldToNewColMap.put("ParentTaxonNameID", "ParentID"); oldToNewColMap.put("TaxonomyTypeID", "TaxonTreeDefID"); oldToNewColMap.put("TaxonomicUnitTypeID", "TaxonTreeDefItemID"); oldToNewColMap.put("TaxonName", "Name"); oldToNewColMap.put("FullTaxonName", "FullName"); oldToNewColMap.put("Accepted", "IsAccepted"); // Ignore new fields // These were added for supporting the new security model and hybrids /*String[] ignoredFields = { "GUID", "Visibility", "VisibilitySetBy", "IsHybrid", "HybridParent1ID", "HybridParent2ID", "EsaStatus", "CitesStatus", "UsfwsCode", "IsisNumber", "Text1", "Text2", "NcbiTaxonNumber", "Number1", "Number2", "CreatedByAgentID", "ModifiedByAgentID", "Version", "CultivarName", "LabelFormat", "COLStatus", "VisibilitySetByID"}; */ StringBuilder newSB = new StringBuilder(); StringBuilder vl = new StringBuilder(); for (int i = 0; i < cols.length; i++) { fieldToColHash.put(cols[i], i + 1); colToFieldHash.put(i + 1, cols[i]); if (newSB.length() > 0) newSB.append(", "); newSB.append(cols[i]); if (vl.length() > 0) vl.append(','); vl.append('?'); } StringBuilder oldSB = new StringBuilder(); for (int i = 0; i < oldCols.length; i++) { oldFieldToColHash.put(oldCols[i], i + 1); if (oldSB.length() > 0) oldSB.append(", "); oldSB.append("tx."); oldSB.append(oldCols[i]); } rankIdOldDBInx = oldFieldToColHash.get("RankID"); String sqlStr = String.format("SELECT %s FROM taxon", newSB.toString()); log.debug(sqlStr); String sql = String.format("SELECT %s %s", oldSB.toString(), taxonFromClause); log.debug(sql); String cntSQL = String.format("SELECT COUNT(*) %s", taxonFromClause); log.debug(cntSQL); int txCnt = BasicSQLUtils.getCountAsInt(oldDBConn, cntSQL); if (frame != null) { frame.setProcess(0, txCnt); } String pStr = String.format("INSERT INTO taxon (%s) VALUES (%s)", newSB.toString(), vl.toString()); log.debug(pStr); try { stmtTx = newDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rs1 = stmtTx.executeQuery(sqlStr); ResultSetMetaData rsmd1 = rs1.getMetaData(); colTypes = new int[rsmd1.getColumnCount()]; colSizes = new int[rsmd1.getColumnCount()]; for (int i = 0; i < colTypes.length; i++) { colTypes[i] = rsmd1.getColumnType(i + 1); colSizes[i] = rsmd1.getPrecision(i + 1); } rs1.close(); stmtTx.close(); missingParentList.clear(); strandedFixedHash.clear(); lastEditedByInx = oldFieldToColHash.get("LastEditedBy"); modifiedByAgentInx = fieldToColHash.get("ModifiedByAgentID"); stmtTx = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); pStmtTx = newDBConn.prepareStatement(pStr); mappers[1].setShowLogErrors(false); int cnt = 0; ResultSet rs = stmtTx.executeQuery(sql); ResultSetMetaData rsmd = rs.getMetaData(); while (rs.next()) { processRow(rs, rsmd, null); cnt++; if (cnt % 1000 == 0) { log.debug(cnt); if (frame != null) { frame.setProcess(cnt); } } } rs.close(); if (frame != null) { frame.setProcess(txCnt, txCnt); } if (missingParentList.size() > 0) { fixStrandedTaxon(oldSB); tblWriter.setHasLines(); tblWriter.startTable("Stranded Taxon (no parent): " + missingParentList.size()); tblWriter.logHdr("Full Name", "RankID", "Sp5 RecordID", "Was Re-parented", "Description"); for (Pair<Integer, String> p : missingParentList) { tblWriter.append("<TR>"); Object[] row = BasicSQLUtils.queryForRow(oldDBConn, "SELECT FullTaxonName, RankID, TaxonNameID FROM taxonname WHERE TaxonNameID = " + p.first); for (Object obj : row) { tblWriter.append("<TD>"); tblWriter.append(obj != null ? obj.toString() : "null"); tblWriter.append("</TD>"); } tblWriter.append("<TD>"); tblWriter.append(strandedFixedHash.contains(p.first) ? "Yes" : "No"); tblWriter.append("</TD><TD>"); tblWriter.append(p.second); tblWriter.append("</TD></TR>"); } tblWriter.endTable(); tblWriter.append("<BR>"); frame.setDesc("Renumbering the tree nodes, this may take a while..."); HashSet<Integer> ttdHash = new HashSet<Integer>(); for (CollectionInfo colInfo : CollectionInfo.getFilteredCollectionInfoList()) { if (!ttdHash.contains(colInfo.getTaxonTreeDef().getId())) { DataProviderSessionIFace session = null; try { session = DataProviderFactory.getInstance().createSession(); TaxonTreeDef taxonTreeDef = colInfo.getTaxonTreeDef(); taxonTreeDef = (TaxonTreeDef) session .getData("FROM TaxonTreeDef WHERE id = " + taxonTreeDef.getId()); sql = "SELECT TaxonID FROM taxon WHERE RankID = 0 AND TaxonTreeDefID = " + taxonTreeDef.getId(); log.debug(sql); Integer txRootId = BasicSQLUtils.getCount(sql); Taxon txRoot = (Taxon) session.getData("FROM Taxon WHERE id = " + txRootId); NodeNumberer<Taxon, TaxonTreeDef, TaxonTreeDefItem> nodeNumberer = new NodeNumberer<Taxon, TaxonTreeDef, TaxonTreeDefItem>( txRoot.getDefinition()); nodeNumberer.doInBackground(); } catch (Exception ex) { //session.rollback(); ex.printStackTrace(); } finally { if (session != null) { session.close(); } } ttdHash.add(colInfo.getTaxonTreeDef().getId()); } } frame.setDesc("Renumbering done."); } missingParentList.clear(); strandedFixedHash.clear(); } catch (SQLException ex) { ex.printStackTrace(); } finally { try { stmtTx.close(); pStmtTx.close(); } catch (Exception ex) { } } IdHashMapper.setTblWriter(null); }
From source file:com.webcohesion.enunciate.modules.docs.DocsModule.java
private TreeSet<Artifact> findDocumentationArtifacts() { HashSet<String> explicitArtifacts = new HashSet<String>(); TreeSet<Artifact> artifacts = new TreeSet<Artifact>(); for (ExplicitDownloadConfig download : getExplicitDownloads()) { if (download.getArtifact() != null) { explicitArtifacts.add(download.getArtifact()); } else if (download.getFile() != null) { File downloadFile = resolveFile(download.getFile()); debug("File %s to be added as an extra download.", downloadFile.getAbsolutePath()); SpecifiedArtifact artifact = new SpecifiedArtifact(getName(), downloadFile.getName(), downloadFile); if (download.getName() != null) { artifact.setName(download.getName()); }/* w w w. ja v a2 s . c o m*/ if (download.getDescription() != null) { artifact.setDescription(download.getDescription()); } artifact.setShowLink(!"false".equals(download.getShowLink())); artifacts.add(artifact); } } for (Artifact artifact : this.enunciate.getArtifacts()) { if (artifact.isPublic() || explicitArtifacts.contains(artifact.getId())) { artifacts.add(artifact); debug("Artifact %s to be added as an extra download.", artifact.getId()); explicitArtifacts.remove(artifact.getId()); } } if (explicitArtifacts.size() > 0) { for (String artifactId : explicitArtifacts) { warn("WARNING: Unknown artifact '%s'. Will not be available for download.", artifactId); } } return artifacts; }
From source file:edu.uci.ics.hyracks.algebricks.rewriter.rules.ComplexUnnestToProductRule.java
private boolean findPlanPartition(AbstractLogicalOperator op, HashSet<LogicalVariable> innerUsedVars, HashSet<LogicalVariable> outerUsedVars, List<ILogicalOperator> innerOps, List<ILogicalOperator> outerOps, List<ILogicalOperator> topSelects, boolean belowSecondUnnest) throws AlgebricksException { if (belowSecondUnnest && innerUsedVars.isEmpty()) { // Trivially joinable. return true; }/*from w w w .j a v a2s . c o m*/ if (!belowSecondUnnest) { // Bail on the following operators. switch (op.getOperatorTag()) { case AGGREGATE: case SUBPLAN: case GROUP: case UNNEST_MAP: return false; } } switch (op.getOperatorTag()) { case UNNEST: case DATASOURCESCAN: { // We may have reached this state by descending through a subplan. outerOps.add(op); return true; } case INNERJOIN: case LEFTOUTERJOIN: { // Make sure that no variables that are live under this join are needed by the inner. List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>(); VariableUtilities.getLiveVariables(op, liveVars); for (LogicalVariable liveVar : liveVars) { if (innerUsedVars.contains(liveVar)) { return false; } } outerOps.add(op); return true; } case SELECT: { // Remember this select to pulling it above the join. if (innerUsedVars.isEmpty()) { outerOps.add(op); } else { topSelects.add(op); } break; } case PROJECT: { // Throw away projects from the plan since we are pulling selects up. break; } case EMPTYTUPLESOURCE: case NESTEDTUPLESOURCE: { if (belowSecondUnnest) { // We have successfully partitioned the plan into independent parts to be plugged into the join. return true; } else { // We could not find a second unnest or a join. return false; } } default: { // The inner is trivially independent. if (!belowSecondUnnest && innerUsedVars.isEmpty()) { outerOps.add(op); break; } // Examine produced vars to determine which partition uses them. List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>(); VariableUtilities.getProducedVariables(op, producedVars); int outerMatches = 0; int innerMatches = 0; for (LogicalVariable producedVar : producedVars) { if (outerUsedVars.contains(producedVar)) { outerMatches++; } if (innerUsedVars.contains(producedVar)) { innerMatches++; } } HashSet<LogicalVariable> targetUsedVars = null; if (outerMatches == producedVars.size() && !producedVars.isEmpty()) { // All produced vars used by outer partition. outerOps.add(op); targetUsedVars = outerUsedVars; } if (innerMatches == producedVars.size() && !producedVars.isEmpty()) { // All produced vars used by inner partition. innerOps.add(op); targetUsedVars = innerUsedVars; } if (innerMatches == 0 && outerMatches == 0) { // Op produces variables that are not used in the part of the plan we've seen (or it doesn't produce any vars). // Try to figure out where it belongs by analyzing the used variables. List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>(); VariableUtilities.getUsedVariables(op, usedVars); for (LogicalVariable usedVar : usedVars) { boolean canBreak = false; if (outerUsedVars.contains(usedVar)) { outerOps.add(op); targetUsedVars = outerUsedVars; canBreak = true; } if (innerUsedVars.contains(usedVar)) { innerOps.add(op); targetUsedVars = innerUsedVars; canBreak = true; } if (canBreak) { break; } } // TODO: For now we bail here, but we could remember such ops and determine their target partition at a later point. if (targetUsedVars == null) { return false; } } else if (innerMatches != 0 && outerMatches != 0) { // The current operator produces variables that are used by both partitions, so the inner and outer are not independent and, therefore, we cannot create a join. // TODO: We may still be able to split the operator to create a viable partitioning. return false; } // Update used variables of partition that op belongs to. if (op.hasNestedPlans() && op.getOperatorTag() != LogicalOperatorTag.SUBPLAN) { AbstractOperatorWithNestedPlans opWithNestedPlans = (AbstractOperatorWithNestedPlans) op; opWithNestedPlans.getUsedVariablesExceptNestedPlans(targetUsedVars); } else { VariableUtilities.getUsedVariables(op, targetUsedVars); } break; } } if (!op.hasInputs()) { if (!belowSecondUnnest) { // We could not find a second unnest or a join. return false; } else { // We have successfully partitioned the plan into independent parts to be plugged into the join. return true; } } return findPlanPartition((AbstractLogicalOperator) op.getInputs().get(0).getValue(), innerUsedVars, outerUsedVars, innerOps, outerOps, topSelects, belowSecondUnnest); }
From source file:fr.openwide.talendalfresco.rest.client.importer.RestImportFileTest.java
public void login() { // create client and configure it HttpClient client = new HttpClient(); client.getHttpConnectionManager().getParams().setConnectionTimeout(timeout); // instantiating a new method and configuring it GetMethod method = new GetMethod(restCommandUrlPrefix + "login"); method.setFollowRedirects(true); // ? // Provide custom retry handler is necessary (?) method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); NameValuePair[] params = new NameValuePair[] { new NameValuePair("username", "admin"), new NameValuePair("password", "admin") }; method.setQueryString(params);/* w w w . j a va2 s . c o m*/ try { // Execute the method. int statusCode = client.executeMethod(method); if (statusCode != HttpStatus.SC_OK) { System.err.println("Method failed: " + method.getStatusLine()); } // Read the response body. byte[] responseBody = method.getResponseBody(); System.out.println(new String(responseBody)); // TODO rm HashSet<String> defaultElementSet = new HashSet<String>( Arrays.asList(new String[] { RestConstants.TAG_COMMAND, RestConstants.TAG_CODE, RestConstants.TAG_CONTENT, RestConstants.TAG_ERROR, RestConstants.TAG_MESSAGE })); HashMap<String, String> elementValueMap = new HashMap<String, String>(6); try { XMLEventReader xmlReader = XmlHelper.getXMLInputFactory() .createXMLEventReader(new ByteArrayInputStream(responseBody)); StringBuffer singleLevelTextBuf = null; while (xmlReader.hasNext()) { XMLEvent event = xmlReader.nextEvent(); switch (event.getEventType()) { case XMLEvent.CHARACTERS: case XMLEvent.CDATA: if (singleLevelTextBuf != null) { singleLevelTextBuf.append(event.asCharacters().getData()); } // else element not meaningful break; case XMLEvent.START_ELEMENT: StartElement startElement = event.asStartElement(); String elementName = startElement.getName().getLocalPart(); if (defaultElementSet.contains(elementName) // TODO another command specific level || "ticket".equals(elementName)) { // reinit buffer at start of meaningful elements singleLevelTextBuf = new StringBuffer(); } else { singleLevelTextBuf = null; // not useful } break; case XMLEvent.END_ELEMENT: if (singleLevelTextBuf == null) { break; // element not meaningful } // TODO or merely put it in the map since the element has been tested at start EndElement endElement = event.asEndElement(); elementName = endElement.getName().getLocalPart(); if (defaultElementSet.contains(elementName)) { String value = singleLevelTextBuf.toString(); elementValueMap.put(elementName, value); // TODO test if it is code and it is not OK, break to error handling } // TODO another command specific level else if ("ticket".equals(elementName)) { ticket = singleLevelTextBuf.toString(); } // singleLevelTextBuf = new StringBuffer(); // no ! in start break; } } } catch (XMLStreamException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Throwable t) { // TODO Auto-generated catch block t.printStackTrace(); //throw t; } String code = elementValueMap.get(RestConstants.TAG_CODE); assertTrue(RestConstants.CODE_OK.equals(code)); System.out.println("got ticket " + ticket); } catch (HttpException e) { // TODO e.printStackTrace(); } catch (IOException e) { // TODO e.printStackTrace(); } finally { // Release the connection. method.releaseConnection(); } }
From source file:fr.openwide.talendalfresco.rest.client.importer.RestImportFileTest.java
public void testSingleFileImport() { // create client and configure it HttpClient client = new HttpClient(); client.getHttpConnectionManager().getParams().setConnectionTimeout(timeout); // instantiating a new method and configuring it PostMethod method = new PostMethod(restCommandUrlPrefix + "import"); // Provide custom retry handler is necessary (?) method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); NameValuePair[] params = new NameValuePair[] { new NameValuePair("path", "/app:company_home"), new NameValuePair("ticket", ticket) }; method.setQueryString(params);//from w w w .ja v a 2 s. com try { //method.setRequestBody(new NameValuePair[] { // new NameValuePair("path", "/app:company_home") }); FileInputStream acpXmlIs = new FileInputStream(SAMPLE_SINGLE_FILE_PATH); InputStreamRequestEntity entity = new InputStreamRequestEntity(acpXmlIs); //InputStreamRequestEntity entity = new InputStreamRequestEntity(acpXmlIs, "text/xml; charset=ISO-8859-1"); method.setRequestEntity(entity); } catch (IOException ioex) { fail("ACP XML file not found " + ioex.getMessage()); } try { // Execute the method. int statusCode = client.executeMethod(method); if (statusCode != HttpStatus.SC_OK) { System.err.println("Method failed: " + method.getStatusLine()); } // Read the response body. byte[] responseBody = method.getResponseBody(); System.out.println(new String(responseBody)); // TODO rm HashSet<String> defaultElementSet = new HashSet<String>( Arrays.asList(new String[] { RestConstants.TAG_COMMAND, RestConstants.TAG_CODE, RestConstants.TAG_CONTENT, RestConstants.TAG_ERROR, RestConstants.TAG_MESSAGE })); HashMap<String, String> elementValueMap = new HashMap<String, String>(6); try { XMLEventReader xmlReader = XmlHelper.getXMLInputFactory() .createXMLEventReader(new ByteArrayInputStream(responseBody)); StringBuffer singleLevelTextBuf = null; while (xmlReader.hasNext()) { XMLEvent event = xmlReader.nextEvent(); switch (event.getEventType()) { case XMLEvent.CHARACTERS: case XMLEvent.CDATA: if (singleLevelTextBuf != null) { singleLevelTextBuf.append(event.asCharacters().getData()); } // else element not meaningful break; case XMLEvent.START_ELEMENT: StartElement startElement = event.asStartElement(); String elementName = startElement.getName().getLocalPart(); if (defaultElementSet.contains(elementName) // TODO another command specific level || "ticket".equals(elementName)) { // reinit buffer at start of meaningful elements singleLevelTextBuf = new StringBuffer(); } else { singleLevelTextBuf = null; // not useful } break; case XMLEvent.END_ELEMENT: if (singleLevelTextBuf == null) { break; // element not meaningful } // TODO or merely put it in the map since the element has been tested at start EndElement endElement = event.asEndElement(); elementName = endElement.getName().getLocalPart(); if (defaultElementSet.contains(elementName)) { String value = singleLevelTextBuf.toString(); elementValueMap.put(elementName, value); // TODO test if it is code and it is not OK, break to error handling } // TODO another command specific level else if ("ticket".equals(elementName)) { ticket = singleLevelTextBuf.toString(); } // singleLevelTextBuf = new StringBuffer(); // no ! in start break; } } } catch (XMLStreamException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Throwable t) { // TODO Auto-generated catch block t.printStackTrace(); //throw t; } String code = elementValueMap.get(RestConstants.TAG_CODE); assertTrue(RestConstants.CODE_OK.equals(code)); System.out.println("got ticket " + ticket); } catch (HttpException e) { // TODO e.printStackTrace(); } catch (IOException e) { // TODO e.printStackTrace(); } finally { // Release the connection. method.releaseConnection(); } }
From source file:edu.ku.brc.specify.dbsupport.cleanuptools.GeoCleanupFuzzySearch.java
/** * @param geoDef/*from w w w .j av a 2 s . c o m*/ * @param frame */ public GeoCleanupFuzzySearch(final GeographyTreeDef geoDef) { super(); readConn = DBConnection.getInstance().createConnection(); // String roundTrip; // try // { // String altNames = BasicSQLUtils.querySingleObj("select alternatenames from geoname where geonameId = 921929"); // byte[] utf8Bytes = altNames.getBytes(); // roundTrip = new String(utf8Bytes, "UTF8"); // System.out.println("roundTrip = " + roundTrip); // roundTrip = new String(utf8Bytes); // System.out.println("roundTrip = " + roundTrip); // // utf8Bytes = altNames.getBytes("UTF8"); // roundTrip = new String(utf8Bytes, "UTF8"); // System.out.println("roundTrip = " + roundTrip); // roundTrip = new String(utf8Bytes); // System.out.println("roundTrip = " + roundTrip); // } catch (UnsupportedEncodingException e) // { // // TODO Auto-generated catch block // e.printStackTrace(); // } // //dbConn.close(); // if (false) // debug { HashSet<String> namesSet = new HashSet<String>(); Vector<Object> nmRows = BasicSQLUtils .querySingleCol("select asciiname from geoname where fcode LIKE '%PCL%' ORDER BY asciiname"); for (Object nm : nmRows) { String newName = stripExtrasFromName((String) nm); System.out.println("[" + newName + "]\t\t[" + nm + "]"); namesSet.add(newName); } nmRows = BasicSQLUtils.querySingleCol("SELECT Name FROM geography WHERE RankID = 200 ORDER BY Name"); for (Object nm : nmRows) { String newName = stripExtrasFromName((String) nm); if (!namesSet.contains(newName)) { System.out.println("Not Found: [" + newName + "]\t\t[" + nm + "]"); } } } Vector<Object[]> rows = BasicSQLUtils.query("SELECT iso_alpha2, name FROM countryinfo"); for (Object[] row : rows) { countryLookupMap.put(row[0].toString(), row[1].toString()); } rows = BasicSQLUtils.query("SELECT asciiname, country, admin1 FROM geoname WHERE fcode = 'ADM1'"); for (Object[] row : rows) { stateLookupMap.put(row[1].toString() + "_" + row[2].toString(), row[0].toString()); } this.geoDef = geoDef; // ZZZ For Release if (isDoingTesting) { FILE_INDEX_DIR = new File("/Users/rods/Downloads/lucene/geonames-index"); // Debug Only } else { String dirPath = getAppDataDir() + File.separator + "geonames-index"; FILE_INDEX_DIR = new File(dirPath); } }
From source file:com.enonic.vertical.engine.handlers.PageTemplateHandler.java
public Document getPageTemplatesByMenu(int siteKey, int[] excludeTypeKeys) { List<PageTemplateEntity> list = pageTemplateDao.findBySiteKey(siteKey); ArrayList<PageTemplateEntity> filtered = new ArrayList<PageTemplateEntity>(); HashSet<Integer> excludedTypeSet = null; if (excludeTypeKeys != null && excludeTypeKeys.length > 0) { excludedTypeSet = new HashSet<Integer>(); for (int key : excludeTypeKeys) { excludedTypeSet.add(key);/* w w w . j a v a 2 s .co m*/ } } for (PageTemplateEntity entity : list) { if ((excludedTypeSet == null) || !excludedTypeSet.contains(entity.getType().getKey())) { filtered.add(entity); } } return createPageTemplatesDocument(filtered); }
From source file:com.cloudera.impala.analysis.DescriptorTable.java
public TDescriptorTable toThrift() { TDescriptorTable result = new TDescriptorTable(); HashSet<Table> referencedTbls = Sets.newHashSet(); HashSet<Table> allPartitionsTbls = Sets.newHashSet(); for (TupleDescriptor tupleDesc : tupleDescs_.values()) { // inline view of a non-constant select has a non-materialized tuple descriptor // in the descriptor table just for type checking, which we need to skip if (tupleDesc.isMaterialized()) { // TODO: Ideally, we should call tupleDesc.checkIsExecutable() here, but there // currently are several situations in which we send materialized tuples without // a mem layout to the BE, e.g., when unnesting unions or when replacing plan // trees with an EmptySetNode. result.addToTupleDescriptors(tupleDesc.toThrift()); Table table = tupleDesc.getTable(); if (table != null && !(table instanceof View)) referencedTbls.add(table); // Only serialize materialized slots for (SlotDescriptor slotD : tupleDesc.getMaterializedSlots()) { result.addToSlotDescriptors(slotD.toThrift()); }//ww w. j a va 2 s. c o m } } for (Table table : referencedTables_) { referencedTbls.add(table); // We don't know which partitions are needed for INSERT, so include them all. allPartitionsTbls.add(table); } for (Table tbl : referencedTbls) { HashSet<Long> referencedPartitions = null; // null means include all partitions. if (!allPartitionsTbls.contains(tbl)) { referencedPartitions = getReferencedPartitions(tbl); } result.addToTableDescriptors(tbl.toThriftDescriptor(referencedPartitions)); } return result; }