List of usage examples for java.lang String CASE_INSENSITIVE_ORDER
Comparator CASE_INSENSITIVE_ORDER
To view the source code for java.lang String CASE_INSENSITIVE_ORDER.
Click Source Link
From source file:acmi.l2.clientmod.l2smr.Controller.java
private void initializeUnr() { mapsDirProperty().addListener((observable, oldValue, newValue) -> { unrChooser.getSelectionModel().clearSelection(); unrChooser.getItems().clear();// ww w . j a va 2 s. c o m unrChooser.setDisable(true); if (newValue == null) return; unrChooser.getItems().addAll(Arrays.stream(newValue.listFiles(MAP_FILE_FILTER)).map(File::getName) .collect(Collectors.toList())); unrChooser.setDisable(false); AutoCompleteComboBox.autoCompleteComboBox(unrChooser, AutoCompleteComboBox.AutoCompleteMode.CONTAINING); }); this.unrChooser.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> { table.getSelectionModel().clearSelection(); filterPane.setDisable(true); actors.set(null); actorStaticMeshChooser.getItems().clear(); System.gc(); if (newValue == null) return; try (UnrealPackage up = new UnrealPackage(new File(getMapsDir(), newValue), true)) { longTask(progress -> { List<UnrealPackage.ImportEntry> staticMeshes = up.getImportTable().parallelStream() .filter(ie -> ie.getFullClassName().equalsIgnoreCase("Engine.StaticMesh")) .sorted((ie1, ie2) -> String.CASE_INSENSITIVE_ORDER .compare(ie1.getObjectInnerFullName(), ie2.getObjectInnerFullName())) .collect(Collectors.toList()); Platform.runLater(() -> { actorStaticMeshChooser.getItems().setAll(staticMeshes); AutoCompleteComboBox.autoCompleteComboBox(actorStaticMeshChooser, AutoCompleteComboBox.AutoCompleteMode.CONTAINING); }); List<Actor> actors = up .getExportTable().parallelStream().filter(e -> UnrealPackage.ObjectFlag .getFlags(e.getObjectFlags()).contains(UnrealPackage.ObjectFlag.HasStack)) .map(entry -> { try { return new Actor(entry.getIndex(), entry.getObjectInnerFullName(), entry.getObjectRawDataExternally(), up); } catch (Throwable e) { return null; } }).filter(Objects::nonNull) .filter(actor -> actor.getStaticMeshRef() != 0 && actor.getOffsets().location != 0) .collect(Collectors.toList()); Platform.runLater(() -> Controller.this.actors.set(FXCollections.observableArrayList(actors))); }, e -> onException("Import failed", e)); } catch (Exception e) { onException("Read failed", e); } resetFilter(); filterPane.setDisable(false); }); this.actorColumn.setCellValueFactory(actorStringCellDataFeatures -> new SimpleStringProperty( actorStringCellDataFeatures.getValue().getActorName())); this.staticMeshColumn.setCellValueFactory(actorStringCellDataFeatures -> new SimpleStringProperty( actorStringCellDataFeatures.getValue().getStaticMesh())); this.table.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE); this.table.getSelectionModel().selectedItemProperty().addListener((observable) -> updateSMAPane()); this.table.setOnMouseClicked(event -> { if (event.getButton() == MouseButton.PRIMARY && event.getClickCount() == 2) { String obj = table.getSelectionModel().getSelectedItem().getStaticMesh(); String file = obj.substring(0, obj.indexOf('.')) + ".usx"; showUmodel(obj, file); } }); }
From source file:url.Path.java
public void pathFrequency(String inputFile, String outputFile, int threshold) { ArrayList<String> d3list = new ArrayList<String>(); Map<String, Integer> wordMap = getWordCount(inputFile); List<Map.Entry<String, Integer>> list = sortByValue(wordMap); try {/* w w w . j a v a 2 s . co m*/ PrintWriter writer = new PrintWriter(outputFile, "UTF-8"); for (Map.Entry<String, Integer> entry : list) { if (entry.getValue() > threshold) { d3list.add(entry.getKey() + ',' + entry.getValue()); // d3list.add(entry.getKey().replace(".", "-")); } } Collections.sort(d3list, String.CASE_INSENSITIVE_ORDER); //Collections.reverse(d3list); for (int i = 0; i < d3list.size() - 1; i++) { writer.println(d3list.get(i)); } writer.close(); System.out.println("Write to file-->" + outputFile); } catch (FileNotFoundException | UnsupportedEncodingException ex) { } }
From source file:org.apache.syncope.core.util.ContentExporter.java
public void export(final OutputStream os, final String wfTablePrefix) throws SAXException, TransformerConfigurationException { if (StringUtils.isNotBlank(wfTablePrefix)) { TABLE_PREFIXES_TO_BE_EXCLUDED.add(wfTablePrefix); }/* w w w .jav a2 s .c o m*/ StreamResult streamResult = new StreamResult(os); final SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory .newInstance(); TransformerHandler handler = transformerFactory.newTransformerHandler(); Transformer serializer = handler.getTransformer(); serializer.setOutputProperty(OutputKeys.ENCODING, SyncopeConstants.DEFAULT_ENCODING); serializer.setOutputProperty(OutputKeys.INDENT, "yes"); handler.setResult(streamResult); handler.startDocument(); handler.startElement("", "", ROOT_ELEMENT, new AttributesImpl()); Connection conn = null; ResultSet rs = null; try { conn = DataSourceUtils.getConnection(dataSource); final DatabaseMetaData meta = conn.getMetaData(); final String schema = dbSchema; rs = meta.getTables(null, schema, null, new String[] { "TABLE" }); final Set<String> tableNames = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER); while (rs.next()) { String tableName = rs.getString("TABLE_NAME"); LOG.debug("Found table {}", tableName); if (isTableAllowed(tableName)) { tableNames.add(tableName); } } LOG.debug("Tables to be exported {}", tableNames); // then sort tables based on foreign keys and dump for (String tableName : sortByForeignKeys(conn, tableNames)) { try { doExportTable(handler, conn, tableName, TABLES_TO_BE_FILTERED.get(tableName.toUpperCase())); } catch (Exception e) { LOG.error("Failure exporting table {}", tableName, e); } } } catch (SQLException e) { LOG.error("While exporting database content", e); } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { LOG.error("While closing tables result set", e); } } DataSourceUtils.releaseConnection(conn, dataSource); if (conn != null) { try { if (!conn.isClosed()) { conn.close(); } } catch (SQLException e) { LOG.error("While releasing connection", e); } } } handler.endElement("", "", ROOT_ELEMENT); handler.endDocument(); }
From source file:weave.servlets.AdminService.java
/** * Return a list of Client Config files from docroot * //from w w w. java2 s. c om * @return A list of (xml) client config files existing in the docroot * folder. */ public String[] getWeaveFileNames(String configConnectionName, String password, Boolean showAllFiles) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); ConnectionInfo info = config.getConnectionInfo(configConnectionName); File[] files = null; List<String> listOfFiles = new ArrayList<String>(); FilenameFilter fileFilter = new FilenameFilter() { public boolean accept(File dir, String fileName) { return fileName.endsWith(".weave") || fileName.endsWith(".xml"); } }; if (showAllFiles == true) { try { String root = docrootPath; File rootFolder = new File(root); files = rootFolder.listFiles(); for (File f : files) { if (!f.isDirectory()) continue; File[] configs = f.listFiles(fileFilter); for (File configfile : configs) { listOfFiles.add(f.getName() + "/" + configfile.getName()); } } } catch (SecurityException e) { throw new RemoteException("Permission error reading directory.", e); } } String path = docrootPath; if (!showAllFiles && info.folderName.length() > 0) path = path + info.folderName + "/"; File docrootFolder = new File(path); try { docrootFolder.mkdirs(); files = docrootFolder.listFiles(fileFilter); for (File file : files) { if (file.isFile()) { // System.out.println(file.getName()); listOfFiles.add(((!showAllFiles && info.folderName.length() > 0) ? info.folderName + "/" : "") + file.getName().toString()); } } } catch (SecurityException e) { throw new RemoteException("Permission error reading directory.", e); } Collections.sort(listOfFiles, String.CASE_INSENSITIVE_ORDER); return ListUtils.toStringArray(listOfFiles); }
From source file:org.apache.hadoop.security.KDiag.java
/** * Execute diagnostics.//from w w w .j a va 2s. co m * <p> * Things it would be nice if UGI made accessible * <ol> * <li>A way to enable JAAS debug programatically</li> * <li>Access to the TGT</li> * </ol> * @return true if security was enabled and all probes were successful * @throws KerberosDiagsFailure explicitly raised failure * @throws Exception other security problems */ @SuppressWarnings("deprecation") public boolean execute() throws Exception { title("Kerberos Diagnostics scan at %s", new Date(System.currentTimeMillis())); // check that the machine has a name println("Hostname = %s", InetAddress.getLocalHost().getCanonicalHostName()); println("%s = %d", ARG_KEYLEN, minKeyLength); println("%s = %s", ARG_KEYTAB, keytab); println("%s = %s", ARG_PRINCIPAL, principal); println("%s = %s", ARG_VERIFYSHORTNAME, checkShortName); // Fail fast on a JVM without JCE installed. validateKeyLength(); // look at realm println("JVM Kerberos Login Module = %s", getKrb5LoginModuleName()); title("Core System Properties"); for (String prop : new String[] { "user.name", "java.version", "java.vendor", JAVA_SECURITY_KRB5_CONF, JAVA_SECURITY_KRB5_REALM, JAVA_SECURITY_KRB5_KDC_ADDRESS, SUN_SECURITY_KRB5_DEBUG, SUN_SECURITY_SPNEGO_DEBUG, SUN_SECURITY_JAAS_FILE }) { printSysprop(prop); } endln(); title("All System Properties"); ArrayList<String> propList = new ArrayList<>(System.getProperties().stringPropertyNames()); Collections.sort(propList, String.CASE_INSENSITIVE_ORDER); for (String s : propList) { printSysprop(s); } endln(); title("Environment Variables"); for (String env : new String[] { HADOOP_JAAS_DEBUG, KRB5_CCNAME, HADOOP_USER_NAME, HADOOP_PROXY_USER, HADOOP_TOKEN_FILE_LOCATION, "HADOOP_SECURE_LOG", "HADOOP_OPTS", "HADOOP_CLIENT_OPTS", }) { printEnv(env); } endln(); title("Configuration Options"); for (String prop : new String[] { KERBEROS_KINIT_COMMAND, HADOOP_SECURITY_AUTHENTICATION, HADOOP_SECURITY_AUTHORIZATION, "hadoop.kerberos.min.seconds.before.relogin", // not in 2.6 "hadoop.security.dns.interface", // not in 2.6 "hadoop.security.dns.nameserver", // not in 2.6 HADOOP_RPC_PROTECTION, HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS, HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX, HADOOP_SECURITY_GROUP_MAPPING, "hadoop.security.impersonation.provider.class", // not in 2.6 DFS_DATA_TRANSFER_PROTECTION, // HDFS DFS_DATA_TRANSFER_SASLPROPERTIES_RESOLVER_CLASS // HDFS }) { printConfOpt(prop); } // check that authentication is enabled Configuration conf = getConf(); if (isSimpleAuthentication(conf)) { println(HADOOP_AUTHENTICATION_IS_DISABLED); failif(securityRequired, CAT_CONFIG, HADOOP_AUTHENTICATION_IS_DISABLED); // no security, warn LOG.warn("Security is not enabled for the Hadoop cluster"); } else { if (isSimpleAuthentication(new Configuration())) { LOG.warn("The default cluster security is insecure"); failif(securityRequired, CAT_CONFIG, HADOOP_AUTHENTICATION_IS_DISABLED); } } // now the big test: login, then try again boolean krb5Debug = getAndSet(SUN_SECURITY_KRB5_DEBUG); boolean spnegoDebug = getAndSet(SUN_SECURITY_SPNEGO_DEBUG); try { UserGroupInformation.setConfiguration(conf); validateKrb5File(); printDefaultRealm(); validateSasl(HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS); if (conf.get(DFS_DATA_TRANSFER_SASLPROPERTIES_RESOLVER_CLASS) != null) { validateSasl(DFS_DATA_TRANSFER_SASLPROPERTIES_RESOLVER_CLASS); } validateKinitExecutable(); validateJAAS(jaas); validateNTPConf(); if (checkShortName) { validateShortName(); } if (!nologin) { title("Logging in"); if (keytab != null) { dumpKeytab(keytab); loginFromKeytab(); } else { UserGroupInformation loginUser = getLoginUser(); dumpUGI("Log in user", loginUser); validateUGI("Login user", loginUser); println("Ticket based login: %b", isLoginTicketBased()); println("Keytab based login: %b", isLoginKeytabBased()); } } return true; } finally { // restore original system properties System.setProperty(SUN_SECURITY_KRB5_DEBUG, Boolean.toString(krb5Debug)); System.setProperty(SUN_SECURITY_SPNEGO_DEBUG, Boolean.toString(spnegoDebug)); } }
From source file:org.forgerock.openicf.maven.ConnectorDocBuilder.java
/** * Execute the generation of the report. * * @throws org.apache.maven.reporting.MavenReportException * if any//from www . j a v a 2 s .c o m */ protected void executeReport() throws MojoExecutionException { List<ConnectorInfo> infoList = null; try { infoList = listAllConnectorInfo(); } catch (Exception e) { handler.getLog().error("Failed to get the ConnectorInfoManager", e); return; } for (ConnectorInfo info : infoList) { handler.getLog().debug("Processing ConnectorInfo: " + info.toString()); try { Context context = new VelocityContext(); context.put("connectorInfo", info); context.put("connectorDisplayName", info.getConnectorDisplayName()); context.put("connectorCategory", info.getConnectorCategory()); context.put("bookName", handler.getMavenProject().getArtifactId() + "-" + handler.getMavenProject().getVersion()); String connectorName = info.getConnectorKey().getConnectorName() .substring(info.getConnectorKey().getConnectorName().lastIndexOf('.') + 1).toLowerCase(); if (connectorName.endsWith("connector")) { connectorName = connectorName.substring(0, connectorName.length() - 9) + "-connector"; } context.put("connectorName", connectorName); context.put("uniqueConnectorName", info.getConnectorKey().getConnectorName().replaceAll("\\.", "-") + "-" + info.getConnectorKey().getBundleVersion()); APIConfiguration config = info.createDefaultAPIConfiguration(); context.put("APIConfiguration", config); try { if (config.getSupportedOperations().contains(SchemaApiOp.class)) { Schema schema = null; try { APIConfiguration facadeConfig = info.createDefaultAPIConfiguration(); if (null != handler.getConfigurationProperties()) { handler.getConfigurationProperties() .mergeConfigurationProperties(facadeConfig.getConfigurationProperties()); } schema = ConnectorFacadeFactory.getInstance().newInstance(facadeConfig).schema(); } catch (Throwable t) { handler.getLog().debug("Getting Schema with ConnectorFacade", t); } if (null == schema && info instanceof LocalConnectorInfoImpl) { Class<? extends Connector> connectorClass = ((LocalConnectorInfoImpl) info) .getConnectorClass(); try { SchemaOp connector = (SchemaOp) connectorClass.newInstance(); schema = connector.schema(); } catch (Throwable t) { handler.getLog().debug("Getting Schema with Connector Instance", t); } } if (null != schema) { SortedMap<Pair<String, String>, List<Map<String, Object>>> operationOptionsMap = new TreeMap<Pair<String, String>, List<Map<String, Object>>>( PAIR_COMPARATOR); for (Class<? extends APIOperation> op : OPERATIONS) { if (SchemaApiOp.class.equals(op) || TestApiOp.class.equals(op)) { continue; } List<Map<String, Object>> optionList = null; for (OperationOptionInfo optionInfo : schema.getSupportedOptionsByOperation(op)) { Map<String, Object> optionInfoMap = new HashMap<String, Object>(); optionInfoMap.put("name", optionInfo.getName()); optionInfoMap.put("type", optionInfo.getType().getSimpleName()); optionInfoMap.put("description", info.getMessages().format(optionInfo.getName() + ".help", "Additional description is not available")); if (null == optionList) { optionList = new ArrayList<Map<String, Object>>(); } optionList.add(optionInfoMap); } if (null != optionList) { operationOptionsMap.put(OP_DICTIONARY.get(op), optionList); } } List<Map<String, Object>> objectClasses = new ArrayList<Map<String, Object>>(); Set<Class<? extends APIOperation>> operationSet = new TreeSet<Class<? extends APIOperation>>( new Comparator<Class<? extends APIOperation>>() { public int compare(Class<? extends APIOperation> o1, Class<? extends APIOperation> o2) { return String.CASE_INSENSITIVE_ORDER.compare(o1.getCanonicalName(), o2.getCanonicalName()); } }); operationSet.addAll(config.getSupportedOperations()); operationSet.retainAll(OBJECTCLASS_OPERATIONS); for (ObjectClassInfo objectClassInfo : schema.getObjectClassInfo()) { Map<String, Object> objectClassInfoMap = new HashMap<String, Object>(); ObjectClass oc = new ObjectClass(objectClassInfo.getType()); objectClassInfoMap.put("name", objectClassInfo.getType()); objectClassInfoMap.put("displayName", info.getMessages() .format(oc.getDisplayNameKey(), oc.getObjectClassValue())); objectClassInfoMap.put("attributes", objectClassInfo.getAttributeInfo()); boolean limited = false; List<Pair<String, String>> operations = new ArrayList<Pair<String, String>>(); for (Class<? extends APIOperation> op : operationSet) { if (schema.getSupportedObjectClassesByOperation(op).contains(objectClassInfo)) { operations.add(OP_DICTIONARY.get(op)); } else { limited = true; } } objectClassInfoMap.put("operations", limited ? operations : null); objectClasses.add(objectClassInfoMap); } context.put("schema", schema); context.put("operationOptions", operationOptionsMap); context.put("objectClasses", objectClasses); } } } catch (Throwable e) { if (handler.getLog().isDebugEnabled()) { handler.getLog().debug("Getting the default Schema.", e); } } try { Set<Pair<String, String>> interfaces = new TreeSet<Pair<String, String>>(PAIR_COMPARATOR); for (Class<? extends APIOperation> clazz : config.getSupportedOperations()) { if (OP_DICTIONARY.containsKey(clazz)) { interfaces.add(OP_DICTIONARY.get(clazz)); } } context.put("connectorInterfaces", interfaces); } catch (Throwable e) { handler.getLog().error("Getting the connector interfaces.", e); } Map<String, List<Map<String, Object>>> configurationTable = new LinkedHashMap<String, List<Map<String, Object>>>(); for (String propertyName : config.getConfigurationProperties().getPropertyNames()) { ConfigurationProperty property = config.getConfigurationProperties().getProperty(propertyName); String groupKey = property.getGroup("Configuration"); List<Map<String, Object>> configurationGroup = configurationTable.get(groupKey); if (configurationGroup == null) { configurationGroup = new ArrayList<Map<String, Object>>(); configurationTable.put(groupKey, configurationGroup); } Map<String, Object> propertyMap = new HashMap<String, Object>(); propertyMap.put("name", propertyName); propertyMap.put("type", property.getType().getSimpleName()); propertyMap.put("property", property); propertyMap.put("required", property.isRequired()); propertyMap.put("operations", convertOperations(property.getOperations())); propertyMap.put("confidential", property.isConfidential()); propertyMap.put("description", convertHTMLtoDocBook(property.getHelpMessage("Description is not available"))); configurationGroup.add(propertyMap); } context.put("configurationProperties", configurationTable); context.put("connectorPoolingSupported", config.isConnectorPoolingSupported()); context.put("PathTool", new PathTool()); context.put("FileUtils", new FileUtils()); context.put("StringUtils", new org.codehaus.plexus.util.StringUtils()); context.put("ConnectorUtils", new ConnectorUtils()); context.put("i18n", handler.getI18N()); context.put("project", handler.getMavenProject()); handler.generate(this, context, connectorName); } catch (ResourceNotFoundException e) { throw new MojoExecutionException("Resource not found.", e); } catch (VelocityException e) { throw new MojoExecutionException(e.toString(), e); } } }
From source file:com.microsoft.windowsazure.mobileservices.sdk.testapp.test.SystemPropertiesTests.java
private void insertDoesNotRemovePropertyWhenIdIsNull(final String property) throws Throwable { final String tableName = "MyTableName"; final String jsonTestSystemProperty = property.replace("\\", "\\\\").replace("\"", "\\\""); final String responseContent = "{\"id\":\"an id\",\"String\":\"Hey\"}"; MobileServiceClient client = null;//from www . j ava 2 s . c o m try { client = new MobileServiceClient(appUrl, appKey, getInstrumentation().getTargetContext()); } catch (MalformedURLException e) { e.printStackTrace(); } // Add a filter to handle the request and create a new json // object with an id defined client = client.withFilter(getTestFilter(responseContent)); client = client.withFilter(new ServiceFilter() { @Override public ListenableFuture<ServiceFilterResponse> handleRequest(ServiceFilterRequest request, NextServiceFilterCallback nextServiceFilterCallback) { String content = request.getContent(); JsonObject obj = new JsonParser().parse(content).getAsJsonObject(); Map<String, JsonElement> properties = new TreeMap<String, JsonElement>( String.CASE_INSENSITIVE_ORDER); for (Entry<String, JsonElement> entry : obj.entrySet()) { properties.put(entry.getKey(), entry.getValue()); } assertFalse(properties.containsKey("id")); assertTrue(properties.containsKey("String")); assertTrue(properties.containsKey(property)); return nextServiceFilterCallback.onNext(request); } }); // Create get the MobileService table MobileServiceJsonTable msTable = client.getTable(tableName); JsonObject obj = new JsonParser() .parse("{\"id\":null,\"String\":\"what\",\"" + jsonTestSystemProperty + "\":\"a value\"}") .getAsJsonObject(); try { // Call the insert method JsonObject jsonObject = msTable.insert(obj).get(); // Asserts if (jsonObject == null) { fail("Expected result"); } } catch (Exception exception) { fail(exception.getMessage()); } }
From source file:com.amazon.carbonado.repo.jdbc.JDBCStorableIntrospector.java
/** * Uses the given database connection to query database metadata. This is * used to bind storables to tables, and properties to columns. Other * checks are performed to ensure that storable type matches well with the * definition in the database./*from www . j a v a 2 s . com*/ */ private static <S extends Storable> JDBCStorableInfo<S> examine(StorableInfo<S> mainInfo, Connection con, final String searchCatalog, final String searchSchema, SchemaResolver resolver, boolean primaryKeyCheckDisabled) throws SQLException, SupportException { final DatabaseMetaData meta = con.getMetaData(); final String databaseProductName = meta.getDatabaseProductName(); final String userName = meta.getUserName(); String[] tableAliases; if (mainInfo.getAliasCount() > 0) { tableAliases = mainInfo.getAliases(); } else { String name = mainInfo.getStorableType().getSimpleName(); tableAliases = generateAliases(name); } // Try to find matching table from aliases. String catalog = null, schema = null, tableName = null, tableType = null; findName: { // The call to getTables may return several matching tables. This // map defines the "best" table type we'd like to use. The higher // the number the better. Map<String, Integer> fitnessMap = new HashMap<String, Integer>(); fitnessMap.put("LOCAL TEMPORARY", 1); fitnessMap.put("GLOBAL TEMPORARY", 2); fitnessMap.put("VIEW", 3); fitnessMap.put("SYSTEM TABLE", 4); fitnessMap.put("TABLE", 5); fitnessMap.put("ALIAS", 6); fitnessMap.put("SYNONYM", 7); for (int i = 0; i < tableAliases.length; i++) { ResultSet rs = meta.getTables(searchCatalog, searchSchema, tableAliases[i], null); try { int bestFitness = 0; while (rs.next()) { String type = rs.getString("TABLE_TYPE"); Integer fitness = fitnessMap.get(type); if (fitness != null) { String rsSchema = rs.getString("TABLE_SCHEM"); if (searchSchema == null) { if (userName != null && userName.equalsIgnoreCase(rsSchema)) { // Favor entities whose schema name matches // the user name. fitness += 7; } } if (fitness > bestFitness) { bestFitness = fitness; catalog = rs.getString("TABLE_CAT"); schema = rsSchema; tableName = rs.getString("TABLE_NAME"); tableType = type; } } } } finally { rs.close(); } if (tableName != null) { // Found a match, so stop checking aliases. break; } } } if (tableName == null && !mainInfo.isIndependent()) { StringBuilder buf = new StringBuilder(); buf.append("Unable to find matching table name for type \""); buf.append(mainInfo.getStorableType().getName()); buf.append("\" by looking for "); appendToSentence(buf, tableAliases); buf.append(" with catalog " + searchCatalog + " and schema " + searchSchema); throw new MismatchException(buf.toString()); } String qualifiedTableName = tableName; String resolvedTableName = tableName; // Oracle specific stuff... // TODO: Migrate this to OracleSupportStrategy. if (tableName != null && databaseProductName.toUpperCase().contains("ORACLE")) { if ("TABLE".equals(tableType) && searchSchema != null) { // Qualified table name references the schema. Used by SQL statements. qualifiedTableName = searchSchema + '.' + tableName; } else if ("SYNONYM".equals(tableType)) { // Try to get the real schema. This call is Oracle specific, however. String select = "SELECT TABLE_OWNER,TABLE_NAME " + "FROM ALL_SYNONYMS " + "WHERE OWNER=? AND SYNONYM_NAME=?"; PreparedStatement ps = con.prepareStatement(select); ps.setString(1, schema); // in Oracle, schema is the owner ps.setString(2, tableName); try { ResultSet rs = ps.executeQuery(); try { if (rs.next()) { schema = rs.getString("TABLE_OWNER"); resolvedTableName = rs.getString("TABLE_NAME"); } } finally { rs.close(); } } finally { ps.close(); } } } // Gather information on all columns such that metadata only needs to // be retrieved once. Map<String, ColumnInfo> columnMap = new TreeMap<String, ColumnInfo>(String.CASE_INSENSITIVE_ORDER); if (resolvedTableName != null) { ResultSet rs = meta.getColumns(catalog, schema, resolvedTableName, null); rs.setFetchSize(1000); try { while (rs.next()) { ColumnInfo info = new ColumnInfo(rs); columnMap.put(info.columnName, info); } } finally { rs.close(); } } // Make sure that all properties have a corresponding column. Map<String, ? extends StorableProperty<S>> mainProperties = mainInfo.getAllProperties(); Map<String, String> columnToProperty = new HashMap<String, String>(); Map<String, JDBCStorableProperty<S>> jProperties = new LinkedHashMap<String, JDBCStorableProperty<S>>( mainProperties.size()); ArrayList<String> errorMessages = new ArrayList<String>(); for (StorableProperty<S> mainProperty : mainProperties.values()) { if (mainProperty.isDerived() || mainProperty.isJoin() || tableName == null) { jProperties.put(mainProperty.getName(), new JProperty<S>(mainProperty, primaryKeyCheckDisabled)); continue; } String[] columnAliases; if (mainProperty.getAliasCount() > 0) { columnAliases = mainProperty.getAliases(); } else { columnAliases = generateAliases(mainProperty.getName()); } JDBCStorableProperty<S> jProperty = null; boolean addedError = false; findName: for (int i = 0; i < columnAliases.length; i++) { ColumnInfo columnInfo = columnMap.get(columnAliases[i]); if (columnInfo != null) { AccessInfo accessInfo = getAccessInfo(mainProperty, columnInfo.dataType, columnInfo.dataTypeName, columnInfo.columnSize, columnInfo.decimalDigits); if (accessInfo == null) { TypeDesc propertyType = TypeDesc.forClass(mainProperty.getType()); String message = "Property \"" + mainProperty.getName() + "\" has type \"" + propertyType.getFullName() + "\" which is incompatible with database type \"" + columnInfo.dataTypeName + '"'; if (columnInfo.decimalDigits > 0) { message += " (decimal digits = " + columnInfo.decimalDigits + ')'; } errorMessages.add(message); addedError = true; break findName; } if (columnInfo.nullable) { if (!mainProperty.isNullable() && !mainProperty.isIndependent()) { errorMessages.add( "Property \"" + mainProperty.getName() + "\" must have a Nullable annotation"); } } else { if (mainProperty.isNullable() && !mainProperty.isIndependent()) { errorMessages.add("Property \"" + mainProperty.getName() + "\" must not have a Nullable annotation"); } } boolean autoIncrement = mainProperty.isAutomatic(); if (autoIncrement) { // Need to execute a little query to check if column is // auto-increment or not. This information is not available in // the regular database metadata prior to jdk1.6. PreparedStatement ps = con.prepareStatement( "SELECT " + columnInfo.columnName + " FROM " + tableName + " WHERE 1=0"); try { ResultSet rs = ps.executeQuery(); try { autoIncrement = rs.getMetaData().isAutoIncrement(1); } finally { rs.close(); } } finally { ps.close(); } } jProperty = new JProperty<S>(mainProperty, columnInfo, autoIncrement, primaryKeyCheckDisabled, accessInfo.mResultSetGet, accessInfo.mPreparedStatementSet, accessInfo.getAdapter()); break findName; } } if (jProperty != null) { jProperties.put(mainProperty.getName(), jProperty); columnToProperty.put(jProperty.getColumnName(), jProperty.getName()); } else { if (mainProperty.isIndependent()) { jProperties.put(mainProperty.getName(), new JProperty<S>(mainProperty, primaryKeyCheckDisabled)); } else if (!addedError) { StringBuilder buf = new StringBuilder(); buf.append("Unable to find matching database column for property \""); buf.append(mainProperty.getName()); buf.append("\" by looking for "); appendToSentence(buf, columnAliases); errorMessages.add(buf.toString()); } } } if (errorMessages.size() > 0) { throw new MismatchException(mainInfo.getStorableType(), errorMessages); } // Now verify that primary or alternate keys match. if (resolvedTableName != null) checkPrimaryKey: { ResultSet rs; try { rs = meta.getPrimaryKeys(catalog, schema, resolvedTableName); } catch (SQLException e) { getLog().info("Unable to get primary keys for table \"" + resolvedTableName + "\" with catalog " + catalog + " and schema " + schema + ": " + e); break checkPrimaryKey; } List<String> pkProps = new ArrayList<String>(); try { while (rs.next()) { String columnName = rs.getString("COLUMN_NAME"); String propertyName = columnToProperty.get(columnName); if (propertyName == null) { errorMessages .add("Column \"" + columnName + "\" must be part of primary or alternate key"); continue; } pkProps.add(propertyName); } } finally { rs.close(); } if (errorMessages.size() > 0) { // Skip any extra checks. break checkPrimaryKey; } if (pkProps.size() == 0) { // If no primary keys are reported, don't even bother checking. // There's no consistent way to get primary keys, and entities // like views and synonyms don't usually report primary keys. // A primary key might even be logically defined as a unique // constraint. break checkPrimaryKey; } if (matchesKey(pkProps, mainInfo.getPrimaryKey())) { // Good. Primary key in database is same as in Storable. break checkPrimaryKey; } // Check if Storable has an alternate key which matches the // database's primary key. boolean foundAnyAltKey = false; for (StorableKey<S> altKey : mainInfo.getAlternateKeys()) { if (matchesKey(pkProps, altKey)) { // Okay. Primary key in database matches a Storable // alternate key. foundAnyAltKey = true; // Also check that declared primary key is a strict subset // of the alternate key. If not, keep checking alt keys. if (matchesSubKey(pkProps, mainInfo.getPrimaryKey())) { break checkPrimaryKey; } } } if (foundAnyAltKey) { errorMessages.add("Actual primary key matches a declared alternate key, " + "but declared primary key must be a strict subset. " + mainInfo.getPrimaryKey().getProperties() + " is not a subset of " + pkProps); } else { errorMessages.add("Actual primary key does not match any " + "declared primary or alternate key: " + pkProps); } } if (errorMessages.size() > 0) { if (primaryKeyCheckDisabled) { for (String errorMessage : errorMessages) { getLog().warn("Suppressed error: " + errorMessage); } errorMessages.clear(); } else { throw new MismatchException(mainInfo.getStorableType(), errorMessages); } } // IndexInfo is empty, as querying for it tends to cause a table analyze to run. IndexInfo[] indexInfo = new IndexInfo[0]; if (needsQuotes(tableName)) { String quote = meta.getIdentifierQuoteString(); if (quote != null && !quote.equals(" ")) { tableName = quote + tableName + quote; qualifiedTableName = quote + qualifiedTableName + quote; } } return new JInfo<S>(mainInfo, catalog, schema, tableName, qualifiedTableName, indexInfo, jProperties); }
From source file:com.genentech.application.calcProps.SDFCalcProps.java
private static String calculate(String[] props, boolean predictTautomer, boolean dontFilter, boolean verbose, boolean debug, boolean printOnly, boolean addMolIndex, Set<Calculator> availCALCS, String inFile, String outFile) throws IOException, InterruptedException { String counterTag = "___sdfCalcProps_counter___"; String savedTitleTag = "___sdfCalcProps_saved_title___"; String tempFileRoot = "/tmp/sdfCalcProps.$$." + System.currentTimeMillis(); String tempOrigFileName = tempFileRoot + ".orig.sdf"; String filteredFileName = tempFileRoot + ".filtered.sdf"; Set<Calculator> calculators = new LinkedHashSet<Calculator>(); //Properties that depend on ionization state of the molecule ex. charge Set<Calculator> ionizedCalculators = new LinkedHashSet<Calculator>(); //Properties that depend on the neutral molecule. ex. MW Set<Calculator> neutralCalculators = new LinkedHashSet<Calculator>(); for (String prop : props) { //getCalculators takes care of getting all dependent calculators, recursively Set<Calculator> myCalcs = getCalculators(prop, availCALCS); //adding to a set, does not contain duplicates calculators calculators.addAll(myCalcs);//from w w w. j av a2 s . c o m } //divide calculators into those that require ionization and those that don't for (Calculator calc : calculators) { if (calc.requiresIonization()) { ionizedCalculators.add(calc); } else { neutralCalculators.add(calc); } } //get the set of SD tags that will be produced, each property produces a set of SD tags //using TreeSet to keep the SD tags in alphabetical order TreeSet<String> ionizedOutputTags = new TreeSet<String>(); for (Calculator p : ionizedCalculators) { ionizedOutputTags.addAll(getOutputFields(p.getName(), ionizedCalculators, verbose)); } TreeSet<String> neutralOutputTags = new TreeSet<String>(); for (Calculator p : neutralCalculators) { neutralOutputTags.addAll(getOutputFields(p.getName(), neutralCalculators, verbose)); } // The list of SD tags that will be produced // this set should be a union of tags from ionized and neutral tags // I guess I could just merge the treesets from ionized and neutral tags TreeSet<String> allOutputTags = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER); for (String prop : props) { allOutputTags.addAll(getOutputFields(prop, calculators, verbose)); } if (debug) { System.err.println("============================================="); System.err.println("The following properties will be calculated."); printProperties(calculators, true); System.err.println("The following tags will be produced."); for (String t : allOutputTags) { System.err.print(t + " "); } System.err.println(); } //The following tags will be produced, exit after printing if (printOnly) { StringBuilder outputTags = assembleTags(allOutputTags); System.out.println("echo '" + outputTags + "'"); System.exit(0); } // special properties that dictate how molecules are preprocessed Calculator tautomerCalculator = null; Calculator filterCalculator = null; Calculator ionizeCalculator = null; for (Calculator calc : availCALCS) { if (calc.getName().equals("predictTautomer")) { tautomerCalculator = calc; } if (calc.getName().equals("filter")) { filterCalculator = calc; } if (calc.getName().equals("ionize")) { ionizeCalculator = calc; } } // assemble the command line base on the properties that were requested, // this is the most complicated part of this program "assembleCommands" //get a string of piped commands for calculating properties that depend on ionization state ionizedCalculators = consolidateByAggregationId(ionizedCalculators); String ionizedCommand = assembleCommands(ionizedCalculators, verbose, debug, counterTag, ionizedOutputTags); if (ionizedCommand != null) { //prepend command to generated ionized molecules ionizedCommand = ionizeCalculator.getProgName() + " " + ionizeCalculator.getProgOps() + " | " + ionizedCommand; } //get a string of piped commands for calculating properties on the neutral molecule neutralCalculators = consolidateByAggregationId(neutralCalculators); String neutralCommand = assembleCommands(neutralCalculators, verbose, debug, counterTag, allOutputTags); //save a temp file that contains a unique identifier //run filter to get rid of "bad" molecules String command = "sdfTagTool.csh -copy TITLE=" + savedTitleTag + " -addCounter -counterTag " + counterTag + " -title " + counterTag + " -in " + inFile + " -out .sdf | tee " + tempOrigFileName + " | " + filterCalculator.getProgName() + " " + filterCalculator.getProgOps() + " | sdfTagTool.csh -in .sdf -out .sdf -keep " + counterTag; //different command if not filtering if (dontFilter) { command = "sdfTagTool.csh -copy TITLE=" + savedTitleTag + " -addCounter -counterTag " + counterTag + " -title " + counterTag + " -in " + inFile + " -out .sdf | tee " + tempOrigFileName + " | sdfTagTool.csh -in .sdf -out .sdf -keep " + counterTag; } String cleanUpCommand = "sdfTagTool.csh -in .sdf -title " + savedTitleTag + " -out .sdf " + " | sdfTagTool.csh -in .sdf -remove \"" + counterTag + "|" + savedTitleTag + "\" -out " + outFile; //command to create a Mol_Index tag for each molecule if (addMolIndex) { cleanUpCommand = "sdfTagTool.csh -in .sdf -title " + savedTitleTag + " -out .sdf " + " -format 'Mol_Index=Mol_{" + counterTag + "}'" + " | sdfTagTool.csh -in .sdf -remove \"" + counterTag + "|" + savedTitleTag + "\" -out " + outFile; } //predict tautomer if (predictTautomer) { command = command + " | " + tautomerCalculator.getProgName() + " " + tautomerCalculator.getProgOps() + " > " + filteredFileName; } else { command = command + " > " + filteredFileName; } // command = command + "; cat " + filteredFileName; command = command + "; echo NCCO | babel -in .smi -out .sdf >> " + filteredFileName + "; cat " + filteredFileName; if (ionizedCommand != null && neutralCommand != null) { // merge temp file with the two tab files String mergeCommand = "sdfTabMerger.csh -sdf " + tempOrigFileName + " -tab - -mergeTag " + counterTag + " -mergeCol " + counterTag + " -out .sdf | " + cleanUpCommand; command = command + " | " + ionizedCommand + " | sdfTabMerger.csh -tab - -sdf " + filteredFileName + " -mergeTag " + counterTag + " -mergeCol " + counterTag + " -out .sdf | " + neutralCommand + " | " + mergeCommand; } else if (ionizedCommand == null && neutralCommand != null) { String mergeCommand = "sdfTabMerger.csh -sdf " + tempOrigFileName + " -tab - -mergeTag " + counterTag + " -mergeCol " + counterTag + " -out .sdf | " + cleanUpCommand; command = command + " | " + neutralCommand + " | " + mergeCommand; } else if (ionizedCommand != null && neutralCommand == null) { String mergeCommand = "sdfTabMerger.csh -sdf " + tempOrigFileName + " -tab - -mergeTag " + counterTag + " -mergeCol " + counterTag + " -out .sdf | " + cleanUpCommand; command = command + " | " + ionizedCommand + " | " + mergeCommand; } if (debug) { System.err.println("ionized command:\n" + ionizedCommand); System.err.println("neutral command:\n" + neutralCommand); System.err.println("Command to be executed:\n" + command); } return (command); }
From source file:org.apache.directory.fortress.core.model.AdminRole.java
/** * Set a User OU attribute to be stored on the AdminRole entity. * * @param osU is a User OU that maps to 'ftOSU' attribute on 'ftPools' aux object class. *//* w ww .j a va2s . c o m*/ @Override public void setOsU(String osU) { if (this.osUs == null) { // create Set with case insensitive comparator: osUs = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); } osUs.add(osU); }