List of usage examples for java.util.regex Matcher quoteReplacement
public static String quoteReplacement(String s)
From source file:pt.webdetails.cdf.dd.AbstractDashboard.java
public String render(IParameterProvider params, String dashboardContext) { logger.debug("[Timing] Starting render proper: " + (new SimpleDateFormat("H:m:s.S")).format(new Date())); String quotedFooter = Matcher.quoteReplacement(this.footer), quotedHeader = Matcher.quoteReplacement(this.header + dashboardContext), quotedContent = Matcher.quoteReplacement(getContent()); logger.debug("[Timing] Replacing tokens: " + (new SimpleDateFormat("H:m:s.S")).format(new Date())); String result = this.template.replaceAll(DASHBOARD_HEADER_TAG, quotedHeader) // Replace the Header .replaceAll(DASHBOARD_FOOTER_TAG, quotedFooter) // And the Footer .replaceAll(DASHBOARD_CONTENT_TAG, quotedContent); // And even the content! logger.debug("[Timing] Finished render proper: " + (new SimpleDateFormat("H:m:s.S")).format(new Date())); return result; }
From source file:au.org.ala.biocache.dao.SearchDAOImpl.java
/** * Format the search input query for a full-text search. * * This includes constructing a user friendly version of the query to * be used for display purposes./* w w w . j av a 2 s . c o m*/ * * TODO Fix this to use a state. REVISE!! * * @param searchParams */ protected void formatSearchQuery(SpatialSearchRequestParams searchParams, boolean forceQueryFormat) { //Only format the query if it doesn't already supply a formattedQuery. if (forceQueryFormat || StringUtils.isEmpty(searchParams.getFormattedQuery())) { // set the query String query = searchParams.getQ(); //cached query parameters are already formatted if (query.contains("qid:")) { Matcher matcher = qidPattern.matcher(query); long qid = 0; while (matcher.find()) { String value = matcher.group(); try { String qidValue = SearchUtils.stripEscapedQuotes(value.substring(4)); qid = Long.parseLong(qidValue); ParamsCacheObject pco = ParamsCache.get(qid); if (pco != null) { searchParams.setQId(qid); searchParams.setQ(pco.getQ()); //add the fqs from the params cache if (pco.getFqs() != null) { String[] currentFqs = searchParams.getFq(); if (currentFqs == null || (currentFqs.length == 1 && currentFqs[0].length() == 0)) { searchParams.setFq(pco.getFqs()); } else { //we need to add the current Fqs together searchParams.setFq((String[]) ArrayUtils.addAll(currentFqs, pco.getFqs())); } } String displayString = pco.getDisplayString(); if (StringUtils.isNotEmpty(pco.getWkt())) { displayString = displayString + " within user defined polygon"; } searchParams.setDisplayString(displayString); if (searchParams instanceof SpatialSearchRequestParams) { ((SpatialSearchRequestParams) searchParams).setWkt(pco.getWkt()); } else if (StringUtils.isNotEmpty(pco.getWkt())) { String originalQ = searchParams.getQ(); searchParams.setQ(spatialField + ":\"Intersects(" + pco.getWkt() + ")"); if (StringUtils.isNotEmpty(originalQ)) searchParams.setQ(searchParams.getQ() + " AND " + originalQ); } searchParams.setFormattedQuery(searchParams.getQ()); return; } } catch (NumberFormatException e) { } catch (ParamsCacheMissingException e) { } } } StringBuffer queryString = new StringBuffer(); StringBuffer displaySb = new StringBuffer(); String displayString = query; // look for field:term sub queries and catch fields: matched_name & matched_name_children if (query.contains(":")) { // will match foo:bar, foo:"bar bash" & foo:bar\ bash Matcher matcher = termPattern.matcher(query); queryString.setLength(0); while (matcher.find()) { String value = matcher.group(); logger.debug("term query: " + value); logger.debug("groups: " + matcher.group(1) + "|" + matcher.group(2)); if ("matched_name".equals(matcher.group(1))) { // name -> accepted taxon name (taxon_name:) String field = matcher.group(1); String queryText = matcher.group(2); if (queryText != null && !queryText.isEmpty()) { String guid = speciesLookupService.getGuidForName(queryText.replaceAll("\"", "")); // strip any quotes logger.info("GUID for " + queryText + " = " + guid); if (guid != null && !guid.isEmpty()) { String acceptedName = speciesLookupService.getAcceptedNameForGuid(guid); // strip any quotes logger.info("acceptedName for " + queryText + " = " + acceptedName); if (acceptedName != null && !acceptedName.isEmpty()) { field = "taxon_name"; queryText = acceptedName; } } else { field = "taxon_name"; } // also change the display query displayString = displayString.replaceAll("matched_name", "taxon_name"); } if (StringUtils.containsAny(queryText, CHARS) && !queryText.startsWith("[")) { // quote any text that has spaces or colons but not range queries queryText = QUOTE + queryText + QUOTE; } logger.debug("queryText: " + queryText); matcher.appendReplacement(queryString, matcher.quoteReplacement(field + ":" + queryText)); } else if ("matched_name_children".equals(matcher.group(1))) { String field = matcher.group(1); String queryText = matcher.group(2); if (queryText != null && !queryText.isEmpty()) { String guid = speciesLookupService.getGuidForName(queryText.replaceAll("\"", "")); // strip any quotes logger.info("GUID for " + queryText + " = " + guid); if (guid != null && !guid.isEmpty()) { field = "lsid"; queryText = guid; } else { field = "taxon_name"; } } if (StringUtils.containsAny(queryText, CHARS) && !queryText.startsWith("[")) { // quote any text that has spaces or colons but not range queries queryText = QUOTE + queryText + QUOTE; } matcher.appendReplacement(queryString, matcher.quoteReplacement(field + ":" + queryText)); } else { matcher.appendReplacement(queryString, matcher.quoteReplacement(value)); } } matcher.appendTail(queryString); query = queryString.toString(); } //if the query string contains lsid: we will need to replace it with the corresponding lft range int last = 0; if (query.contains("lsid:")) { Matcher matcher = lsidPattern.matcher(query); queryString.setLength(0); while (matcher.find()) { //only want to process the "lsid" if it does not represent taxon_concept_lsid etc... if ((matcher.start() > 0 && query.charAt(matcher.start() - 1) != '_') || matcher.start() == 0) { String value = matcher.group(); logger.debug("preprocessing " + value); String lsid = matcher.group(2); if (lsid.contains("\"")) { //remove surrounding quotes, if present lsid = lsid.replaceAll("\"", ""); } if (lsid.contains("\\")) { //remove internal \ chars, if present //noinspection MalformedRegex lsid = lsid.replaceAll("\\\\", ""); } logger.debug("lsid = " + lsid); String[] values = searchUtils.getTaxonSearch(lsid); String lsidHeader = matcher.group(1).length() > 0 ? matcher.group(1) : ""; matcher.appendReplacement(queryString, lsidHeader + values[0]); displaySb.append(query.substring(last, matcher.start())); if (!values[1].startsWith("taxon_concept_lsid:")) displaySb.append(lsidHeader).append("<span class='lsid' id='").append(lsid).append("'>") .append(values[1]).append("</span>"); else displaySb.append(lsidHeader).append(values[1]); last = matcher.end(); //matcher.appendReplacement(displayString, values[1]); } } matcher.appendTail(queryString); displaySb.append(query.substring(last, query.length())); query = queryString.toString(); displayString = displaySb.toString(); } if (query.contains("urn")) { //escape the URN strings before escaping the rest this avoids the issue with attempting to search on a urn field Matcher matcher = urnPattern.matcher(query); queryString.setLength(0); while (matcher.find()) { String value = matcher.group(); logger.debug("escaping lsid urns " + value); matcher.appendReplacement(queryString, prepareSolrStringForReplacement(value)); } matcher.appendTail(queryString); query = queryString.toString(); } if (query.contains("Intersects")) { Matcher matcher = spatialPattern.matcher(query); if (matcher.find()) { String spatial = matcher.group(); SpatialSearchRequestParams subQuery = new SpatialSearchRequestParams(); logger.debug("region Start : " + matcher.regionStart() + " start : " + matcher.start() + " spatial length " + spatial.length() + " query length " + query.length()); //format the search query of the remaining text only subQuery.setQ(query.substring(matcher.start() + spatial.length(), query.length())); //format the remaining query formatSearchQuery(subQuery); //now append Q's together queryString.setLength(0); //need to include the prefix queryString.append(query.substring(0, matcher.start())); queryString.append(spatial); queryString.append(subQuery.getFormattedQuery()); searchParams.setFormattedQuery(queryString.toString()); //add the spatial information to the display string if (spatial.contains("circles")) { String[] values = spatial.substring(spatial.indexOf("=") + 1, spatial.indexOf("}")) .split(","); if (values.length == 3) { displaySb.setLength(0); displaySb.append(subQuery.getDisplayString()); displaySb.append(" - within ").append(values[2]).append(" km of point(") .append(values[0]).append(",").append(values[1]).append(")"); searchParams.setDisplayString(displaySb.toString()); } } else { searchParams.setDisplayString(subQuery.getDisplayString() + " - within supplied region"); } } } else { //escape reserved characters unless the colon represnts a field name colon queryString.setLength(0); Matcher matcher = spacesPattern.matcher(query); while (matcher.find()) { String value = matcher.group(); //special cases to ignore from character escaping //if the value is a single - or * it means that we don't want to escape it as it is likely to have occurred in the following situation -(occurrence_date:[* TO *]) or *:* if (!value.equals("-") && /*!value.equals("*") && !value.equals("*:*") && */ !value.endsWith("*")) { //split on the colon String[] bits = StringUtils.split(value, ":", 2); if (bits.length == 2) { if (!bits[0].contains("urn") && !bits[1].contains("urn\\")) matcher.appendReplacement(queryString, bits[0] + ":" + prepareSolrStringForReplacement(bits[1])); } else if (!value.endsWith(":")) { //need to ignore field names where the : is at the end because the pattern matching will return field_name: as a match when it has a double quoted value //default behaviour is to escape all matcher.appendReplacement(queryString, prepareSolrStringForReplacement(value)); } } } matcher.appendTail(queryString); //substitute better display strings for collection/inst etc searches if (displayString.contains("_uid")) { displaySb.setLength(0); String normalised = displayString.replaceAll("\"", ""); matcher = uidPattern.matcher(normalised); while (matcher.find()) { String newVal = "<span>" + searchUtils.getUidDisplayString(matcher.group(1), matcher.group(2)) + "</span>"; if (newVal != null) matcher.appendReplacement(displaySb, newVal); } matcher.appendTail(displaySb); displayString = displaySb.toString(); } if (searchParams.getQ().equals("*:*")) { displayString = "[all records]"; } if (searchParams.getLat() != null && searchParams.getLon() != null && searchParams.getRadius() != null) { displaySb.setLength(0); displaySb.append(displayString); displaySb.append(" - within ").append(searchParams.getRadius()).append(" km of point(") .append(searchParams.getLat()).append(",").append(searchParams.getLon()).append(")"); displayString = displaySb.toString(); } // substitute i18n version of field name, if found in messages.properties displayString = formatDisplayStringWithI18n(displayString); searchParams.setFormattedQuery(queryString.toString()); logger.debug("formattedQuery = " + queryString); logger.debug("displayString = " + displayString); searchParams.setDisplayString(displayString); } //format the fq's for facets that need ranges substituted for (int i = 0; i < searchParams.getFq().length; i++) { String fq = searchParams.getFq()[i]; String[] parts = fq.split(":", 2); //check to see if the first part is a range based query and update if necessary Map<String, String> titleMap = RangeBasedFacets.getTitleMap(parts[0]); if (titleMap != null) { searchParams.getFq()[i] = titleMap.get(parts[1]); } } } searchParams.setDisplayString(formatDisplayStringWithI18n(searchParams.getDisplayString())); }
From source file:org.archive.wayback.replay.html.transformer.JSStringTransformer.java
public String transform(ReplayParseContext context, String input) { StringBuffer replaced = new StringBuffer(input.length()); Matcher m = pattern.matcher(input); while (m.find()) { String rawUrl = m.group(1); String pre = input.substring(m.start(), m.start(1)); String post = input.substring(m.end(1), m.end()); String origUrl = sourceEscaping != null ? sourceEscaping.unescape(rawUrl) : rawUrl; String url = context.contextualizeUrl(origUrl); if (url != origUrl) { // reverse some changes made to url by contextualizeUrl method, that // may break assumptions in subsequent JavaScript processing. // eg. "http://example.org" -> "/20140101012345/http://example.org/" // eg. "https://domain" + ".example.org" -> "http://domain/" + ".example.org" // eg. "https://domain." + "example.org" -> "http://domain" + "example.org" // remove trailing "/" if origUrl doesn't have it. As Wayback does not need // trailing slash, it may make sense to this everywhere. Just doing this fix // in JavaScript for now. if (url.endsWith("/") && !origUrl.endsWith("/")) { url = url.substring(0, url.length() - 1); }/*from ww w.j a v a 2s . c o m*/ // add trailing "." (removed by canonicalizer) back, if origUrl has it. if (origUrl.endsWith(".") && !url.endsWith(".")) { url = url + "."; } if (sourceEscaping != null) { url = sourceEscaping.escape(url); } } else { // use the original rawUrl url = rawUrl; } m.appendReplacement(replaced, Matcher.quoteReplacement(pre + url + post)); } m.appendTail(replaced); return replaced.toString(); }
From source file:com.networknt.light.rule.form.AbstractFormRule.java
protected String enrichForm(String json, Map<String, Object> inputMap) throws Exception { Map<String, Object> data = (Map<String, Object>) inputMap.get("data"); Pattern pattern = Pattern.compile("\\[\\{\"label\":\"dynamic\",([^]]+)\\}\\]"); Matcher m = pattern.matcher(json); StringBuffer sb = new StringBuffer(json.length()); while (m.find()) { String text = m.group(1); // get the values from rules. logger.debug("text = {}", text); text = text.substring(8);/* w w w. j ava 2 s . c o m*/ logger.debug("text = {}", text); Map<String, Object> jsonMap = mapper.readValue(text, new TypeReference<HashMap<String, Object>>() { }); jsonMap.put("payload", inputMap.get("payload")); // inject host into data here. Map<String, Object> dataMap = new HashMap<String, Object>(); dataMap.put("host", data.get("host")); jsonMap.put("data", dataMap); RuleEngine.getInstance().executeRule(Util.getCommandRuleId(jsonMap), jsonMap); String result = (String) jsonMap.get("result"); logger.debug("result = {}", result); if (result != null && result.length() > 0) { m.appendReplacement(sb, Matcher.quoteReplacement(result)); } else { m.appendReplacement(sb, Matcher.quoteReplacement("[ ]")); } } m.appendTail(sb); logger.debug("form = {}", sb.toString()); return sb.toString(); }
From source file:com.salesforce.ide.apex.internal.core.CompilerService.java
public static String canonicalizeString(String inputString) { String text = inputString.replaceAll("(\\r\\n|\\r)", Matcher.quoteReplacement("\n")); return text;/*from www. j a v a 2 s . co m*/ }
From source file:com.mirth.connect.connectors.jdbc.DatabaseConnectorServlet.java
@Override public SortedSet<Table> getTables(String channelId, String channelName, String driver, String url, String username, String password, Set<String> tableNamePatterns, String selectLimit, Set<String> resourceIds) { CustomDriver customDriver = null;/*from w ww . j a va 2 s. c o m*/ Connection connection = null; try { url = replacer.replaceValues(url, channelId, channelName); username = replacer.replaceValues(username, channelId, channelName); password = replacer.replaceValues(password, channelId, channelName); String schema = null; try { MirthContextFactory contextFactory = contextFactoryController.getContextFactory(resourceIds); try { ClassLoader isolatedClassLoader = contextFactory.getIsolatedClassLoader(); if (isolatedClassLoader != null) { customDriver = new CustomDriver(isolatedClassLoader, driver); logger.debug("Custom driver created: " + customDriver.toString() + ", Version " + customDriver.getMajorVersion() + "." + customDriver.getMinorVersion()); } else { logger.debug("Custom classloader is not being used, defaulting to DriverManager."); } } catch (Exception e) { logger.debug("Error creating custom driver, defaulting to DriverManager.", e); } } catch (Exception e) { logger.debug("Error retrieving context factory, defaulting to DriverManager.", e); } if (customDriver == null) { Class.forName(driver); } int oldLoginTimeout = DriverManager.getLoginTimeout(); DriverManager.setLoginTimeout(30); if (customDriver != null) { connection = customDriver.connect(url, username, password); } else { connection = DriverManager.getConnection(url, username, password); } DriverManager.setLoginTimeout(oldLoginTimeout); DatabaseMetaData dbMetaData = connection.getMetaData(); // the sorted set to hold the table information SortedSet<Table> tableInfoList = new TreeSet<Table>(); // Use a schema if the user name matches one of the schemas. // Fix for Oracle: MIRTH-1045 ResultSet schemasResult = null; try { schemasResult = dbMetaData.getSchemas(); while (schemasResult.next()) { String schemaResult = schemasResult.getString(1); if (username.equalsIgnoreCase(schemaResult)) { schema = schemaResult; } } } finally { if (schemasResult != null) { schemasResult.close(); } } // based on the table name pattern, attempt to retrieve the table information tableNamePatterns = translateTableNamePatterns(tableNamePatterns); List<String> tableNameList = new ArrayList<String>(); // go through each possible table name patterns and query for the tables for (String tableNamePattern : tableNamePatterns) { ResultSet rs = null; try { rs = dbMetaData.getTables(null, schema, tableNamePattern, TABLE_TYPES); // based on the result set, loop through to store the table name so it can be used to // retrieve the table's column information while (rs.next()) { tableNameList.add(rs.getString("TABLE_NAME")); } } finally { if (rs != null) { rs.close(); } } } // for each table, grab their column information for (String tableName : tableNameList) { ResultSet rs = null; ResultSet backupRs = null; boolean fallback = false; try { // apparently it's much more efficient to use ResultSetMetaData to retrieve // column information. So each driver is defined with their own unique SELECT // statement to query the table columns and use ResultSetMetaData to retrieve // the column information. If driver is not defined with the select statement // then we'll define to the generic method of getting column information, but // this could be extremely slow List<Column> columnList = new ArrayList<Column>(); if (StringUtils.isEmpty(selectLimit)) { logger.debug("No select limit is defined, using generic method"); rs = dbMetaData.getColumns(null, null, tableName, null); // retrieve all relevant column information for (int i = 0; rs.next(); i++) { Column column = new Column(rs.getString("COLUMN_NAME"), rs.getString("TYPE_NAME"), rs.getInt("COLUMN_SIZE")); columnList.add(column); } } else { logger.debug( "Select limit is defined, using specific select query : '" + selectLimit + "'"); // replace the '?' with the appropriate schema.table name, and use ResultSetMetaData to // retrieve column information final String schemaTableName = StringUtils.isNotEmpty(schema) ? "\"" + schema + "\".\"" + tableName + "\"" : "\"" + tableName + "\""; final String queryString = selectLimit.trim().replaceAll("\\?", Matcher.quoteReplacement(schemaTableName)); Statement statement = connection.createStatement(); try { rs = statement.executeQuery(queryString); ResultSetMetaData rsmd = rs.getMetaData(); // retrieve all relevant column information for (int i = 1; i < rsmd.getColumnCount() + 1; i++) { Column column = new Column(rsmd.getColumnName(i), rsmd.getColumnTypeName(i), rsmd.getPrecision(i)); columnList.add(column); } } catch (SQLException sqle) { logger.info("Failed to execute '" + queryString + "', fall back to generic approach to retrieve column information"); fallback = true; } finally { if (statement != null) { statement.close(); } } // failed to use selectLimit method, so we need to fall back to generic // if this generic approach fails, then there's nothing we can do if (fallback) { // Re-initialize in case some columns were added before failing columnList = new ArrayList<Column>(); logger.debug("Using fallback method for retrieving columns"); backupRs = dbMetaData.getColumns(null, null, tableName.replace("/", "//"), null); // retrieve all relevant column information while (backupRs.next()) { Column column = new Column(backupRs.getString("COLUMN_NAME"), backupRs.getString("TYPE_NAME"), backupRs.getInt("COLUMN_SIZE")); columnList.add(column); } } } // create table object and add to the list of table definitions Table table = new Table(tableName, columnList); tableInfoList.add(table); } finally { if (rs != null) { rs.close(); } if (backupRs != null) { backupRs.close(); } } } return tableInfoList; } catch (Exception e) { throw new MirthApiException(new Exception("Could not retrieve database tables and columns.", e)); } finally { if (connection != null) { try { connection.close(); } catch (SQLException e) { } } } }
From source file:com.haulmont.yarg.formatters.impl.AbstractFormatter.java
protected String inlineParameterValue(String template, String parameterName, String value) { String parameterRegex = UNIVERSAL_ALIAS_REGEXP.replace(ALIAS_GROUP, parameterName); return template.replaceAll(parameterRegex, Matcher.quoteReplacement(value)); }
From source file:org.springframework.integration.ftp.dsl.FtpTests.java
@Test @SuppressWarnings("unchecked") public void testFtpMgetFlow() { QueueChannel out = new QueueChannel(); IntegrationFlow flow = f -> f//ww w . ja v a2 s. com .handle(Ftp .outboundGateway(sessionFactory(), AbstractRemoteFileOutboundGateway.Command.MGET, "payload") .options(AbstractRemoteFileOutboundGateway.Option.RECURSIVE) .filterExpression("name matches 'subFtpSource|.*1.txt'") .localDirectoryExpression("'" + getTargetLocalDirectoryName() + "' + #remoteDirectory") .localFilenameExpression("#remoteFileName.replaceFirst('ftpSource', 'localTarget')")) .channel(out); IntegrationFlowRegistration registration = this.flowContext.registration(flow).register(); String dir = "ftpSource/"; registration.getInputChannel().send(new GenericMessage<>(dir + "*")); Message<?> result = out.receive(10_000); assertNotNull(result); List<File> localFiles = (List<File>) result.getPayload(); // should have filtered ftpSource2.txt assertEquals(2, localFiles.size()); for (File file : localFiles) { assertThat(file.getPath().replaceAll(Matcher.quoteReplacement(File.separator), "/"), Matchers.containsString(dir)); } assertThat(localFiles.get(1).getPath().replaceAll(Matcher.quoteReplacement(File.separator), "/"), Matchers.containsString(dir + "subFtpSource")); registration.destroy(); }
From source file:net.sathis.export.sql.DocBuilder.java
public Map<String, Object> getFields(Map<String, Object> firstRow, ResultSet rs, Entity entity, Map<String, Object> entityMap, Map<String, Object> rootEntityMap) throws SQLException { entityMap = new HashMap<String, Object>(); if (entity.allAttributes.get(MULTI_VALUED) != null && entity.allAttributes.get(MULTI_VALUED).equalsIgnoreCase("true")) { List<Object> fieldArray = new ArrayList<Object>(); rs.beforeFirst();//from w w w .ja va2 s. c om while (rs.next()) { if (entity.fields.size() > 1) { Map<String, Object> entityFieldsMap = new HashMap<String, Object>(); for (Iterator<Field> iterator = entity.fields.iterator(); iterator.hasNext();) { Field field = (Field) iterator.next(); FieldType fieldType = FieldType.valueOf(field.allAttributes.get("type").toUpperCase()); entityFieldsMap.put(field.name, convertFieldType(fieldType, rs.getObject(field.column)).get(0)); } fieldArray.add(entityFieldsMap); } else if (entity.fields.size() == 1) { fieldArray.add(rs.getObject(entity.fields.get(0).column)); } } rootEntityMap.put(entity.name, fieldArray); } else if (firstRow != null) { for (Iterator<Field> iterator = entity.fields.iterator(); iterator.hasNext();) { Field field = (Field) iterator.next(); FieldType fieldType = FieldType.valueOf(field.allAttributes.get("type").toUpperCase()); if (firstRow.get(field.column) != null) { if (entity.pk != null && entity.pk.equals(field.name)) { if (importer.getDataStoreType().equals(DataStoreType.MONGO)) { entityMap.put("_id", convertFieldType(fieldType, firstRow.get(field.column)).get(0)); } else if (importer.getDataStoreType().equals(DataStoreType.COUCH)) { //couch db says document id must be string entityMap.put("_id", convertFieldType(FieldType.STRING, firstRow.get(field.column)).get(0)); } } else { entityMap.put(field.getName(), convertFieldType(fieldType, firstRow.get(field.column)).get(0)); } params.put(entity.name + "." + field.name, firstRow.get(field.column).toString()); } } } if (entity.entities != null) { Entity subEntity = null; String query = "", aparam = ""; for (Iterator<Entity> iterator = entity.entities.iterator(); iterator.hasNext();) { subEntity = (Entity) iterator.next(); subLevel = subConnection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); query = subEntity.allAttributes.get("query"); m = p.matcher(query); aparam = ""; try { log.info("Parameter Map is: " + params); while (m.find()) { aparam = query.substring(m.start() + 2, m.end() - 1); query = query.replaceAll("(\\$\\{" + aparam + "\\})", Matcher.quoteReplacement(StringEscapeUtils.escapeSql(params.get(aparam)))); m = p.matcher(query); } } catch (Exception e) { e.printStackTrace(); } resultSet = subLevel.executeQuery(query); if (resultSet.next()) { subEntityData = getFields(processor.toMap(resultSet), resultSet, subEntity, null, entityMap); if (subEntityData.size() > 0) entityMap.put(subEntity.name, subEntityData); } resultSet.close(); subLevel.close(); } } return entityMap; }
From source file:org.entando.edo.builder.TestBuilderNoPlugin.java
@Test public void test_Controller_Spring_Xml() throws IOException { String commonPath = "src/main/resources/spring/sandbox/apsadmin".replaceAll("/", Matcher.quoteReplacement(File.separator)); String actualPath = ACTUAL_BASE_FOLDER + commonPath; File actualDir = new File(actualPath); Assert.assertTrue(actualDir.exists()); List<File> actualFiles = this.searchFiles(actualDir, null); Assert.assertEquals(1, actualFiles.size()); this.compareFiles(actualFiles); }