List of usage examples for java.util.regex Matcher appendReplacement
public Matcher appendReplacement(StringBuilder sb, String replacement)
From source file:org.etudes.util.XrefHelper.java
/** * For email notifications, revise embedded media relative url to full url. * /*from www . ja v a 2 s. co m*/ * @param data * The message data with relative urls. * @return data with full urls. */ public static String fullUrls(String data) { if (data == null) return data; Pattern p = getPattern(); Matcher m = p.matcher(data); StringBuffer sb = new StringBuffer(); String serverUrl = ServerConfigurationService.getServerUrl(); // for the relative access check: matches /access/ Pattern relAccessPattern = Pattern.compile("/access/.*"); // process each "harvested" string (avoiding like strings that are not in src= or href= patterns) while (m.find()) { if (m.groupCount() == 3) { String ref = m.group(2); String terminator = m.group(3); // if this is an access to our own server, make it full URL (i.e. starting with "/access") Matcher relAccessMatcher = relAccessPattern.matcher(ref); if (relAccessMatcher.matches()) { m.appendReplacement(sb, Matcher.quoteReplacement(m.group(1) + "=\"" + serverUrl + ref + terminator)); } } } m.appendTail(sb); return sb.toString(); }
From source file:org.azyva.dragom.cliutil.CliUtil.java
/** * Initializes the Java Util Logging framework by implementing replaceable * properties in the configuration file. * * <p>If the java.util.logging.config.file system property is defined, this method * does nothing, leaving the default initialization process be used. * * <p>If the java.util.logging.config.file system property is not defined and the * org.azyva.dragom.JavaUtilLoggingConfigFile system property is defined, this * method calls LogManager.readConfiguration with an InputStream which represents * the file but with property references replaced by the corresponding system * property./*from ww w .j a va 2 s .com*/ * * <p>If none of these two system properties are defined, this method does * nothing. */ public static void initJavaUtilLogging() { String javaUtilLoggingConfigFile; String javaUtilLoggingConfig; Matcher matcher; StringBuffer stringBufferNewJavaUtilLoggingConfig; Util.applyDragomSystemProperties(); if ((System.getProperty("java.util.logging.config.file") == null) && ((javaUtilLoggingConfigFile = System .getProperty(CliUtil.SYS_PROPERTY_JAVA_UTIL_LOGGING_CONFIG_FILE)) != null)) { try { javaUtilLoggingConfig = new String(Files.readAllBytes(Paths.get(javaUtilLoggingConfigFile))); matcher = CliUtil.patternPropertyReference.matcher(javaUtilLoggingConfig); stringBufferNewJavaUtilLoggingConfig = new StringBuffer(); while (matcher.find()) { String property; String value; property = matcher.group(1); value = System.getProperty(property); if (value == null) { throw new RuntimeException("System property " + property + " referenced in " + javaUtilLoggingConfigFile + " is not defined."); } // In a Properties file, \ must be escaped. value = value.replace("\\", "\\\\"); matcher.appendReplacement(stringBufferNewJavaUtilLoggingConfig, Matcher.quoteReplacement(value)); } matcher.appendTail(stringBufferNewJavaUtilLoggingConfig); java.util.logging.LogManager.getLogManager().readConfiguration( new ByteArrayInputStream(stringBufferNewJavaUtilLoggingConfig.toString().getBytes())); } catch (IOException ioe) { throw new RuntimeException(ioe); } } }
From source file:io.klerch.alexa.tellask.model.wrapper.AlexaSpeechletResponse.java
private String resolveSlotsInUtterance(final String utterance) { final StringBuffer buffer = new StringBuffer(); // extract all the placeholders found in the utterance final Matcher slotsInUtterance = Pattern.compile("\\{(.*?)\\}").matcher(utterance); // for any of the placeholders ... while (slotsInUtterance.find()) { // ... placeholder-name is the slotName to look after in two places of the output final String slotName = slotsInUtterance.group(1); final AlexaOutputSlot outputSlot = output // prefer directly set output slots .getSlots().stream()//from w ww .j av a 2 s .c o m // which do have the same name as what is found in the utterance .filter(slot -> slot.getName().equals(slotName)).findFirst() // if not directly applied look in provided models for AlexaSlotSave fields .orElse(getSavedSlot(slotName)); Validate.notNull(outputSlot, "Could not replace placeholder with name {" + slotName + "} because no corresponding slot was set in the output."); // RJH - FEB 2017 - Matcher.quoteReplacement on slot input to fix bug // ~ https://github.com/KayLerch/alexa-skills-kit-tellask-java/issues/1 slotsInUtterance.appendReplacement(buffer, Matcher.quoteReplacement(outputSlot.getSsml())); } slotsInUtterance.appendTail(buffer); return "<speak>" + buffer.toString() + "</speak>"; }
From source file:org.opennms.ng.services.databaseschemaconfig.JdbcFilterDao.java
/** * Generic method to parse and translate a rule into SQL. * * Only columns listed in database-schema.xml may be used in a filter * (explicit "table.column" specification is not supported in filters) * * To differentiate column names from SQL key words (operators, functions, typecasts, etc) * SQL_KEYWORD_REGEX must match any SQL key words that may be used in filters, * and must not match any column names or prefixed values * * To make filter syntax more simple and intuitive than SQL * - Filters support some aliases for common SQL key words / operators * "&" or "&&" = "AND" * "|" or "||" = "OR"/*w w w .jav a2 s .c o m*/ * "!" = "NOT" * "==" = "=" * - "IPLIKE" may be used as an operator instead of a function in filters ("ipAddr IPLIKE '*.*.*.*'") * When using "IPLIKE" as an operator, the value does not have to be quoted ("ipAddr IPLIKE *.*.*.*" is ok) * - Some common SQL expressions may be generated by adding a (lower-case) prefix to an unquoted value in the filter * "isVALUE" = "serviceName = VALUE" * "notisVALUE" = interface does not support the specified service * "catincVALUE" = node is in the specified category * - Double-quoted (") strings in filters are converted to single-quoted (') strings in SQL * SQL treats single-quoted strings as constants (values) and double-quoted strings as identifiers (columns, tables, etc) * So, all quoted strings in filters are treated as constants, and filters don't support quoted identifiers * * This function does not do complete syntax/grammar checking - that is left to the database itself - do not assume the output is valid SQL * * @param tables * a list to be populated with any tables referenced by the returned SQL * @param rule * the rule to parse * * @return an SQL WHERE clause * * @throws org.opennms.ng.services.databaseschemaconfig.FilterParseException * if any errors occur during parsing */ private String parseRule(final List<Table> tables, final String rule) throws FilterParseException { if (rule != null && rule.length() > 0) { final List<String> extractedStrings = new ArrayList<String>(); String sqlRule = rule; // Extract quoted strings from rule and convert double-quoted strings to single-quoted strings // Quoted strings need to be extracted first to avoid accidentally matching/modifying anything within them // As in SQL, pairs of quotes within a quoted string are treated as an escaped quote character: // 'a''b' = a'b ; "a""b" = a"b ; 'a"b' = a"b ; "a'b" = a'b Matcher regex = SQL_QUOTE_PATTERN.matcher(sqlRule); StringBuffer tempStringBuff = new StringBuffer(); while (regex.find()) { final String tempString = regex.group(); if (tempString.charAt(0) == '"') { extractedStrings.add("'" + tempString.substring(1, tempString.length() - 1) .replaceAll("\"\"", "\"").replaceAll("'", "''") + "'"); } else { extractedStrings.add(regex.group()); } regex.appendReplacement(tempStringBuff, "###@" + (extractedStrings.size() - 1) + "@###"); } final int tempIndex = tempStringBuff.length(); regex.appendTail(tempStringBuff); if (tempStringBuff.substring(tempIndex).indexOf('\'') > -1) { final String message = "Unmatched ' in filter rule '" + rule + "'"; LOG.error(message); throw new FilterParseException(message); } if (tempStringBuff.substring(tempIndex).indexOf('"') > -1) { final String message = "Unmatched \" in filter rule '" + rule + "'"; LOG.error(message); throw new FilterParseException(message); } sqlRule = tempStringBuff.toString(); // Translate filter-specific operators to SQL operators sqlRule = sqlRule.replaceAll("\\s*(?:&|&&)\\s*", " AND "); sqlRule = sqlRule.replaceAll("\\s*(?:\\||\\|\\|)\\s*", " OR "); sqlRule = sqlRule.replaceAll("\\s*!(?!=)\\s*", " NOT "); sqlRule = sqlRule.replaceAll("==", "="); // Translate IPLIKE operators to IPLIKE() functions // If IPLIKE is already used as a function in the filter, this regex should not match it regex = SQL_IPLIKE_PATTERN.matcher(sqlRule); tempStringBuff = new StringBuffer(); while (regex.find()) { // Is the second argument already a quoted string? if (regex.group().charAt(0) == '#') { regex.appendReplacement(tempStringBuff, "IPLIKE($1, $2)"); } else { regex.appendReplacement(tempStringBuff, "IPLIKE($1, '$2')"); } } regex.appendTail(tempStringBuff); sqlRule = tempStringBuff.toString(); // Extract SQL key words to avoid identifying them as columns or prefixed values regex = SQL_KEYWORD_PATTERN.matcher(sqlRule); tempStringBuff = new StringBuffer(); while (regex.find()) { extractedStrings.add(regex.group().toUpperCase()); regex.appendReplacement(tempStringBuff, "###@" + (extractedStrings.size() - 1) + "@###"); } regex.appendTail(tempStringBuff); sqlRule = tempStringBuff.toString(); // Identify prefixed values and columns regex = SQL_VALUE_COLUMN_PATTERN.matcher(sqlRule); tempStringBuff = new StringBuffer(); while (regex.find()) { // Convert prefixed values to SQL expressions if (regex.group().startsWith("is")) { regex.appendReplacement(tempStringBuff, addColumn(tables, "serviceName") + " = '" + regex.group().substring(2) + "'"); } else if (regex.group().startsWith("notis")) { regex.appendReplacement(tempStringBuff, addColumn(tables, "ipAddr") + " NOT IN (SELECT ifServices.ipAddr FROM ifServices, service WHERE service.serviceName ='" + regex.group().substring(5) + "' AND service.serviceID = ifServices.serviceID)"); } else if (regex.group().startsWith("catinc")) { regex.appendReplacement(tempStringBuff, addColumn(tables, "nodeID") + " IN (SELECT category_node.nodeID FROM category_node, categories WHERE categories.categoryID = category_node.categoryID AND categories.categoryName = '" + regex.group().substring(6) + "')"); } else { // Call addColumn() on each column regex.appendReplacement(tempStringBuff, addColumn(tables, regex.group())); } } regex.appendTail(tempStringBuff); sqlRule = tempStringBuff.toString(); // Merge extracted strings back into expression regex = SQL_ESCAPED_PATTERN.matcher(sqlRule); tempStringBuff = new StringBuffer(); while (regex.find()) { regex.appendReplacement(tempStringBuff, Matcher.quoteReplacement(extractedStrings.get(Integer.parseInt(regex.group(1))))); } regex.appendTail(tempStringBuff); sqlRule = tempStringBuff.toString(); return "WHERE " + sqlRule; } return ""; }
From source file:mergedoc.core.Comment.java
/** * ???????/*from ww w. j av a2 s.c o m*/ * <p> * @param o */ private void expandComment(OutputComment o) { // HTML ?????????? if (!o.comment.contains("<")) { return; } // ? int height = o.resultHeight(); // <pre>?<blockquote>?<ol>?<ul> ?? StringBuffer sb = new StringBuffer(); Pattern pat = PatternCache.getPattern("([^\n])(\n(<blockquote>)?<pre>|\n<(blockquote|ol|ul)>)"); Matcher mat = pat.matcher(o.comment); while (height < o.originHeight && mat.find()) { mat.appendReplacement(sb, "$1\n$2"); height++; } mat.appendTail(sb); o.comment = sb.toString(); if (height == o.originHeight) { return; } // </pre>?</blockquote>?</ol>?</ul> ?? sb = new StringBuffer(); pat = PatternCache.getPattern("(</pre>(</blockquote>)?\n|</(blockquote|ol|ul)>\n)([^\n])"); mat = pat.matcher(o.comment); while (height < o.originHeight && mat.find()) { mat.appendReplacement(sb, "$1\n$4"); height++; } mat.appendTail(sb); o.comment = sb.toString(); if (height == o.originHeight) { return; } }
From source file:com.ibm.jaggr.core.impl.layer.LayerImpl.java
@SuppressWarnings("unchecked") @Override/*ww w.j av a 2s . com*/ public InputStream getInputStream(HttpServletRequest request, HttpServletResponse response) throws IOException { CacheEntry entry = null; String key = null; IAggregator aggr = (IAggregator) request.getAttribute(IAggregator.AGGREGATOR_REQATTRNAME); List<String> cacheInfoReport = null; if (_isReportCacheInfo) { cacheInfoReport = (List<String>) request.getAttribute(LAYERCACHEINFO_PROPNAME); if (cacheInfoReport != null) { cacheInfoReport.clear(); } } if (log.isLoggable(Level.FINEST) && cacheInfoReport == null) { cacheInfoReport = new LinkedList<String>(); } try { IOptions options = aggr.getOptions(); ICacheManager mgr = aggr.getCacheManager(); boolean ignoreCached = RequestUtil.isIgnoreCached(request); InputStream result; long lastModified = getLastModified(request); CacheEntry newEntry = new CacheEntry(_id, _cacheKey, lastModified); CacheEntry existingEntry = null; if (ignoreCached) { request.setAttribute(NOCACHE_RESPONSE_REQATTRNAME, Boolean.TRUE); } if (options.isDevelopmentMode()) { synchronized (this) { // See if we need to discard previously built LayerBuilds if (lastModified > _lastModified) { if (cacheInfoReport != null) { cacheInfoReport.add("update_lastmod2"); //$NON-NLS-1$ } if (lastModified != Long.MAX_VALUE) { // max value means missing requested source _lastModified = lastModified; } _cacheKeyGenerators = null; } } } Map<String, ICacheKeyGenerator> cacheKeyGenerators = _cacheKeyGenerators; // Creata a cache key. key = generateCacheKey(request, cacheKeyGenerators); if (!ignoreCached && key != null) { int loopGuard = 5; do { // Try to retrieve an existing layer build using the blocking putIfAbsent. If the return // value is null, then the newEntry was successfully added to the map, otherwise the // existing entry is returned in the buildReader and newEntry was not added. existingEntry = _layerBuilds.putIfAbsent(key, newEntry, options.isDevelopmentMode()); if (cacheInfoReport != null) { cacheInfoReport.add(existingEntry != null ? "hit_1" : "added"); //$NON-NLS-1$ //$NON-NLS-2$ } if (existingEntry != null) { if ((result = existingEntry.tryGetInputStream(request)) != null) { setResponseHeaders(request, response, existingEntry.getSize()); if (log.isLoggable(Level.FINEST)) { log.finest(cacheInfoReport.toString() + "\n" + //$NON-NLS-1$ "key:" + key + //$NON-NLS-1$ "\n" + existingEntry.toString()); //$NON-NLS-1$ } if (_isReportCacheInfo) { request.setAttribute(LAYERBUILDCACHEKEY_PROPNAME, key); } return result; } else if (existingEntry.isDeleted()) { if (_layerBuilds.replace(key, existingEntry, newEntry)) { // entry was replaced, use newEntry if (cacheInfoReport != null) { cacheInfoReport.add("replace_1"); //$NON-NLS-1$ } existingEntry = null; } else { // Existing entry was removed from the cache by another thread // between the time we retrieved it and the time we tried to // replace it. Try to add the new entry again. if (cacheInfoReport != null) { cacheInfoReport.add("retry_add"); //$NON-NLS-1$ } if (--loopGuard == 0) { // Should never happen, but just in case throw new IllegalStateException(); } continue; } } } break; } while (true); } // putIfAbsent() succeeded and the new entry was added to the cache entry = (existingEntry != null) ? existingEntry : newEntry; LayerBuilder layerBuilder = null; // List of Future<IModule.ModuleReader> objects that will be used to read the module // data from List<ICacheKeyGenerator> moduleKeyGens = null; // Synchronize on the LayerBuild object for the build. This will prevent multiple // threads from building the same output. If more than one thread requests the same // output (same cache key), then the first one to grab the sync object will win and // the rest will wait for the first thread to finish building and then just return // the output from the first thread when they wake. synchronized (entry) { // Check to see if data is available one more time in case a different thread finished // building the output while we were blocked on the sync object. if (!ignoreCached && key != null && (result = entry.tryGetInputStream(request)) != null) { if (cacheInfoReport != null) { cacheInfoReport.add("hit_2"); //$NON-NLS-1$ } setResponseHeaders(request, response, entry.getSize()); if (log.isLoggable(Level.FINEST)) { log.finest(cacheInfoReport.toString() + "\n" + //$NON-NLS-1$ "key:" + key + //$NON-NLS-1$ "\n" + entry.toString()); //$NON-NLS-1$ } if (_isReportCacheInfo) { request.setAttribute(LAYERBUILDCACHEKEY_PROPNAME, key); } return result; } boolean isGzip = RequestUtil.isGzipEncoding(request); ByteArrayOutputStream bos = new ByteArrayOutputStream(); // See if we already have a cached response that uses a different gzip // encoding option. If we do, then just zip (or unzip) the cached // response CacheEntry otherEntry = null; if (key != null) { StringBuffer sb = new StringBuffer(); Matcher m = GZIPFLAG_KEY_PATTERN.matcher(key); m.find(); m.appendReplacement(sb, new StringBuffer(s_layerCacheKeyGenerators.get(0).toString()).append(":") //$NON-NLS-1$ .append("1".equals(m.group(1)) ? "0" : "1") //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ .append(":").toString() //$NON-NLS-1$ ).appendTail(sb); otherEntry = _layerBuilds.get(sb.toString()); } if (otherEntry != null) { if (isGzip) { if (cacheInfoReport != null) { cacheInfoReport.add("zip_unzipped"); //$NON-NLS-1$ } // We need gzipped and the cached entry is unzipped // Create the compression stream for the output VariableGZIPOutputStream compress = new VariableGZIPOutputStream(bos, 10240); // is 10k too big? compress.setLevel(Deflater.BEST_COMPRESSION); Writer writer = new OutputStreamWriter(compress, "UTF-8"); //$NON-NLS-1$ // Copy the data from the input stream to the output, compressing as we go. CopyUtil.copy(otherEntry.getInputStream(request), writer); } else { if (cacheInfoReport != null) { cacheInfoReport.add("unzip_zipped"); //$NON-NLS-1$ } // We need unzipped and the cached entry is zipped. Just unzip it CopyUtil.copy(new GZIPInputStream(otherEntry.getInputStream(request)), bos); } // Set the buildReader to the LayerBuild and release the lock by exiting the sync block entry.setBytes(bos.toByteArray()); if (!ignoreCached) { _layerBuilds.replace(key, entry, entry); // updates entry weight in map if (cacheInfoReport != null) { cacheInfoReport.add("update_weights_1"); //$NON-NLS-1$ } entry.persist(mgr); } } else { moduleKeyGens = new LinkedList<ICacheKeyGenerator>(); ModuleList moduleList = getModules(request); // Remove the module list from the request to safe-guard it now that we don't // need it there anymore request.removeAttribute(MODULE_FILES_PROPNAME); // Create a BuildListReader from the list of Futures. This reader will obtain a // ModuleReader from each of the Futures in the list and read data from each one in // succession until all the data has been read, blocking on each Future until the // reader becomes available. layerBuilder = new LayerBuilder(request, moduleKeyGens, moduleList); String layer = layerBuilder.build(); if (isGzip) { if (cacheInfoReport != null) { cacheInfoReport.add("zip"); //$NON-NLS-1$ } VariableGZIPOutputStream compress = new VariableGZIPOutputStream(bos, 10240); // is 10k too big? compress.setLevel(Deflater.BEST_COMPRESSION); Writer writer = new OutputStreamWriter(compress, "UTF-8"); //$NON-NLS-1$ // Copy the data from the input stream to the output, compressing as we go. CopyUtil.copy(new StringReader(layer), writer); // Set the buildReader to the LayerBuild and release the lock by exiting the sync block entry.setBytes(bos.toByteArray()); } else { entry.setBytes(layer.getBytes()); } // entry will be persisted below after we determine if cache key // generator needs to be updated } } // if any of the readers included an error response, then don't cache the layer. if (layerBuilder != null && layerBuilder.hasErrors()) { request.setAttribute(NOCACHE_RESPONSE_REQATTRNAME, Boolean.TRUE); if (cacheInfoReport != null) { cacheInfoReport.add(key == null ? "error_noaction" : "error_remove"); //$NON-NLS-1$ //$NON-NLS-2$ } if (key != null) { _layerBuilds.remove(key, entry); } } else if (layerBuilder != null) { if (!ignoreCached) { // See if we need to create or update the cache key generators Map<String, ICacheKeyGenerator> newKeyGens = new HashMap<String, ICacheKeyGenerator>(); Set<String> requiredModuleListDeps = getModules(request).getDependentFeatures(); addCacheKeyGenerators(newKeyGens, s_layerCacheKeyGenerators); addCacheKeyGenerators(newKeyGens, aggr.getTransport().getCacheKeyGenerators()); addCacheKeyGenerators(newKeyGens, Arrays.asList(new ICacheKeyGenerator[] { new FeatureSetCacheKeyGenerator(requiredModuleListDeps, false) })); addCacheKeyGenerators(newKeyGens, moduleKeyGens); boolean cacheKeyGeneratorsUpdated = false; if (!newKeyGens.equals(cacheKeyGenerators)) { // If we don't yet have a cache key for this layer, then get one // from the cache key generators, and then update the cache key for this // cache entry. synchronized (this) { if (_cacheKeyGenerators != null) { addCacheKeyGenerators(newKeyGens, _cacheKeyGenerators.values()); } _cacheKeyGenerators = Collections.unmodifiableMap(newKeyGens); } if (cacheInfoReport != null) { cacheInfoReport.add("update_keygen"); //$NON-NLS-1$ } cacheKeyGeneratorsUpdated = true; } final String originalKey = key; if (key == null || cacheKeyGeneratorsUpdated) { if (cacheInfoReport != null) { cacheInfoReport.add("update_key"); //$NON-NLS-1$ } key = generateCacheKey(request, newKeyGens); } if (originalKey == null || !originalKey.equals(key)) { /* * The cache key has changed from what was originally used to put the * un-built entry into the cache. Add the LayerBuild to the cache * using the new key. */ if (log.isLoggable(Level.FINE)) { log.fine("Key changed! Adding layer to cache with key: " + key); //$NON-NLS-1$ } final CacheEntry originalEntry = entry; CacheEntry updateEntry = (originalKey == null) ? entry : new CacheEntry(entry); CacheEntry previousEntry = _layerBuilds.putIfAbsent(key, updateEntry, options.isDevelopmentMode()); if (cacheInfoReport != null) { cacheInfoReport.add(previousEntry == null ? "update_add" : "update_hit"); //$NON-NLS-1$ //$NON-NLS-2$ } // Write the file to disk only if the LayerBuild was successfully added to the cache if (previousEntry == null) { // Updated entry was added to the cache. entry = updateEntry; entry.persist(mgr); } // If the key changed, then remove the entry under the old key. Use a // delay to give other threads a chance to start using the new cache // key generator. No need to update entry weight in map if (originalKey != null) { aggr.getExecutors().getScheduledExecutor().schedule(new Runnable() { public void run() { _layerBuilds.remove(originalKey, originalEntry); } }, LAYERBUILD_REMOVE_DELAY_SECONDS, TimeUnit.SECONDS); } } else { if (cacheInfoReport != null) { cacheInfoReport.add("update_weights_2"); //$NON-NLS-1$ } _layerBuilds.replace(key, entry, entry); // updates entry weight in map entry.persist(mgr); } } } result = entry.getInputStream(request); setResponseHeaders(request, response, entry.getSize()); // return the input stream to the LayerBuild if (log.isLoggable(Level.FINEST)) { log.finest(cacheInfoReport.toString() + "\n" + //$NON-NLS-1$ "key:" + key + //$NON-NLS-1$ "\n" + entry.toString()); //$NON-NLS-1$ } if (_isReportCacheInfo) { request.setAttribute(LAYERBUILDCACHEKEY_PROPNAME, key); } return result; } catch (IOException e) { _layerBuilds.remove(key, entry); throw e; } catch (RuntimeException e) { _layerBuilds.remove(key, entry); throw e; } finally { if (_layerBuilds.isLayerEvicted()) { _layerBuilds.removeLayerFromCache(this); } } }
From source file:com.glaf.core.util.DBUtils.java
public static String removeOrders(String sql) { Assert.hasText(sql);// w w w.ja v a2 s . c o m Pattern pattern = Pattern.compile("order\\s*by[\\w|\\W|\\s|\\S]*", Pattern.CASE_INSENSITIVE); Matcher matcher = pattern.matcher(sql); StringBuffer buf = new StringBuffer(); while (matcher.find()) { matcher.appendReplacement(buf, ""); } matcher.appendTail(buf); return buf.toString(); }
From source file:org.picketlink.social.standalone.login.ExternalAuthentication.java
/** * <p>//from w w w .j av a2s. c o m * Get the system property value if the string is of the format ${sysproperty} * </p> * <p> * You can insert default value when the system property is not set, by separating it at the beginning with :: * </p> * <p> * <b>Examples:</b> * </p> * * <p> * ${idp} should resolve to a value if the system property "idp" is set. * </p> * <p> * ${idp::http://localhost:8080} will resolve to http://localhost:8080 if the system property "idp" is not set. * </p> * * @param str * @return */ private String getSystemPropertyAsString(String str) { if (str.contains("${")) { Pattern pattern = Pattern.compile("\\$\\{([^}]+)}"); Matcher matcher = pattern.matcher(str); StringBuffer buffer = new StringBuffer(); String sysPropertyValue = null; while (matcher.find()) { String subString = matcher.group(1); String defaultValue = ""; // Look for default value if (subString.contains("::")) { int index = subString.indexOf("::"); defaultValue = subString.substring(index + 2); subString = subString.substring(0, index); } sysPropertyValue = SecurityActions.getSystemProperty(subString, defaultValue); matcher.appendReplacement(buffer, sysPropertyValue); } matcher.appendTail(buffer); str = buffer.toString(); } return str; }
From source file:com.ibm.jaggr.service.impl.modulebuilder.css.CSSModuleBuilder.java
/** * Replace <code>url(<<i>relative-path</i>>)</code> references in the * input CSS with//from w w w .j a v a 2 s . co m * <code>url(data:<<i>mime-type</i>>;<<i>base64-encoded-data</i>></code> * ). The conversion is controlled by option settings as described in * {@link CSSModuleBuilder}. * * @param css * The input CSS * @param uri * The URI for the input CSS * @return The transformed CSS with images in-lined as determined by option * settings. */ protected String inlineImageUrls(HttpServletRequest req, String css, IResource res) { if (imageSizeThreshold == 0 && inlinedImageIncludeList.size() == 0) { // nothing to do return css; } // In-lining of imports can be disabled by request parameter for debugging if (!TypeUtil.asBoolean(req.getParameter(INLINEIMAGES_REQPARAM_NAME), true)) { return css; } StringBuffer buf = new StringBuffer(); Matcher m = urlPattern.matcher(css); while (m.find()) { String fullMatch = m.group(0); String urlMatch = m.group(1); // remove quotes. urlMatch = quotedStringPattern.matcher(urlMatch).replaceAll(""); //$NON-NLS-1$ urlMatch = forwardSlashPattern.matcher(urlMatch).replaceAll("/"); //$NON-NLS-1$ // Don't do anything with non-relative URLs if (urlMatch.startsWith("/") || urlMatch.startsWith("#") || protocolPattern.matcher(urlMatch).find()) { //$NON-NLS-1$ //$NON-NLS-2$ m.appendReplacement(buf, ""); //$NON-NLS-1$ buf.append(fullMatch); continue; } URI imageUri = res.getURI().resolve(urlMatch); boolean exclude = false, include = false; // Determine if this image is in the include list for (Pattern regex : inlinedImageIncludeList) { if (regex.matcher(imageUri.getPath()).find()) { include = true; break; } } // Determine if this image is in the exclude list for (Pattern regex : inlinedImageExcludeList) { if (regex.matcher(imageUri.getPath()).find()) { exclude = true; break; } } // If there's an include list, then only the files in the include list // will be inlined if (inlinedImageIncludeList.size() > 0 && !include || exclude) { m.appendReplacement(buf, ""); //$NON-NLS-1$ buf.append(fullMatch); continue; } boolean imageInlined = false; InputStream in = null; try { // In-line the image. URLConnection connection = imageUri.toURL().openConnection(); in = connection.getInputStream(); int size = connection.getContentLength(); String type = connection.getContentType(); if (type == null) { type = "content/unknown"; //$NON-NLS-1$ } if (include || inlineableImageTypes.contains(type) && size <= imageSizeThreshold) { String base64 = getBase64(connection); m.appendReplacement(buf, ""); //$NON-NLS-1$ buf.append("url('data:" + type + //$NON-NLS-1$ ";base64," + base64 + "')"); //$NON-NLS-1$ //$NON-NLS-2$ imageInlined = true; } } catch (IOException ex) { if (log.isLoggable(Level.WARNING)) { log.log(Level.WARNING, MessageFormat.format(Messages.CSSModuleBuilder_0, new Object[] { imageUri }), ex); } } finally { if (in != null) { try { in.close(); } catch (IOException ignore) { } } } if (!imageInlined) { // Image not in-lined. Write the original URL m.appendReplacement(buf, ""); //$NON-NLS-1$ buf.append(fullMatch); } } m.appendTail(buf); return buf.toString(); }
From source file:com.axelor.studio.service.data.importer.FormImporter.java
private String[] getDomainContext(String domain) { if (domain == null) { return new String[] { null }; }// ww w .ja v a 2s .c o m Matcher macher = DOMAIN_PATTERN.matcher(domain); StringBuffer sb = new StringBuffer(domain.length()); List<String> context = new ArrayList<String>(); int count = 0; while (macher.find()) { String replacement = ":_param" + count; context.add(replacement.substring(1) + ";eval" + macher.group().replace("= ", "")); macher.appendReplacement(sb, replacement); count++; } macher.appendTail(sb); if (context.isEmpty()) { return new String[] { domain }; } return new String[] { sb.toString(), Joiner.on(",").join(context) }; }