List of usage examples for java.util.regex Pattern quote
public static String quote(String s)
From source file:com.jaspersoft.studio.statistics.UsageManager.java
/** * Send the statistics to the defined server. They are read from the properties filed and converted into a JSON * string. Then this string is sent to the server as a post parameter named data *//*ww w . ja va 2 s.c o m*/ protected void sendStatistics() { BufferedReader responseReader = null; DataOutputStream postWriter = null; try { if (!STATISTICS_SERVER_URL.trim().isEmpty()) { URL obj = new URL(STATISTICS_SERVER_URL); HttpURLConnection con = (HttpURLConnection) obj.openConnection(); // add request header con.setRequestMethod("POST"); //$NON-NLS-1$ con.setRequestProperty("User-Agent", "Mozilla/5.0"); //$NON-NLS-1$ //$NON-NLS-2$ con.setRequestProperty("Accept-Language", "en-US,en;q=0.5"); //$NON-NLS-1$ //$NON-NLS-2$ // Read and convert the statistics into a JSON string UsagesContainer container = new UsagesContainer(getAppDataFolder().getName()); boolean fileChanged = false; synchronized (UsageManager.this) { Properties prop = getStatisticsContainer(); for (Object key : new ArrayList<Object>(prop.keySet())) { try { String[] id_category = key.toString().split(Pattern.quote(ID_CATEGORY_SEPARATOR)); String value = prop.getProperty(key.toString(), "0"); int usageNumber = Integer.parseInt(value); //$NON-NLS-1$ String version = getVersion(); //Check if the id contains the version if (id_category.length == 3) { version = id_category[2]; } else { //Old structure, remove the old entry and insert the new fixed one //this is a really limit case and should almost never happen prop.remove(key); String fixed_key = id_category[0] + ID_CATEGORY_SEPARATOR + id_category[1] + ID_CATEGORY_SEPARATOR + version; prop.setProperty(fixed_key, value); fileChanged = true; } container.addStat( new UsageStatistic(id_category[0], id_category[1], version, usageNumber)); } catch (Exception ex) { //if a key is invalid remove it ex.printStackTrace(); prop.remove(key); fileChanged = true; } } } if (fileChanged) { //The statistics file was changed, maybe a fix or an invalid property removed //write it corrected on the disk writeStatsToDisk.cancel(); writeStatsToDisk.setPriority(Job.SHORT); writeStatsToDisk.schedule(MINIMUM_WAIT_TIME); } ObjectMapper mapper = new ObjectMapper(); String serializedData = mapper.writeValueAsString(container); // Send post request with the JSON string as the data parameter String urlParameters = "data=" + serializedData; //$NON-NLS-1$ con.setDoOutput(true); postWriter = new DataOutputStream(con.getOutputStream()); postWriter.writeBytes(urlParameters); postWriter.flush(); int responseCode = con.getResponseCode(); responseReader = new BufferedReader(new InputStreamReader(con.getInputStream())); String inputLine; StringBuffer response = new StringBuffer(); while ((inputLine = responseReader.readLine()) != null) { response.append(inputLine); } // Update the upload time if (responseCode == 200 && ModelUtils.safeEquals(response.toString(), "ok")) { setInstallationInfo(TIMESTAMP_INFO, String.valueOf(getCurrentTime())); } else { //print result System.out.println("Response error: " + response.toString()); } } } catch (Exception ex) { ex.printStackTrace(); JaspersoftStudioPlugin.getInstance().logError(Messages.UsageManager_errorStatUpload, ex); } finally { FileUtils.closeStream(postWriter); FileUtils.closeStream(responseReader); } }
From source file:display.containers.FileManager.java
public boolean isPatient(File fi) { String[] parts = fi.getAbsolutePath().split(Pattern.quote(File.separator)); int serverdirlen = (SystemSettings.SERVER_INFO.getServerDir().toString() .split(Pattern.quote(File.separator))).length + 1;// +1 pour NRI-ANALYSE et NRI-DICOM if (parts.length == (serverdirlen)) return false; if (!fi.getName().contains("..")) { int count = 0; for (int i = serverdirlen; i < parts.length; i++) { if (!parts[i].isEmpty()) { count++;// www . j av a 2 s .c om } } return count == 2; } return false; }
From source file:adalid.commons.util.StrUtils.java
public static String getFileName(String string, String separator) { if (string == null) { return null; }//from w w w .j av a2 s.com final String defaultSeparator = "-"; final String escapedSeparator = "\\_\\-\\."; final String invalidSeparator = "[^" + escapedSeparator + "]"; final String prefixSeparators = "^[" + escapedSeparator + "]+"; final String suffixSeparators = "[" + escapedSeparator + "]+$"; String sep = separator == null ? defaultSeparator : separator.replaceAll(invalidSeparator, defaultSeparator); String invalidCharactersRegex = "[^\\w\\-\\.]"; String severalSeparatorsRegex = Pattern.quote(sep) + "+"; String dhxless = dhxless(string, sep, invalidCharactersRegex, severalSeparatorsRegex); String trimmed = dhxless.replaceAll(prefixSeparators, "").replaceAll(suffixSeparators, ""); return trimmed; }
From source file:com.cisco.dvbu.ps.deploytool.dao.jdbcapi.RegressionInputFileJdbcDAOImpl.java
/** * Generates pubtest input file for a given CIS instance and other input parameters, such as domain, user, published datasource and others. * /*from w w w. ja v a 2 s . co m*/ * @param cisServerConfig composite server object used for connections * @param regressionConfig regression config object * @param regressionQueries regression query object * * @return String representation of the input file * * @throws CompositeException */ public String generateInputFile(CompositeServer cisServerConfig, RegressionTestType regressionConfig, RegressionQueriesType regressionQueries) // String serverId, String dsList, String pathToRegressionXML, String pathToServersXML) throws CompositeException { // First check the input parameter values: if (cisServerConfig == null || regressionConfig == null) { throw new CompositeException("XML Configuration objects are not initialized " + "when trying to generate Regression input file."); } // Set the command and action name String command = "generateInputFile"; String actionName = "CREATE_FILE"; // Initialize start time and format Date startDate = new Date(); // Initialize all variables String prefix = "generateInputFile"; String outString = null; // Output String the above buffer is converted to. String queriesStr = ""; String proceduresStr = ""; String wsStr = ""; boolean getActualLinkType = false; // Get the DEBUG3 value from the property file setGlobalProperties(); populateConfigValues(regressionConfig, regressionQueries); totalQueriesGenerated = 0; totalProceduresGenerated = 0; totalWebServicesGenerated = 0; // Begin the input file generation if (this.needQueries) { /** * [QUERY] * database=MYTEST * SELECT count(1) cnt FROM CAT1.SCH1.customers */ RegressionItem[] items = buildQueriesString(cisServerConfig, regressionConfig); // Output the query to the input file for (int i = 0; i < items.length; i++) { StringBuffer buf = new StringBuffer(); RegressionItem item = new RegressionItem(); item = items[i]; buf.append("[QUERY]\n"); buf.append("database=" + item.database + "\n"); // datasource if (item.outputFilename != null) buf.append("outputFilename=" + item.outputFilename + "\n"); // outputFilename buf.append(item.input + "\n\n"); // patterns: table | schema.table | cat.schema.table queriesStr = queriesStr + buf.toString(); // Add debug statement to log output when debug3=true CommonUtils.writeOutput( "Added query to query list: resource path=" + item.resourcePath + " type=" + item.resourceType + " query=" + item.input, prefix, "-debug3", logger, debug1, debug2, debug3); } } if (this.needProcs) { if (this.useSelectForProcs) { /** * [PROCEDURE] * database=MYTEST * SELECT * FROM CAT1.SCH1.LookupProduct(1) */ RegressionItem[] items = buildProcsStringSelectSyntax(cisServerConfig, regressionConfig); // Output the query to the input file for (int i = 0; i < items.length; i++) { StringBuffer buf = new StringBuffer(); RegressionItem item = new RegressionItem(); item = items[i]; buf.append("[PROCEDURE]\n"); buf.append("database=" + item.database + "\n"); // datasource if (item.outTypes != null && item.outTypes.length > 0) { String outTypes = null; for (int j = 0; j < item.outTypes.length; j++) { if (outTypes == null) { outTypes = ""; } else { outTypes = outTypes + ", "; } outTypes = outTypes + item.outTypes[j]; } buf.append("outTypes=" + outTypes + "\n"); } if (item.outputFilename != null) buf.append("outputFilename=" + item.outputFilename + "\n"); // outputFilename buf.append(item.input + "\n\n"); // patterns: table | schema.table | cat.schema.table proceduresStr = proceduresStr + buf.toString(); // Add debug statement to log output when debug3=true CommonUtils.writeOutput( "Added procedure to query list: resource path=" + item.resourcePath + " type=" + item.resourceType + " query=" + item.input, prefix, "-debug3", logger, debug1, debug2, debug3); } } else { /** * [PROCEDURE] * database=MYTEST * CALL CAT1.SCH1.LookupProduct(1) */ RegressionItem[] items = buildProcsStringCallSyntax(cisServerConfig, regressionConfig); // Output the query to the input file for (int i = 0; i < items.length; i++) { StringBuffer buf = new StringBuffer(); RegressionItem item = new RegressionItem(); item = items[i]; buf.append("[PROCEDURE]\n"); buf.append("database=" + item.database + "\n"); // datasource if (item.outTypes != null && item.outTypes.length > 0) { String outTypes = null; for (int j = 0; j < item.outTypes.length; j++) { if (outTypes == null) { outTypes = ""; } else { outTypes = outTypes + ", "; } outTypes = outTypes + item.outTypes[j]; } buf.append("outTypes=" + outTypes + "\n"); } if (item.outputFilename != null) buf.append("outputFilename=" + item.outputFilename + "\n"); // outputFilename buf.append(item.input + "\n\n"); // patterns: table | schema.table | cat.schema.table proceduresStr = proceduresStr + buf.toString(); // Add debug statement to log output when debug3=true CommonUtils.writeOutput( "Added call procedure to query list: resource path=" + item.resourcePath + " type=" + item.resourceType + " query=" + item.input, prefix, "-debug3", logger, debug1, debug2, debug3); } } } if (this.needWs) { /** * [WEB_SERVICE] * database=testWebService00_NoParams_wrapped * path=/soap11/testWebService00_NoParams_wrapped * action=ViewSales * encrypt=false * contentType=text/xml;charset=UTF-8 * <soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:ns1="http://tempuri.org/"> * <soapenv:Header/> * <soapenv:Body> * <ns1:ViewSales> * <ns1:ViewSalesInput></ns1:ViewSalesInput> * </ns1:ViewSales> * </soapenv:Body> * </soapenv:Envelope> */ RegressionItem[] items = buildWsString(cisServerConfig, regressionConfig, getActualLinkType); // Output the query to the input file for (int i = 0; i < items.length; i++) { StringBuffer buf = new StringBuffer(); RegressionItem item = new RegressionItem(); item = items[i]; buf.append("[WEB_SERVICE]\n"); buf.append("database=" + item.database + "\n"); buf.append("path=" + item.path + "\n"); // name of the web service port with path is the path in the input file buf.append("action=" + item.action + "\n"); buf.append("encrypt=" + item.encrypt + "\n"); buf.append("contentType=" + item.contentType + "\n"); if (item.outputFilename != null) buf.append("outputFilename=" + item.outputFilename + "\n"); // outputFilename buf.append(item.input + "\n\n"); wsStr = wsStr + buf.toString(); // Add debug statement to log output when debug3=true String queryNoLines = item.input.replaceAll(Pattern.quote("\n"), Matcher.quoteReplacement("")); CommonUtils.writeOutput( "Added web service to query list: resource path=" + item.resourcePath + " type=" + item.resourceType + " query=" + queryNoLines, prefix, "-debug3", logger, debug1, debug2, debug3); } } // Write the pubtest input file to the file system. outString = new String(fileDescription + queriesStr + proceduresStr + wsStr); // Built String String filePath = CommonUtils.extractVariable(prefix, regressionConfig.getInputFilePath(), propertyFile, true); // Don't execute if -noop (NO_OPERATION) has been set otherwise execute under normal operation. if (CommonUtils.isExecOperation()) { CommonUtils.createFileWithContent(filePath, outString); } else { logger.info( "NO_OPERATION SET: COMMAND [" + command + "], ACTION [" + actionName + "] WAS NOT PERFORMED."); } // Print out timings String duration = CommonUtils.getElapsedTime(startDate); int len = 56; logger.info("--------------------------------------------------------"); logger.info("------------ Regression Generation Summary -------------"); logger.info("--------------------------------------------------------"); logger.info(" "); logger.info(CommonUtils.rpad(" Total Queries Generated: " + totalQueriesGenerated, len, " ")); logger.info(CommonUtils.rpad(" Total Procedures Generated: " + totalProceduresGenerated, len, " ")); logger.info( CommonUtils.rpad(" Total Web Services Generated: " + totalWebServicesGenerated, len, " ")); logger.info(" --------- "); logger.info(CommonUtils.rpad( "Total Combined ---------> Generated: " + (totalQueriesGenerated + totalProceduresGenerated + totalWebServicesGenerated), len, " ")); logger.info(" "); logger.info(CommonUtils.rpad(" Input file generation duration: " + duration, len, " ")); logger.info(" "); logger.info("Review input file: " + filePath); logger.info("--------------------------------------------------------"); String moduleActionMessage = "MODULE_INFO: Generate Summary: Queries=" + totalQueriesGenerated + " Procedures=" + totalProceduresGenerated + " WebServices=" + totalWebServicesGenerated; System.setProperty("MODULE_ACTION_MESSAGE", moduleActionMessage); return outString; }
From source file:io.fabric8.tooling.archetype.builder.ArchetypeBuilder.java
/** * Copies single file from <code>src</code> to <code>dest</code>. * If the file is source file, variable references will be escaped, so they'll survive Velocity template merging. * * @param src/*from w w w . j a va 2 s . c o m*/ * @param dest * @param replaceFn * @throws IOException */ private void copyFile(File src, File dest, Replacement replaceFn) throws IOException { if (replaceFn != null && isSourceFile(src)) { String original = FileUtils.readFileToString(src); String escapeDollarSquiggly = original; if (original.contains("${")) { String replaced = original.replaceAll(Pattern.quote("${"), "\\${D}{"); // add Velocity expression at the beginning of the result file. // Velocity is used by mvn archetype:generate escapeDollarSquiggly = "#set( $D = '$' )\n" + replaced; } // do additional replacement String text = replaceFn.replace(escapeDollarSquiggly); FileUtils.write(dest, text); } else { if (LOG.isDebugEnabled()) { LOG.warn("Not a source dir as the extension is {}", FilenameUtils.getExtension(src.getName())); } FileUtils.copyFile(src, dest); } }
From source file:display.containers.FileManager.java
public boolean isProject(File fi) { String[] parts = fi.getAbsolutePath().split(Pattern.quote(File.separator)); int serverdirlen = (SystemSettings.SERVER_INFO.getServerDir().toString() .split(Pattern.quote(File.separator))).length + 1;// +1 pour NRI-ANALYSE et NRI-DICOM if (parts.length == (serverdirlen)) return false; if (!fi.getName().contains("..")) { int count = 0; for (int i = serverdirlen; i < parts.length; i++) { if (!parts[i].isEmpty()) { count++;// ww w . j av a2 s. com } } return count == 1; } return false; }
From source file:com.hichinaschool.flashcards.libanki.Finder.java
public static int findReplace(Collection col, List<Long> nids, String src, String dst, boolean isRegex, String field, boolean fold) { Map<Long, Integer> mmap = new HashMap<Long, Integer>(); if (field != null) { try {/*from ww w . ja v a 2 s . co m*/ for (JSONObject m : col.getModels().all()) { JSONArray flds = m.getJSONArray("flds"); for (int fi = 0; fi < flds.length(); ++fi) { JSONObject f = flds.getJSONObject(fi); if (f.getString("name").equals(field)) { mmap.put(m.getLong("id"), f.getInt("ord")); } } } } catch (JSONException e) { throw new RuntimeException(e); } if (mmap.isEmpty()) { return 0; } } // find and gather replacements if (!isRegex) { src = Pattern.quote(src); } if (fold) { src = "(?i)" + src; } Pattern regex = Pattern.compile(src); ArrayList<Object[]> d = new ArrayList<Object[]>(); String sql = "select id, mid, flds from notes where id in " + Utils.ids2str(nids.toArray(new Long[] {})); nids = new ArrayList<Long>(); Cursor cur = null; try { cur = col.getDb().getDatabase().rawQuery(sql, null); while (cur.moveToNext()) { String flds = cur.getString(2); String origFlds = flds; // does it match? String[] sflds = Utils.splitFields(flds); if (field != null) { long mid = cur.getLong(1); if (!mmap.containsKey(mid)) { continue; } int ord = mmap.get(mid); sflds[ord] = regex.matcher(sflds[ord]).replaceAll(dst); } else { for (int i = 0; i < sflds.length; ++i) { sflds[i] = regex.matcher(sflds[i]).replaceAll(dst); } } flds = Utils.joinFields(sflds); if (!flds.equals(origFlds)) { long nid = cur.getLong(0); nids.add(nid); d.add(new Object[] { flds, Utils.intNow(), col.usn(), nid }); } } } finally { if (cur != null) { cur.close(); } } if (d.isEmpty()) { return 0; } // replace col.getDb().executeMany("update notes set flds=?,mod=?,usn=? where id=?", d); long[] pnids = Utils.toPrimitive(nids); col.updateFieldCache(pnids); col.genCards(pnids); return d.size(); }
From source file:de.dfki.km.leech.parser.wikipedia.WikipediaDumpParser.java
protected void parseInfoBox(String strText, Metadata metadata, ContentHandler handler) throws SAXException { // att-value paare mit | getrennt. Innerhalb eines values gibt es auch Zeilenumbrche (mit '<br />') - dies gilt als Aufzhlung // |Single1 |Datum1 , Besetzung1a Besetzung1b, Sonstiges1Titel |Sonstiges1Inhalt , Coverversion3 |Jahr3 // | 1Option = 3 // | 1Option Name = Demos // | 1Option Link = Demos // | 1Option Color = // als erstes schneiden wir mal die Infobox raus. (?m) ist multiline und (?s) ist dotall ('.' matcht auch line breaks) int iStartInfoBox = -1; int iEndInfoBox = -1; MatchResult infoMatch = StringUtils.findFirst("\\{\\{\\s*Infobox", strText); if (infoMatch != null) { iStartInfoBox = infoMatch.start(); iEndInfoBox = StringUtils.findMatchingBracket(iStartInfoBox, strText) + 1; } else/* w w w . ja v a 2 s. c om*/ return; if (strText.length() < 3 || strText.length() < iEndInfoBox || iEndInfoBox <= 0 || (iStartInfoBox + 2) > iEndInfoBox) return; String strInfoBox = ""; strInfoBox = strText.substring(iStartInfoBox + 2, iEndInfoBox); if (strInfoBox.length() < 5) return; String strCleanedInfoBox = m_wikiModel.render(new PlainTextConverter(), strInfoBox.replaceAll("<br />", "<br />")); // da wir hier eigentlich relationierte Datenstze haben, machen wir auch einzelne, separierte Dokumente draus // System.out.println(strCleanedInfoBox); // System.out.println(strCleanedInfoBox.substring(0, strCleanedInfoBox.indexOf("\n")).trim()); // erste Zeile bezeichnet die InfoBox int iIndex = strCleanedInfoBox.indexOf("|"); if (iIndex == -1) iIndex = strCleanedInfoBox.indexOf("\n"); if (iIndex == -1) return; String strInfoBoxName = strCleanedInfoBox.substring(7, iIndex).trim(); metadata.add(infobox, strInfoBoxName); String[] straCleanedInfoBoxSplit = strCleanedInfoBox.split("\\s*\\|\\s*"); HashMap<String, MultiValueHashMap<String, String>> hsSubDocId2AttValuePairsOfSubDoc = new HashMap<String, MultiValueHashMap<String, String>>(); for (String strAttValuePair : straCleanedInfoBoxSplit) { // System.out.println("\nattValPair unsplittet " + strAttValuePair); // die Dinger sind mit einem '=' getrennt String[] straAtt2Value = strAttValuePair.split("="); if (straAtt2Value.length == 0 || straAtt2Value[0] == null) continue; if (straAtt2Value.length < 2 || straAtt2Value[1] == null) continue; String strAttName = straAtt2Value[0].trim(); String strAttValues = straAtt2Value[1]; if (StringUtils.nullOrWhitespace(strAttValues)) continue; // Innerhalb eines values gibt es auch Zeilenumbrche (mit '<br />' bzw. '<br />') - dies gilt als Aufzhlung String[] straAttValues = strAttValues.split(Pattern.quote("<br />")); // XXX wir werfen zusatzangaben in Klammern erst mal weg - man knnte sie auch als attnameAddInfo in einem extra Attribut speichern - // allerdings mu man dann wieder aufpassen, ob nicht ein subDocument entstehen mu (Bsp. mehrere Genre-entries mit jeweiliger // Jahreszahl) // der Attributname entscheidet nun, ob ein Dokument ausgelagert werden soll oder nicht. Ist darin eine Zahl enthalten, dann entfernen // wir diese und gruppieren alle att-value-paare mit dieser Zahl in einen extra Datensatz (MultiValueHashMap) Matcher numberMatcher = Pattern.compile("([\\D]*)(\\d+)([\\D]*)").matcher(strAttName); if (!numberMatcher.find()) { // wir haben keine Zahl im AttNamen - wir tragen diesen Wert einfach in die Metadaten ein. for (String strAttValue : straAttValues) { String strCleanedAttValue = cleanAttValue(strAttName, strAttValue); if (strCleanedAttValue != null) metadata.add(strAttName, strCleanedAttValue); } } else { // wir haben eine Zahl im Namen - wir tragen den Wert in einem SubDocument unter der Id <zahl> ein String strPrefix = numberMatcher.group(1); String strNumber = numberMatcher.group(2); String strSuffix = numberMatcher.group(3); String strDataSetId = strPrefix + strNumber; String strFinalAttName = strPrefix + strSuffix; // wenn wir noch mehr Zahlen haben, dann haben wir geloost - und tragen es einfach ein if (numberMatcher.find()) { for (String strAttValue : straAttValues) { String strCleanedAttValue = cleanAttValue(strFinalAttName, strAttValue); if (strCleanedAttValue != null) metadata.add(strFinalAttName, strCleanedAttValue); } } // System.out.println("prefix " + strPrefix); // System.out.println("num " + strDataSetId); // System.out.println("suffix " + strSuffix); MultiValueHashMap<String, String> hsAttname2ValueOfSubDoc = hsSubDocId2AttValuePairsOfSubDoc .get(strDataSetId); if (hsAttname2ValueOfSubDoc == null) { hsAttname2ValueOfSubDoc = new MultiValueHashMap<String, String>(); hsSubDocId2AttValuePairsOfSubDoc.put(strDataSetId, hsAttname2ValueOfSubDoc); } for (String strAttValue : straAttValues) hsAttname2ValueOfSubDoc.add(strFinalAttName, strAttValue.replaceAll("\\(.*?\\)", "").trim()); } } String strPageId = new UID().toString(); metadata.add(LeechMetadata.id, strPageId); // we have to use the same metadata Object Metadata metadataBackup4ParentPage = TikaUtils.copyMetadata(metadata); for (MultiValueHashMap<String, String> hsAttValuePairsOfSubDoc : hsSubDocId2AttValuePairsOfSubDoc .values()) { TikaUtils.clearMetadata(metadata); // die Referenz zu meinem parent metadata.add(LeechMetadata.parentId, strPageId); metadata.add(infobox, strInfoBoxName); String strChildId = new UID().toString(); metadata.add(LeechMetadata.id, strChildId); // zum rckreferenzieren geben wir dem parent auch noch unsere id metadataBackup4ParentPage.add(LeechMetadata.childId, strChildId); for (Entry<String, String> attName2Value4SubDoc : hsAttValuePairsOfSubDoc.entryList()) { String strAttName = attName2Value4SubDoc.getKey(); String strAttValue = attName2Value4SubDoc.getValue(); String strCleanedAttValue = cleanAttValue(strAttName, strAttValue); if (strCleanedAttValue != null) metadata.add(strAttName, strCleanedAttValue); } metadata.add(Metadata.CONTENT_TYPE, "application/wikipedia-meta+xml"); // so erreichen wir, da im bergeordneten ContentHandler mehrere Docs ankommen :) XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); xhtml.startDocument(); xhtml.endDocument(); } TikaUtils.clearMetadata(metadata); TikaUtils.copyMetadataFromTo(metadataBackup4ParentPage, metadata); }
From source file:com.microfocus.application.automation.tools.octane.configuration.JobConfigurationProxy.java
@JavaScriptMethod public JSONObject searchReleases(String term, String instanceId, long workspaceId) { int defaultSize = 5; JSONObject ret = new JSONObject(); OctaneClient octaneClient = OctaneSDK.getClientByInstanceId(instanceId); try {//from w w w . j av a 2 s . c om ResponseEntityList releasePagedList = queryReleasesByName(octaneClient, term, workspaceId, defaultSize); List<Entity> releases = releasePagedList.getData(); boolean moreResults = releasePagedList.getTotalCount() > releases.size(); JSONArray retArray = new JSONArray(); if (moreResults) { retArray.add(createMoreResultsJson()); } String quotedTerm = Pattern.quote(term.toLowerCase()); if (Pattern.matches(".*" + quotedTerm + ".*", NOT_SPECIFIED.toLowerCase())) { JSONObject notSpecifiedItemJson = new JSONObject(); notSpecifiedItemJson.put("id", -1); notSpecifiedItemJson.put("text", NOT_SPECIFIED); retArray.add(notSpecifiedItemJson); } for (Entity release : releases) { JSONObject relJson = new JSONObject(); relJson.put("id", release.getId()); relJson.put("text", release.getName()); retArray.add(relJson); } ret.put("results", retArray); } catch (Exception e) { logger.warn("Failed to retrieve releases", e); return error("Unable to retrieve releases"); } return ret; }
From source file:com.dtolabs.rundeck.core.authorization.RuleEvaluator.java
private Pattern patternForRegex(final String regex) { if (!patternCache.containsKey(regex)) { Pattern compile = null;//from ww w . j a v a2 s .c o m try { compile = Pattern.compile(regex); } catch (Exception e) { //invalid regex } if (null == compile) { //create equality match regex compile = Pattern.compile("^" + Pattern.quote(regex) + "$"); } patternCache.putIfAbsent(regex, compile); } return patternCache.get(regex); }