List of usage examples for java.util.regex Matcher replaceAll
public String replaceAll(Function<MatchResult, String> replacer)
From source file:com.googlecode.fascinator.portal.pages.Dispatch.java
private void fileProcessing() { // What we are looking for UploadedFile uploadedFile = null;/* w ww .j a va 2s.c o m*/ String workflowId = null; // Roles of current user String username = (String) sessionState.get("username"); String userSource = (String) sessionState.get("source"); List<String> roles = null; try { User user = security.getUser(sessionState, username, userSource); String[] roleArray = security.getRolesList(sessionState, user); roles = Arrays.asList(roleArray); } catch (AuthenticationException ex) { log.error("Error retrieving user data."); return; } // The workflow we're using List<String> reqParams = request.getParameterNames(); log.info("REQUEST {}", request); log.info("reqParams {}", reqParams); if (reqParams.contains("upload-file-workflow")) { workflowId = request.getParameter("upload-file-workflow"); } if (workflowId == null) { log.error("No workflow provided with form data."); return; } // Upload context String uploadContext = ""; if (reqParams.contains("upload-file-context")) { uploadContext = request.getParameter("upload-file-context"); } JsonSimple workflowConfig = sysConfig.getJsonSimpleMap("uploader").get(workflowId); // Roles allowed to upload into this workflow boolean security_check = false; for (String role : workflowConfig.getStringList("security")) { if (roles.contains(role)) { security_check = true; } } if (!security_check) { log.error("Security error, current user not allowed to upload."); return; } // Get the workflow's file directory String file_path = workflowConfig.getString(null, "upload-path"); // Get the uploaded file for (String param : reqParams) { UploadedFile tmpFile = decoder.getFileUpload(param); if (tmpFile != null) { // Our file uploadedFile = tmpFile; } } if (uploadedFile == null) { log.error("No uploaded file found!"); return; } // Write the file to that directory if (uploadContext == null || "".equals(uploadContext)) { file_path = file_path + "/" + uploadedFile.getFileName(); } else { file_path = file_path + "/" + uploadContext + "/" + uploadedFile.getFileName(); } File file = new File(file_path); if (!file.exists()) { file.getParentFile().mkdirs(); try { file.createNewFile(); } catch (IOException ex) { log.error("Failed writing file", ex); return; } } uploadedFile.write(file); // Make sure the new file gets harvested String configPath = workflowConfig.getString(null, "json-config"); if (configPath == null) { log.error("No harvest configuration file provided!"); return; } File harvestFile = new File(configPath); // Get the workflow template needed for stage 1 String template = ""; try { JsonSimple harvestConfig = new JsonSimple(harvestFile); List<JsonSimple> stages = harvestConfig.getJsonSimpleList("stages"); if (stages.size() > 0) { template = stages.get(0).getString(null, "template"); } } catch (IOException ex) { log.error("Unable to access workflow config : ", ex); } HarvestClient harvester = null; String oid = null; String error = null; try { harvester = new HarvestClient(harvestFile, file, username); harvester.start(); oid = harvester.getUploadOid(); harvester.shutdown(); } catch (PluginException ex) { error = "File upload failed : " + ex.getMessage(); log.error(error); harvester.shutdown(); } catch (Exception ex) { log.error("Failed harvest", ex); return; } boolean success = file.delete(); if (!success) { log.error("Error deleting uploaded file from cache: " + file.getAbsolutePath()); } // Now create some session data for use later Map<String, String> file_details = new LinkedHashMap<String, String>(); file_details.put("name", uploadedFile.getFileName()); if (error != null) { // Strip our package/class details from error string Pattern pattern = Pattern.compile("au\\..+Exception:"); Matcher matcher = pattern.matcher(error); file_details.put("error", matcher.replaceAll("")); } file_details.put("workflow", workflowId); file_details.put("template", template); file_details.put("location", file_path); file_details.put("size", String.valueOf(uploadedFile.getSize())); file_details.put("type", uploadedFile.getContentType()); if (oid != null) { file_details.put("oid", oid); } // Helps some browsers (like IE7) resolve the path from the form sessionState.set("fileName", uploadedFile.getFileName()); sessionState.set(uploadedFile.getFileName(), file_details); formData.set("fileProcessing", "true"); formData.set("oid", oid); sessionState.set("uploadFormData", formData); }
From source file:org.lanes.text.mining.EntityRecogniser.java
public String determineEntityClassOld(String entity, double acceptancethreshold) { long timestart = System.currentTimeMillis(); entity = entity.trim();// w ww. ja v a 2 s. c o m String isa = ""; String confidence = "0"; double totalfreq = 0; double personfreq = 0; double locationfreq = 0; double organisationfreq = 0; double medicalcondfreq = 0; double consumablefreq = 0; double personsimstrfactor = 0; double locationsimstrfactor = 0; double organisationsimstrfactor = 0; double medicalcondsimstrfactor = 0; double consumesimstrfactor = 0; try { String intextquery = ""; String intitlequery = ""; String[] toks = entity.split(" "); for (String tok : toks) { intextquery = intextquery + tok + " AND "; intitlequery = tok; } Matcher replace1 = Pattern.compile(" AND $").matcher(intextquery); intextquery = replace1.replaceAll(""); ModifiableSolrParams param = simobj.formulateQuery("titleText:" + intitlequery + " text:" + intextquery, 15); QueryResponse response = solrserver.query(param); for (SolrDocument doc : response.getResults()) { boolean isPerson = false; boolean isLocation = false; boolean isOrganisation = false; boolean isMedicalCond = false; boolean isConsumable = false; Collection<String> fnames = doc.getFieldNames(); for (String fname : fnames) { if (fname.equals("titleText")) { String title = (String) doc.getFieldValue(fname); double strsim = FuzzyMatcher.stringSim(title, entity); //System.err.println("title:" + title); List<String> parents = (new Conceptualiser()).findNeighbours(title, "HYPERNYMY"); if (parents.size() > 0) { for (String parent : parents) { //System.err.println("\t" + parent); Matcher matcherper = Pattern.compile( "(?:people|occupations|actors|scientists|politicians|academics|artists|chefs|surnames|rulers|producers)", Pattern.CASE_INSENSITIVE).matcher(parent); if (matcherper.find()) { isPerson = true; } Matcher matcherloc = Pattern.compile( "(?:capitals|cities|countries|continents|places|towns|districts|electoral_divisions|roads|district|housing_estates|communes|prefectures|states|nations|regions_of|territories)", Pattern.CASE_INSENSITIVE).matcher(parent); if (matcherloc.find()) { isLocation = true; } Matcher matcherorg = Pattern.compile( "(?:hospitals|manufacturers|consortium|schools|companies|organisations|universities|colleges)", Pattern.CASE_INSENSITIVE).matcher(parent); if (matcherorg.find()) { isOrganisation = true; } Matcher matcherddi = Pattern.compile( "(?:vascular-related_cutaneous_conditions|syndromes|diseases|disorders|substance_abuse|medical_emergencies|poisoning|symptoms_and_signs)", Pattern.CASE_INSENSITIVE).matcher(parent); if (matcherddi.find()) { isMedicalCond = true; } Matcher matcherconsume = Pattern.compile( "(?:meats|breakfasts|desserts|foods|food|appetizers|cuisine|dishes|dairy_products|supplements|drugs|analgesics|euphoriants|stimulants|neurotransmitters|opioids|opiates)", Pattern.CASE_INSENSITIVE).matcher(parent); if (matcherconsume.find()) { isConsumable = true; } } if (strsim > personsimstrfactor && isPerson) { personsimstrfactor = strsim; } if (strsim > locationsimstrfactor && isLocation) { locationsimstrfactor = strsim; } if (strsim > organisationsimstrfactor && isOrganisation) { organisationsimstrfactor = strsim; } if (strsim > medicalcondsimstrfactor && isMedicalCond) { medicalcondsimstrfactor = strsim; } if (strsim > consumesimstrfactor && isConsumable) { consumesimstrfactor = strsim; } totalfreq++; } } } if (isPerson) { personfreq++; } if (isLocation) { locationfreq++; } if (isOrganisation) { organisationfreq++; } if (isMedicalCond) { medicalcondfreq++; } if (isConsumable) { consumablefreq++; } } double personscore = CommonData.roundDecimal((personfreq / totalfreq) * personsimstrfactor * 100, "##.####"); double locationscore = CommonData.roundDecimal((locationfreq / totalfreq) * locationsimstrfactor * 100, "##.####"); double organisationscore = CommonData .roundDecimal((organisationfreq / totalfreq) * organisationsimstrfactor * 100, "##.####"); double medicalcondscore = CommonData .roundDecimal((medicalcondfreq / totalfreq) * medicalcondsimstrfactor * 100, "##.####"); double consumablescore = CommonData .roundDecimal((consumablefreq / totalfreq) * consumesimstrfactor * 100, "##.####"); if (personscore > acceptancethreshold && personscore >= locationscore && personscore >= organisationscore && personscore >= medicalcondscore && personscore >= consumablescore) { isa = isa + "[PERSON]"; confidence = String.valueOf(personscore); } if (locationscore > acceptancethreshold && locationscore >= personscore && locationscore >= organisationscore && locationscore >= medicalcondscore && locationscore >= consumablescore) { isa = isa + "[LOCATION]"; confidence = String.valueOf(locationscore); } if (organisationscore > acceptancethreshold && organisationscore >= personscore && organisationscore >= locationscore && organisationscore >= medicalcondscore && organisationscore >= consumablescore) { isa = isa + "[ORGANISATION]"; confidence = String.valueOf(organisationscore); } if (medicalcondscore > acceptancethreshold && medicalcondscore >= personscore && medicalcondscore >= locationscore && medicalcondscore >= organisationscore && medicalcondscore >= consumablescore) { isa = isa + "[MEDICALCOND]"; confidence = String.valueOf(medicalcondscore); } if (consumablescore > acceptancethreshold && consumablescore >= personscore && consumablescore >= locationscore && consumablescore >= organisationscore && consumablescore >= medicalcondscore) { isa = isa + "[FOODDRUG]"; confidence = String.valueOf(consumablescore); } //System.err.println("isPerson: (" + personscore + ") = (" + personfreq + "/" + totalfreq + ") x (" + personsimstrfactor + ")"); //System.err.println("isLocation: (" + locationscore + ") = (" + locationfreq + "/" + totalfreq + ") x (" + locationsimstrfactor + ")"); //System.err.println("isOrganisation: (" + organisationscore + ") = (" + organisationfreq + "/" + totalfreq + ") x (" + organisationsimstrfactor + ")"); //System.err.println("isMedicalCond: (" + medicalcondscore + ") = (" + medicalcondfreq + "/" + totalfreq + ") x (" + medicalcondsimstrfactor + ")"); //System.err.println("isFoodDrug: (" + consumablescore + ") = (" + consumablefreq + "/" + totalfreq + ") x (" + consumesimstrfactor + ")"); } catch (Exception e) { e.printStackTrace(); } return isa + ":" + confidence; }
From source file:ee.ioc.cs.vsle.synthesize.SpecParser.java
/** * Extracts the specification from the java file, also removing unnecessary * whitespaces//from ww w. j a va2 s .com * * @return specification text * @param fileString a (Java) file containing the specification * @throws SpecParseException */ private static String refineSpec(String fileString) { Matcher matcher; // remove comments before removing line brake \n String[] s = fileString.split("\n"); StringBuilder tmpBuf = new StringBuilder(fileString.length() / 2); for (int i = 0; i < s.length; i++) { if (!s[i].trim().startsWith("//")) { tmpBuf.append(s[i]); } } // remove unneeded whitespace matcher = PATTERN_WHITESPACE.matcher(tmpBuf); // This is broken as spaces should not be replaced in, // e.g. string literals. Keeping it now for compatibility. tmpBuf.replace(0, tmpBuf.length(), matcher.replaceAll(" ")); // find spec matcher = PATTERN_SPEC.matcher(tmpBuf); if (matcher.find()) { StringBuilder sc = new StringBuilder(); if (matcher.group(2) != null) { sc.append("super"); String[] superclasses = matcher.group(2).split(","); for (int i = 0; i < superclasses.length; i++) { String t = superclasses[i].trim(); if (t.length() > 0) { sc.append("#"); sc.append(t); } } sc.append(";\n"); } return sc.append(matcher.group(3)).toString(); } throw new SpecParseException("Specification parsing error"); }
From source file:com.adobe.cq.dialogconversion.impl.rules.NodeBasedRewriteRule.java
/** * Applies a string rewrite to a property. * * @param property the property to rewrite * @param rewriteProperty the property that defines the string rewrite */// w w w. j a v a 2s . c o m private void rewriteProperty(Property property, Property rewriteProperty) throws RepositoryException { if (property.getType() == PropertyType.STRING) { if (rewriteProperty.isMultiple() && rewriteProperty.getValues().length == 2) { Value[] rewrite = rewriteProperty.getValues(); if (rewrite[0].getType() == PropertyType.STRING && rewrite[1].getType() == PropertyType.STRING) { String pattern = rewrite[0].toString(); String replacement = rewrite[1].toString(); Pattern compiledPattern = Pattern.compile(pattern); Matcher matcher = compiledPattern.matcher(property.getValue().toString()); property.setValue(matcher.replaceAll(replacement)); } } } }
From source file:com.quattroresearch.helm.HlConfigEntry.java
public static String regexEngine(String sectionString, HlConfigEntry configEntry) { String outString = ""; String hlTarget;/*from w w w. j a va2 s . com*/ Pattern pattern; Matcher matcher; pattern = Pattern.compile("^.*SECTION_(.*)"); matcher = pattern.matcher(configEntry.category); matcher.find(); hlTarget = matcher.group(1); // log("HLTARGET=" + hlTarget + ", SectionString: '" + sectionString + "'" + "configEntry.category=" + configEntry.category); switch (hlTarget) { case "PEPTIDE_ID": pattern = Pattern.compile("(PEPTIDE[0-9]+)"); break; case "RNA_ID": pattern = Pattern.compile("(RNA[0-9]+)"); break; case "CHEM_ID": pattern = Pattern.compile("(CHEM[0-9]+)"); break; case "BLOB_ID": pattern = Pattern.compile("(BLOB[0-9]+)"); break; case "GROUP_ID": pattern = Pattern.compile("(G[0-9]+)"); break; case "BRANCH_MONOMER_ID": pattern = Pattern.compile("(\\([A-Z]\\))"); break; case "MULTICHAR_MONOMER_ID": pattern = Pattern.compile("(\\[[A-Za-z]+\\])"); break; case "MONOMER_SEPARATOR_DOT": pattern = Pattern.compile("(\\.)"); break; case "SEPARATOR_PIPE": pattern = Pattern.compile("(\\|)"); break; } matcher = pattern.matcher(sectionString); outString = matcher.replaceAll(configEntry.preHtmlSeq + "$1" + configEntry.postHtmlSeq); // log("OUTSTRING=" + outString); return (outString); }
From source file:me.doshou.admin.maintain.staticresource.web.controller.StaticResourceVersionController.java
private String versionedStaticResourceContent(String fileRealPath, String content, String newVersion) throws IOException { content = StringEscapeUtils.unescapeXml(content); if (newVersion != null && newVersion.equals("1")) { newVersion = "?" + newVersion; }/*from w w w. j a va 2 s . c o m*/ File file = new File(fileRealPath); List<String> contents = FileUtils.readLines(file); for (int i = 0, l = contents.size(); i < l; i++) { String fileContent = contents.get(i); if (content.equals(fileContent)) { Matcher matcher = scriptPattern.matcher(content); if (!matcher.matches()) { matcher = linkPattern.matcher(content); } if (newVersion == null) { // content = matcher.replaceAll("$1$2$5"); } else { content = matcher.replaceAll("$1$2$3" + newVersion + "$5"); } contents.set(i, content); break; } } FileUtils.writeLines(file, contents); return content; }
From source file:gov.va.vinci.v3nlp.negex.GenNegEx.java
public String negCheck(String sentenceString, String phraseString, List<String> ruleStrings, boolean negatePossible) throws Exception { Sorter s = new Sorter(); String sToReturn = ""; String sScope = ""; List<String> sortedRules = new ArrayList<String>(); String filler = "_"; boolean negPoss = negatePossible; // Sort the rules by length in descending order. // Rules need to be sorted so the longest rule is always tried to match // first.//w w w. j a v a 2 s . co m // Some of the rules overlap so without sorting first shorter rules (some of them POSSIBLE or PSEUDO) // would match before longer legitimate negation rules. // // There is efficiency issue here. It is better if rules are sorted by the // calling program once and used without sorting in GennegEx. sortedRules = s.sortRules(ruleStrings); // Process the sentence and tag each matched negation // rule with correct negation rule tag. // // At the same time check for the phrase that we want to decide // the negation status for and // tag the phrase with [PHRASE] ... [PHRASE] // In both the negation rules and in the phrase replace white space // with "filler" string. (This could cause problems if the sentences // we study has "filler" on their own.) // Sentence needs one character in the beginning and end to match. // We remove the extra characters after processing. String sentence = "." + sentenceString + "."; // Tag the phrases we want to detect for negation. // Should happen before rule detection. String phrase = phraseString; Pattern pph = null; try { pph = Pattern.compile(phrase.trim(), Pattern.CASE_INSENSITIVE); } catch (Exception e) { // IF There was an exception, escape the phrase for special regex characters. It is more // efficient to only escape if an error, as most phrases will work fine. logger.info("In Special processing... (" + phrase.trim() + ")"); pph = Pattern.compile(escapeRegexCharacters(phrase.trim()), Pattern.CASE_INSENSITIVE); } Matcher mph = pph.matcher(sentence); while (mph.find() == true) { sentence = mph.replaceAll(" [PHRASE]" + mph.group().trim().replaceAll(" ", filler) + "[PHRASE]"); } Iterator<String> iRule = sortedRules.iterator(); while (iRule.hasNext()) { String rule = iRule.next(); Pattern p = Pattern.compile("[\\t]+"); // Working. String[] ruleTokens = p.split(rule.trim()); // Add the regular expression characters to tokens and asemble the rule again. String[] ruleMembers = ruleTokens[0].trim().split(" "); String rule2 = ""; for (int i = 0; i <= ruleMembers.length - 1; i++) { if (!ruleMembers[i].equals("")) { if (ruleMembers.length == 1) { rule2 = ruleMembers[i]; } else { rule2 = rule2 + ruleMembers[i].trim() + "\\s+"; } } } // Remove the last s+ if (rule2.endsWith("\\s+")) { rule2 = rule2.substring(0, rule2.lastIndexOf("\\s+")); } rule2 = "(?m)(?i)[[\\p{Punct}&&[^\\]\\[]]|\\s+](" + rule2 + ")[[\\p{Punct}&&[^_]]|\\s+]"; Pattern p2 = Pattern.compile(ruleTokens[0].trim()); Matcher m = p2.matcher(sentence); while (m.find()) { String rpWith = ruleTokens[2].substring(2).trim(); sentence = m.replaceAll(" " + rpWith + m.group().trim().replaceAll(" ", filler) + rpWith + " "); } } // Exchange the [PHRASE] ... [PHRASE] tags for [NEGATED] ... [NEGATED] // based of PREN, POST rules and if flag is set to true // then based on PREP and POSP, as well. // Because PRENEGATION [PREN} is checked first it takes precedent over // POSTNEGATION [POST]. // Similarly POSTNEGATION [POST] takes precedent over POSSIBLE PRENEGATION [PREP] // and [PREP] takes precedent over POSSIBLE POSTNEGATION [POSP]. Pattern pSpace = Pattern.compile("[\\s+]"); String[] sentenceTokens = pSpace.split(sentence); StringBuilder sb = new StringBuilder(); // Check for [PREN] for (int i = 0; i < sentenceTokens.length; i++) { sb.append(" " + sentenceTokens[i].trim()); if (sentenceTokens[i].trim().startsWith("[PREN]") || sentenceTokens[i].trim().startsWith("[PRE_NEG]")) { for (int j = i + 1; j < sentenceTokens.length; j++) { if (sentenceTokens[j].trim().startsWith("[CONJ]") || sentenceTokens[j].trim().startsWith("[PSEU]") || sentenceTokens[j].trim().startsWith("[POST]") || sentenceTokens[j].trim().startsWith("[PREP]") || sentenceTokens[j].trim().startsWith("[POSP]")) { break; } if (sentenceTokens[j].trim().startsWith("[PHRASE]")) { sentenceTokens[j] = sentenceTokens[j].trim().replaceAll("\\[PHRASE\\]", "[NEGATED]"); } } } } sentence = sb.toString(); pSpace = Pattern.compile("[\\s+]"); sentenceTokens = pSpace.split(sentence); StringBuilder sb2 = new StringBuilder(); // Check for [POST] for (int i = sentenceTokens.length - 1; i > 0; i--) { sb2.insert(0, sentenceTokens[i] + " "); if (sentenceTokens[i].trim().startsWith("[POST]")) { for (int j = i - 1; j > 0; j--) { if (sentenceTokens[j].trim().startsWith("[CONJ]") || sentenceTokens[j].trim().startsWith("[PSEU]") || sentenceTokens[j].trim().startsWith("[PRE_NEG]") || sentenceTokens[j].trim().startsWith("[PREN]") || sentenceTokens[j].trim().startsWith("[PREP]") || sentenceTokens[j].trim().startsWith("[POSP]")) { break; } if (sentenceTokens[j].trim().startsWith("[PHRASE]")) { sentenceTokens[j] = sentenceTokens[j].trim().replaceAll("\\[PHRASE\\]", "[NEGATED]"); } } } } sentence = sb2.toString(); // If POSSIBLE negation is detected as negation. // negatePossible being set to "true" then check for [PREP] and [POSP]. if (negPoss == true) { pSpace = Pattern.compile("[\\s+]"); sentenceTokens = pSpace.split(sentence); StringBuilder sb3 = new StringBuilder(); // Check for [PREP] for (int i = 0; i < sentenceTokens.length; i++) { sb3.append(" " + sentenceTokens[i].trim()); if (sentenceTokens[i].trim().startsWith("[PREP]")) { for (int j = i + 1; j < sentenceTokens.length; j++) { if (sentenceTokens[j].trim().startsWith("[CONJ]") || sentenceTokens[j].trim().startsWith("[PSEU]") || sentenceTokens[j].trim().startsWith("[POST]") || sentenceTokens[j].trim().startsWith("[PRE_NEG]") || sentenceTokens[j].trim().startsWith("[PREN]") || sentenceTokens[j].trim().startsWith("[POSP]")) { break; } if (sentenceTokens[j].trim().startsWith("[PHRASE]")) { sentenceTokens[j] = sentenceTokens[j].trim().replaceAll("\\[PHRASE\\]", "[POSSIBLE]"); } } } } sentence = sb3.toString(); pSpace = Pattern.compile("[\\s+]"); sentenceTokens = pSpace.split(sentence); StringBuilder sb4 = new StringBuilder(); // Check for [POSP] for (int i = sentenceTokens.length - 1; i > 0; i--) { sb4.insert(0, sentenceTokens[i] + " "); if (sentenceTokens[i].trim().startsWith("[POSP]")) { for (int j = i - 1; j > 0; j--) { if (sentenceTokens[j].trim().startsWith("[CONJ]") || sentenceTokens[j].trim().startsWith("[PSEU]") || sentenceTokens[j].trim().startsWith("[PREN]") || sentenceTokens[j].trim().startsWith("[PRE_NEG]") || sentenceTokens[j].trim().startsWith("[PREP]") || sentenceTokens[j].trim().startsWith("[POST]")) { break; } if (sentenceTokens[j].trim().startsWith("[PHRASE]")) { sentenceTokens[j] = sentenceTokens[j].trim().replaceAll("\\[PHRASE\\]", "[POSSIBLE]"); } } } } sentence = sb4.toString(); } // Remove the filler character we used. sentence = sentence.replaceAll(filler, " "); // Remove the extra periods at the beginning // and end of the sentence. sentence = sentence.substring(0, sentence.trim().lastIndexOf('.')); sentence = sentence.replaceFirst(".", ""); // Get the scope of the negation for PREN and PREP if (sentence.contains("[PRE_NEG]") || sentence.contains("[PREN]") || sentence.contains("[PREP]")) { int startOffset = sentence.indexOf("[PREN]"); if (startOffset == -1) { startOffset = sentence.indexOf("[PRE_NEG]"); } if (startOffset == -1) { startOffset = sentence.indexOf("[PREP]"); } int endOffset = sentence.indexOf("[CONJ]"); if (endOffset == -1) { endOffset = sentence.indexOf("[PSEU]"); } if (endOffset == -1) { endOffset = sentence.indexOf("[POST]"); } if (endOffset == -1) { endOffset = sentence.indexOf("[POSP]"); } if (endOffset == -1 || endOffset < startOffset) { endOffset = sentence.length() - 1; } sScope = sentence.substring(startOffset, endOffset + 1); } // Get the scope of the negation for POST and POSP if (sentence.contains("[POST]") || sentence.contains("[POSP]")) { int endOffset = sentence.lastIndexOf("[POST]"); if (endOffset == -1) { endOffset = sentence.lastIndexOf("[POSP]"); } int startOffset = sentence.lastIndexOf("[CONJ]"); if (startOffset == -1) { startOffset = sentence.lastIndexOf("[PSEU]"); } if (startOffset == -1) { startOffset = sentence.lastIndexOf("[PREN]"); } if (startOffset == -1) { startOffset = sentence.lastIndexOf("[PRE_NEG]"); } if (startOffset == -1) { startOffset = sentence.lastIndexOf("[PREP]"); } if (startOffset == -1) { startOffset = 0; } sScope = sentence.substring(startOffset, endOffset); } // Classify to: negated/possible/affirmed if (sentence.contains("[NEGATED]")) { sentence = sentence + "\t" + "negated" + "\t" + sScope; } else if (sentence.contains("[POSSIBLE]")) { sentence = sentence + "\t" + "possible" + "\t" + sScope; } else { sentence = sentence + "\t" + "affirmed" + "\t" + sScope; } sToReturn = sentence; return sToReturn; }
From source file:com.haulmont.cuba.gui.components.filter.condition.DynamicAttributesCondition.java
@Override protected void updateText() { if (operator == Op.NOT_EMPTY) { if (BooleanUtils.isTrue((Boolean) param.getValue())) { text = text.replace("not exists", "exists"); } else if (BooleanUtils.isFalse((Boolean) param.getValue()) && !text.contains("not exists")) { text = text.replace("exists ", "not exists "); }//from w w w .j av a 2 s . c o m } if (!isCollection) { if (operator == Op.ENDS_WITH || operator == Op.STARTS_WITH || operator == Op.CONTAINS || operator == Op.DOES_NOT_CONTAIN) { Matcher matcher = LIKE_PATTERN.matcher(text); if (matcher.find()) { String escapeCharacter = ("\\".equals(QueryUtils.ESCAPE_CHARACTER) || "$".equals(QueryUtils.ESCAPE_CHARACTER)) ? QueryUtils.ESCAPE_CHARACTER + QueryUtils.ESCAPE_CHARACTER : QueryUtils.ESCAPE_CHARACTER; text = matcher.replaceAll("$1 ESCAPE '" + escapeCharacter + "' "); } } } else { if (operator == Op.CONTAINS) { text = text.replace("not exists", "exists"); } else if (operator == Op.DOES_NOT_CONTAIN && !text.contains("not exists")) { text = text.replace("exists ", "not exists "); } } }
From source file:org.kuali.coeus.s2sgen.impl.generate.S2SBaseFormGenerator.java
protected String cleanFileName(String fileName) { Pattern pattern = Pattern.compile(REGEX_TITLE_FILENAME_PATTERN); Matcher matcher = pattern.matcher(fileName); return matcher.replaceAll(REPLACEMENT_CHARACTER); }