List of usage examples for java.util.regex Pattern CASE_INSENSITIVE
int CASE_INSENSITIVE
To view the source code for java.util.regex Pattern CASE_INSENSITIVE.
Click Source Link
From source file:edu.toronto.cs.phenotips.solr.AbstractSolrScriptService.java
/** * Perform a search, falling back on the suggested spellchecked query if the original query fails to return any * results.//from w w w .j a v a2 s . co m * * @param params the Solr parameters to use, should contain at least a value for the "q" parameter; use * {@link #getSolrQuery(String, int, int)} to get the proper parameter expected by this method * @return the list of matching documents, empty if there are no matching terms */ private SolrDocumentList search(MapSolrParams params) { try { NamedList<Object> newParams = params.toNamedList(); if (newParams.get(CommonParams.FL) == null) { newParams.add(CommonParams.FL, "* score"); } QueryResponse response = this.server.query(MapSolrParams.toSolrParams(newParams)); SolrDocumentList results = response.getResults(); if (response.getSpellCheckResponse() != null && !response.getSpellCheckResponse().isCorrectlySpelled()) { String suggestedQuery = response.getSpellCheckResponse().getCollatedResult(); if (StringUtils.isEmpty(suggestedQuery)) { return results; } Pattern p = Pattern.compile("(\\w++):(\\w++)\\*$", Pattern.CASE_INSENSITIVE); Matcher originalStub = p.matcher((String) newParams.get(CommonParams.Q)); newParams.remove(CommonParams.Q); Matcher newStub = p.matcher(suggestedQuery); if (originalStub.find() && newStub.find()) { suggestedQuery += ' ' + originalStub.group() + "^1.5 " + originalStub.group(2) + "^1.5"; String boostQuery = (String) newParams.get(DisMaxParams.BQ); if (boostQuery != null) { boostQuery += ' ' + boostQuery.replace(originalStub.group(2), newStub.group(2)); newParams.remove(DisMaxParams.BQ); newParams.add(DisMaxParams.BQ, boostQuery); } } newParams.add(CommonParams.Q, suggestedQuery); SolrDocumentList spellcheckResults = this.server.query(MapSolrParams.toSolrParams(newParams)) .getResults(); if (results.getMaxScore() < spellcheckResults.getMaxScore()) { results = spellcheckResults; } } return results; } catch (SolrServerException ex) { this.logger.error("Failed to search: {}", ex.getMessage(), ex); } return null; }
From source file:net.longfalcon.newsj.Releases.java
public void processReleases() { String startDateString = DateUtil.displayDateFormatter.print(System.currentTimeMillis()); _log.info(String.format("Starting release update process (%s)", startDateString)); // get site config TODO: use config service Site site = siteDAO.getDefaultSite(); int retcount = 0; Directory nzbBaseDir = fileSystemService.getDirectory("/nzbs"); checkRegexesUptoDate(site.getLatestRegexUrl(), site.getLatestRegexRevision()); // Stage 0// www . ja v a 2 s . c om // this is a hack - tx is not working ATM TransactionStatus transaction = transactionManager .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED)); // // Get all regexes for all groups which are to be applied to new binaries // in order of how they should be applied // List<ReleaseRegex> releaseRegexList = releaseRegexDAO.getRegexes(true, "-1", false); for (ReleaseRegex releaseRegex : releaseRegexList) { String releaseRegexGroupName = releaseRegex.getGroupName(); _log.info(String.format("Applying regex %d for group %s", releaseRegex.getId(), ValidatorUtil.isNull(releaseRegexGroupName) ? "all" : releaseRegexGroupName)); // compile the regex early, to test them String regex = releaseRegex.getRegex(); Pattern pattern = Pattern.compile(fixRegex(regex), Pattern.CASE_INSENSITIVE); // remove '/' and '/i' HashSet<Long> groupMatch = new LinkedHashSet<>(); // // Groups ending in * need to be like matched when getting out binaries for groups and children // Matcher matcher = _wildcardPattern.matcher(releaseRegexGroupName); if (matcher.matches()) { releaseRegexGroupName = releaseRegexGroupName.substring(0, releaseRegexGroupName.length() - 1); List<Group> groups = groupDAO.findGroupsByName(releaseRegexGroupName); for (Group group : groups) { groupMatch.add(group.getId()); } } else if (!ValidatorUtil.isNull(releaseRegexGroupName)) { Group group = groupDAO.getGroupByName(releaseRegexGroupName); if (group != null) { groupMatch.add(group.getId()); } } List<Binary> binaries = new ArrayList<>(); if (groupMatch.size() > 0) { // Get out all binaries of STAGE0 for current group binaries = binaryDAO.findByGroupIdsAndProcStat(groupMatch, Defaults.PROCSTAT_NEW); } Map<String, String> arrNoPartBinaries = new LinkedHashMap<>(); DateTime fiveHoursAgo = DateTime.now().minusHours(5); // this for loop should probably be a single transaction for (Binary binary : binaries) { String testMessage = "Test run - Binary Name " + binary.getName(); Matcher groupRegexMatcher = pattern.matcher(binary.getName()); if (groupRegexMatcher.find()) { String reqIdGroup = null; try { reqIdGroup = groupRegexMatcher.group("reqid"); } catch (IllegalArgumentException e) { _log.debug(e.toString()); } String partsGroup = null; try { partsGroup = groupRegexMatcher.group("parts"); } catch (IllegalArgumentException e) { _log.debug(e.toString()); } String nameGroup = null; try { nameGroup = groupRegexMatcher.group("name"); } catch (Exception e) { _log.debug(e.toString()); } _log.debug(testMessage + " matches with: \n reqId = " + reqIdGroup + " parts = " + partsGroup + " and name = " + nameGroup); if ((ValidatorUtil.isNotNull(reqIdGroup) && ValidatorUtil.isNumeric(reqIdGroup)) && ValidatorUtil.isNull(nameGroup)) { nameGroup = reqIdGroup; } if (ValidatorUtil.isNull(nameGroup)) { _log.warn(String.format( "regex applied which didnt return right number of capture groups - %s", regex)); _log.warn(String.format("regex matched: reqId = %s parts = %s and name = %s", reqIdGroup, partsGroup, nameGroup)); continue; } // If theres no number of files data in the subject, put it into a release if it was posted to usenet longer than five hours ago. if ((ValidatorUtil.isNull(partsGroup) && fiveHoursAgo.isAfter(binary.getDate().getTime()))) { // // Take a copy of the name of this no-part release found. This can be used // next time round the loop to find parts of this set, but which have not yet reached 3 hours. // arrNoPartBinaries.put(nameGroup, "1"); partsGroup = "01/01"; } if (ValidatorUtil.isNotNull(nameGroup) && ValidatorUtil.isNotNull(partsGroup)) { if (partsGroup.indexOf('/') == -1) { partsGroup = partsGroup.replaceFirst("(-)|(~)|(\\sof\\s)", "/"); // replace weird parts delimiters } Integer regexCategoryId = releaseRegex.getCategoryId(); Integer reqId = null; if (ValidatorUtil.isNotNull(reqIdGroup) && ValidatorUtil.isNumeric(reqIdGroup)) { reqId = Integer.parseInt(reqIdGroup); } //check if post is repost Pattern repostPattern = Pattern.compile("(repost\\d?|re\\-?up)", Pattern.CASE_INSENSITIVE); Matcher binaryNameRepostMatcher = repostPattern.matcher(binary.getName()); if (binaryNameRepostMatcher.find() && !nameGroup.toLowerCase().matches("^[\\s\\S]+(repost\\d?|re\\-?up)")) { nameGroup = nameGroup + (" " + binaryNameRepostMatcher.group(1)); } String partsStrings[] = partsGroup.split("/"); int relpart = Integer.parseInt(partsStrings[0]); int relTotalPart = Integer.parseInt(partsStrings[1]); binary.setRelName(nameGroup.replace("_", " ")); binary.setRelPart(relpart); binary.setRelTotalPart(relTotalPart); binary.setProcStat(Defaults.PROCSTAT_TITLEMATCHED); binary.setCategoryId(regexCategoryId); binary.setRegexId(releaseRegex.getId()); binary.setReqId(reqId); binaryDAO.updateBinary(binary); } } } } transactionManager.commit(transaction); // this is a hack - tx is not working ATM transaction = transactionManager .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED)); // // Move all binaries from releases which have the correct number of files on to the next stage. // _log.info("Stage 2"); List<MatchedReleaseQuery> matchedReleaseQueries = binaryDAO .findBinariesByProcStatAndTotalParts(Defaults.PROCSTAT_TITLEMATCHED); matchedReleaseQueries = combineMatchedQueries(matchedReleaseQueries); int siteMinFilestoFormRelease = site.getMinFilesToFormRelease(); for (MatchedReleaseQuery matchedReleaseQuery : matchedReleaseQueries) { retcount++; // // Less than the site permitted number of files in a release. Dont discard it, as it may // be part of a set being uploaded. // int minFiles = siteMinFilestoFormRelease; String releaseName = matchedReleaseQuery.getReleaseName(); long matchedReleaseQueryGroup = matchedReleaseQuery.getGroup(); Long matchedReleaseQueryNumberOfBinaries = matchedReleaseQuery.getNumberOfBinaries(); int matchecReleaseTotalParts = matchedReleaseQuery.getReleaseTotalParts(); String fromName = matchedReleaseQuery.getFromName(); Integer reqId = matchedReleaseQuery.getReqId(); Group group = groupDAO.findGroupByGroupId(matchedReleaseQueryGroup); if (group != null && group.getMinFilesToFormRelease() != null) { minFiles = group.getMinFilesToFormRelease(); } if (matchedReleaseQueryNumberOfBinaries < minFiles) { _log.warn(String.format("Number of files in release %s less than site/group setting (%s/%s)", releaseName, matchedReleaseQueryNumberOfBinaries, minFiles)); binaryDAO.updateBinaryIncrementProcAttempts(releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName); } else if (matchedReleaseQueryNumberOfBinaries >= matchecReleaseTotalParts) { // Check that the binary is complete List<Binary> releaseBinaryList = binaryDAO.findBinariesByReleaseNameProcStatGroupIdFromName( releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName); boolean incomplete = false; for (Binary binary : releaseBinaryList) { long partsCount = partDAO.countPartsByBinaryId(binary.getId()); if (partsCount < binary.getTotalParts()) { float percentComplete = ((float) partsCount / (float) binary.getTotalParts()) * 100; _log.warn(String.format("binary %s from %s has missing parts = %s/%s (%s%% complete)", binary.getId(), releaseName, partsCount, binary.getTotalParts(), percentComplete)); // Allow to binary to release if posted to usenet longer than four hours ago and we still don't have all the parts DateTime fourHoursAgo = DateTime.now().minusHours(4); if (fourHoursAgo.isAfter(new DateTime(binary.getDate()))) { _log.info("allowing incomplete binary " + binary.getId()); } else { incomplete = true; } } } if (incomplete) { _log.warn(String.format("Incorrect number of parts %s-%s-%s", releaseName, matchedReleaseQueryNumberOfBinaries, matchecReleaseTotalParts)); binaryDAO.updateBinaryIncrementProcAttempts(releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName); } // // Right number of files, but see if the binary is a allfilled/reqid post, in which case it needs its name looked up // TODO: Does this even work anymore? else if (ValidatorUtil.isNotNull(site.getReqIdUrl()) && ValidatorUtil.isNotNull(reqId)) { // // Try and get the name using the group // _log.info("Looking up " + reqId + " in " + group.getName() + "..."); String newTitle = getReleaseNameForReqId(site.getReqIdUrl(), group, reqId, true); // // if the feed/group wasnt supported by the scraper, then just use the release name as the title. // if (ValidatorUtil.isNull(newTitle) || newTitle.equals("no feed")) { newTitle = releaseName; _log.warn("Group not supported"); } // // Valid release with right number of files and title now, so move it on // if (ValidatorUtil.isNotNull(newTitle)) { binaryDAO.updateBinaryNameAndStatus(newTitle, Defaults.PROCSTAT_READYTORELEASE, releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName); } else { // // Item not found, if the binary was added to the index yages ago, then give up. // Timestamp timestamp = binaryDAO.findMaxDateAddedBinaryByReleaseNameProcStatGroupIdFromName( releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName); DateTime maxAddedDate = new DateTime(timestamp); DateTime twoDaysAgo = DateTime.now().minusDays(2); if (maxAddedDate.isBefore(twoDaysAgo)) { binaryDAO.updateBinaryNameAndStatus(releaseName, Defaults.PROCSTAT_NOREQIDNAMELOOKUPFOUND, releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName); _log.warn("Not found in 48 hours"); } } } else { binaryDAO.updateBinaryNameAndStatus(releaseName, Defaults.PROCSTAT_READYTORELEASE, releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName); } } else { // // Theres less than the expected number of files, so update the attempts and move on. // _log.info(String.format("Incorrect number of files for %s (%d/%d)", releaseName, matchedReleaseQueryNumberOfBinaries, matchecReleaseTotalParts)); binaryDAO.updateBinaryIncrementProcAttempts(releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName); } if (retcount % 10 == 0) { _log.info(String.format("-processed %d binaries stage two", retcount)); } } transactionManager.commit(transaction); retcount = 0; int nfoCount = 0; // this is a hack - tx is not working ATM transaction = transactionManager .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED)); // // Get out all distinct relname, group from binaries of STAGE2 // _log.info("Stage 3"); List<MatchedReleaseQuery> readyReleaseQueries = binaryDAO .findBinariesByProcStatAndTotalParts(Defaults.PROCSTAT_READYTORELEASE); readyReleaseQueries = combineMatchedQueries(readyReleaseQueries); for (MatchedReleaseQuery readyReleaseQuery : readyReleaseQueries) { retcount++; String releaseName = readyReleaseQuery.getReleaseName(); int numParts = readyReleaseQuery.getReleaseTotalParts(); long binaryCount = readyReleaseQuery.getNumberOfBinaries(); long groupId = readyReleaseQuery.getGroup(); // // Get the last post date and the poster name from the binary // String fromName = readyReleaseQuery.getFromName(); Timestamp timestamp = binaryDAO.findMaxDateAddedBinaryByReleaseNameProcStatGroupIdFromName(releaseName, Defaults.PROCSTAT_READYTORELEASE, groupId, fromName); DateTime addedDate = new DateTime(timestamp); // // Get all releases with the same name with a usenet posted date in a +1-1 date range. // Date oneDayBefore = addedDate.minusDays(1).toDate(); Date oneDayAfter = addedDate.plusDays(1).toDate(); List<Release> relDupes = releaseDAO.findReleasesByNameAndDateRange(releaseName, oneDayBefore, oneDayAfter); if (!relDupes.isEmpty()) { binaryDAO.updateBinaryNameAndStatus(releaseName, Defaults.PROCSTAT_DUPLICATE, releaseName, Defaults.PROCSTAT_READYTORELEASE, groupId, fromName); continue; } // // Get total size of this release // Done in a big OR statement, not an IN as the mysql binaryID index on parts table // was not being used. // // SM: TODO this should be revisited, using hb mappings long totalSize = 0; int regexAppliedCategoryId = 0; long regexIdUsed = 0; int reqIdUsed = 0; int relTotalParts = 0; float relCompletion; List<Binary> binariesForSize = binaryDAO.findBinariesByReleaseNameProcStatGroupIdFromName(releaseName, Defaults.PROCSTAT_READYTORELEASE, groupId, fromName); long relParts = 0; for (Binary binary : binariesForSize) { if (ValidatorUtil.isNotNull(binary.getCategoryId()) && regexAppliedCategoryId == 0) { regexAppliedCategoryId = binary.getCategoryId(); } if (ValidatorUtil.isNotNull(binary.getRegexId()) && regexIdUsed == 0) { regexIdUsed = binary.getRegexId(); } if (ValidatorUtil.isNotNull(binary.getReqId()) && reqIdUsed == 0) { reqIdUsed = binary.getReqId(); } relTotalParts += binary.getTotalParts(); relParts += partDAO.countPartsByBinaryId(binary.getId()); totalSize += partDAO.sumPartsSizeByBinaryId(binary.getId()); } relCompletion = ((float) relParts / (float) relTotalParts) * 100f; // // Insert the release // String releaseGuid = UUID.randomUUID().toString(); int categoryId; Category category = null; Long regexId; Integer reqId; if (regexAppliedCategoryId == 0) { categoryId = categoryService.determineCategory(groupId, releaseName); } else { categoryId = regexAppliedCategoryId; } if (categoryId > 0) { category = categoryService.getCategory(categoryId); } if (regexIdUsed == 0) { regexId = null; } else { regexId = regexIdUsed; } if (reqIdUsed == 0) { reqId = null; } else { reqId = reqIdUsed; } //Clean release name of '#', '@', '$', '%', '^', '', '', '', '' String cleanReleaseName = releaseName.replaceAll("[^A-Za-z0-9-_\\ \\.]+", ""); Release release = new Release(); release.setName(cleanReleaseName); release.setSearchName(cleanReleaseName); release.setTotalpart(numParts); release.setGroupId(groupId); release.setAddDate(new Date()); release.setGuid(releaseGuid); release.setCategory(category); release.setRegexId(regexId); release.setRageId((long) -1); release.setPostDate(addedDate.toDate()); release.setFromName(fromName); release.setSize(totalSize); release.setReqId(reqId); release.setPasswordStatus(site.getCheckPasswordedRar() == 1 ? -1 : 0); // magic constants release.setCompletion(relCompletion); releaseDAO.updateRelease(release); long releaseId = release.getId(); _log.info("Added release " + cleanReleaseName); // // Tag every binary for this release with its parent release id // remove the release name from the binary as its no longer required // binaryDAO.updateBinaryNameStatusReleaseID("", Defaults.PROCSTAT_RELEASED, releaseId, releaseName, Defaults.PROCSTAT_READYTORELEASE, groupId, fromName); // // Find an .nfo in the release // ReleaseNfo releaseNfo = nfo.determineReleaseNfo(release); if (releaseNfo != null) { nfo.addReleaseNfo(releaseNfo); nfoCount++; } // // Write the nzb to disk // nzb.writeNZBforReleaseId(release, nzbBaseDir, true); if (retcount % 5 == 0) { _log.info("-processed " + retcount + " releases stage three"); } } _log.info("Found " + nfoCount + " nfos in " + retcount + " releases"); // // Process nfo files // if (site.getLookupNfo() != 1) { _log.info("Site config (site.lookupnfo) prevented retrieving nfos"); } else { nfo.processNfoFiles(site.getLookupImdb(), site.getLookupTvRage()); } // // Lookup imdb if enabled // if (site.getLookupImdb() == 1) { movieService.processMovieReleases(); } // // Lookup music if enabled // if (site.getLookupMusic() == 1) { musicService.processMusicReleases(); } // // Lookup games if enabled // if (site.getLookupGames() == 1) { gameService.processConsoleReleases(); } // // Check for passworded releases // if (site.getCheckPasswordedRar() != 1) { _log.info("Site config (site.checkpasswordedrar) prevented checking releases are passworded"); } else { processPasswordedReleases(true); } // // Process all TV related releases which will assign their series/episode/rage data // tvRageService.processTvReleases(site.getLookupTvRage() == 1); // // Get the current datetime again, as using now() in the housekeeping queries prevents the index being used. // DateTime now = new DateTime(); // // Tidy away any binaries which have been attempted to be grouped into // a release more than x times (SM: or is it days?) // int attemtpGroupBinDays = site.getAttemtpGroupBinDays(); _log.info(String.format("Tidying away binaries which cant be grouped after %s days", attemtpGroupBinDays)); DateTime maxGroupBinDays = now.minusDays(attemtpGroupBinDays); binaryDAO.updateProcStatByProcStatAndDate(Defaults.PROCSTAT_WRONGPARTS, Defaults.PROCSTAT_NEW, maxGroupBinDays.toDate()); // // Delete any parts and binaries which are older than the site's retention days // int maxRetentionDays = site.getRawRetentionDays(); DateTime maxRetentionDate = now.minusDays(maxRetentionDays); _log.info(String.format("Deleting parts which are older than %d days", maxRetentionDays)); partDAO.deletePartByDate(maxRetentionDate.toDate()); _log.info(String.format("Deleting binaries which are older than %d days", maxRetentionDays)); binaryDAO.deleteBinaryByDate(maxRetentionDate.toDate()); // // Delete any releases which are older than site's release retention days // int releaseretentiondays = site.getReleaseRetentionDays(); if (releaseretentiondays != 0) { _log.info("Determining any releases past retention to be deleted."); DateTime maxReleaseRetentionDate = DateTime.now().minusDays(releaseretentiondays); List<Release> releasesToDelete = releaseDAO.findReleasesBeforeDate(maxReleaseRetentionDate.toDate()); for (Iterator<Release> iterator = releasesToDelete.iterator(); iterator.hasNext();) { Release release = iterator.next(); releaseDAO.deleteRelease(release); } } transaction.flush(); // may be unneeded transactionManager.commit(transaction); _log.info(String.format("Processed %d releases", retcount)); if (!transaction.isCompleted()) { throw new IllegalStateException("Transaction is not completed or rolled back."); } //return retcount; }
From source file:com.swordlord.jalapeno.datacontainer.DataContainer.java
/** * Filters the given rows using an complex filter. * /* w w w . j a v a 2 s. c om*/ * @param <T> * The row type * @param rows * The rows to process * @param filter * The complex filter * @param context * The binding context or null * @param dt * The table to which the rows belong * @return The filtered rows */ private static <T> List<T> complexFilter(List<T> rows, DataBindingContext context, DataTableBase dt, String filter) { // Example of a filter: ?NotIn(threatId,@ActiveThreat[0]:threatId) final Pattern pattern = Pattern.compile( "^(\\?)?([a-z]*)(\\(){1,1}([a-z]*)?(,){1,1}([(\\?)?a-z_0-9,:='%\\s@\\[\\]\\(\\)]*)?(\\)){1,1}$", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE); final Matcher matcher = pattern.matcher(filter); /* * The groups are as follows 1 - ! 2 - NotIn 3 - ( 4 - threatId 5 - * , 6 - @ActiveThreat[0]:threatId 7 - ) */ if (!matcher.find()) { LOG.info("Parser can't parse filter: " + filter); return rows; } if (matcher.groupCount() != 7) { LOG.error("Wrong group count during parsing of filter: " + filter); return rows; } final String strCommand = matcher.group(2); final String strBoundField = matcher.group(4).replace(":", "."); final String strBindingMember = matcher.group(6).replace(":", "."); final DataBindingMember bm = new DataBindingMember(strBindingMember); // re-use the DataBindingContext when there is one // this is needed so that all currentRow informations are correct // within a filter final DataBindingManager dbm; if (context != null) { dbm = context.getDataBindingManager(bm); } else { dbm = new DataBindingManager(dt.getDataContainer(), bm); } // get all to-be-filtered records as field because the expression // filters on one single field and does no lookup final List<String> fieldsFilter = new ArrayList<String>(); for (DataRowBase row : dbm.getRows(bm)) { if (row.getPersistenceState() != PersistenceState.DELETED) { final String strFieldName = bm.getDataBindingFieldName(); if (strFieldName == null || strFieldName.length() == 0) { LOG.error("There must be something wrong with your filter. Field is empty: " + filter); } else { fieldsFilter.add(row.getPropertyAsStringForce(strFieldName)); } } } // Create the expression according to the binding information if (strCommand.equalsIgnoreCase("in")) { final Expression exp = ExpressionFactory.inExp(strBoundField, fieldsFilter); return new ArrayList<T>(exp.filterObjects(rows)); } else if (strCommand.equalsIgnoreCase("notin")) { final Expression exp = ExpressionFactory.notInExp(strBoundField, fieldsFilter); return new ArrayList<T>(exp.filterObjects(rows)); } else { LOG.warn("Unknown filter command: " + strCommand); return rows; } }
From source file:fr.gael.dhus.sync.impl.ODataProductSynchronizer.java
/** * Uses the given `http_client` to download `url` into `out_tmp`. * Renames `out_tmp` to the value of the filename param of the Content-Disposition header field. * Returns a path to the renamed file./*w w w . j a va 2 s .co m*/ * * @param http_client synchronous interruptible HTTP client. * @param out_tmp download destination file on disk (will be created if does not exist). * @param url what to download. * @return Path to file with its actual name. * @throws IOException Anything went wrong (with IO or network, or if the HTTP header field * Content-Disposition is missing). * @throws InterruptedException Thread has been interrupted. */ private DownloadResult downloadValidateRename(InterruptibleHttpClient http_client, Path out_tmp, String url) throws IOException, InterruptedException { try (FileChannel output = FileChannel.open(out_tmp, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) { HttpResponse response = http_client.interruptibleGet(url, output); // If the response's status code is not 200, something wrong happened if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { Formatter ff = new Formatter(); ff.format( "Synchronizer#%d cannot download product at %s," + " remote dhus returned message '%s' (HTTP%d)", getId(), url, response.getStatusLine().getReasonPhrase(), response.getStatusLine().getStatusCode()); throw new IOException(ff.out().toString()); } // Gets the filename from the HTTP header field `Content-Disposition' Pattern pat = Pattern.compile("filename=\"(.+?)\"", Pattern.CASE_INSENSITIVE); String contdis = response.getFirstHeader("Content-Disposition").getValue(); Matcher m = pat.matcher(contdis); if (!m.find()) { throw new IOException("Synchronizer#" + getId() + " Missing HTTP header field `Content-Disposition` that determines the filename"); } String filename = m.group(1); if (filename == null || filename.isEmpty()) { throw new IOException( "Synchronizer#" + getId() + " Invalid filename in HTTP header field `Content-Disposition`"); } // Renames the downloaded file output.close(); Path dest = out_tmp.getParent().resolve(filename); Files.move(out_tmp, dest, StandardCopyOption.ATOMIC_MOVE); DownloadResult res = new DownloadResult(dest, response.getEntity().getContentType().getValue(), response.getEntity().getContentLength()); return res; } finally { if (Files.exists(out_tmp)) { Files.delete(out_tmp); } } }
From source file:com.oltpbenchmark.catalog.Catalog.java
protected Map<String, String> getOriginalTableNames() { Map<String, String> origTableNames = new HashMap<String, String>(); Pattern p = Pattern.compile("CREATE[\\s]+TABLE[\\s]+(.*?)[\\s]+", Pattern.CASE_INSENSITIVE); URL ddl = this.benchmark.getDatabaseDDL(DatabaseType.HSQLDB); String ddlContents;/*from ww w . j a v a 2s.c o m*/ try { ddlContents = IOUtils.toString(ddl); } catch (IOException ioe) { throw new RuntimeException(ioe); } assert (ddlContents.isEmpty() == false); Matcher m = p.matcher(ddlContents); while (m.find()) { String tableName = m.group(1).trim(); origTableNames.put(tableName.toUpperCase(), tableName); // origTableNames.put(tableName, tableName); } // WHILE assert (origTableNames.isEmpty() == false) : "Failed to extract original table names for " + this.benchmark.getBenchmarkName(); if (LOG.isDebugEnabled()) LOG.debug("Original Table Names:\n" + StringUtil.formatMaps(origTableNames)); return (origTableNames); }
From source file:common.ckplugins.handlers.command.FileUploadCommand.java
/** * if file exists this method adds (number) to file. * * @param path folder/*ww w .j av a2 s . co m*/ * @param name file name * @return new file name. */ private String getFinalFileName(final String path, final String name) { File file = new File(path, name); int number = 0; String nameWithoutExtension = FileUtils.getFileNameWithoutExtension(name, false); Pattern p = Pattern.compile("^(AUX|COM\\d|CLOCK\\$|CON|NUL|PRN|LPT\\d)$", Pattern.CASE_INSENSITIVE); Matcher m = p.matcher(nameWithoutExtension); boolean protectedName = m.find() ? true : false; while (true) { if (file.exists() || protectedName) { number++; StringBuilder sb = new StringBuilder(); sb.append(FileUtils.getFileNameWithoutExtension(name, false)); sb.append("(").append(number).append(")."); sb.append(FileUtils.getFileExtension(name, false)); this.newFileName = sb.toString(); file = new File(path, this.newFileName); this.errorCode = Constants.Errors.CKFINDER_CONNECTOR_ERROR_UPLOADED_FILE_RENAMED; protectedName = false; } else { return this.newFileName; } } }
From source file:com.parse.OfflineQueryLogic.java
/** * Matches $regex constraints.// w ww. j ava2 s . c o m */ private static boolean matchesRegexConstraint(Object constraint, Object value, String options) throws ParseException { if (value == null || value == JSONObject.NULL) { return false; } if (options == null) { options = ""; } if (!options.matches("^[imxs]*$")) { throw new ParseException(ParseException.INVALID_QUERY, String.format("Invalid regex options: %s", options)); } int flags = 0; if (options.contains("i")) { flags = flags | Pattern.CASE_INSENSITIVE; } if (options.contains("m")) { flags = flags | Pattern.MULTILINE; } if (options.contains("x")) { flags = flags | Pattern.COMMENTS; } if (options.contains("s")) { flags = flags | Pattern.DOTALL; } String regex = (String) constraint; Pattern pattern = Pattern.compile(regex, flags); Matcher matcher = pattern.matcher((String) value); return matcher.find(); }
From source file:com.amalto.core.server.routing.DefaultRoutingEngine.java
@Override public RoutingRulePOJOPK[] route(final ItemPOJOPK itemPOJOPK) throws XtentisException { if (isStopped) { LOGGER.error("Not publishing event for '" + itemPOJOPK + "' (event manager is stopped)."); return new RoutingRulePOJOPK[0]; }/*from ww w . j ava 2 s .com*/ // The cached ItemPOJO - will only be retrieved if needed: we have expressions on the routing rules String type = itemPOJOPK.getConceptName(); ItemPOJO itemPOJO = null; // Rules that matched ArrayList<RoutingRulePOJO> routingRulesThatSyncMatched = new ArrayList<>(); ArrayList<RoutingRulePOJO> routingRulesThatAsyncMatched = new ArrayList<>(); // loop over the known rules Collection<RoutingRulePOJOPK> routingRulePOJOPKs = routingRules.getRoutingRulePKs(".*"); for (RoutingRulePOJOPK routingRulePOJOPK : routingRulePOJOPKs) { RoutingRulePOJO routingRule = routingRules.getRoutingRule(routingRulePOJOPK); if (routingRule.isDeActive()) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(routingRule.getName() + " disabled, skip it!"); } continue; } // check if type matches the routing rule if (!"*".equals(routingRule.getConcept())) { if (!type.equals(routingRule.getConcept())) { continue; } } // check if all routing rule expression matches - null: always matches // aiming modify see 4572 add condition to check if (routingRule.getCondition() == null || routingRule.getCondition().trim().length() == 0) { boolean matches = true; Collection<RoutingRuleExpressionPOJO> routingExpressions = routingRule.getRoutingExpressions(); if (routingExpressions != null) { for (RoutingRuleExpressionPOJO routingExpression : routingExpressions) { if (itemPOJO == null) { // Get the item itemPOJO = item.getItem(itemPOJOPK); if (itemPOJO == null) { // Item does not exist, no rule can apply. return new RoutingRulePOJOPK[0]; } } if (!ruleExpressionMatches(itemPOJO, routingExpression)) { matches = false; // Rule doesn't match: expect a full match to consider routing rule. break; } } } if (!matches) { continue; } } else { String condition = routingRule.getCondition(); String compileCondition = strip(condition); Collection<RoutingRuleExpressionPOJO> routingExpressions = routingRule.getRoutingExpressions(); try { for (RoutingRuleExpressionPOJO pojo : routingExpressions) { if (pojo.getName() != null && pojo.getName().trim().length() > 0) { Pattern p1 = Pattern.compile(pojo.getName(), Pattern.CASE_INSENSITIVE); Matcher m1 = p1.matcher(condition); while (m1.find()) { if (itemPOJO == null) { // Get the item itemPOJO = item.getItem(itemPOJOPK); if (itemPOJO == null) { // Item does not exist, no rule can apply. return new RoutingRulePOJOPK[0]; } } ntp.set(m1.group(), ruleExpressionMatches(itemPOJO, pojo)); } } } // compile ntp.eval("routingRuleResult = " + compileCondition + ";"); boolean result = (Boolean) ntp.get("routingRuleResult"); if (LOGGER.isDebugEnabled()) { LOGGER.debug(condition + " : " + result); if (result) { LOGGER.debug("Trigger \"" + (routingRule.getName() == null ? "" : routingRule.getName()) + "\" matched! "); } } if (!result) { continue; } } catch (EvalError e) { String err = "Condition compile error :" + e.getMessage(); LOGGER.error(err, e); throw new XtentisException(err, e); } } if (LOGGER.isDebugEnabled()) { LOGGER.debug("route() Routing Rule MATCH '" + routingRulePOJOPK.getUniqueId() + "' for item '" + itemPOJOPK.getUniqueID() + "'"); } // increment matching routing rules counter if (routingRule.isSynchronous()) { routingRulesThatSyncMatched.add(routingRule); } else { routingRulesThatAsyncMatched.add(routingRule); } } // Contract imposes to send matching rule names List<RoutingRulePOJOPK> pks = new ArrayList<RoutingRulePOJOPK>( routingRulesThatSyncMatched.size() + routingRulesThatAsyncMatched.size()); // Log debug information if no rule found for document if (routingRulesThatSyncMatched.size() == 0 && routingRulesThatAsyncMatched.size() == 0) { if (LOGGER.isDebugEnabled()) { String err = "Unable to find a routing rule for document " + itemPOJOPK.getUniqueID(); LOGGER.debug(err); } return new RoutingRulePOJOPK[0]; } // execute asynchronous triggers (send JMS message) if (routingRulesThatAsyncMatched.size() > 0) { this.sendMessage(itemPOJOPK, routingRulesThatAsyncMatched); pks.addAll(buildListOfRulePK(routingRulesThatAsyncMatched)); } // execute synchronous triggers directly if (routingRulesThatSyncMatched.size() > 0) { Collections.sort(routingRulesThatSyncMatched); String routingOrder = UUID.randomUUID().toString(); for (RoutingRulePOJO rule : routingRulesThatSyncMatched) { applyRule(itemPOJOPK, rule, routingOrder, System.currentTimeMillis()); } pks.addAll(buildListOfRulePK(routingRulesThatSyncMatched)); } return pks.toArray(new RoutingRulePOJOPK[pks.size()]); }
From source file:org.ocsinventoryng.android.actions.OCSProtocol.java
private String extractResponse(String message) { String resp = ""; Pattern p = Pattern.compile(".*<RESPONSE>(.*)</RESPONSE>.*", Pattern.CASE_INSENSITIVE); Matcher m = p.matcher(message); if (m.find()) { return m.group(1); }/*from w ww. j a v a 2s.co m*/ return resp; }
From source file:com.adguard.filter.rules.UrlFilterRule.java
/** * Url regular expression/*from w w w .j a v a 2 s . c o m*/ * * @return Regexp */ public synchronized Pattern getUrlRegexp() { if (invalidRule) { return null; } loadRuleProperties(); if (urlRegexp == null) { int regexOptions = Pattern.DOTALL; if (!isOptionEnabled(UrlFilterRuleOption.MATCH_CASE)) { regexOptions = regexOptions | Pattern.CASE_INSENSITIVE; } urlRegexp = Pattern.compile(regex, regexOptions); regex = null; } return urlRegexp; }