List of usage examples for java.util.regex Pattern quote
public static String quote(String s)
From source file:com.moesol.geoserver.sync.client.AbstractClientSynchronizer.java
private Response post(Sha1SyncJson outputJson) throws IOException { m_lastOutputFormat = isReadyForGML(outputJson) ? GML3_OUTPUT_FORMAT : SHA1_SYNC_OUTPUT_FORMAT; String json = new Gson().toJson(outputJson); String xmlRequest = m_postTemplate.replaceAll(Pattern.quote("${outputFormat}"), m_lastOutputFormat); xmlRequest = xmlRequest.replaceAll(Pattern.quote("${attributes}"), m_attributesToInclude); xmlRequest = xmlRequest.replaceAll(Pattern.quote("${sha1Sync}"), json); // Unit test support to pass along outputJson if (m_builder instanceof RequestBuilderJUnit) { RequestBuilderJUnit reqJUnit = (RequestBuilderJUnit) m_builder; reqJUnit.prePost(m_lastOutputFormat, m_attributesToInclude, json); }//from www . ja va 2 s . c o m LOGGER.log(Level.FINE, "outputFormat({0}), attributes({1}), json({2})", new Object[] { m_lastOutputFormat, m_attributesToInclude, json }); if (TRACE_POST != null) { outputJson.dumpSha1SyncJson("POST", TRACE_POST); } m_txBytes += xmlRequest.length(); return m_builder.post(m_url, xmlRequest); }
From source file:com.github.wuic.nut.ftp.FtpNutDao.java
/** * {@inheritDoc}/*from w ww . j a va 2s . co m*/ */ @Override protected List<String> listNutsPaths(final String pattern) throws StreamException { try { connect(); return recursiveSearch(getBasePath(), Pattern.compile(regularExpression ? pattern : Pattern.quote(pattern))); } catch (IOException ioe) { throw new StreamException(ioe); } }
From source file:hydrograph.ui.engine.converter.impl.JoinConverter.java
public List<Object> getLookupOrJoinOutputMapping(JoinMappingGrid joinPropertyGrid) { List<Object> passThroughFieldorMapFieldList = null; if (joinPropertyGrid != null) { passThroughFieldorMapFieldList = new ArrayList<>(); TypeInputField typeInputField = null; TypeMapField mapField = null;/*w w w . java 2 s. c om*/ List<LookupMapProperty> lookupMapProperties = joinPropertyGrid.getLookupMapProperties(); if (!converterHelper.hasAllLookupMapPropertiesAsParams(lookupMapProperties)) { for (LookupMapProperty lookupMapProperty : lookupMapProperties) { if (!ParameterUtil.isParameter(lookupMapProperty.getSource_Field())) { if (StringUtils.isBlank(lookupMapProperty.getSource_Field())) { continue; } String[] sourceNameValue = lookupMapProperty.getSource_Field().split(Pattern.quote(".")); if (sourceNameValue.length == 2) { if (sourceNameValue[1].equalsIgnoreCase(lookupMapProperty.getOutput_Field())) { typeInputField = new TypeInputField(); typeInputField.setName(sourceNameValue[1]); typeInputField.setInSocketId(sourceNameValue[0]); passThroughFieldorMapFieldList.add(typeInputField); } else { mapField = new TypeMapField(); mapField.setSourceName(sourceNameValue[1]); mapField.setName(lookupMapProperty.getOutput_Field()); mapField.setInSocketId(sourceNameValue[0]); passThroughFieldorMapFieldList.add(mapField); } } } else { converterHelper.addParamTag(ID, lookupMapProperty.getSource_Field(), ComponentXpathConstants.OPERATIONS_OUTSOCKET.value(), false); } } } else { StringBuffer parameterFieldNames = new StringBuffer(); TypeInputField inputField = new TypeInputField(); inputField.setName(""); inputField.setInSocketId(""); passThroughFieldorMapFieldList.add(inputField); for (LookupMapProperty lookupMapProperty : lookupMapProperties) parameterFieldNames.append(lookupMapProperty.getOutput_Field() + " "); converterHelper.addParamTag(ID, parameterFieldNames.toString(), ComponentXpathConstants.OPERATIONS_OUTSOCKET.value(), true); } } return passThroughFieldorMapFieldList; }
From source file:com.blackberry.logdriver.admin.LogMaintenance.java
@Override public int run(String[] args) throws Exception { Configuration conf = getConf(); // If run by Oozie, then load the Oozie conf too if (System.getProperty("oozie.action.conf.xml") != null) { conf.addResource(new URL("file://" + System.getProperty("oozie.action.conf.xml"))); }//from w w w . j av a 2s . c o m // For some reason, Oozie needs some options to be set in system instead of // in the confiuration. So copy the configs over. { Iterator<Entry<String, String>> i = conf.iterator(); while (i.hasNext()) { Entry<String, String> next = i.next(); System.setProperty(next.getKey(), next.getValue()); } } if (args.length < 3) { printUsage(); return 1; } String userName = args[0]; String dcNumber = args[1]; String service = args[2]; String date = null; String hour = null; if (args.length >= 4) { date = args[3]; } if (args.length >= 5) { hour = args[4]; } // Set from environment variables String mergeJobPropertiesFile = getConfOrEnv(conf, "MERGEJOB_CONF"); String filterJobPropertiesFile = getConfOrEnv(conf, "FILTERJOB_CONF"); String daysBeforeArchive = getConfOrEnv(conf, "DAYS_BEFORE_ARCHIVE"); String daysBeforeDelete = getConfOrEnv(conf, "DAYS_BEFORE_DELETE"); String maxConcurrentMR = getConfOrEnv(conf, "MAX_CONCURRENT_MR", "-1"); String zkConnectString = getConfOrEnv(conf, "ZK_CONNECT_STRING"); String logdir = getConfOrEnv(conf, "logdriver.logdir.name"); boolean resetOrphanedJobs = Boolean.parseBoolean(getConfOrEnv(conf, "reset.orphaned.jobs", "true")); String rootDir = getConfOrEnv(conf, "service.root.dir"); String maxTotalMR = getConfOrEnv(conf, "MAX_TOTAL_MR", "-1"); boolean doMerge = true; boolean doArchive = true; boolean doDelete = true; if (zkConnectString == null) { LOG.error("ZK_CONNECT_STRING is not set. Exiting."); return 1; } if (mergeJobPropertiesFile == null) { LOG.info("MERGEJOB_CONF is not set. Not merging."); doMerge = false; } if (filterJobPropertiesFile == null) { LOG.info("FILTERJOB_CONF is not set. Not archiving."); doArchive = false; } if (daysBeforeArchive == null) { LOG.info("DAYS_BEFORE_ARCHIVE is not set. Not archiving."); doArchive = false; } if (doArchive && Integer.parseInt(daysBeforeArchive) < 0) { LOG.info("DAYS_BEFORE_ARCHIVE is negative. Not archiving."); doArchive = false; } if (daysBeforeDelete == null) { LOG.info("DAYS_BEFORE_DELETE is not set. Not deleting."); doDelete = false; } if (doDelete && Integer.parseInt(daysBeforeDelete) < 0) { LOG.info("DAYS_BEFORE_DELETE is negative. Not deleting."); doDelete = false; } if (logdir == null) { LOG.info("LOGDRIVER_LOGDIR_NAME is not set. Using default value of 'logs'."); logdir = "logs"; } if (rootDir == null) { LOG.info("SERVICE_ROOT_DIR is not set. Using default value of 'service'."); rootDir = "/service"; } // We can hang if this fails. So make sure we abort if it fails. fs = null; try { fs = FileSystem.get(conf); fs.exists(new Path("/")); // Test if it works. } catch (IOException e) { LOG.error("Error getting filesystem.", e); return 1; } // Create the LockUtil instance lockUtil = new LockUtil(zkConnectString); // Now it's safe to create our Job Runner JobRunner jobRunner = new JobRunner(Integer.parseInt(maxConcurrentMR), Integer.parseInt(maxTotalMR)); Thread jobRunnerThread = new Thread(jobRunner); jobRunnerThread.setName("JobRunner"); jobRunnerThread.setDaemon(false); jobRunnerThread.start(); // Figure out what date we start filters on. String filterCutoffDate = ""; if (doArchive) { Calendar cal = Calendar.getInstance(); cal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + daysBeforeArchive)); filterCutoffDate = String.format("%04d%02d%02d%02d", cal.get(Calendar.YEAR), (cal.get(Calendar.MONTH) + 1), cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY)); LOG.info("Archiving logs from before {}", filterCutoffDate); } String deleteCutoffDate = ""; if (doDelete) { Calendar cal = Calendar.getInstance(); cal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + daysBeforeDelete)); deleteCutoffDate = String.format("%04d%02d%02d%02d", cal.get(Calendar.YEAR), (cal.get(Calendar.MONTH) + 1), cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY)); LOG.info("Deleting logs from before {}", deleteCutoffDate); } long now = System.currentTimeMillis(); // Various exceptions have been popping up here. So make sure I catch them // all. try { // Patterns to recognize hour, day and incoming directories, so that they // can be processed. Pattern datePathPattern; Pattern hourPathPattern; Pattern incomingPathPattern; Pattern dataPathPattern; Pattern archivePathPattern; Pattern workingPathPattern; if (hour != null) { datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")"); hourPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")"); incomingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/incoming"); dataPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/data"); archivePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/archive"); workingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/working/([^/]+)_(\\d+)"); } else if (date != null) { datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")"); hourPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})"); incomingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/incoming"); dataPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/data"); archivePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/archive"); workingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/working/([^/]+)_(\\d+)"); } else { datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})"); hourPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})"); incomingPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/incoming"); dataPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/data"); archivePathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/archive"); workingPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/working/([^/]+)_(\\d+)"); } // Do a depth first search of the directory, processing anything that // looks // interesting along the way Deque<Path> paths = new ArrayDeque<Path>(); Path rootPath = new Path(rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/"); paths.push(rootPath); while (paths.size() > 0) { Path p = paths.pop(); LOG.debug("{}", p.toString()); if (!fs.exists(p)) { continue; } FileStatus dirStatus = fs.getFileStatus(p); FileStatus[] children = fs.listStatus(p); boolean addChildren = true; boolean old = dirStatus.getModificationTime() < now - WAIT_TIME; LOG.debug(" Was last modified {}ms ago", now - dirStatus.getModificationTime()); if (!old) { LOG.debug(" Skipping, since it's not old enough."); } else if ((!rootPath.equals(p)) && (children.length == 0 || (children.length == 1 && children[0].getPath().getName().equals(READY_MARKER)))) { // old and no children? Delete! LOG.info(" Deleting empty directory {}", p.toString()); fs.delete(p, true); } else { Matcher matcher = datePathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.debug("Checking date directory"); // If this is already done, then skip it. So only process if it // doesn't exist. if (fs.exists(new Path(p, READY_MARKER)) == false) { // Check each subdirectory. If they all have ready markers, then I // guess we're ready. boolean ready = true; for (FileStatus c : children) { if (c.isDirectory() && fs.exists(new Path(c.getPath(), READY_MARKER)) == false) { ready = false; break; } } if (ready) { fs.createNewFile(new Path(p, READY_MARKER)); } } } matcher = hourPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.debug("Checking hour directory"); // If this is already done, then skip it. So only process if it // doesn't exist. if (fs.exists(new Path(p, READY_MARKER)) == false) { // Check each subdirectory. If they all have ready markers, then I // guess we're ready. boolean ready = true; for (FileStatus c : children) { if (c.isDirectory() && fs.exists(new Path(c.getPath(), READY_MARKER)) == false) { ready = false; break; } } if (ready) { fs.createNewFile(new Path(p, READY_MARKER)); } } } // Check to see if we have to run a merge matcher = incomingPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.debug("Checking incoming directory"); String matchDate = matcher.group(1); String matchHour = matcher.group(2); String matchComponent = matcher.group(3); String timestamp = matchDate + matchHour; if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) { LOG.info("Deleting old directory: {}", p); fs.delete(p, true); addChildren = false; } else if (doMerge) { // old, looks right, and has children? Run it! boolean hasMatchingChildren = false; boolean subdirTooYoung = false; for (FileStatus child : children) { if (!hasMatchingChildren) { FileStatus[] grandchildren = fs.listStatus(child.getPath()); for (FileStatus gc : grandchildren) { if (VALID_FILE.matcher(gc.getPath().getName()).matches()) { hasMatchingChildren = true; break; } } } if (!subdirTooYoung) { if (child.getModificationTime() >= now - WAIT_TIME) { subdirTooYoung = true; LOG.debug(" Subdir {} is too young.", child.getPath()); } } } if (!hasMatchingChildren) { LOG.debug(" No files match the expected pattern ({})", VALID_FILE.pattern()); } if (hasMatchingChildren && !subdirTooYoung) { LOG.info(" Run Merge job {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); Properties jobProps = new Properties(); jobProps.load(new FileInputStream(mergeJobPropertiesFile)); jobProps.setProperty("jobType", "merge"); jobProps.setProperty("rootDir", rootDir); jobProps.setProperty("dcNumber", dcNumber); jobProps.setProperty("service", service); jobProps.setProperty("date", matchDate); jobProps.setProperty("hour", matchHour); jobProps.setProperty("component", matchComponent); jobProps.setProperty("user.name", userName); jobProps.setProperty("logdir", logdir); jobRunner.submit(jobProps); addChildren = false; } } } // Check to see if we need to run a filter and archive matcher = dataPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { String matchDate = matcher.group(1); String matchHour = matcher.group(2); String matchComponent = matcher.group(3); String timestamp = matchDate + matchHour; if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) { LOG.info("Deleting old directory: {}", p); fs.delete(p, true); addChildren = false; } else if (doArchive && timestamp.compareTo(filterCutoffDate) < 0) { Properties jobProps = new Properties(); jobProps.load(new FileInputStream(filterJobPropertiesFile)); jobProps.setProperty("jobType", "filter"); jobProps.setProperty("rootDir", rootDir); jobProps.setProperty("dcNumber", dcNumber); jobProps.setProperty("service", service); jobProps.setProperty("date", matchDate); jobProps.setProperty("hour", matchHour); jobProps.setProperty("component", matchComponent); jobProps.setProperty("user.name", userName); jobProps.setProperty("logdir", logdir); // Check to see if we should just keep all or delete all here. // The filter file should be here String appPath = jobProps.getProperty("oozie.wf.application.path"); appPath = appPath.replaceFirst("\\$\\{.*?\\}", ""); Path filterFile = new Path( appPath + "/" + conf.get("filter.definition.file", service + ".yaml")); LOG.info("Filter file is {}", filterFile); if (fs.exists(filterFile)) { List<BoomFilterMapper.Filter> filters = BoomFilterMapper.loadFilters(matchComponent, fs.open(filterFile)); if (filters == null) { LOG.warn( " Got null when getting filters. Not processing. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); } else if (filters.size() == 0) { LOG.warn(" Got no filters. Not processing. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); } else if (filters.size() == 1 && filters.get(0) instanceof BoomFilterMapper.KeepAllFilter) { LOG.info(" Keeping everything. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); // Move files from data to archive // delete it all! String destination = rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/" + matchDate + "/" + matchHour + "/" + matchComponent + "/archive/"; PathInfo pathInfo = new PathInfo(); pathInfo.setDcNumber(dcNumber); pathInfo.setService(service); pathInfo.setLogdir(logdir); pathInfo.setDate(matchDate); pathInfo.setHour(matchHour); pathInfo.setComponent(matchComponent); try { lockUtil.acquireWriteLock(lockUtil.getLockPath(pathInfo)); fs.mkdirs(new Path(destination)); for (FileStatus f : fs.listStatus(p)) { fs.rename(f.getPath(), new Path(destination)); } } finally { lockUtil.releaseWriteLock(lockUtil.getLockPath(pathInfo)); } } else if (filters.size() == 1 && filters.get(0) instanceof BoomFilterMapper.DropAllFilter) { LOG.info(" Dropping everything. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); PathInfo pathInfo = new PathInfo(); pathInfo.setDcNumber(dcNumber); pathInfo.setService(service); pathInfo.setLogdir(logdir); pathInfo.setDate(matchDate); pathInfo.setHour(matchHour); pathInfo.setComponent(matchComponent); try { lockUtil.acquireWriteLock(lockUtil.getLockPath(pathInfo)); fs.delete(p, true); } finally { lockUtil.releaseWriteLock(lockUtil.getLockPath(pathInfo)); } } else { LOG.info(" Run Filter/Archive job {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); jobRunner.submit(jobProps); } } else { LOG.warn("Skipping filter job, since no filter file exists"); } addChildren = false; } } matcher = archivePathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { String matchDate = matcher.group(1); String matchHour = matcher.group(2); String timestamp = matchDate + matchHour; if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) { LOG.info("Deleting old directory: {}", p); fs.delete(p, true); addChildren = false; } } matcher = workingPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.info(" Matches working pattern ({})", p); if (resetOrphanedJobs) { String matchDate = matcher.group(1); String matchHour = matcher.group(2); String matchComponent = matcher.group(3); // Move everything from working/xxx/incoming/ to incoming/ PathInfo lockPathInfo = new PathInfo(logdir, rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/" + matchDate + "/" + matchHour + "/" + matchComponent); lockUtil.acquireWriteLock(lockUtil.getLockPath(lockPathInfo)); FileStatus[] fileStatuses = fs.listStatus(new Path(p.toUri().getPath() + "/incoming/")); if (fileStatuses != null) { for (FileStatus fileStatus : fileStatuses) { Path toPath = new Path( fileStatus.getPath().getParent().getParent().getParent().getParent(), "incoming/" + fileStatus.getPath().getName()); LOG.info(" Moving data from {} to {}", fileStatus.getPath(), toPath); LOG.info(" mkdir {}", toPath); fs.mkdirs(toPath); Path fromDir = new Path(p.toUri().getPath(), "incoming/" + fileStatus.getPath().getName()); LOG.info(" moving from {}", fromDir); FileStatus[] files = fs.listStatus(fromDir); if (files == null || files.length == 0) { LOG.info(" Nothing to move from {}", fromDir); } else { for (FileStatus f : files) { LOG.info(" rename {} {}", f.getPath(), new Path(toPath, f.getPath().getName())); fs.rename(f.getPath(), new Path(toPath, f.getPath().getName())); } } LOG.info(" rm {}", fileStatus.getPath()); fs.delete(fileStatus.getPath(), true); } lockUtil.releaseWriteLock(lockUtil.getLockPath(lockPathInfo)); fs.delete(new Path(p.toUri().getPath()), true); } } addChildren = false; } } // Add any children which are directories to the stack. if (addChildren) { for (int i = children.length - 1; i >= 0; i--) { FileStatus child = children[i]; if (child.isDirectory()) { paths.push(child.getPath()); } } } } // Since we may have deleted a bunch of directories, delete any unused // locks // from ZooKeeper. { LOG.info("Checking for unused locks in ZooKeeper"); String scanPath = rootDir + "/" + dcNumber + "/" + service + "/" + logdir; if (date != null) { scanPath += "/" + date; if (hour != null) { scanPath += "/" + hour; } } List<LockInfo> lockInfo = lockUtil.scan(scanPath); for (LockInfo li : lockInfo) { // Check if the lock path still exists in HDFS. If it doesn't, then // delete it from ZooKeeper. String path = li.getPath(); String hdfsPath = path.substring(LockUtil.ROOT.length()); if (!fs.exists(new Path(hdfsPath))) { ZooKeeper zk = lockUtil.getZkClient(); while (!path.equals(LockUtil.ROOT)) { try { zk.delete(path, -1); } catch (KeeperException.NotEmptyException e) { // That's fine. just stop trying then. break; } catch (Exception e) { LOG.error("Caught exception trying to delete from ZooKeeper.", e); break; } LOG.info("Deleted from ZooKeeper: {}", path); path = path.substring(0, path.lastIndexOf('/')); } } } } // Now that we're done, wait for the Oozie Runner to stop, and print the // results. LOG.info("Waiting for Oozie jobs to complete."); jobRunner.shutdown(); jobRunnerThread.join(); LOG.info("Job Stats : Started={} Succeeded={} failed={} errors={}", new Object[] { jobRunner.getStarted(), jobRunner.getSucceeded(), jobRunner.getFailed(), jobRunner.getErrors() }); lockUtil.close(); } catch (Exception e) { LOG.error("Unexpected exception caught.", e); return 1; } return 0; }
From source file:ddf.ldap.ldaplogin.SslLdapLoginModule.java
protected boolean doLogin() throws LoginException { // --------- EXTRACT USERNAME AND PASSWORD FOR LDAP LOOKUP ------------- Callback[] callbacks = new Callback[2]; callbacks[0] = new NameCallback("Username: "); callbacks[1] = new PasswordCallback("Password: ", false); try {/*ww w .j av a 2 s. c o m*/ callbackHandler.handle(callbacks); } catch (IOException ioException) { LOGGER.debug("Exception while handling login.", ioException); throw new LoginException(ioException.getMessage()); } catch (UnsupportedCallbackException unsupportedCallbackException) { LOGGER.debug("Exception while handling login.", unsupportedCallbackException); throw new LoginException( unsupportedCallbackException.getMessage() + " not available to obtain information from user."); } user = ((NameCallback) callbacks[0]).getName(); if (user == null) { return false; } user = user.trim(); validateUsername(user); char[] tmpPassword = ((PasswordCallback) callbacks[1]).getPassword(); // If either a username or password is specified don't allow authentication = "none". // This is to prevent someone from logging into Karaf as any user without providing a // valid password (because if authentication = none, the password could be any // value - it is ignored). // Username is not checked in this conditional because a null username immediately exits // this method. if ("none".equalsIgnoreCase(getBindMethod()) && (tmpPassword != null)) { LOGGER.debug("Changing from authentication = none to simple since user or password was specified."); // default to simple so that the provided user/password will get checked setBindMethod(DEFAULT_AUTHENTICATION); } if (tmpPassword == null) { tmpPassword = new char[0]; } // --------------------------------------------------------------------- // RESET OBJECT STATE AND DECLARE LOCAL VARS principals = new HashSet<>(); Connection connection; String userDn; // --------------------------------------------------------------------- // ------------- CREATE CONNECTION #1 ---------------------------------- try { connection = ldapConnectionPool.borrowObject(); } catch (Exception e) { LOGGER.info("Unable to obtain ldap connection from pool", e); return false; } try { if (connection != null) { // ------------- BIND #1 (CONNECTION USERNAME & PASSWORD) -------------- try { BindRequest request; switch (bindMethod) { case "Simple": request = Requests.newSimpleBindRequest(connectionUsername, connectionPassword); break; case "SASL": request = Requests.newPlainSASLBindRequest(connectionUsername, connectionPassword); break; case "GSSAPI SASL": request = Requests.newGSSAPISASLBindRequest(connectionUsername, connectionPassword); ((GSSAPISASLBindRequest) request).setRealm(realm); ((GSSAPISASLBindRequest) request).setKDCAddress(kdcAddress); break; case "Digest MD5 SASL": request = Requests.newDigestMD5SASLBindRequest(connectionUsername, connectionPassword); ((DigestMD5SASLBindRequest) request).setCipher(DigestMD5SASLBindRequest.CIPHER_HIGH); ((DigestMD5SASLBindRequest) request).getQOPs().clear(); ((DigestMD5SASLBindRequest) request).getQOPs().add(DigestMD5SASLBindRequest.QOP_AUTH_CONF); ((DigestMD5SASLBindRequest) request).getQOPs().add(DigestMD5SASLBindRequest.QOP_AUTH_INT); ((DigestMD5SASLBindRequest) request).getQOPs().add(DigestMD5SASLBindRequest.QOP_AUTH); if (StringUtils.isNotEmpty(realm)) { ((DigestMD5SASLBindRequest) request).setRealm(realm); } break; default: request = Requests.newSimpleBindRequest(connectionUsername, connectionPassword); break; } LOGGER.trace("Attempting LDAP bind for administrator: {}", connectionUsername); BindResult bindResult = connection.bind(request); if (!bindResult.isSuccess()) { LOGGER.debug("Bind failed"); return false; } } catch (LdapException e) { LOGGER.debug("Unable to bind to LDAP server.", e); return false; } LOGGER.trace("LDAP bind successful for administrator: {}", connectionUsername); // --------- SEARCH #1, FIND USER DISTINGUISHED NAME ----------- SearchScope scope; scope = userSearchSubtree ? SearchScope.WHOLE_SUBTREE : SearchScope.SINGLE_LEVEL; userFilter = userFilter.replaceAll(Pattern.quote("%u"), Matcher.quoteReplacement(user)); userFilter = userFilter.replace("\\", "\\\\"); LOGGER.trace("Performing LDAP query for user: {} at {} with filter {}", user, userBaseDN, userFilter); try (ConnectionEntryReader entryReader = connection.search(userBaseDN, scope, userFilter)) { while (entryReader.hasNext() && entryReader.isReference()) { LOGGER.debug("Referral ignored while searching for user {}", user); entryReader.readReference(); } if (!entryReader.hasNext()) { LOGGER.info("User {} not found in LDAP.", user); return false; } SearchResultEntry searchResultEntry = entryReader.readEntry(); userDn = searchResultEntry.getName().toString(); } catch (LdapException | SearchResultReferenceIOException e) { LOGGER.info("Unable to read contents of LDAP user search.", e); return false; } // ----- BIND #2 (USER DISTINGUISHED NAME AND PASSWORD) ------------ // Validate user's credentials. try { LOGGER.trace("Attempting LDAP bind for user: {}", userDn); BindResult bindResult = connection.bind(userDn, tmpPassword); if (!bindResult.isSuccess()) { LOGGER.info("Bind failed"); return false; } } catch (Exception e) { LOGGER.info("Unable to bind user: {} to LDAP server.", userDn, e); return false; } LOGGER.trace("LDAP bind successful for user: {}", userDn); // ---------- ADD USER AS PRINCIPAL -------------------------------- principals.add(new UserPrincipal(user)); // ----- BIND #3 (CONNECTION USERNAME & PASSWORD) -------------- try { LOGGER.trace("Attempting LDAP bind for administrator: {}", connectionUsername); BindResult bindResult = connection.bind(connectionUsername, connectionPassword); if (!bindResult.isSuccess()) { LOGGER.info("Bind failed"); return false; } } catch (LdapException e) { LOGGER.info("Unable to bind to LDAP server.", e); return false; } LOGGER.trace("LDAP bind successful for administrator: {}", connectionUsername); // --------- SEARCH #3, GET ROLES ------------------------------ scope = roleSearchSubtree ? SearchScope.WHOLE_SUBTREE : SearchScope.SINGLE_LEVEL; roleFilter = roleFilter.replaceAll(Pattern.quote("%u"), Matcher.quoteReplacement(user)); roleFilter = roleFilter.replaceAll(Pattern.quote("%dn"), Matcher.quoteReplacement(userBaseDN)); roleFilter = roleFilter.replaceAll(Pattern.quote("%fqdn"), Matcher.quoteReplacement(userDn)); roleFilter = roleFilter.replace("\\", "\\\\"); LOGGER.trace( "Performing LDAP query for roles for user: {} at {} with filter {} for role attribute {}", user, roleBaseDN, roleFilter, roleNameAttribute); // ------------- ADD ROLES AS NEW PRINCIPALS ------------------- try (ConnectionEntryReader entryReader = connection.search(roleBaseDN, scope, roleFilter, roleNameAttribute)) { SearchResultEntry entry; while (entryReader.hasNext()) { if (entryReader.isEntry()) { entry = entryReader.readEntry(); Attribute attr = entry.getAttribute(roleNameAttribute); if (attr == null) { throw new LoginException("No attributes returned for [" + roleNameAttribute + " : " + roleBaseDN + "]"); } for (ByteString role : attr) { principals.add(new RolePrincipal(role.toString())); } } else { // Got a continuation reference. final SearchResultReference ref = entryReader.readReference(); LOGGER.debug("Skipping result reference: {}", ref.getURIs()); } } } catch (Exception e) { LOGGER.debug("Exception while getting roles for [" + user + "].", e); throw new LoginException("Can't get roles for [" + user + "]: " + e.getMessage()); } } else { LOGGER.trace("LDAP Connection was null could not authenticate user."); return false; } return true; } finally { ldapConnectionPool.returnObject(connection); } }
From source file:de.tor.tribes.util.parser.TroopsParser70.java
private TroopAmountFixed parseUnits(String pLine) throws RuntimeException { String line = pLine.replaceAll(getVariable("troops.own"), "").replaceAll(getVariable("troops.commands"), "") .replaceAll(getVariable("troops"), "").replaceAll(Pattern.quote("+"), "").trim(); debug("Getting units from line '" + line + "'"); StringTokenizer t = new StringTokenizer(line, " \t"); int uCount = DataHolder.getSingleton().getUnits().size(); List<UnitHolder> allUnits = DataHolder.getSingleton().getUnits(); TroopAmountFixed units = new TroopAmountFixed(-1); int cnt = 0;//from ww w .j a va2s .co m while (t.hasMoreTokens()) { try { String toParse = t.nextToken(); units.setAmountForUnit(allUnits.get(cnt), Integer.parseInt(toParse)); cnt++; } catch (Exception e) { //token with no troops } } if (cnt < uCount) { throw new RuntimeException("Unit count does not match"); } if (cnt > uCount) { debug("Too many units found " + cnt + "/" + uCount); } debug("Units: "); for (UnitHolder unit : allUnits) { debug(units.getAmountForUnit(unit)); } return units; }
From source file:com.adobe.ags.curly.controller.ActionRunner.java
private String detokenizeParameters(String str) { Set<String> variableTokens = ActionUtils.getVariableNames(action); int tokenCounter = 0; for (String var : variableTokens) { str = str.replaceAll(Pattern.quote("${" + (tokenCounter++) + "}"), Matcher.quoteReplacement("${" + var + "}")); }/* w w w . j ava 2 s . c o m*/ return str; }
From source file:eu.gsmservice.android.websms.connector.gsmservice.ConnectorGsmService.java
private String formatNumber(final Context context, String receiver) { if (receiver != null) { if (receiver.contains("<") && receiver.contains(">")) { int start = receiver.indexOf("<"); int end = receiver.indexOf(">"); receiver = receiver.substring(start + 1, end); }//from w ww . ja v a 2 s . c o m this.log("replace +"); receiver = receiver.replaceAll(Pattern.quote("+"), ""); this.log("replace ("); receiver = receiver.replaceAll(Pattern.quote("("), ""); this.log("replace )"); receiver = receiver.replaceAll(Pattern.quote(")"), ""); if (receiver.startsWith("00")) { this.log("replace 00"); receiver = receiver.replaceFirst(Pattern.quote("00"), ""); } this.log("receiver temp=" + receiver); if (receiver.startsWith("0")) { this.log("error_format_wrong -> starts with 0=" + receiver); throw new WebSMSException(context, R.string.error_format_wrong); } } return receiver; }
From source file:com.wxxr.mobile.core.tools.VelocityTemplateRenderer.java
private String replacePlaceholder(String text, String placeholder, String replacement) { String pattern = Pattern.quote("(!PLACEHOLDER:" + placeholder + "!)"); return text.replaceAll(pattern, replacement); }
From source file:com.predic8.membrane.core.rules.SOAPProxy.java
public void configure() throws Exception { parseWSDL();/* w w w. j a va 2s. c om*/ // remove previously added interceptors for (; automaticallyAddedInterceptorCount > 0; automaticallyAddedInterceptorCount--) interceptors.remove(0); // add interceptors (in reverse order) to position 0. WebServiceExplorerInterceptor sui = new WebServiceExplorerInterceptor(); sui.setWsdl(wsdl); sui.setPortName(portName); interceptors.add(0, sui); automaticallyAddedInterceptorCount++; boolean hasPublisher = getInterceptorOfType(WSDLPublisherInterceptor.class) != null; if (!hasPublisher) { WSDLPublisherInterceptor wp = new WSDLPublisherInterceptor(); wp.setWsdl(wsdl); interceptors.add(0, wp); automaticallyAddedInterceptorCount++; } WSDLInterceptor wsdlInterceptor = getInterceptorOfType(WSDLInterceptor.class); boolean hasRewriter = wsdlInterceptor != null; if (!hasRewriter) { wsdlInterceptor = new WSDLInterceptor(); interceptors.add(0, wsdlInterceptor); automaticallyAddedInterceptorCount++; } if (key.getPath() != null) { final String keyPath = key.getPath(); final String name = URLUtil.getName(router.getUriFactory(), keyPath); wsdlInterceptor.setPathRewriter(new PathRewriter() { @Override public String rewrite(String path2) { try { if (path2.contains("://")) { path2 = new URL(new URL(path2), keyPath).toString(); } else { Matcher m = relativePathPattern.matcher(path2); path2 = m.replaceAll("./" + name + "?"); } } catch (MalformedURLException e) { } return path2; } }); } if (hasRewriter && !hasPublisher) log.warn( "A <soapProxy> contains a <wsdlRewriter>, but no <wsdlPublisher>. Probably you want to insert a <wsdlPublisher> just after the <wsdlRewriter>. (Or, if this is a valid use case, please notify us at " + Constants.PRODUCT_CONTACT_EMAIL + ".)"); if (targetPath != null) { RewriteInterceptor ri = new RewriteInterceptor(); ri.setMappings(Lists.newArrayList(new RewriteInterceptor.Mapping("^" + Pattern.quote(key.getPath()), Matcher.quoteReplacement(targetPath), "rewrite"))); interceptors.add(0, ri); automaticallyAddedInterceptorCount++; } }