List of usage examples for org.apache.commons.lang StringUtils stripEnd
public static String stripEnd(String str, String stripChars)
Strips any of a set of characters from the end of a String.
From source file:org.apache.archiva.admin.model.beans.RemoteRepository.java
public void setUrl(String url) { this.url = StringUtils.stripEnd(url, "/"); }
From source file:org.apache.archiva.rest.services.DefaultArchivaAdministrationService.java
@Override public void setUiConfiguration(UiConfiguration uiConfiguration) throws ArchivaRestServiceException { try {/*from w w w. jav a 2s .c o m*/ // fix for MRM-1757 // strip any trailing '/' at the end of the url so it won't affect url/link calculations in UI uiConfiguration.setApplicationUrl(StringUtils.stripEnd(uiConfiguration.getApplicationUrl(), "/")); archivaAdministration.updateUiConfiguration(uiConfiguration); } catch (RepositoryAdminException e) { throw new ArchivaRestServiceException(e.getMessage(), e); } }
From source file:org.apache.falcon.converter.OozieFeedMapperTest.java
private String getPathsWithPartitions(Cluster sourceCluster, Cluster targetCluster, Feed aFeed) throws FalconException { String srcPart = FeedHelper// ww w . j av a 2s .c o m .normalizePartitionExpression(FeedHelper.getCluster(aFeed, sourceCluster.getName()).getPartition()); srcPart = FeedHelper.evaluateClusterExp(sourceCluster, srcPart); String targetPart = FeedHelper .normalizePartitionExpression(FeedHelper.getCluster(aFeed, targetCluster.getName()).getPartition()); targetPart = FeedHelper.evaluateClusterExp(targetCluster, targetPart); StringBuilder pathsWithPartitions = new StringBuilder(); pathsWithPartitions.append("${coord:dataIn('input')}/") .append(FeedHelper.normalizePartitionExpression(srcPart, targetPart)); String parts = pathsWithPartitions.toString().replaceAll("//+", "/"); parts = StringUtils.stripEnd(parts, "/"); return parts; }
From source file:org.apache.falcon.converter.OozieFeedWorkflowBuilderTest.java
private String getPathsWithPartitions(Cluster sourceCluster, Cluster targetCluster, Feed aFeed) throws FalconException { String srcPart = FeedHelper//from w ww . j a v a 2 s. co m .normalizePartitionExpression(FeedHelper.getCluster(aFeed, sourceCluster.getName()).getPartition()); srcPart = FeedHelper.evaluateClusterExp(sourceCluster, srcPart); String targetPart = FeedHelper .normalizePartitionExpression(FeedHelper.getCluster(aFeed, targetCluster.getName()).getPartition()); targetPart = FeedHelper.evaluateClusterExp(targetCluster, targetPart); String pathsWithPartitions = "${coord:dataIn('input')}/" + FeedHelper.normalizePartitionExpression(srcPart, targetPart); String parts = pathsWithPartitions.replaceAll("//+", "/"); parts = StringUtils.stripEnd(parts, "/"); return parts; }
From source file:org.apache.geode.management.internal.cli.GfshParser.java
/** * *///from w w w .j a v a 2 s.c om public ParseResult parse(String userInput) { GfshParseResult parseResult = null; // First remove the trailing white spaces userInput = StringUtils.stripEnd(userInput, null); if ((ParserUtils.contains(userInput, SyntaxConstants.COMMAND_DELIMITER) && StringUtils.endsWithIgnoreCase(userInput, SyntaxConstants.COMMAND_DELIMITER))) { userInput = StringUtils.removeEnd(userInput, SyntaxConstants.COMMAND_DELIMITER); } try { boolean error = false; CliCommandOptionException coe = null; List<CommandTarget> targets = locateTargets(ParserUtils.trimBeginning(userInput), false); if (targets.size() > 1) { if (userInput.length() > 0) { handleCondition(CliStrings.format( CliStrings.GFSHPARSER__MSG__AMBIGIOUS_COMMAND_0_FOR_ASSISTANCE_USE_1_OR_HINT_HELP, new Object[] { userInput, AbstractShell.completionKeys }), CommandProcessingException.COMMAND_NAME_AMBIGUOUS, userInput); } } else { if (targets.size() == 1) { OptionSet parse = null; List<MethodParameter> parameters = new ArrayList<MethodParameter>(); Map<String, String> paramValMap = new HashMap<String, String>(); CommandTarget commandTarget = targets.get(0); GfshMethodTarget gfshMethodTarget = commandTarget.getGfshMethodTarget(); preConfigureConverters(commandTarget); try { parse = commandTarget.getOptionParser().parse(gfshMethodTarget.getRemainingBuffer()); } catch (CliException ce) { if (ce instanceof CliCommandOptionException) { coe = (CliCommandOptionException) ce; coe.setCommandTarget(commandTarget); parse = coe.getOptionSet(); error = true; } } try { checkOptionSetForValidCommandModes(parse, commandTarget); } catch (CliCommandMultiModeOptionException ce) { error = true; coe = ce; } error = processArguments(parse, commandTarget, paramValMap, parameters, error); // TODO: next call throws when space before closing " error = processOptions(parse, commandTarget, paramValMap, parameters, error); if (!error) { Object[] methodParameters = new Object[parameters.size()]; for (MethodParameter parameter : parameters) { methodParameters[parameter.getParameterNo()] = parameter.getParameter(); } parseResult = new GfshParseResult(gfshMethodTarget.getMethod(), gfshMethodTarget.getTarget(), methodParameters, userInput, commandTarget.getCommandName(), paramValMap); } else { if (coe != null) { logWrapper.fine("Handling exception: " + coe.getMessage()); ExceptionHandler.handleException(coe); // TODO: this eats exception that would make it // easier to debug GemfireDataCommandsDUnitTest // ExceptionHandler.handleException() only logs it on console. // When on member, we need to handle this. if (!CliUtil.isGfshVM()) { handleCondition( CliStrings.format(CliStrings.GFSHPARSER__MSG__INVALID_COMMAND_STRING_0, userInput), coe, CommandProcessingException.COMMAND_INVALID, userInput); } } } } else { String message = CliStrings.format(CliStrings.GFSHPARSER__MSG__COMMAND_0_IS_NOT_VALID, userInput); CommandTarget commandTarget = locateExactMatchingTarget(userInput); if (commandTarget != null) { String commandName = commandTarget.getCommandName(); AvailabilityTarget availabilityIndicator = commandTarget.getAvailabilityIndicator(); message = CliStrings.format(CliStrings.GFSHPARSER__MSG__0_IS_NOT_AVAILABLE_REASON_1, new Object[] { commandName, availabilityIndicator.getAvailabilityDescription() }); } handleCondition(message, CommandProcessingException.COMMAND_INVALID_OR_UNAVAILABLE, userInput); } } } catch (IllegalArgumentException e1) { logWrapper.warning(CliUtil.stackTraceAsString(e1)); } catch (IllegalAccessException e1) { logWrapper.warning(CliUtil.stackTraceAsString(e1)); } catch (InvocationTargetException e1) { logWrapper.warning(CliUtil.stackTraceAsString(e1)); } return parseResult; }
From source file:org.apache.hadoop.hive.common.type.HiveChar.java
public String getStrippedValue() { return StringUtils.stripEnd(value, " "); }
From source file:org.apache.hadoop.hive.ql.udf.generic.GenericUDFRTrim.java
@Override protected String performOp(String val) { return StringUtils.stripEnd(val, " "); }
From source file:org.apache.hadoop.hive.ql.udf.UDFRTrim.java
public Text evaluate(Text s) { if (s == null) { return null; }//ww w. j av a 2 s . c o m result.set(StringUtils.stripEnd(s.toString(), " ")); return result; }
From source file:org.apache.hive.ptest.execution.conf.UnitTestPropertiesParser.java
private String stripEndAndStart(String srcString, String stripChars) { srcString = StringUtils.stripEnd(srcString, stripChars); srcString = StringUtils.stripStart(srcString, stripChars); return srcString; }
From source file:org.apache.ivory.converter.OozieFeedMapper.java
private ACTION getReplicationWorkflowAction(Cluster srcCluster, Cluster trgCluster, Path wfPath, String wfName) throws IvoryException { ACTION replicationAction = new ACTION(); WORKFLOW replicationWF = new WORKFLOW(); try {//from w ww . ja va 2s . c om replicationWF.setAppPath(getStoragePath(wfPath.toString())); Feed feed = getEntity(); String srcPart = FeedHelper .normalizePartitionExpression(FeedHelper.getCluster(feed, srcCluster.getName()).getPartition()); srcPart = FeedHelper.evaluateClusterExp(srcCluster, srcPart); String targetPart = FeedHelper .normalizePartitionExpression(FeedHelper.getCluster(feed, trgCluster.getName()).getPartition()); targetPart = FeedHelper.evaluateClusterExp(trgCluster, targetPart); StringBuilder pathsWithPartitions = new StringBuilder(); pathsWithPartitions.append("${coord:dataIn('input')}/") .append(FeedHelper.normalizePartitionExpression(srcPart, targetPart)); Map<String, String> props = createCoordDefaultConfiguration(trgCluster, wfPath, wfName); props.put("srcClusterName", srcCluster.getName()); props.put("srcClusterColo", srcCluster.getColo()); props.put(ARG.feedNames.getPropName(), feed.getName()); props.put(ARG.feedInstancePaths.getPropName(), pathsWithPartitions.toString()); String parts = pathsWithPartitions.toString().replaceAll("//+", "/"); parts = StringUtils.stripEnd(parts, "/"); props.put("sourceRelativePaths", parts); props.put("distcpSourcePaths", "${coord:dataIn('input')}"); props.put("distcpTargetPaths", "${coord:dataOut('output')}"); props.put("ivoryInPaths", pathsWithPartitions.toString()); props.put("ivoryInputFeeds", feed.getName()); replicationWF.setConfiguration(getCoordConfig(props)); replicationAction.setWorkflow(replicationWF); } catch (Exception e) { throw new IvoryException("Unable to create replication workflow", e); } return replicationAction; }