List of usage examples for org.apache.commons.lang StringUtils replaceChars
public static String replaceChars(String str, String searchChars, String replaceChars)
Replaces multiple characters in a String in one go.
From source file:org.apache.archiva.metadata.repository.storage.maven2.RepositoryModelResolver.java
protected File findTimeStampedSnapshotPom(String groupId, String artifactId, String version, String parentDirectory) { // reading metadata if there File mavenMetadata = new File(parentDirectory, METADATA_FILENAME); if (mavenMetadata.exists()) { try {/*from ww w . j a v a2s . c o m*/ ArchivaRepositoryMetadata archivaRepositoryMetadata = MavenMetadataReader.read(mavenMetadata); SnapshotVersion snapshotVersion = archivaRepositoryMetadata.getSnapshotVersion(); if (snapshotVersion != null) { String lastVersion = snapshotVersion.getTimestamp(); int buildNumber = snapshotVersion.getBuildNumber(); String snapshotPath = StringUtils.replaceChars(groupId, '.', '/') + '/' + artifactId + '/' + version + '/' + artifactId + '-' + StringUtils.remove(version, "-" + VersionUtil.SNAPSHOT) + '-' + lastVersion + '-' + buildNumber + ".pom"; log.debug("use snapshot path {} for maven coordinate {}:{}:{}", snapshotPath, groupId, artifactId, version); File model = new File(basedir, snapshotPath); //model = pathTranslator.toFile( basedir, groupId, artifactId, lastVersion, filename ); if (model.exists()) { return model; } } } catch (XMLException e) { log.warn("fail to read {}, {}", mavenMetadata.getAbsolutePath(), e.getCause()); } } return null; }
From source file:org.apache.archiva.repository.content.maven2.ManagedDefaultRepositoryContent.java
@Override public void deleteGroupId(String groupId) throws ContentNotFoundException { String path = StringUtils.replaceChars(groupId, '.', '/'); File directory = new File(getRepoRoot(), path); if (directory.exists()) { try {// w w w.j a v a2s.c om FileUtils.deleteDirectory(directory); } catch (IOException e) { log.warn("skip error deleting directory {}:", directory.getPath(), e); } } }
From source file:org.apache.archiva.repository.metadata.MetadataToolsTest.java
private void prepTestRepo(ManagedRepositoryContent repo, ProjectReference reference) throws IOException { String groupDir = StringUtils.replaceChars(reference.getGroupId(), '.', '/'); String path = groupDir + "/" + reference.getArtifactId(); File srcRepoDir = new File("src/test/repositories/metadata-repository"); File srcDir = new File(srcRepoDir, path); File destDir = new File(repo.getRepoRoot(), path); assertTrue("Source Dir exists: " + srcDir, srcDir.exists()); destDir.mkdirs();/*ww w . ja v a 2 s . c om*/ FileUtils.copyDirectory(srcDir, destDir); }
From source file:org.apache.archiva.rest.services.AbstractRestService.java
protected String getArtifactUrl(Artifact artifact, String repositoryId) throws ArchivaRestServiceException { try {/* w w w .j a va 2 s .co m*/ if (httpServletRequest == null) { return null; } StringBuilder sb = new StringBuilder(getBaseUrl()); sb.append("/repository"); // when artifact come from a remote repository when have here the remote repo id // we must replace it with a valid managed one available for the user. if (StringUtils.isEmpty(repositoryId)) { List<String> userRepos = userRepositories.getObservableRepositoryIds(getPrincipal()); // is it a good one? if yes nothing to // if not search the repo who is proxy for this remote if (!userRepos.contains(artifact.getContext())) { for (Map.Entry<String, List<ProxyConnector>> entry : proxyConnectorAdmin .getProxyConnectorAsMap().entrySet()) { for (ProxyConnector proxyConnector : entry.getValue()) { if (StringUtils.equals("remote-" + proxyConnector.getTargetRepoId(), artifact.getContext()) // && userRepos.contains(entry.getKey())) { sb.append('/').append(entry.getKey()); } } } } else { sb.append('/').append(artifact.getContext()); } } else { sb.append('/').append(repositoryId); } sb.append('/').append(StringUtils.replaceChars(artifact.getGroupId(), '.', '/')); sb.append('/').append(artifact.getArtifactId()); if (VersionUtil.isSnapshot(artifact.getVersion())) { sb.append('/').append(VersionUtil.getBaseVersion(artifact.getVersion())); } else { sb.append('/').append(artifact.getVersion()); } sb.append('/').append(artifact.getArtifactId()); sb.append('-').append(artifact.getVersion()); if (StringUtils.isNotBlank(artifact.getClassifier())) { sb.append('-').append(artifact.getClassifier()); } // maven-plugin packaging is a jar if (StringUtils.equals("maven-plugin", artifact.getPackaging())) { sb.append("jar"); } else { sb.append('.').append(artifact.getFileExtension()); } return sb.toString(); } catch (Exception e) { throw new ArchivaRestServiceException(e.getMessage(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e); } }
From source file:org.apache.archiva.web.rss.RssFeedServlet.java
@Override public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { String repoId = null;//from w w w. j a v a 2s .co m String groupId = null; String artifactId = null; String url = StringUtils.removeEnd(req.getRequestURL().toString(), "/"); if (StringUtils.countMatches(StringUtils.substringAfter(url, "feeds/"), "/") > 0) { artifactId = StringUtils.substringAfterLast(url, "/"); groupId = StringUtils.substringBeforeLast(StringUtils.substringAfter(url, "feeds/"), "/"); groupId = StringUtils.replaceChars(groupId, '/', '.'); } else if (StringUtils.countMatches(StringUtils.substringAfter(url, "feeds/"), "/") == 0) { // we receive feeds?babla=ded which is not correct if (StringUtils.countMatches(url, "feeds?") > 0) { res.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid request url."); return; } repoId = StringUtils.substringAfterLast(url, "/"); } else { res.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid request url."); return; } RssFeedProcessor processor = null; try { Map<String, String> map = new HashMap<>(); SyndFeed feed = null; if (isAllowed(req, repoId, groupId, artifactId)) { if (repoId != null) { // new artifacts in repo feed request processor = newArtifactsprocessor; map.put(RssFeedProcessor.KEY_REPO_ID, repoId); } else if ((groupId != null) && (artifactId != null)) { // TODO: this only works for guest - we could pass in the list of repos // new versions of artifact feed request processor = newVersionsprocessor; map.put(RssFeedProcessor.KEY_GROUP_ID, groupId); map.put(RssFeedProcessor.KEY_ARTIFACT_ID, artifactId); } } else { res.sendError(HttpServletResponse.SC_UNAUTHORIZED, USER_NOT_AUTHORIZED); return; } RepositorySession repositorySession = repositorySessionFactory.createSession(); try { feed = processor.process(map, repositorySession.getRepository()); } finally { repositorySession.close(); } if (feed == null) { res.sendError(HttpServletResponse.SC_NO_CONTENT, "No information available."); return; } res.setContentType(MIME_TYPE); if (repoId != null) { feed.setLink(req.getRequestURL().toString()); } else if ((groupId != null) && (artifactId != null)) { feed.setLink(req.getRequestURL().toString()); } SyndFeedOutput output = new SyndFeedOutput(); output.output(feed, res.getWriter()); } catch (UserNotFoundException unfe) { log.debug(COULD_NOT_AUTHENTICATE_USER, unfe); res.sendError(HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER); } catch (AccountLockedException acce) { res.sendError(HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER); } catch (AuthenticationException authe) { log.debug(COULD_NOT_AUTHENTICATE_USER, authe); res.sendError(HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER); } catch (FeedException ex) { log.debug(COULD_NOT_GENERATE_FEED_ERROR, ex); res.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, COULD_NOT_GENERATE_FEED_ERROR); } catch (MustChangePasswordException e) { res.sendError(HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER); } catch (UnauthorizedException e) { log.debug(e.getMessage()); if (repoId != null) { res.setHeader("WWW-Authenticate", "Basic realm=\"Repository Archiva Managed " + repoId + " Repository"); } else { res.setHeader("WWW-Authenticate", "Basic realm=\"Artifact " + groupId + ":" + artifactId); } res.sendError(HttpServletResponse.SC_UNAUTHORIZED, USER_NOT_AUTHORIZED); } }
From source file:org.apache.click.util.ClickUtils.java
/** * Deploys required files (from a file list) for a control that repsects a specific convention. * <p/>//www . jav a2 s . c om * <b>Convention:</b> * <p/> * There's a descriptor file generated by the <code>tools/standalone/dev-tasks/ListFilesTask</code>. * The files to deploy are all in a subdirectory placed in the same directory with the control. * See documentation for more details. <p/> * * <b>Usage:</b><p/> * In your Control simply use the code below, and everything should work automatically. * <pre class="prettyprint"> * public void onDeploy(ServletContext servletContext) { * ClickUtils.deployFileList(servletContext, HeavyControl.class, "click"); * } </pre> * * @param servletContext the web applications servlet context * @param controlClass the class of the Control that has files for deployment * @param targetDir target directory where to deploy the files to. In most cases this * is only the reserved directory <code>click</code> */ public static void deployFileList(ServletContext servletContext, Class<? extends Control> controlClass, String targetDir) { String packageName = ClassUtils.getPackageName(controlClass); packageName = StringUtils.replaceChars(packageName, '.', '/'); packageName = "/" + packageName; String controlName = ClassUtils.getShortClassName(controlClass); ConfigService configService = getConfigService(servletContext); LogService logService = configService.getLogService(); String descriptorFile = packageName + "/" + controlName + ".files"; logService.debug("Use deployment descriptor file:" + descriptorFile); try { InputStream is = getResourceAsStream(descriptorFile, ClickUtils.class); List fileList = IOUtils.readLines(is); if (fileList == null || fileList.isEmpty()) { logService.info("there are no files to deploy for control " + controlClass.getName()); return; } // a target dir list is required cause the ClickUtils.deployFile() is too inflexible to autodetect // required subdirectories. List<String> targetDirList = new ArrayList<String>(fileList.size()); for (int i = 0; i < fileList.size(); i++) { String filePath = (String) fileList.get(i); String destination = ""; int index = filePath.lastIndexOf('/'); if (index != -1) { destination = filePath.substring(0, index + 1); } targetDirList.add(i, targetDir + "/" + destination); fileList.set(i, packageName + "/" + filePath); } for (int i = 0; i < fileList.size(); i++) { String source = (String) fileList.get(i); String targetDirName = targetDirList.get(i); ClickUtils.deployFile(servletContext, source, targetDirName); } } catch (IOException e) { String msg = "error occurred getting resource " + descriptorFile + ", error " + e; logService.warn(msg); } }
From source file:org.apache.cocoon.components.language.programming.java.EclipseJavaCompiler.java
private String makeClassName(String fileName) throws IOException { File origFile = new File(fileName); String canonical = null;/*from w w w. ja v a 2 s .com*/ if (origFile.exists()) { canonical = origFile.getCanonicalPath().replace('\\', '/'); } String str = fileName; str = str.replace('\\', '/'); if (sourceDir != null) { String prefix = new File(sourceDir).getCanonicalPath().replace('\\', '/'); if (canonical != null) { if (canonical.startsWith(prefix)) { String result = canonical.substring(prefix.length() + 1, canonical.length() - 5); result = result.replace('/', '.'); return result; } } else { File t = new File(sourceDir, fileName); if (t.exists()) { str = t.getCanonicalPath().replace('\\', '/'); String result = str.substring(prefix.length() + 1, str.length() - 5).replace('/', '.'); return result; } } } if (fileName.endsWith(".java")) { fileName = fileName.substring(0, fileName.length() - 5); } return StringUtils.replaceChars(fileName, "\\/", ".."); }
From source file:org.apache.jena.fuseki.FusekiLib.java
public static String safeParameter(HttpServletRequest request, String pName) { String value = request.getParameter(pName); value = StringUtils.replaceChars(value, "\r", ""); value = StringUtils.replaceChars(value, "\n", ""); return value; }
From source file:org.apache.jena.fuseki.servlets.ResponseResultSet.java
private static void jsonOutput(HttpAction action, String contentType, final ResultSet resultSet, final Boolean booleanResult) { OutputContent proc = new OutputContent() { @Override//from ww w. j a v a 2 s .c o m public void output(ServletOutputStream out) { if (resultSet != null) ResultSetFormatter.outputAsJSON(out, resultSet); if (booleanResult != null) ResultSetFormatter.outputAsJSON(out, booleanResult); } }; try { String callback = ResponseOps.paramCallback(action.request); ServletOutputStream out = action.response.getOutputStream(); if (callback != null) { callback = StringUtils.replaceChars(callback, "\r", ""); callback = StringUtils.replaceChars(callback, "\n", ""); out.print(callback); out.println("("); } output(action, contentType, WebContent.charsetUTF8, proc); if (callback != null) out.println(")"); } catch (IOException ex) { errorOccurred(ex); } }
From source file:org.apache.kylin.common.KylinConfigBase.java
public String getHdfsWorkingDirectory() { if (cachedHdfsWorkingDirectory != null) return cachedHdfsWorkingDirectory; String root = getOptional("kylin.env.hdfs-working-dir", "/kylin"); Path path = new Path(root); if (!path.isAbsolute()) throw new IllegalArgumentException("kylin.env.hdfs-working-dir must be absolute, but got " + root); // make sure path is qualified try {// ww w. jav a 2 s .c o m FileSystem fs = path.getFileSystem(new Configuration()); path = fs.makeQualified(path); } catch (IOException e) { throw new RuntimeException(e); } // append metadata-url prefix root = new Path(path, StringUtils.replaceChars(getMetadataUrlPrefix(), ':', '-')).toString(); if (!root.endsWith("/")) root += "/"; cachedHdfsWorkingDirectory = root; if (cachedHdfsWorkingDirectory.startsWith("file:")) { cachedHdfsWorkingDirectory = cachedHdfsWorkingDirectory.replace("file:", "file://"); } return cachedHdfsWorkingDirectory; }