List of usage examples for java.io File isAbsolute
public boolean isAbsolute()
From source file:org.apache.archiva.admin.repository.managed.DefaultManagedRepositoryAdmin.java
protected void addRepository(ManagedRepositoryConfiguration repository, Configuration configuration) throws RepositoryAdminException, IOException { // Normalize the path File file = new File(repository.getLocation()); if (!file.isAbsolute()) { // add appserver.base/repositories file = new File(getRegistry().getString("appserver.base") + File.separatorChar + "repositories", repository.getLocation()); }//from w w w . j a v a2 s . c o m repository.setLocation(file.getCanonicalPath()); if (!file.exists()) { file.mkdirs(); } if (!file.exists() || !file.isDirectory()) { throw new RepositoryAdminException( "Unable to add repository - no write access, can not create the root directory: " + file); } configuration.addManagedRepository(repository); }
From source file:org.eclipse.jubula.client.ui.rcp.widgets.autconfig.JavaAutConfigComponent.java
/** * @param filename to check// w ww .j a va 2s .com * @return true if the path of the given executable file is absolute */ private static boolean isFilePathAbsolute(String filename) { final File execFile = new File(filename); return execFile.isAbsolute(); }
From source file:languageTools.analyzer.mas.MASValidator.java
@Override public Void visitAgentFile(AgentFileContext ctx) { boolean problem = false; // Get agent file name String path = visitString(ctx.string()); File file = new File(path); if (!file.isAbsolute()) { // relative to path specified for MAS file file = new File(getPathRelativeToSourceFile(path)); }/*from w w w . j a v a2 s .co m*/ // Check file extension String ext = FilenameUtils.getExtension(path); if (Extension.getFileExtension(file) != Extension.GOAL) { problem = reportError(MASError.AGENTFILE_OTHER_EXTENSION, ctx.string(), ext); } else if (!file.isFile()) { problem = reportError(MASError.AGENTFILE_COULDNOT_FIND, ctx.string(), file.getPath()); } // Get (optional) parameters Map<String, String> parameters = new HashMap<String, String>(); for (AgentFileParContext parameter : ctx.agentFilePar()) { Map.Entry<String, String> keyValuePair = visitAgentFilePar(parameter); String key = keyValuePair.getKey(); if (parameters.containsKey(key)) { reportWarning(MASWarning.AGENTFILE_DUPLICATE_KEY, parameter, key); } else { parameters.put(key, keyValuePair.getValue()); } } // Construct agent symbol String agentName; if (parameters.containsKey("name")) { agentName = parameters.get("name"); } else { agentName = FilenameUtils.getBaseName(FilenameUtils.getName(path)); } // Add agent symbol to symbol table for later reference (if key does not // yet exist). if (!this.agentFiles.define(new MASSymbol(agentName, file, getSourceInfo(ctx)))) { problem = reportWarning(MASWarning.AGENTFILES_DUPLICATE_NAME, ctx.string(), agentName); } // Get KR language String interfaceName = parameters.get("language"); KRInterface krInterface = null; try { if (interfaceName == null) { // no parameter set, use default krInterface = KRFactory.getDefaultInterface(); } else { krInterface = KRFactory.getInterface(interfaceName); } } catch (KRInterfaceNotSupportedException | KRInitFailedException e) { reportError(MASError.KRINTERFACE_NOT_SUPPORTED, ctx.agentFilePar().get(0), interfaceName); } // Add agent file to MAS program (only if no problems were detected and // file does not yet exist). if (!problem && !getProgram().getAgentFiles().contains(file)) { getProgram().addAgentFile(file); getProgram().setKRInterface(file, krInterface); } return null; // Java says must return something even when Void }
From source file:org.apache.sling.maven.jspc.JspcMojo.java
/** * Executes the compilation.//www . j a va2 s . com * * @throws JasperException If an error occurs */ private void executeInternal() throws JasperException { if (getLog().isDebugEnabled()) { getLog().debug("execute() starting for " + pages.size() + " pages."); } try { if (context == null) { initServletContext(); } if (includes == null) { includes = new String[] { "**/*.jsp" }; } // No explicit pages, we'll process all .jsp in the webapp if (pages.size() == 0) { scanFiles(sourceDirectory); } File uriRootF = new File(uriSourceRoot); if (!uriRootF.exists() || !uriRootF.isDirectory()) { throw new JasperException( "The source location '" + uriSourceRoot + "' must be an existing directory"); } for (String nextjsp : pages) { File fjsp = new File(nextjsp); if (!fjsp.isAbsolute()) { fjsp = new File(uriRootF, nextjsp); } if (!fjsp.exists()) { if (getLog().isWarnEnabled()) { getLog().warn("JSP file " + fjsp + " does not exist"); } continue; } String s = fjsp.getAbsolutePath(); if (s.startsWith(uriSourceRoot)) { nextjsp = s.substring(uriSourceRoot.length()); } if (nextjsp.startsWith("." + File.separatorChar)) { nextjsp = nextjsp.substring(2); } processFile(nextjsp); } } catch (JasperException je) { Throwable rootCause = je; while (rootCause instanceof JasperException && ((JasperException) rootCause).getRootCause() != null) { rootCause = ((JasperException) rootCause).getRootCause(); } if (rootCause != je) { rootCause.printStackTrace(); } throw je; } catch (/* IO */Exception ioe) { throw new JasperException(ioe); } }
From source file:org.apache.maven.plugin.cxx.CMakeMojo.java
protected void updateOrCreateCMakeDependenciesFile(List aiDependenciesLib, boolean bMavenDependencies) { String dependencieFile = (bMavenDependencies ? cmakeMavenDependenciesFile : cmakeDependenciesFile); String fullDependenciesFile = dependencieFile; File file = new File(dependencieFile); if (!file.isAbsolute()) { // $FB always use unix path separator with cmake even under windows ! fullDependenciesFile = getProjectDir() + "/" + dependencieFile; }//ww w .j a va 2s.co m file = new File(fullDependenciesFile); if (!file.exists()) { try { file.createNewFile(); } catch (IOException e) { getLog().error(dependencieFile + " script can't be created at " + file.getAbsolutePath()); return; } } // check file content InputStream dependenciesStream = null; String content = new String(); try { dependenciesStream = new FileInputStream(file); content = IOUtils.toString(dependenciesStream, "UTF8"); } catch (IOException e) { // shall not happen since file has been created getLog().error(dependencieFile + " script can't be opened at " + file.getAbsolutePath()); } finally { getLog().debug("close input stream at reading"); IOUtils.closeQuietly(dependenciesStream); } String beginDepsPattern = (bMavenDependencies ? (isDebugBuild() ? "# BEGIN MAVEN_DEBUG_DEPENDENCIES" : "# BEGIN MAVEN_OPTIMIZED_DEPENDENCIES") : "# BEGIN CMAKE_DEPENDENCIES"); String endDepsPattern = (bMavenDependencies ? (isDebugBuild() ? "# END MAVEN_DEBUG_DEPENDENCIES" : "# END MAVEN_OPTIMIZED_DEPENDENCIES") : "# END CMAKE_DEPENDENCIES"); String beginIncPattern = "# BEGIN MAVEN_INCLUDE_ROOTS"; String endIncPattern = "# END MAVEN_INCLUDE_ROOTS"; // reset file content if needed if (StringUtils.isEmpty(content) || content.indexOf(beginDepsPattern) == -1) { getLog().info(file.getAbsolutePath() + " content full update"); try { dependenciesStream = getClass() .getResourceAsStream((bMavenDependencies ? "/cmake-cpp-project/CMakeMavenDependencies.txt" : "/cmake-cpp-project/CMakeDependencies.txt")); content = IOUtils.toString(dependenciesStream, "UTF8"); } catch (IOException e) { getLog().error(dependencieFile + " default content not found "); } finally { getLog().debug("close input stream at full update"); IOUtils.closeQuietly(dependenciesStream); } } // update file content String simpleIndentation = "\n "; String doubleIndentation = "\n "; Iterator itDeps = aiDependenciesLib.iterator(); StringBuilder allDepsBuilder = new StringBuilder( (bMavenDependencies ? doubleIndentation : simpleIndentation)); while (itDeps.hasNext()) { String dep = (String) itDeps.next(); if (bMavenDependencies) { String externalDep = generalizeDependencyFileName(dep, true); allDepsBuilder.append("target_link_libraries(${target} " + (isDebugBuild() ? "debug " : "optimized ") + externalDep + ")" + doubleIndentation); } else { String cmakeDep = generalizeDependencyFileName(dep, false); allDepsBuilder.append("# If a \"" + cmakeDep + "\" target has been define, this means we are building " + "an amalgamed cmake project" + simpleIndentation + "# but maven dependencies can be used too" + simpleIndentation + "if(TARGET " + cmakeDep + ")" + doubleIndentation + "message(\"Adding direct " + cmakeDep + " cmake dependencies to target '${target}'\")" + doubleIndentation + "target_link_libraries(${target} " + cmakeDep + ")" + simpleIndentation + "endif()" + simpleIndentation); } } // adding additionalIncludeRoots in cmake maven dependencies file StringBuilder addIncsBuilder = new StringBuilder(doubleIndentation); if (bMavenDependencies && null != additionalIncludeRoots) { addIncsBuilder.append("include_directories( " + doubleIndentation); for (String includeRoot : additionalIncludeRoots) { addIncsBuilder.append("\"" + includeRoot + "\"" + doubleIndentation); } addIncsBuilder.append(")" + doubleIndentation); for (String includeRoot : additionalIncludeRoots) { addIncsBuilder.append( "message(\"Adding '" + includeRoot + "' additional include root.\")" + doubleIndentation); } } getLog().debug(dependencieFile + " depfile was : " + content); String allDeps = Matcher.quoteReplacement(allDepsBuilder.toString()); //.replace( "$", "\\$" ); // Matcher replaceAll() is a bit rigid ! getLog().debug(dependencieFile + " injected dependency will be : " + allDeps); // regexp multi-line replace, see http://stackoverflow.com/questions/4154239/java-regex-replaceall-multiline Pattern p1 = Pattern.compile(beginDepsPattern + ".*" + endDepsPattern, Pattern.DOTALL); Matcher m1 = p1.matcher(content); content = m1.replaceAll(beginDepsPattern + allDeps + endDepsPattern); if (bMavenDependencies && null != additionalIncludeRoots) { String addIncs = Matcher.quoteReplacement(addIncsBuilder.toString()); //.replace( "$", "\\$" ); // Matcher replaceAll() is a bit rigid ! getLog().debug(dependencieFile + " injected includes Roots will be : " + addIncs); Pattern p2 = Pattern.compile(beginIncPattern + ".*" + endIncPattern, Pattern.DOTALL); Matcher m2 = p2.matcher(content); content = m2.replaceAll(beginIncPattern + addIncs + endIncPattern); } getLog().debug(dependencieFile + " depfile now is : " + content); OutputStream outStream = null; try { outStream = new FileOutputStream(file); IOUtils.write(content, outStream, "UTF8"); } catch (IOException e) { getLog().error( dependencieFile + " script can't be written at " + file.getAbsolutePath() + e.toString()); } finally { getLog().debug("close output stream at update"); IOUtils.closeQuietly(outStream); } }
From source file:org.codehaus.mojo.osxappbundle.CreateApplicationBundleMojo.java
/** * Copies given resources to the build directory. * /*from ww w. j a v a 2s.co m*/ * @param fileSets * A list of FileSet objects that represent additional resources to copy. * @throws MojoExecutionException * In case af a resource copying error. */ private void copyResources(List fileSets) throws MojoExecutionException { final String[] emptyStrArray = {}; for (Iterator it = fileSets.iterator(); it.hasNext();) { FileSet fileSet = (FileSet) it.next(); File resourceDirectory = new File(fileSet.getDirectory()); if (!resourceDirectory.isAbsolute()) { resourceDirectory = new File(project.getBasedir(), resourceDirectory.getPath()); } if (!resourceDirectory.exists()) { getLog().info("Additional resource directory does not exist: " + resourceDirectory); continue; } DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir(resourceDirectory); if (fileSet.getIncludes() != null && !fileSet.getIncludes().isEmpty()) { scanner.setIncludes((String[]) fileSet.getIncludes().toArray(emptyStrArray)); } else { scanner.setIncludes(DEFAULT_INCLUDES); } if (fileSet.getExcludes() != null && !fileSet.getExcludes().isEmpty()) { scanner.setExcludes((String[]) fileSet.getExcludes().toArray(emptyStrArray)); } if (fileSet.isUseDefaultExcludes()) { scanner.addDefaultExcludes(); } scanner.scan(); List includedFiles = Arrays.asList(scanner.getIncludedFiles()); getLog().info("Copying " + includedFiles.size() + " additional resource" + (includedFiles.size() > 1 ? "s" : "")); for (Iterator j = includedFiles.iterator(); j.hasNext();) { String destination = (String) j.next(); File source = new File(resourceDirectory, destination); File destinationFile = new File(buildDirectory, destination); if (!destinationFile.getParentFile().exists()) { destinationFile.getParentFile().mkdirs(); } try { FileUtils.copyFile(source, destinationFile); } catch (IOException e) { throw new MojoExecutionException("Error copying additional resource " + source, e); } } } }
From source file:org.fuin.utils4j.Utils4J.java
/** * Unzips a file into a given directory. WARNING: Only relative path entries * are allowed inside the archive!/*from ww w .j a v a2s.c o m*/ * * @param zipFile * Source ZIP file - Cannot be <code>null</code> and must be a * valid ZIP file. * @param destDir * Destination directory - Cannot be <code>null</code> and must * exist. * @param wrapper * Callback interface to give the caller the chance to wrap the * ZIP input stream into another one. This is useful for example * to display a progress bar - Can be <code>null</code> if no * wrapping is required. * @param cancelable * Signals if the unzip should be canceled - Can be * <code>null</code> if no cancel option is required. * * @throws IOException * Error unzipping the file. */ public static void unzip(final File zipFile, final File destDir, final UnzipInputStreamWrapper wrapper, final Cancelable cancelable) throws IOException { checkNotNull("zipFile", zipFile); checkValidFile(zipFile); checkNotNull("destDir", destDir); checkValidDir(destDir); final ZipFile zip = new ZipFile(zipFile); try { final Enumeration enu = zip.entries(); while (enu.hasMoreElements() && ((cancelable == null) || !cancelable.isCanceled())) { final ZipEntry entry = (ZipEntry) enu.nextElement(); final File file = new File(entry.getName()); if (file.isAbsolute()) { throw new IllegalArgumentException( "Only relative path entries are allowed! [" + entry.getName() + "]"); } if (entry.isDirectory()) { final File dir = new File(destDir, entry.getName()); createIfNecessary(dir); } else { final File outFile = new File(destDir, entry.getName()); createIfNecessary(outFile.getParentFile()); final InputStream in; if (wrapper == null) { in = new BufferedInputStream(zip.getInputStream(entry)); } else { in = new BufferedInputStream( wrapper.wrapInputStream(zip.getInputStream(entry), entry, outFile)); } try { final OutputStream out = new BufferedOutputStream(new FileOutputStream(outFile)); try { final byte[] buf = new byte[4096]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } } finally { out.close(); } } finally { in.close(); } } } } finally { zip.close(); } }
From source file:catalina.users.MemoryUserDatabase.java
/** * Initialize access to this user database. * * @exception Exception if any exception is thrown during opening *///from w w w . ja v a 2 s . c om public void open() throws Exception { synchronized (groups) { synchronized (users) { // Erase any previous groups and users users.clear(); groups.clear(); roles.clear(); // Construct a reader for the XML input file (if it exists) File file = new File(pathname); if (!file.isAbsolute()) { file = new File(System.getProperty("catalina.base"), pathname); } if (!file.exists()) { return; } FileInputStream fis = new FileInputStream(file); // Construct a digester to read the XML input file Digester digester = new Digester(); digester.addFactoryCreate("tomcat-users/group", new MemoryGroupCreationFactory(this)); digester.addFactoryCreate("tomcat-users/role", new MemoryRoleCreationFactory(this)); digester.addFactoryCreate("tomcat-users/user", new MemoryUserCreationFactory(this)); // Parse the XML input file to load this database try { digester.parse(fis); fis.close(); } catch (Exception e) { try { fis.close(); } catch (Throwable t) { ; } throw e; } } } }
From source file:helma.main.Server.java
/** * Get a logger to use for output in this server. *///from ww w .j ava 2 s .co m public Log getLogger() { if (logger == null) { if (helmaLogging) { // set up system properties for helma.util.Logging String logDir = sysProps.getProperty("logdir", "log"); if (!"console".equals(logDir)) { // try to get the absolute logdir path // set up helma.logdir system property File dir = new File(logDir); if (!dir.isAbsolute()) { dir = new File(hopHome, logDir); } logDir = dir.getAbsolutePath(); } System.setProperty("helma.logdir", logDir); } logger = LogFactory.getLog("helma.server"); } return logger; }
From source file:com.streamsets.pipeline.stage.origin.hdfs.cluster.ClusterHdfsSource.java
Configuration getHadoopConfiguration(List<ConfigIssue> issues) { Configuration hadoopConf = new Configuration(); if (conf.hdfsKerberos) { hadoopConf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, UserGroupInformation.AuthenticationMethod.KERBEROS.name()); try {/*from w w w .j av a 2 s.c o m*/ hadoopConf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "hdfs/_HOST@" + HadoopSecurityUtil.getDefaultRealm()); } catch (Exception ex) { if (!conf.hdfsConfigs.containsKey(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY)) { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), null, Errors.HADOOPFS_28, ex.getMessage())); } } } if (conf.hdfsConfDir != null && !conf.hdfsConfDir.isEmpty()) { File hadoopConfigDir = new File(conf.hdfsConfDir); if (hadoopConfigDir.isAbsolute()) { // Do not allow absolute hadoop config directory in cluster mode issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hadoopConfDir", Errors.HADOOPFS_29, conf.hdfsConfDir)); } else { hadoopConfigDir = new File(getContext().getResourcesDirectory(), conf.hdfsConfDir) .getAbsoluteFile(); } if (!hadoopConfigDir.exists()) { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_25, hadoopConfigDir.getPath())); } else if (!hadoopConfigDir.isDirectory()) { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_26, hadoopConfigDir.getPath())); } else { File coreSite = new File(hadoopConfigDir, CORE_SITE_XML); if (coreSite.exists()) { if (!coreSite.isFile()) { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_27, coreSite.getPath())); } hadoopConf.addResource(new Path(coreSite.getAbsolutePath())); } else { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_30, CORE_SITE_XML)); } File hdfsSite = new File(hadoopConfigDir, HDFS_SITE_XML); if (hdfsSite.exists()) { if (!hdfsSite.isFile()) { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_27, hdfsSite.getPath())); } hadoopConf.addResource(new Path(hdfsSite.getAbsolutePath())); } else { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_30, HDFS_SITE_XML)); } File yarnSite = new File(hadoopConfigDir, YARN_SITE_XML); if (yarnSite.exists()) { if (!yarnSite.isFile()) { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_27, yarnSite.getPath())); } hadoopConf.addResource(new Path(yarnSite.getAbsolutePath())); } else { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_30, YARN_SITE_XML)); } File mapredSite = new File(hadoopConfigDir, MAPRED_SITE_XML); if (mapredSite.exists()) { if (!mapredSite.isFile()) { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_27, mapredSite.getPath())); } hadoopConf.addResource(new Path(mapredSite.getAbsolutePath())); } else { issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), ClusterHdfsConfigBean.CLUSTER_HDFS_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_30, MAPRED_SITE_XML)); } } } for (Map.Entry<String, String> config : conf.hdfsConfigs.entrySet()) { hadoopConf.set(config.getKey(), config.getValue()); } return hadoopConf; }