List of usage examples for java.io File isAbsolute
public boolean isAbsolute()
From source file:org.gradle.api.internal.project.AbstractProject.java
public File findRelativePath(Object path) { File file = new File(path.toString()); if (!file.isAbsolute()) { return file; }/* w w w . j av a 2s . c o m*/ File loopFile = file; String relativePath = ""; while (loopFile != null) { if (loopFile.equals(getProjectDir())) { break; } relativePath = loopFile.getName() + "/" + relativePath; loopFile = loopFile.getParentFile(); } return loopFile == null ? null : new File(relativePath); }
From source file:org.broad.igv.feature.genome.GenomeManager.java
/** * Creates a genome descriptor.//from w w w . ja v a 2 s .com */ public static GenomeDescriptor parseGenomeArchiveFile(File f) throws IOException { if (!f.exists()) { throw new FileNotFoundException("Genome file: " + f.getAbsolutePath() + " does not exist."); } GenomeDescriptor genomeDescriptor = null; Map<String, ZipEntry> zipEntries = new HashMap(); ZipFile zipFile = new ZipFile(f); FileInputStream fileInputStream = null; try { fileInputStream = new FileInputStream(f); ZipInputStream zipInputStream = new ZipInputStream(fileInputStream); ZipEntry zipEntry = zipInputStream.getNextEntry(); while (zipEntry != null) { String zipEntryName = zipEntry.getName(); zipEntries.put(zipEntryName, zipEntry); if (zipEntryName.equalsIgnoreCase(Globals.GENOME_ARCHIVE_PROPERTY_FILE_NAME)) { InputStream inputStream = zipFile.getInputStream(zipEntry); Properties properties = new Properties(); properties.load(inputStream); String cytobandZipEntryName = properties.getProperty(Globals.GENOME_ARCHIVE_CYTOBAND_FILE_KEY); String geneFileName = properties.getProperty(Globals.GENOME_ARCHIVE_GENE_FILE_KEY); String chrAliasFileName = properties.getProperty(Globals.GENOME_CHR_ALIAS_FILE_KEY); String sequenceLocation = properties .getProperty(Globals.GENOME_ARCHIVE_SEQUENCE_FILE_LOCATION_KEY); if ((sequenceLocation != null) && !HttpUtils.isRemoteURL(sequenceLocation)) { File sequenceFolder = null; // Relative or absolute location? We use a few redundant methods to check, //since we don't know what platform the file was created on or is running on sequenceFolder = new File(sequenceLocation); boolean isAbsolutePath = sequenceFolder.isAbsolute() || sequenceLocation.startsWith("/") || sequenceLocation.startsWith("\\"); if (!isAbsolutePath) { sequenceFolder = new File(f.getParent(), sequenceLocation); } sequenceLocation = sequenceFolder.getCanonicalPath(); sequenceLocation.replace('\\', '/'); } boolean chrNamesAltered = parseBooleanPropertySafe(properties, "filenamesAltered"); boolean fasta = parseBooleanPropertySafe(properties, "fasta"); boolean fastaDirectory = parseBooleanPropertySafe(properties, "fastaDirectory"); boolean chromosomesAreOrdered = parseBooleanPropertySafe(properties, Globals.GENOME_ORDERED_KEY); boolean hasCustomSequenceLocation = parseBooleanPropertySafe(properties, Globals.GENOME_ARCHIVE_CUSTOM_SEQUENCE_LOCATION_KEY); String fastaFileNameString = properties.getProperty("fastaFiles"); String url = properties.getProperty(Globals.GENOME_URL_KEY); // The new descriptor genomeDescriptor = new GenomeZipDescriptor( properties.getProperty(Globals.GENOME_ARCHIVE_NAME_KEY), chrNamesAltered, properties.getProperty(Globals.GENOME_ARCHIVE_ID_KEY), cytobandZipEntryName, geneFileName, chrAliasFileName, properties.getProperty(Globals.GENOME_GENETRACK_NAME, "Gene"), sequenceLocation, hasCustomSequenceLocation, zipFile, zipEntries, chromosomesAreOrdered, fasta, fastaDirectory, fastaFileNameString); if (url != null) { genomeDescriptor.setUrl(url); } } zipEntry = zipInputStream.getNextEntry(); } } finally { try { if (fileInputStream != null) { fileInputStream.close(); } } catch (IOException ex) { log.warn("Error closing imported genome zip stream!", ex); } } return genomeDescriptor; }
From source file:com.intel.mtwilson.MyConfiguration.java
private File findConfigurationFile(String path) { File f = new File(path); if (f.isAbsolute()) { return f; } else {/* w ww.ja v a 2s. com*/ return new File(getMtWilsonConf() + File.separator + path); } }
From source file:com.snowplowanalytics.snowplow.collectors.clojure.SnowplowAccessLogValve.java
/** * Create a File object based on the current log file name. * Directories are created as needed but the underlying file * is not created or opened.// ww w . j av a2 s . c om * * @param useDateStamp include the timestamp in the file name. * @return the log file object */ private File getLogFile(boolean useDateStamp) { // Create the directory if necessary File dir = new File(directory); if (!dir.isAbsolute()) { dir = new File(getContainer().getCatalinaBase(), directory); } if (!dir.mkdirs() && !dir.isDirectory()) { log.error(sm.getString("accessLogValve.openDirFail", dir)); } // Calculate the current log file name File pathname; if (useDateStamp) { pathname = new File(dir.getAbsoluteFile(), prefix + dateStamp + suffix); } else { pathname = new File(dir.getAbsoluteFile(), prefix + suffix); } File parent = pathname.getParentFile(); if (!parent.mkdirs() && !parent.isDirectory()) { log.error(sm.getString("accessLogValve.openDirFail", parent)); } return pathname; }
From source file:it.geosolutions.geobatch.unredd.script.ingestion.IngestionAction.java
protected File executeUnzipped(File unzipDir) throws ActionException, IOException { /*************/*from w w w .ja v a 2s. c o m*/ * * read the content of the XML file * ***********/ this.listenerForwarder.progressing(10, "Parsing " + INFO_XML); File infoXmlFile = new File(unzipDir, INFO_XML); if (LOGGER.isInfoEnabled()) { LOGGER.info("Reading XML parameters from " + infoXmlFile); } Request request = null; try { request = RequestJDOMReader.parseFile(infoXmlFile); } catch (Exception e) { throw new ActionException(this, "Error reading info.xml file, Are you sure to have built the input zip pkg in the right way? Note that all the content must be placed in the zip root folder, no any other subfolder are allowed...", e); } if (request.getFormat() == null) { throw new ActionException(this, "the format cannot be null."); } final String layername = request.getLayername(); if (layername == null) throw new ActionException(this, "the layername cannot be null."); final String year = request.getYear(); if (year == null) throw new ActionException(this, "the year cannot be null."); if (!year.matches("\\d{4}")) { throw new ActionException(this, "Bad format for year parameter (" + year + ")"); } final String month = request.getMonth(); if (month != null && !month.matches("\\d\\d?")) throw new ActionException(this, "Bad format for month parameter (" + month + ")"); final String day = request.getDay(); if (month != null && !month.matches("\\d\\d?")) throw new ActionException(this, "Bad format for month parameter (" + day + ")"); final String srcFilename = request.buildFileName(); // build the name of the snapshot final String layerUpdateName = NameUtils.buildLayerUpdateName(layername, year, month, day); if (LOGGER.isInfoEnabled()) { LOGGER.info("Info: layername:" + layername + " year:" + year + " month:" + month + " day:" + day); } this.listenerForwarder.progressing(12, "Info from xml file: layername:" + layername + " year:" + year + " month:" + month + " day:" + day); if (LOGGER.isDebugEnabled()) { LOGGER.debug("XML parameter settings : [layer name = " + layername + "], [year = " + year + "], [month = " + month + "], [day = " + day + "], [ file name = " + srcFilename + "]"); LOGGER.debug("XML parameter settings : [layer update name = " + layerUpdateName + "]"); } File unzippedDataDir = new File(unzipDir, DATA_DIR_NAME); File dataFile = new File(unzippedDataDir, srcFilename); if (!dataFile.isFile()) { throw new ActionException(this, "Could not read main data file " + dataFile); } /*****************/ GeoStoreUtil geostore = new GeoStoreUtil(cfg.getGeoStoreConfig(), this.getTempDir()); /****************** * Load Layer data ******************/ this.listenerForwarder.progressing(15, "Searching layer in GeoStore"); final Resource layerRes; try { layerRes = geostore.searchLayer(layername); } catch (Exception e) { throw new ActionException(this, "Error loading Layer " + layername, e); } if (layerRes == null) throw new ActionException(this, "Layer not found: " + layername); UNREDDLayer layer = new UNREDDLayer(layerRes); LOGGER.info("Layer resource found "); if (!layer.getAttribute(Attributes.LAYERTYPE).equalsIgnoreCase(request.getFormat().getName())) throw new ActionException(this, "Bad Layer format " + "(declared:" + request.getFormat().getName() + ", expected:" + layer.getAttribute(Attributes.LAYERTYPE)); // this attribute is read for moving the raster file to the destination directory, not for rasterization // Going to get the staging mosaic dir path and create the mosaic dir if it still doesn't exists. String mosaicDirPath = layer.getAttribute(UNREDDLayer.Attributes.MOSAICPATH); if (mosaicDirPath == null) { throw new ActionException(this, "Null mosaic directory for layer: '" + layername + "'... check the layer configuration on geostore"); } File mosaicDir = new File(mosaicDirPath); MosaicDirBuilder.buildMosaicDir(mosaicDir, cfg.getIndexerPath(), NameUtils.TIME_REGEX); if (!mosaicDir.isDirectory() && !mosaicDir.isAbsolute()) { throw new ActionException(this, "Bad mosaic directory for layer '" + layername + "': '" + mosaicDir + "'... create it or check the layer configuration on geostore"); } // ****************** // Check for LayerUpdate // ****************** this.listenerForwarder.progressing(20, "Check for existing LayerUpdate in GeoStore"); Resource existingLayerUpdate = null; try { existingLayerUpdate = geostore.searchLayerUpdate(layername, year, month, day); } catch (Exception e) { LOGGER.debug("Parameter : [layerSnapshot=" + layerUpdateName + "]"); throw new ActionException(this, "Error searching for a LayerUpdate (layer:" + layername + " year:" + year + " month:" + month + ")", e); } if (existingLayerUpdate != null) { throw new ActionException(this, "LayerUpdate already exists (layer:" + layername + " year:" + year + " month:" + month + ")"); } /******************************** * * Image processing * *******************************/ final File rasterFile; if (request.getFormat() == UNREDDFormat.VECTOR) { rasterFile = processVector(dataFile, layername, year, month, day, layer, mosaicDir); } else { rasterFile = processRaster(dataFile, layer, mosaicDir, layername); } // *** Image processing has finished // ******************** // Create LayerUpdate // ******************** if (LOGGER.isInfoEnabled()) { LOGGER.info("Adding LayerUpdate into GeoStore"); } this.listenerForwarder.progressing(70, "Adding LayerUpdate into GeoStore"); try { geostore.insertLayerUpdate(layername, year, month, day); } catch (Exception e) { LOGGER.debug("Parameter : [layername=" + layername + ", year=" + year + ", month=" + month + "]"); throw new ActionException(this, "Error while inserting a LayerUpdate", e); } // finish action if (LOGGER.isInfoEnabled()) { LOGGER.info("Ingestion action succesfully completed"); } this.listenerForwarder.completed(); this.listenerForwarder.progressing(100, "Action successfully completed"); // add the event to the return queue return rasterFile; }
From source file:org.apache.hadoop.security.KDiag.java
/** * A cursory look at the {@code kinit} executable. * * If it is an absolute path: it must exist with a size > 0. * If it is just a command, it has to be on the path. There's no check * for that -but the PATH is printed out. *//*from w ww .jav a 2 s . c om*/ private void validateKinitExecutable() { String kinit = getConf().getTrimmed(KERBEROS_KINIT_COMMAND, ""); if (!kinit.isEmpty()) { File kinitPath = new File(kinit); println("%s = %s", KERBEROS_KINIT_COMMAND, kinitPath); if (kinitPath.isAbsolute()) { verifyFileIsValid(kinitPath, CAT_KERBEROS, KERBEROS_KINIT_COMMAND); } else { println("Executable %s is relative -must be on the PATH", kinit); printEnv("PATH"); } } }
From source file:org.dita.dost.module.reader.AbstractReaderModule.java
void parseInputParameters(final AbstractPipelineInput input) { ditaDir = toFile(input.getAttribute(ANT_INVOKER_EXT_PARAM_DITADIR)); if (!ditaDir.isAbsolute()) { throw new IllegalArgumentException("DITA-OT installation directory " + ditaDir + " must be absolute"); }/*from w w w . jav a 2 s .co m*/ validate = Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAM_VALIDATE)); transtype = input.getAttribute(ANT_INVOKER_EXT_PARAM_TRANSTYPE); gramcache = "yes".equalsIgnoreCase(input.getAttribute(ANT_INVOKER_EXT_PARAM_GRAMCACHE)); processingMode = Optional.ofNullable(input.getAttribute(ANT_INVOKER_EXT_PARAM_PROCESSING_MODE)) .map(String::toUpperCase).map(Mode::valueOf).orElse(Mode.LAX); genDebugInfo = Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAM_GENERATE_DEBUG_ATTR)); // For the output control job.setGeneratecopyouter(input.getAttribute(ANT_INVOKER_EXT_PARAM_GENERATECOPYOUTTER)); job.setOutterControl(input.getAttribute(ANT_INVOKER_EXT_PARAM_OUTTERCONTROL)); job.setOnlyTopicInMap(Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAM_ONLYTOPICINMAP))); job.setCrawl(Optional.ofNullable(input.getAttribute(ANT_INVOKER_EXT_PARAM_CRAWL)) .orElse(ANT_INVOKER_EXT_PARAM_CRAWL_VALUE_TOPIC)); // Set the OutputDir final File path = toFile(input.getAttribute(ANT_INVOKER_EXT_PARAM_OUTPUTDIR)); if (path.isAbsolute()) { job.setOutputDir(path); } else { throw new IllegalArgumentException("Output directory " + path + " must be absolute"); } final File basedir = toFile(input.getAttribute(ANT_INVOKER_PARAM_BASEDIR)); final URI ditaInputDir = toURI(input.getAttribute(ANT_INVOKER_EXT_PARAM_INPUTDIR)); if (ditaInputDir != null) { if (ditaInputDir.isAbsolute()) { baseInputDir = ditaInputDir; } else if (ditaInputDir.getPath() != null && ditaInputDir.getPath().startsWith(URI_SEPARATOR)) { baseInputDir = setScheme(ditaInputDir, "file"); } else { // XXX Shouldn't this be resolved to current directory, not Ant script base directory? baseInputDir = basedir.toURI().resolve(ditaInputDir); } assert baseInputDir.isAbsolute(); } URI ditaInput = toURI(input.getAttribute(ANT_INVOKER_PARAM_INPUTMAP)); ditaInput = ditaInput != null ? ditaInput : job.getInputFile(); if (ditaInput.isAbsolute()) { rootFile = ditaInput; } else if (ditaInput.getPath() != null && ditaInput.getPath().startsWith(URI_SEPARATOR)) { rootFile = setScheme(ditaInput, "file"); } else if (baseInputDir != null) { rootFile = baseInputDir.resolve(ditaInput); } else { rootFile = basedir.toURI().resolve(ditaInput); } job.setInputFile(rootFile); if (baseInputDir == null) { baseInputDir = rootFile.resolve("."); } job.setInputDir(baseInputDir); profilingEnabled = Optional.ofNullable(input.getAttribute(ANT_INVOKER_PARAM_PROFILING_ENABLED)) .map(Boolean::parseBoolean).orElse(true); if (profilingEnabled) { ditavalFile = Optional.of(new File(job.tempDir, FILE_NAME_MERGED_DITAVAL)).filter(File::exists) .orElse(null); } }
From source file:org.sikuli.script.App.java
/** * creates an instance for an app with this name (nothing done yet) * * @param name name//ww w .jav a2s . c o m */ public App(String name) { appNameGiven = name; appName = name; appPID = -1; appWindow = ""; appOptions = ""; String execName = ""; if (appNameGiven.startsWith("+")) { isImmediate = true; appNameGiven = appNameGiven.substring(1); Debug.log(3, "App.immediate: %s", appNameGiven); appName = appNameGiven; String[] parts; if (appName.startsWith("\"")) { parts = appName.substring(1).split("\""); if (parts.length > 1) { appOptions = appName.substring(parts[0].length() + 3); appName = "\"" + parts[0] + "\""; } } else { parts = appName.split(" "); if (parts.length > 1) { appOptions = appName.substring(parts[0].length() + 1); appName = parts[0]; } } if (appName.startsWith("\"")) { execName = appName.substring(1, appName.length() - 1); } else { execName = appName; } appName = new File(execName).getName(); File checkName = new File(execName); if (checkName.isAbsolute()) { if (!checkName.exists()) { appName = ""; appOptions = ""; appWindow = "!"; notFound = true; } } } else { init(appNameGiven); } Debug.log(3, "App.create: %s", toStringShort()); }
From source file:com.streamsets.pipeline.stage.destination.hdfs.HdfsTargetConfigBean.java
private Configuration getHadoopConfiguration(Stage.Context context, List<Stage.ConfigIssue> issues) { Configuration conf = new Configuration(); conf.setClass("fs.file.impl", RawLocalFileSystem.class, FileSystem.class); if (hdfsKerberos) { conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, UserGroupInformation.AuthenticationMethod.KERBEROS.name()); try {// w w w.j av a 2 s . c o m conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "hdfs/_HOST@" + KerberosUtil.getDefaultRealm()); } catch (Exception ex) { if (!hdfsConfigs.containsKey(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY)) { issues.add(context.createConfigIssue(Groups.HADOOP_FS.name(), null, Errors.HADOOPFS_28, ex.toString())); } } } if (hdfsConfDir != null && !hdfsConfDir.isEmpty()) { File hadoopConfigDir = new File(hdfsConfDir); if ((context.getExecutionMode() == ExecutionMode.CLUSTER_BATCH || context.getExecutionMode() == ExecutionMode.CLUSTER_YARN_STREAMING || context.getExecutionMode() == ExecutionMode.CLUSTER_MESOS_STREAMING) && hadoopConfigDir.isAbsolute()) { //Do not allow absolute hadoop config directory in cluster mode issues.add(context.createConfigIssue(Groups.HADOOP_FS.name(), HDFS_TARGET_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_45, hdfsConfDir)); } else { if (!hadoopConfigDir.isAbsolute()) { hadoopConfigDir = new File(context.getResourcesDirectory(), hdfsConfDir).getAbsoluteFile(); } if (!hadoopConfigDir.exists()) { issues.add(context.createConfigIssue(Groups.HADOOP_FS.name(), HDFS_TARGET_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_25, hadoopConfigDir.getPath())); } else if (!hadoopConfigDir.isDirectory()) { issues.add(context.createConfigIssue(Groups.HADOOP_FS.name(), HDFS_TARGET_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_26, hadoopConfigDir.getPath())); } else { File coreSite = new File(hadoopConfigDir, "core-site.xml"); if (coreSite.exists()) { if (!coreSite.isFile()) { issues.add(context.createConfigIssue(Groups.HADOOP_FS.name(), HDFS_TARGET_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_27, coreSite.getPath())); } conf.addResource(new Path(coreSite.getAbsolutePath())); } File hdfsSite = new File(hadoopConfigDir, "hdfs-site.xml"); if (hdfsSite.exists()) { if (!hdfsSite.isFile()) { issues.add(context.createConfigIssue(Groups.HADOOP_FS.name(), HDFS_TARGET_CONFIG_BEAN_PREFIX + "hdfsConfDir", Errors.HADOOPFS_27, hdfsSite.getPath())); } conf.addResource(new Path(hdfsSite.getAbsolutePath())); } } } } for (Map.Entry<String, String> config : hdfsConfigs.entrySet()) { conf.set(config.getKey(), config.getValue()); } return conf; }