Example usage for org.apache.hadoop.fs FileSystem getFileStatus

List of usage examples for org.apache.hadoop.fs FileSystem getFileStatus

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem getFileStatus.

Prototype

public abstract FileStatus getFileStatus(Path f) throws IOException;

Source Link

Document

Return a file status object that represents the path.

Usage

From source file:azkaban.viewer.hdfs.HdfsImageFileViewer.java

License:Apache License

public boolean canReadFile(FileSystem fs, Path path) {
    String fileName = path.getName();
    int pos = fileName.lastIndexOf('.');
    if (pos >= 0) {
        String suffix = fileName.substring(pos).toLowerCase();
        if (acceptedSuffix.contains(suffix)) {
            long len = 0;
            try {
                len = fs.getFileStatus(path).getLen();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();//from w w w .  ja va 2 s .  co m
                return false;
            }
            if (len <= MAX_IMAGE_FILE_SIZE) {
                return true;
            }
        }
    }
    return false;
}

From source file:azkaban.viewer.hdfs.ImageFileViewer.java

License:Apache License

@Override
public Set<Capability> getCapabilities(FileSystem fs, Path path) throws AccessControlException {
    String fileName = path.getName();
    int pos = fileName.lastIndexOf('.');
    if (pos < 0) {
        return EnumSet.noneOf(Capability.class);
    }/*from   w w  w  .jav a  2  s.c  om*/

    String suffix = fileName.substring(pos).toLowerCase();
    if (acceptedSuffix.contains(suffix)) {
        long len = 0;
        try {
            len = fs.getFileStatus(path).getLen();
        } catch (AccessControlException e) {
            throw e;
        } catch (IOException e) {
            e.printStackTrace();
            return EnumSet.noneOf(Capability.class);
        }

        if (len <= MAX_IMAGE_FILE_SIZE) {
            return EnumSet.of(Capability.READ);
        }
    }

    return EnumSet.noneOf(Capability.class);
}

From source file:azkaban.webapp.servlet.hdfsviewer.ImageFileViewer.java

License:Apache License

public Set<Capability> getCapabilities(FileSystem fs, Path path) {
    String fileName = path.getName();
    int pos = fileName.lastIndexOf('.');
    if (pos < 0) {
        return EnumSet.noneOf(Capability.class);
    }/*ww  w  . j a  v  a 2  s . co  m*/

    String suffix = fileName.substring(pos).toLowerCase();
    if (acceptedSuffix.contains(suffix)) {
        long len = 0;
        try {
            len = fs.getFileStatus(path).getLen();
        } catch (IOException e) {
            e.printStackTrace();
            return EnumSet.noneOf(Capability.class);
        }

        if (len <= MAX_IMAGE_FILE_SIZE) {
            return EnumSet.of(Capability.READ);
        }
    }

    return EnumSet.noneOf(Capability.class);
}

From source file:be.uantwerpen.adrem.bigfim.AprioriPhaseReducer.java

License:Apache License

private void getBaseDirs(Context context) {
    try {/*from   www. j  a v  a  2  s  .  com*/
        String dir = getJobAbsoluteOutputDir(context);
        baseDir = dir.isEmpty() ? "tmp" : dir;

        Path path = new Path(context.getConfiguration().get("mapred.output.dir"));
        FileSystem fs = path.getFileSystem(context.getConfiguration());

        if (fs.getFileStatus(path) != null) {
            aprioriPhase = fs.getFileStatus(path).getPath().getName().split("-")[0].substring(2);
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:be.uantwerpen.adrem.hadoop.util.Tools.java

License:Apache License

public static String getJobAbsoluteOutputDir(@SuppressWarnings("rawtypes") Context context) {
    try {/*w  w  w.  jav  a  2 s  . c o m*/
        Path path = new Path(context.getConfiguration().get(OUTPUT_DIR_KEY));
        FileSystem fs = path.getFileSystem(context.getConfiguration());
        return fs.getFileStatus(path).getPath().toString();
    } catch (IOException e) {
    }
    return "";
}

From source file:be.ugent.intec.halvade.hadoop.mapreduce.VCFCombineReducer.java

License:Open Source License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    try {/*from  ww w  .j a  v  a  2 s .c om*/
        // read header from input
        outpFormat = new KeyIgnoringVCFOutputFormat(VCFFormat.VCF);
        String input = HalvadeConf.getInputDir(context.getConfiguration());
        String output = HalvadeConf.getOutDir(context.getConfiguration());
        reportBest = HalvadeConf.getReportAllVariant(context.getConfiguration());
        FileSystem fs = FileSystem.get(new URI(input), context.getConfiguration());
        Path firstVcfFile = null;
        if (fs.getFileStatus(new Path(input)).isDirectory()) {
            // get first file
            FileStatus[] files = fs.listStatus(new Path(input));
            int i = 0, l = files.length;
            while (i < l && !files[i].getPath().getName().endsWith(".vcf")) {
                i++;
            }
            if (i < l) {
                firstVcfFile = files[i].getPath();
            } else {
                throw new InterruptedException("VCFCombineReducer: No files in input folder.");
            }
        } else {
            throw new InterruptedException("VCFCombineReducer: Input directory is not a directory.");
        }
        Logger.DEBUG("first file: " + firstVcfFile);
        outpFormat.readHeaderFrom(firstVcfFile, fs);
        recordWriter = outpFormat.getRecordWriter(context, new Path(output + "HalvadeCombined.vcf"));
    } catch (URISyntaxException ex) {
        Logger.EXCEPTION(ex);
        throw new InterruptedException("URI for input directory is invalid.");
    }
}

From source file:be.ugent.intec.halvade.HalvadeOptions.java

License:Open Source License

protected double getInputSize(String input, Configuration conf) throws URISyntaxException, IOException {
    double size = 0;
    FileSystem fs = FileSystem.get(new URI(input), conf);
    if (fs.getFileStatus(new Path(input)).isDirectory()) {
        // add every file in directory
        FileStatus[] files = fs.listStatus(new Path(input));
        for (FileStatus file : files) {
            if (!file.isDirectory()) {
                size += file.getLen();/*from   w w  w.ja  va 2 s .  co m*/
            }
        }
    } else {
        size += fs.getFileStatus(new Path(input)).getLen();
    }
    return (size / (1024 * 1024 * 1024));
}

From source file:be.ugent.intec.halvade.MapReduceRunner.java

License:Open Source License

protected int runPass1RNAJob(Configuration pass1Conf, String tmpOutDir)
        throws IOException, InterruptedException, ClassNotFoundException, URISyntaxException {
    HalvadeConf.setIsPass2(pass1Conf, false);
    HalvadeResourceManager.setJobResources(halvadeOpts, pass1Conf, HalvadeResourceManager.RNA_SHMEM_PASS1, true,
            halvadeOpts.useBamInput);//  ww  w .j  a v  a 2  s . co  m
    Job pass1Job = Job.getInstance(pass1Conf, "Halvade pass 1 RNA pipeline");
    pass1Job.addCacheArchive(new URI(halvadeOpts.halvadeBinaries));
    pass1Job.setJarByClass(be.ugent.intec.halvade.hadoop.mapreduce.HalvadeMapper.class);
    FileSystem fs = FileSystem.get(new URI(halvadeOpts.in), pass1Conf);
    try {
        if (fs.getFileStatus(new Path(halvadeOpts.in)).isDirectory()) {
            // add every file in directory
            FileStatus[] files = fs.listStatus(new Path(halvadeOpts.in));
            for (FileStatus file : files) {
                if (!file.isDirectory()) {
                    FileInputFormat.addInputPath(pass1Job, file.getPath());
                }
            }
        } else {
            FileInputFormat.addInputPath(pass1Job, new Path(halvadeOpts.in));
        }
    } catch (IOException | IllegalArgumentException e) {
        Logger.EXCEPTION(e);
    }

    FileSystem outFs = FileSystem.get(new URI(tmpOutDir), pass1Conf);
    boolean skipPass1 = false;
    if (outFs.exists(new Path(tmpOutDir))) {
        // check if genome already exists
        skipPass1 = outFs.exists(new Path(tmpOutDir + "/_SUCCESS"));
        if (skipPass1)
            Logger.DEBUG("pass1 genome already created, skipping pass 1");
        else {
            Logger.INFO("The output directory \'" + tmpOutDir + "\' already exists.");
            Logger.INFO("ERROR: Please remove this directory before trying again.");
            System.exit(-2);
        }
    }
    if (!skipPass1) {
        FileOutputFormat.setOutputPath(pass1Job, new Path(tmpOutDir));
        pass1Job.setMapperClass(be.ugent.intec.halvade.hadoop.mapreduce.StarAlignPassXMapper.class);

        pass1Job.setInputFormatClass(HalvadeTextInputFormat.class);
        pass1Job.setMapOutputKeyClass(GenomeSJ.class);
        pass1Job.setMapOutputValueClass(Text.class);

        pass1Job.setSortComparatorClass(GenomeSJSortComparator.class);
        pass1Job.setGroupingComparatorClass(GenomeSJGroupingComparator.class);
        pass1Job.setNumReduceTasks(1);
        pass1Job.setReducerClass(be.ugent.intec.halvade.hadoop.mapreduce.RebuildStarGenomeReducer.class);
        pass1Job.setOutputKeyClass(LongWritable.class);
        pass1Job.setOutputValueClass(Text.class);

        return runTimedJob(pass1Job, "Halvade pass 1 Job");
    } else
        return 0;
}

From source file:be.ugent.intec.halvade.MapReduceRunner.java

License:Open Source License

protected void setHeaderFile(String input, Configuration conf) throws IOException, URISyntaxException {
    FileSystem fs = FileSystem.get(new URI(input), conf);
    String headerFile = null;//from   w  ww  . ja va 2s . c o m
    if (fs.getFileStatus(new Path(input)).isDirectory()) {
        FileStatus[] files = fs.listStatus(new Path(input));
        if (files.length > 0)
            headerFile = files[0].getPath().toString();
    } else
        headerFile = input;
    if (headerFile != null)
        HalvadeConf.setHeaderFile(conf, headerFile);
}

From source file:be.ugent.intec.halvade.MapReduceRunner.java

License:Open Source License

protected void addInputFiles(String input, Configuration conf, Job job) throws URISyntaxException, IOException {
    FileSystem fs = FileSystem.get(new URI(input), conf);
    Logger.DEBUG("adding input files from " + input);
    if (fs.getFileStatus(new Path(input)).isDirectory()) {
        // add every file in directory
        FileStatus[] files = fs.listStatus(new Path(input));
        for (FileStatus file : files) {
            if (!file.isDirectory()) {
                FileInputFormat.addInputPath(job, file.getPath());
            }/*  w ww .j ava 2  s.com*/
        }
    } else
        FileInputFormat.addInputPath(job, new Path(input));
}