List of usage examples for org.apache.hadoop.fs FileSystem listStatus
public FileStatus[] listStatus(Path[] files) throws FileNotFoundException, IOException
From source file:com.panguso.lc.analysis.format.Logcenter.java
License:Open Source License
@Override public int run(String[] args) throws Exception { context = new ClassPathXmlApplicationContext("applicationContext.xml"); Properties prop = context.getBean("configProperties", Properties.class); // ??// www. j a va2 s .c om // String time = new DateTime().toString("yyyyMMddHH"); // hadoop.lib=/application/format/lib/ // hadoop.conf=/application/format/conf/ // hadoop.src=/log/src/ // hadoop.dest=/log/dest/ // hadoop.archive=/log/archive/ libPath = prop.getProperty("hadoop.lib"); confPath = prop.getProperty("hadoop.conf"); srcPath = prop.getProperty("hadoop.src"); destPath = prop.getProperty("hadoop.dest"); archivePath = prop.getProperty("hadoop.archive"); Configuration conf = getConf(); logger.info("libPath=" + libPath); logger.info("confPath=" + confPath); logger.info("srcPath=" + srcPath); logger.info("destPath=" + destPath); logger.info("archivePath=" + archivePath); FileSystem fs = FileSystem.get(conf); // --jar FileStatus[] fJars = fs.listStatus(new Path(libPath)); for (FileStatus fileStatus : fJars) { String jar = libPath + fileStatus.getPath().getName(); DistributedCache.addFileToClassPath(new Path(jar), conf, FileSystem.get(conf)); } // --? FileStatus[] fProp = fs.listStatus(new Path(confPath)); for (FileStatus fileStatus : fProp) { DistributedCache.addArchiveToClassPath(new Path(confPath + fileStatus.getPath().getName()), conf, FileSystem.get(conf)); } FileStatus[] fDirs = fs.listStatus(new Path(srcPath)); if (fDirs != null && fDirs.length > 0) { for (FileStatus file : fDirs) { // dir String currentTime = file.getPath().getName(); String srcPathWithTime = srcPath + currentTime + "/"; String destPathWithTime = destPath + currentTime + "/"; String archPathWithTime = archivePath + currentTime + "/"; // ?? if (analysisService.isSuccessful(currentTime)) { continue; } // ??job? // fs.delete(new Path(destPathWithTime), true); // ? // if (!fs.exists(new Path(srcPathWithTime))) { // logger.warn("outPath does not exist,inputPath=" + // srcPathWithTime); // analysisService.saveFailureJob(job.getJobName(), // currentTime); // return -1; // } // ?classpath";"":" Job job = new Job(conf); String jars = job.getConfiguration().get("mapred.job.classpath.files"); job.getConfiguration().set("mapred.job.classpath.files", jars.replace(";", ":")); logger.info("current dir=" + currentTime); job.setJobName("format_" + currentTime); job.setJarByClass(Logcenter.class); job.setMapperClass(FormatAnalysisMapper.class); job.setReducerClass(FormatAnalysisReducer.class); job.setCombinerClass(FormatAnalysisReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setOutputFormatClass(TextOutputFormat.class); // job.setNumReduceTasks(0); // //??reduce????namenode FileInputFormat.addInputPath(job, new Path(srcPathWithTime)); FileOutputFormat.setOutputPath(job, new Path(destPathWithTime)); // ? boolean result = false; try { result = job.waitForCompletion(true); } catch (FileAlreadyExistsException e) { logger.warn(e.getMessage(), e); } if (!result) { logger.warn("job execute failure!"); analysisService.saveFailureJob(job.getJobName(), currentTime); continue; // return -1; } // , fs.delete(new Path(archPathWithTime), true); fs.rename(new Path(srcPathWithTime), new Path(archPathWithTime)); analysisService.saveSuccessJob(job.getJobName(), currentTime); } } FileSystem.closeAll(); return 0; }
From source file:com.pegasus.ResultInfo.java
License:Apache License
public int run(final String[] args) throws Exception { Configuration conf = getConf(); final FileSystem fs = FileSystem.get(conf); edge_path = new Path(conf.get("edge_path")); all_vertices = new Path(conf.get("all_vertices")); curbm_path = new Path(conf.get("iteration_state")); tempbm_path = new Path(conf.get("stage1out")); nextbm_path = new Path(conf.get("stage2out")); output_path = new Path(conf.get("stage3out")); grapherOut_path = new Path(conf.get("grapherout")); nreducers = Integer.parseInt(conf.get("num_reducers")); local_output_path = conf.get("local_output"); // initital cleanup fs.delete(tempbm_path, true);//from w ww. ja v a 2s . c o m fs.delete(nextbm_path, true); fs.delete(output_path, true); fs.delete(curbm_path, true); fs.delete(grapherOut_path, true); FileUtil.fullyDelete(new File(local_output_path)); fs.mkdirs(curbm_path); //fs.mkdirs(grapherOut_path); FileStatus[] statusArray = fs.listStatus(all_vertices); for (int index = 0; index < statusArray.length; index++) { Path temp = statusArray[index].getPath(); FileUtil.copy(fs, temp, fs, curbm_path, false, conf); } make_symmetric = 1; System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); // Iteratively calculate neighborhood function. // rotate directory for (int i = cur_iter; i < MAX_ITERATIONS; i++) { cur_iter++; System.out.println("configStage1"); JobClient.runJob(configStage1()); System.out.println("configStage2"); JobClient.runJob(configStage2()); System.out.println("configStage3"); JobClient.runJob(configStage3()); FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path)); // copy neighborhood information from HDFS to local disk, and read it! String new_path = local_output_path + "/" + i; fs.copyToLocalFile(output_path, new Path(new_path)); ResultInfo ri = readIterationOutput(new_path); changed_nodes[iter_counter] = ri.changed; changed_nodes[iter_counter] = ri.unchanged; iter_counter++; System.out.println("Hop " + i + " : changed = " + ri.changed + ", unchanged = " + ri.unchanged); fs.delete(curbm_path); fs.delete(tempbm_path); fs.delete(output_path); fs.rename(nextbm_path, curbm_path); // Stop when the minimum neighborhood doesn't change if (ri.changed == 0) { System.out.println("All the component ids converged. Finishing..."); fs.rename(curbm_path, grapherOut_path); break; } } FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path)); // finishing. System.out.println("\n[PEGASUS] Connected component computed."); System.out.println("[PEGASUS] Total Iteration = " + iter_counter); return 0; }
From source file:com.phantom.hadoop.examples.pi.Util.java
License:Apache License
/** Read job outputs */ static List<TaskResult> readJobOutputs(FileSystem fs, Path outdir) throws IOException { final List<TaskResult> results = new ArrayList<TaskResult>(); for (FileStatus status : fs.listStatus(outdir)) { if (status.getPath().getName().startsWith("part-")) { final BufferedReader in = new BufferedReader( new InputStreamReader(fs.open(status.getPath()), Charsets.UTF_8)); try { for (String line; (line = in.readLine()) != null;) results.add(TaskResult.valueOf(line)); } finally { in.close();//from w ww.j ava 2s . c o m } } } if (results.isEmpty()) throw new IOException("Output not found"); return results; }
From source file:com.philiphubbard.digraph.MRBuildVerticesTest.java
License:Open Source License
private static void cleanupTest(Configuration conf) throws IOException { FileSystem fileSystem = FileSystem.get(conf); ArrayList<MRVertex> branch = new ArrayList<MRVertex>(); FileStatus[] branchFiles = fileSystem.listStatus(new Path(testOutput + "/branch")); for (FileStatus status : branchFiles) readVertices(status, branch, conf); for (MRVertex vertex : branch) System.out.println(vertex.toDisplayString()); ArrayList<MRVertex> chain = new ArrayList<MRVertex>(); FileStatus[] chainFiles = fileSystem.listStatus(new Path(testOutput + "/chain")); for (FileStatus status : chainFiles) readVertices(status, chain, conf); for (MRVertex vertex : chain) System.out.println(vertex.toDisplayString()); fileSystem.delete(new Path(testInput), true); fileSystem.delete(new Path(testOutput), true); fileSystem.close();/*w w w . j a v a2 s . c o m*/ }
From source file:com.philiphubbard.digraph.MRCompressChainsTest.java
License:Open Source License
private static void cleanupTest(Configuration conf) throws IOException { FileSystem fileSystem = FileSystem.get(conf); ArrayList<MRVertex> vertices = new ArrayList<MRVertex>(); FileStatus[] files = fileSystem.listStatus(new Path(testOutput)); for (FileStatus status : files) { Path path = status.getPath(); if (path.getName().startsWith("part")) { System.out.println(path); SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(path)); IntWritable key = new IntWritable(); BytesWritable value = new BytesWritable(); while (reader.next(key, value)) vertices.add(new MRVertex(value, conf)); reader.close();/*www . j av a 2 s . co m*/ } } for (MRVertex vertex : vertices) System.out.println(vertex.toDisplayString()); fileSystem.delete(new Path(testInput), true); fileSystem.delete(new Path(testOutput), true); fileSystem.close(); }
From source file:com.philiphubbard.sabe.MRAssembler.java
License:Open Source License
protected Graph buildCompressedGraph(Configuration conf, FileSystem fileSystem, Path branchPath, Path chainPath) throws IOException, InterruptedException { System.out.println("sabe.MRAssembler starting graph construction"); ArrayList<MRMerVertex> vertices = new ArrayList<MRMerVertex>(); FileStatus[] branchFiles = fileSystem.listStatus(branchPath); for (FileStatus status : branchFiles) readVertices(status, vertices, conf); FileStatus[] chainFiles = fileSystem.listStatus(chainPath); for (FileStatus status : chainFiles) readVertices(status, vertices, conf); // Check for a malformed graph, that does not have exactly one source and sink. // Return null in this case. int numSources = 0; int numSinks = 0; for (MRMerVertex vertex : vertices) { if (vertex.getIsSource()) numSources++;/*from w ww .j a v a 2 s . c o m*/ if (vertex.getIsSink()) numSinks++; } if ((numSources != 1) || (numSinks != 1)) { System.out.println( "Malformed graph: number of sources = " + numSources + ", number of sinks = " + numSinks); return null; } return new Graph(vertices); }
From source file:com.pinterest.hdfsbackup.distcp.DistCp.java
License:Apache License
/** * Initialize DFSCopyFileMapper specific job-configuration. * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments// w w w .j a v a 2s . c o m */ private static void setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); //set boolean values final boolean update = args.flags.contains(Options.UPDATE); final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE); jobConf.setBoolean(Options.UPDATE.propertyname, update); jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite); jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname, args.flags.contains(Options.IGNORE_READ_FAILURES)); jobConf.setBoolean(Options.PRESERVE_STATUS.propertyname, args.flags.contains(Options.PRESERVE_STATUS)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path jobDirectory = new Path(jClient.getSystemDir(), NAME + "_" + randomId); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); FileSystem dstfs = args.dst.getFileSystem(conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDir(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_distcp_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_distcp_src_files"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); SequenceFile.Writer src_writer = SequenceFile.createWriter(jobfs, jobConf, srcfilelist, LongWritable.class, FilePair.class, SequenceFile.CompressionType.NONE); Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files"); SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobfs, jobConf, dstfilelist, Text.class, Text.class, SequenceFile.CompressionType.NONE); Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs"); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobfs, jobConf, dstdirlist, Text.class, FilePair.class, SequenceFile.CompressionType.NONE); // handle the case where the destination directory doesn't exist // and we've only a single src directory OR we're updating/overwriting // the contents of the destination directory. final boolean special = (args.srcs.size() == 1 && !dstExists) || update || overwrite; int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, byteCount = 0L, cbsyncs = 0L; try { for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext();) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDir() ? src : src.getParent(); if (srcfilestat.isDir()) { ++srcCount; } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for (pathstack.push(srcfilestat); !pathstack.empty();) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for (int i = 0; i < children.length; i++) { boolean skipfile = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDir()) { pathstack.push(child); } else { //skip file if the src and the dst files are the same. skipfile = update && sameFile(srcfs, child, dstfs, new Path(args.dst, dst)); //skip file if it exceed file limit or size limit skipfile |= fileCount == args.filelimit || byteCount + child.getLen() > args.sizelimit; if (!skipfile) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } } if (!skipfile) { src_writer.append(new LongWritable(child.isDir() ? 0 : child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDir()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } finally { checkAndClose(src_writer); checkAndClose(dst_writer); checkAndClose(dir_writer); } FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch (FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_distcp_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); if (dststatus != null && args.flags.contains(Options.DELETE)) { deleteNonexisting(dstfs, dststatus, sorted, jobfs, jobDirectory, jobConf, conf); } Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst, "_distcp_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); LOG.info("srcCount=" + srcCount); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); setMapCount(byteCount, jobConf); }
From source file:com.pinterest.hdfsbackup.distcp.DistCp.java
License:Apache License
/** Delete the dst files/dirs which do not exist in src */ static private void deleteNonexisting(FileSystem dstfs, FileStatus dstroot, Path dstsorted, FileSystem jobfs, Path jobdir, JobConf jobconf, Configuration conf) throws IOException { if (!dstroot.isDir()) { throw new IOException("dst must be a directory when option " + Options.DELETE.cmd + " is set, but dst (= " + dstroot.getPath() + ") is not a directory."); }/* w w w . j a va 2 s.com*/ //write dst lsr results final Path dstlsr = new Path(jobdir, "_distcp_dst_lsr"); final SequenceFile.Writer writer = SequenceFile.createWriter(jobfs, jobconf, dstlsr, Text.class, FileStatus.class, SequenceFile.CompressionType.NONE); try { //do lsr to get all file statuses in dstroot final Stack<FileStatus> lsrstack = new Stack<FileStatus>(); for (lsrstack.push(dstroot); !lsrstack.isEmpty();) { final FileStatus status = lsrstack.pop(); if (status.isDir()) { for (FileStatus child : dstfs.listStatus(status.getPath())) { String relative = makeRelative(dstroot.getPath(), child.getPath()); writer.append(new Text(relative), child); lsrstack.push(child); } } } } finally { checkAndClose(writer); } //sort lsr results final Path sortedlsr = new Path(jobdir, "_distcp_dst_lsr_sorted"); SequenceFile.Sorter sorter = new SequenceFile.Sorter(jobfs, new Text.Comparator(), Text.class, FileStatus.class, jobconf); sorter.sort(dstlsr, sortedlsr); //compare lsr list and dst list SequenceFile.Reader lsrin = null; SequenceFile.Reader dstin = null; try { lsrin = new SequenceFile.Reader(jobfs, sortedlsr, jobconf); dstin = new SequenceFile.Reader(jobfs, dstsorted, jobconf); //compare sorted lsr list and sorted dst list final Text lsrpath = new Text(); final FileStatus lsrstatus = new FileStatus(); final Text dstpath = new Text(); final Text dstfrom = new Text(); final FsShell shell = new FsShell(conf); final String[] shellargs = { "-rmr", null }; boolean hasnext = dstin.next(dstpath, dstfrom); for (; lsrin.next(lsrpath, lsrstatus);) { int dst_cmp_lsr = dstpath.compareTo(lsrpath); for (; hasnext && dst_cmp_lsr < 0;) { hasnext = dstin.next(dstpath, dstfrom); dst_cmp_lsr = dstpath.compareTo(lsrpath); } if (dst_cmp_lsr == 0) { //lsrpath exists in dst, skip it hasnext = dstin.next(dstpath, dstfrom); } else { //lsrpath does not exist, delete it String s = new Path(dstroot.getPath(), lsrpath.toString()).toString(); if (shellargs[1] == null || !isAncestorPath(shellargs[1], s)) { shellargs[1] = s; int r = 0; try { r = shell.run(shellargs); } catch (Exception e) { throw new IOException("Exception from shell.", e); } if (r != 0) { throw new IOException( "\"" + shellargs[0] + " " + shellargs[1] + "\" returns non-zero value " + r); } } } } } finally { checkAndClose(lsrin); checkAndClose(dstin); } }
From source file:com.pinterest.secor.util.FileUtil.java
License:Apache License
public static String[] list(String path) throws IOException { FileSystem fs = getFileSystem(path); Path fsPath = new Path(path); ArrayList<String> paths = new ArrayList<String>(); FileStatus[] statuses = fs.listStatus(fsPath); if (statuses != null) { for (FileStatus status : statuses) { Path statusPath = status.getPath(); if (path.startsWith("s3://") || path.startsWith("s3n://")) { paths.add(statusPath.toUri().toString()); } else { paths.add(statusPath.toUri().getPath()); }//from w w w . ja v a2 s . co m } } return paths.toArray(new String[] {}); }
From source file:com.pinterest.secor.util.FileUtil.java
License:Apache License
public static long getModificationTimeMsRecursive(String path) throws IOException { FileSystem fs = getFileSystem(path); Path fsPath = new Path(path); FileStatus status = fs.getFileStatus(fsPath); long modificationTime = status.getModificationTime(); FileStatus[] statuses = fs.listStatus(fsPath); if (statuses != null) { for (FileStatus fileStatus : statuses) { Path statusPath = fileStatus.getPath(); String stringPath;//w w w . j av a 2 s .co m if (path.startsWith("s3://") || path.startsWith("s3n://")) { stringPath = statusPath.toUri().toString(); } else { stringPath = statusPath.toUri().getPath(); } if (!stringPath.equals(path)) { modificationTime = Math.max(modificationTime, getModificationTimeMsRecursive(stringPath)); } } } return modificationTime; }