List of usage examples for org.apache.hadoop.fs FileSystem delete
public abstract boolean delete(Path f, boolean recursive) throws IOException;
From source file:com.antbrains.crf.hadoop.CalcFeatureWeights.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 3 && otherArgs.length != 4) { System.err.println("CalcFeatureWeights <inDir> <tmpDir> <outDir> [startStep]"); System.exit(-1);// w w w .j a v a 2s . co m } int startStep = 1; if (otherArgs.length == 4) { startStep = Integer.valueOf(otherArgs[otherArgs.length - 1]); } FileSystem fs = FileSystem.get(conf); if (startStep <= 1) { System.out.println("calc"); fs.delete(new Path(otherArgs[1]), true); Job job = new Job(conf, CalcFeatureWeights.class.getSimpleName()); job.setNumReduceTasks(1); job.setJarByClass(CalcFeatureWeights.class); job.setMapperClass(CalcFeatureMapper.class); job.setReducerClass(CalcFeatureReducer.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setInputFormatClass(SequenceFileInputFormat.class); job.setMapOutputKeyClass(IntWritable.class); job.setMapOutputValueClass(MyKey.class); job.setOutputKeyClass(MyKey.class); job.setOutputValueClass(MyValue.class); FileInputFormat.setInputPaths(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); boolean res = job.waitForCompletion(true); if (!res) { System.err.println("step1 failed"); return; } } if (startStep <= 2) // sort { fs.delete(new Path(otherArgs[2]), true); System.out.println("sort"); Job job = new Job(conf, CalcFeatureWeights.class.getSimpleName()); job.setNumReduceTasks(1); job.setJarByClass(CalcFeatureWeights.class); job.setMapperClass(IdentityMapper.class); job.setReducerClass(IdentityReducer.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setInputFormatClass(SequenceFileInputFormat.class); job.setMapOutputKeyClass(MyKey.class); job.setMapOutputValueClass(MyValue.class); job.setOutputKeyClass(MyKey.class); job.setOutputValueClass(MyValue.class); FileInputFormat.setInputPaths(job, new Path(otherArgs[1])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[2])); boolean res = job.waitForCompletion(true); if (!res) { System.err.println("step2 failed"); return; } } }
From source file:com.antbrains.crf.hadoop.ParallelTraining2.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); FileSystem fs = FileSystem.get(conf); TrainingParams params = SgdCrf.loadParams(otherArgs[3]); System.out.println(new Gson().toJson(params)); if (otherArgs.length != 5) { System.err.println(// ww w . j av a2s .co m "ParallelTraining2 <instanceDir> <outDir> <featurecount> <training-params> <out-iter>"); System.exit(-1); } int featureCount = Integer.valueOf(otherArgs[2]); // conf.set("tc", object2String(tc)); int outIter = Integer.valueOf(otherArgs[4]); String prevOutDir = ""; for (int i = 1; i <= outIter; i++) { System.out.println("iterator: " + i); conf.set("pt.iterate", i + ""); conf.set("pt.featureCount", featureCount + ""); conf.set("pt.params", object2String(params)); String outDir = otherArgs[1] + "/result" + i; if (i > 1) { conf.set("paramDir", prevOutDir); } prevOutDir = outDir; fs.delete(new Path(outDir), true); Job job = new Job(conf, ParallelTraining2.class.getSimpleName()); job.setJarByClass(ParallelTraining2.class); job.setMapperClass(TrainingMapper.class); job.setReducerClass(TrainingReducer.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(DoubleWritable.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); System.out.println("outDir: " + outDir); FileOutputFormat.setOutputPath(job, new Path(outDir)); boolean res = job.waitForCompletion(true); if (!res) { System.err.println("iter " + i + " failed"); break; } } }
From source file:com.architecting.ch07.MapReduceIndexerTool.java
License:Apache License
private boolean delete(Path path, boolean recursive, FileSystem fs) throws IOException { boolean success = fs.delete(path, recursive); if (!success) { LOG.error("Cannot delete " + path); }/*from w ww .j a v a 2 s. co m*/ return success; }
From source file:com.asakusafw.bulkloader.cache.CacheBuildTest.java
License:Apache License
/** * Initializes the test./* ww w . j a va 2 s . c o m*/ * @throws Exception if some errors were occurred */ @Before public void setUp() throws Exception { URI uri = getTargetUri(); FileSystem fs = FileSystem.get(uri, getConfiguration()); fs.delete(new Path(uri), true); }
From source file:com.asakusafw.cleaner.main.HDFSCleaner.java
License:Apache License
/** * ?/* w ww.ja va 2 s . co m*/ * @param fs HDFS? * @param cleanPath HDFS?? * @param isSetExecutionId ID???????? * @param pattern * @param keepDate ?? * @param now ? * @param recursive ???? * @return ? * @throws CleanerSystemException */ private boolean cleanDir(FileSystem fs, Path cleanPath, boolean isSetExecutionId, String pattern, int keepDate, Date now, boolean recursive) throws CleanerSystemException { try { if (!fs.exists(cleanPath)) { // ?????? Log.log(CLASS, MessageIdConst.HCLN_CLEN_DIR_ERROR, "??????", cleanPath.toString()); return false; } if (!fs.getFileStatus(cleanPath).isDir()) { // ?????? Log.log(CLASS, MessageIdConst.HCLN_CLEN_DIR_ERROR, "??????", cleanPath.toString()); return false; } // ? Log.log(CLASS, MessageIdConst.HCLN_FILE_DELETE, cleanPath.toString()); int cleanFileCount = 0; int cleanDirCount = 0; boolean result = true; FileStatus[] dirStatus = getListStatus(fs, cleanPath); Path[] listedPaths = FileUtil.stat2Paths(dirStatus); for (Path path : listedPaths) { FileStatus status = fs.getFileStatus(path); long lastModifiedTime = status.getModificationTime(); if (status.isDir() && recursive) { // ???????? if (isSetExecutionId) { // ID??????MM??????? String executionId = path.getName(); if (isRunningJobFlow(executionId)) { // ??????? Log.log(CLASS, MessageIdConst.HCLN_CLEN_DIR_EXEC, path.toString()); continue; } } FileStatus[] childdirStatus = getListStatus(fs, path); if (childdirStatus.length == 0) { // ??????? if (isExpired(lastModifiedTime, keepDate, now)) { if (!fs.delete(path, false)) { Log.log(CLASS, MessageIdConst.HCLN_CLEN_FAIL, "", path.toString()); result = false; } else { cleanDirCount++; Log.log(CLASS, MessageIdConst.HCLN_DIR_DELETE, path.toString()); } } } else { // ????????? if (cleanDir(fs, path, false, pattern, keepDate, now, recursive)) { // ???????? childdirStatus = getListStatus(fs, path); if (childdirStatus.length == 0) { if (isExpired(lastModifiedTime, keepDate, now)) { if (!fs.delete(path, false)) { Log.log(CLASS, MessageIdConst.HCLN_CLEN_FAIL, "", path.toString()); result = false; } else { cleanDirCount++; Log.log(CLASS, MessageIdConst.HCLN_DIR_DELETE, path.toString()); } } } } else { Log.log(CLASS, MessageIdConst.HCLN_CLEN_FAIL, "", path.toString()); result = false; } } } else if (!status.isDir()) { // ??????????? if (isExpired(lastModifiedTime, keepDate, now) && isMatchPattern(path, pattern)) { if (!fs.delete(path, false)) { Log.log(CLASS, MessageIdConst.HCLN_CLEN_FAIL, "", path.toString()); result = false; } else { Log.log(CLASS, MessageIdConst.HCLN_DELETE_FILE, path.toString()); cleanFileCount++; } } } } Log.log(CLASS, MessageIdConst.HCLN_FILE_DELETE_SUCCESS, cleanPath.toString(), cleanDirCount, cleanFileCount); return result; } catch (IOException e) { Log.log(e, CLASS, MessageIdConst.HCLN_CLEN_DIR_EXCEPTION, cleanPath.getName()); return false; } }
From source file:com.asakusafw.compiler.util.tester.HadoopDriver.java
License:Apache License
/** * Cleans up the temporary working area. * @throws IOException if failed to clean up *//* w w w. jav a2s. c om*/ public void clean() throws IOException { logger.info("clean user directory"); Path path = new Path(toPath().toPath('/')); FileSystem fs = path.getFileSystem(configuration); try { if (fs.exists(path)) { fs.delete(path, true); } } catch (IOException e) { logger.info(MessageFormat.format("Failed to fs -rmr {0}", toPath()), e); } }
From source file:com.asakusafw.dag.runtime.directio.TransactionManager.java
License:Apache License
private void setTransactionInfo(boolean value) throws IOException { Path transactionInfo = getTransactionInfoPath(); FileSystem fs = transactionInfo.getFileSystem(configuration); if (value) {/*from w w w .j av a 2s . c o m*/ try (OutputStream output = new SafeOutputStream(fs.create(transactionInfo, false)); PrintWriter writer = new PrintWriter( new OutputStreamWriter(output, HadoopDataSourceUtil.COMMENT_CHARSET))) { for (Map.Entry<String, String> entry : transactionProperties.entrySet()) { if (entry.getValue() != null) { writer.printf("%s: %s%n", //$NON-NLS-1$ entry.getKey(), entry.getValue()); } } } } else { fs.delete(transactionInfo, false); } }
From source file:com.asakusafw.dag.runtime.directio.TransactionManager.java
License:Apache License
private void setCommitted(boolean value) throws IOException { Path commitMark = getCommitMarkPath(); FileSystem fs = commitMark.getFileSystem(configuration); if (value) {// w w w.j a va 2 s . com fs.create(commitMark, false).close(); } else { fs.delete(commitMark, false); } }
From source file:com.asakusafw.lang.compiler.extension.testdriver.InternalExporterRetriever.java
License:Apache License
@Override public void truncate(InternalExporterDescription description, TestContext context) throws IOException { LOG.debug("deleting output directory: {}", description); //$NON-NLS-1$ VariableTable variables = createVariables(context); Configuration config = configurations.newInstance(); FileSystem fs = FileSystem.get(config); String resolved = variables.parse(description.getPathPrefix(), false); Path path = new Path(resolved); Path output = path.getParent(); Path target;/*from ww w.j a va 2s.c o m*/ if (output == null) { LOG.warn(MessageFormat.format("skipped deleting output directory because it is a base directory: {0}", path)); target = fs.makeQualified(path); } else { LOG.debug("output directory will be deleted: {}", output); //$NON-NLS-1$ target = fs.makeQualified(output); } LOG.debug("deleting output target: {}", target); //$NON-NLS-1$ try { FileStatus[] stats = fs.globStatus(path); for (FileStatus s : stats) { Path f = s.getPath(); boolean deleted = fs.delete(f, true); LOG.debug("deleted output target (succeed={}): {}", deleted, f); //$NON-NLS-1$ } } catch (IOException e) { LOG.debug("exception in truncate", e); } }
From source file:com.asakusafw.lang.compiler.extension.testdriver.InternalImporterPreparator.java
License:Apache License
@Override public void truncate(InternalImporterDescription description, TestContext context) throws IOException { LOG.debug("deleting input: {}", description); //$NON-NLS-1$ VariableTable variables = createVariables(context); Configuration config = configurations.newInstance(); FileSystem fs = FileSystem.get(config); String resolved = variables.parse(description.getPathPrefix(), false); Path target = fs.makeQualified(new Path(resolved)); FileStatus[] stats = fs.globStatus(target); if (stats == null || stats.length == 0) { return;//from ww w. ja v a 2 s .c om } for (FileStatus s : stats) { Path path = s.getPath(); LOG.debug("deleting file: {}", path); //$NON-NLS-1$ boolean succeed = fs.delete(path, true); LOG.debug("deleted file (succeed={}): {}", succeed, path); //$NON-NLS-1$ } return; }