List of usage examples for org.apache.hadoop.fs FileSystem delete
public abstract boolean delete(Path f, boolean recursive) throws IOException;
From source file:arpserver.HadoopTool.java
@Override public int run(String[] strings) throws Exception { Configuration conf = new Configuration(); String in = strings[0];// w ww .ja v a 2s.c o m String out = strings[1]; FileSystem fs = FileSystem.get(conf); if (fs.exists(new Path(out))) { fs.delete(new Path(out), true); fs.delete(new Path(out + "Src"), true); fs.delete(new Path(out + "Mitm"), true); fs.delete(new Path(out + "ArpScn"), true); fs.delete(new Path(out + "s"), true); fs.delete(new Path(out + "d"), true); fs.delete(new Path(out + "t"), true); } Job job = new Job(); Job job2 = new Job(); Job job3 = new Job(); Job job4 = new Job(); Job job5 = new Job(); Job job6 = new Job(); Job job7 = new Job(); job.setJobName("Q"); job2.setJobName("Src"); job3.setJobName("Mitm"); job4.setJobName("ArpScn"); job5.setJobName("s"); job6.setJobName("d"); job7.setJobName("time"); job.setJarByClass(QuickDetect.class); job.setMapperClass(Qmapper.class); job.setReducerClass(Qreducer.class); job2.setMapperClass(Srcmapper.class); job2.setReducerClass(Srcreducer.class); job3.setMapperClass(ArpScanmapper.class); job3.setReducerClass(ArpScanreducer.class); job4.setMapperClass(Mitmmapper.class); job4.setReducerClass(Mitmreducer.class); job5.setMapperClass(Smapper.class); job5.setReducerClass(Sreducer.class); job6.setMapperClass(Dmapper.class); job6.setReducerClass(Dreducer.class); job7.setMapperClass(timemapper.class); job7.setReducerClass(timereducer.class); //testFinal168.txt job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(Text.class); job2.setOutputKeyClass(NullWritable.class); job2.setOutputValueClass(Text.class); job3.setOutputKeyClass(NullWritable.class); job3.setOutputValueClass(IntWritable.class); job4.setOutputKeyClass(NullWritable.class); job4.setOutputValueClass(Text.class); job5.setOutputKeyClass(NullWritable.class); job5.setOutputValueClass(Text.class); job6.setOutputKeyClass(NullWritable.class); job6.setOutputValueClass(Text.class); job7.setOutputKeyClass(NullWritable.class); job7.setOutputValueClass(Text.class); job.setMapOutputKeyClass(QuickDetect.class); job.setMapOutputValueClass(IntWritable.class); //job.setOutputFormatClass(YearMultipleTextOutputFormat.class); job2.setMapOutputKeyClass(DetectSrc.class); job2.setMapOutputValueClass(IntWritable.class); job3.setMapOutputKeyClass(DetectArpScan.class); job3.setMapOutputValueClass(IntWritable.class); job4.setMapOutputKeyClass(DetectMitm.class); job4.setMapOutputValueClass(IntWritable.class); job5.setMapOutputKeyClass(SMac.class); job5.setMapOutputValueClass(IntWritable.class); job6.setMapOutputKeyClass(DMac.class); job6.setMapOutputValueClass(IntWritable.class); job7.setMapOutputKeyClass(timeMac.class); job7.setMapOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(in)); FileOutputFormat.setOutputPath(job, new Path(out)); if (job.waitForCompletion(true)) { FileInputFormat.addInputPath(job2, new Path(in)); FileOutputFormat.setOutputPath(job2, new Path(out + "Src")); if (job2.waitForCompletion(true)) { FileInputFormat.addInputPath(job3, new Path(in)); FileOutputFormat.setOutputPath(job3, new Path(out + "ArpScn")); if (job3.waitForCompletion(true)) { FileInputFormat.addInputPath(job4, new Path(in)); FileOutputFormat.setOutputPath(job4, new Path(out + "Mitm")); if (job4.waitForCompletion(true)) { FileInputFormat.addInputPath(job5, new Path(in)); FileOutputFormat.setOutputPath(job5, new Path(out + "s")); if (job5.waitForCompletion(true)) { FileInputFormat.addInputPath(job6, new Path(in)); FileOutputFormat.setOutputPath(job6, new Path(out + "d")); if (job6.waitForCompletion(true)) { FileInputFormat.addInputPath(job7, new Path(in)); FileOutputFormat.setOutputPath(job7, new Path(out + "t")); job7.waitForCompletion(true); } else { return 1; } } else { return 1; } } else { return 1; } } else { return 1; } } else { return 1; } } else { return 1; } return 0; }
From source file:at.illecker.hadoop.rootbeer.examples.matrixmultiplication.DistributedRowMatrix.java
License:Apache License
/** * Returns the column-wise mean of a DistributedRowMatrix * /*ww w .j a va 2s . c om*/ * @param vectorClass desired class for the column-wise mean vector e.g. * RandomAccessSparseVector, DenseVector * @return Vector containing the column-wise mean of this */ public Vector columnMeans(String vectorClass) throws IOException { Path outputVectorTmpPath = new Path(outputTmpBasePath, new Path(Long.toString(System.nanoTime()))); Configuration initialConf = getConf() == null ? new Configuration() : getConf(); String vectorClassFull = "org.apache.mahout.math." + vectorClass; Vector mean = MatrixColumnMeansJob.run(initialConf, rowPath, outputVectorTmpPath, vectorClassFull); if (!keepTempFiles) { FileSystem fs = outputVectorTmpPath.getFileSystem(conf); fs.delete(outputVectorTmpPath, true); } return mean; }
From source file:at.illecker.hadoop.rootbeer.examples.matrixmultiplication.DistributedRowMatrix.java
License:Apache License
@Override public Vector times(Vector v) { try {/* www .j a v a2s . c o m*/ Configuration initialConf = getConf() == null ? new Configuration() : getConf(); Path outputVectorTmpPath = new Path(outputTmpBasePath, new Path(Long.toString(System.nanoTime()))); Configuration conf = TimesSquaredJob.createTimesJobConf(initialConf, v, numRows, rowPath, outputVectorTmpPath); JobClient.runJob(new JobConf(conf)); Vector result = TimesSquaredJob.retrieveTimesSquaredOutputVector(conf); if (!keepTempFiles) { FileSystem fs = outputVectorTmpPath.getFileSystem(conf); fs.delete(outputVectorTmpPath, true); } return result; } catch (IOException ioe) { throw new IllegalStateException(ioe); } }
From source file:at.illecker.hadoop.rootbeer.examples.matrixmultiplication.DistributedRowMatrix.java
License:Apache License
@Override public Vector timesSquared(Vector v) { try {//ww w .j a v a2s.c o m Configuration initialConf = getConf() == null ? new Configuration() : getConf(); Path outputVectorTmpPath = new Path(outputTmpBasePath, new Path(Long.toString(System.nanoTime()))); Configuration conf = TimesSquaredJob.createTimesSquaredJobConf(initialConf, v, rowPath, outputVectorTmpPath); JobClient.runJob(new JobConf(conf)); Vector result = TimesSquaredJob.retrieveTimesSquaredOutputVector(conf); if (!keepTempFiles) { FileSystem fs = outputVectorTmpPath.getFileSystem(conf); fs.delete(outputVectorTmpPath, true); } return result; } catch (IOException ioe) { throw new IllegalStateException(ioe); } }
From source file:at.illecker.hadoop.rootbeer.examples.matrixmultiplication.MatrixMultiplicationBenchmark.java
License:Apache License
@Override protected void tearDown() throws Exception { // verify();//from w ww .jav a 2 s . co m // Cleanup FileSystem fs = FileSystem.get(m_conf); fs.delete(CONF_TMP_DIR, true); // printOutput(m_conf); }
From source file:at.illecker.hama.hybrid.examples.kmeans.KMeansHybridBenchmark.java
License:Apache License
@Override protected void tearDown() throws Exception { FileSystem fs = FileSystem.get(m_conf); fs.delete(CONF_TMP_DIR, true); }
From source file:at.illecker.hama.hybrid.examples.kmeans.KMeansHybridBSP.java
License:Apache License
/** * prepareInputData//w w w . j av a 2s . c o m * */ public static void prepareInputData(Configuration conf, FileSystem fs, Path in, Path centerIn, int numBspTask, int numGPUBspTask, long n, int k, int vectorDimension, Random rand, int GPUPercentage) throws IOException { // Delete input files if already exist if (fs.exists(in)) { fs.delete(in, true); } if (fs.exists(centerIn)) { fs.delete(centerIn, true); } final NullWritable nullValue = NullWritable.get(); final SequenceFile.Writer centerWriter = SequenceFile.createWriter(fs, conf, centerIn, PipesVectorWritable.class, NullWritable.class, CompressionType.NONE); // Compute work distributions int cpuTaskNum = numBspTask - numGPUBspTask; long inputVectorsPerGPUTask = 0; long inputVectorsPerCPU = 0; long inputVectorsPerCPUTask = 0; if ((numGPUBspTask > 0) && (GPUPercentage > 0) && (GPUPercentage <= 100)) { inputVectorsPerGPUTask = (n * GPUPercentage) / 100; inputVectorsPerCPU = n - inputVectorsPerGPUTask; } else { inputVectorsPerCPU = n; } if (cpuTaskNum > 0) { inputVectorsPerCPUTask = inputVectorsPerCPU / cpuTaskNum; } // long interval = totalNumberOfPoints / numBspTask; long centers = 0; for (int part = 0; part < numBspTask; part++) { Path partIn = new Path(in, "part" + part + ".seq"); final SequenceFile.Writer dataWriter = SequenceFile.createWriter(fs, conf, partIn, PipesVectorWritable.class, NullWritable.class, CompressionType.NONE); long interval = 0; if (part > cpuTaskNum) { interval = inputVectorsPerGPUTask; } else { interval = inputVectorsPerCPUTask; } long start = interval * part; long end = start + interval - 1; if ((numBspTask - 1) == part) { end = n; // set to totalNumberOfPoints } LOG.info("Partition " + part + ": from " + start + " to " + end); for (long i = start; i <= end; i++) { double[] arr = new double[vectorDimension]; for (int j = 0; j < vectorDimension; j++) { if (rand != null) { arr[j] = rand.nextInt((int) n); } else { arr[j] = i; } } PipesVectorWritable vector = new PipesVectorWritable(new DenseDoubleVector(arr)); // LOG.info("input[" + i + "]: " + Arrays.toString(arr)); dataWriter.append(vector, nullValue); if (k > centers) { // LOG.info("center[" + i + "]: " + Arrays.toString(arr)); centerWriter.append(vector, nullValue); centers++; } else { centerWriter.close(); } } dataWriter.close(); } }
From source file:at.illecker.hama.hybrid.examples.kmeans.KMeansHybridBSP.java
License:Apache License
/** * Create testExample vectors and centers as input from * http://www.maplesoft.com/support/help/Maple/view.aspx?path=NAG/g03efc * /*from w w w.j av a 2s . c o m*/ * n := 20: vectorDimension := 5: k := 3: maxIterations := 10: * * x := Matrix([ [77.3, 13, 9.699999999999999, 1.5, 6.4], [82.5, 10, 7.5, 1.5, * 6.5], [66.90000000000001, 20.6, 12.5, 2.3, 7], [47.2, 33.8, 19, 2.8, 5.8], * [65.3, 20.5, 14.2, 1.9, 6.9], [83.3, 10, 6.7, 2.2, 7], [81.59999999999999, * 12.7, 5.7, 2.9, 6.7], [47.8, 36.5, 15.7, 2.3, 7.2], [48.6, 37.1, 14.3, 2.1, * 7.2], [61.6, 25.5, 12.9, 1.9, 7.3], [58.6, 26.5, 14.9, 2.4, 6.7], [69.3, * 22.3, 8.4, 4, 7], [61.8, 30.8, 7.4, 2.7, 6.4], [67.7, 25.3, 7, 4.8, 7.3], * [57.2, 31.2, 11.6, 2.4, 6.5], [67.2, 22.7, 10.1, 3.3, 6.2], [59.2, 31.2, * 9.6, 2.4, 6], [80.2, 13.2, 6.6, 2, 5.8], [82.2, 11.1, 6.7, 2.2, 7.2], * [69.7, 20.7, 9.6, 3.1, 5.9]], datatype=float[8], order='C_order'): * * cmeans := Matrix( [[82.5, 10, 7.5, 1.5, 6.5], [47.8, 36.5, 15.7, 2.3, 7.2], * [67.2, 22.7, 10.1, 3.3, 6.2]], datatype=float[8], order='C_order'): * * * Results * * cmeans := Matrix([ [81.1833333333333371, 11.6666666666666661, * 7.1499999999999947, 2.0500000000000027, 6.6000000000000052], * [47.8666666666666671, 35.8000000000000043, 16.3333333333333321, * 2.3999999999999992, 6.7333333333333340], [64.0454545454545610, * 25.2090909090909037, 10.7454545454545425, 2.83636363636363642, * 6.65454545454545521]]): * * inc := Vector([0, 0, 2, 1, 2, 0, 0, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, * 2]): * * nic := Vector([6, 3, 11]): * * css := Vector([46.5716666666666583, 20.3800000000000097, * 468.896363636363503]): * */ public static void prepareTestInput(Configuration conf, FileSystem fs, Path in, Path centerIn) throws IOException { // Delete input files if already exist if (fs.exists(in)) { fs.delete(in, true); } if (fs.exists(centerIn)) { fs.delete(centerIn, true); } double[][] input = { { 77.3, 13, 9.699999999999999, 1.5, 6.4 }, { 82.5, 10, 7.5, 1.5, 6.5 }, { 66.90000000000001, 20.6, 12.5, 2.3, 7 }, { 47.2, 33.8, 19, 2.8, 5.8 }, { 65.3, 20.5, 14.2, 1.9, 6.9 }, { 83.3, 10, 6.7, 2.2, 7 }, { 81.59999999999999, 12.7, 5.7, 2.9, 6.7 }, { 47.8, 36.5, 15.7, 2.3, 7.2 }, { 48.6, 37.1, 14.3, 2.1, 7.2 }, { 61.6, 25.5, 12.9, 1.9, 7.3 }, { 58.6, 26.5, 14.9, 2.4, 6.7 }, { 69.3, 22.3, 8.4, 4, 7 }, { 61.8, 30.8, 7.4, 2.7, 6.4 }, { 67.7, 25.3, 7, 4.8, 7.3 }, { 57.2, 31.2, 11.6, 2.4, 6.5 }, { 67.2, 22.7, 10.1, 3.3, 6.2 }, { 59.2, 31.2, 9.6, 2.4, 6 }, { 80.2, 13.2, 6.6, 2, 5.8 }, { 82.2, 11.1, 6.7, 2.2, 7.2 }, { 69.7, 20.7, 9.6, 3.1, 5.9 } }; double[][] centers = { { 82.5, 10, 7.5, 1.5, 6.5 }, { 47.8, 36.5, 15.7, 2.3, 7.2 }, { 67.2, 22.7, 10.1, 3.3, 6.2 } }; final NullWritable nullValue = NullWritable.get(); // Write inputs LOG.info("inputs: "); final SequenceFile.Writer dataWriter = SequenceFile.createWriter(fs, conf, in, PipesVectorWritable.class, NullWritable.class, CompressionType.NONE); for (int i = 0; i < input.length; i++) { dataWriter.append(new PipesVectorWritable(new DenseDoubleVector(input[i])), nullValue); LOG.info("input[" + i + "]: " + Arrays.toString(input[i])); } dataWriter.close(); // Write centers LOG.info("centers: "); final SequenceFile.Writer centerWriter = SequenceFile.createWriter(fs, conf, centerIn, PipesVectorWritable.class, NullWritable.class, CompressionType.NONE); for (int i = 0; i < centers.length; i++) { centerWriter.append(new PipesVectorWritable(new DenseDoubleVector(centers[i])), nullValue); LOG.info("center[" + i + "]: " + Arrays.toString(centers[i])); } centerWriter.close(); }
From source file:at.illecker.hama.hybrid.examples.matrixmultiplication.MatrixMultiplicationHybridBenchmark.java
License:Apache License
@Override protected void tearDown() throws Exception { verify();// w ww .java 2 s.c o m // Cleanup FileSystem fs = FileSystem.get(m_conf); fs.delete(m_MATRIX_A_PATH, true); fs.delete(m_MATRIX_B_PATH, true); fs.delete(m_MATRIX_C_PATH, true); fs.delete(m_MATRIX_D_PATH, true); printOutput(m_conf); }
From source file:at.illecker.hama.hybrid.examples.matrixmultiplication2.MatrixMultiplicationHybridBenchmark.java
License:Apache License
@Override protected void tearDown() throws Exception { // skip verification // verify();//from w w w. ja va 2 s . c om // Cleanup FileSystem fs = FileSystem.get(m_conf); fs.delete(CONF_TMP_DIR, true); // printOutput(m_conf); }