List of usage examples for org.apache.hadoop.fs FileSystem create
public FSDataOutputStream create(Path f) throws IOException
From source file:FormatStorageBasicTest.java
License:Open Source License
public void testPersistentSegment() { try {/* w ww . j a va2s . c om*/ IndexInfo info = new IndexInfo(); info.offset = 0; Head head = new Head(); head.setVar((byte) 1); Configuration conf = new Configuration(); FormatDataFile fd = new FormatDataFile(conf); fd.create(prefix + "testPersistentSegment_tmp", head); String fileName = prefix + "testPersistentSegment"; Path path = new Path(fileName); FileSystem fs = FileSystem.get(new Configuration()); FSDataOutputStream out = fs.create(path); fd.setOut(out); Segment segment = new Segment(info, fd); int unitSize = 100; for (int i = 0; i < unitSize; i++) { IndexInfo indexInfo = new IndexInfo(); indexInfo.offset = i * 100; indexInfo.len = 77; indexInfo.beginLine = (i + 1) * 100; indexInfo.endLine = (i + 2) * 100; indexInfo.idx = i; Unit unit = new Unit(indexInfo, segment); addRecord2Unit(unit, 100); unit.beginLine = (i + 1) * 100; unit.endLine = (i + 2) * 100; segment.addUnit(unit); if (unit.len() != 100 * full7chunkLen + 100 * 8 + ConstVar.DataChunkMetaOffset) { fail("error unit.len:" + unit.len()); } } segment.recordNum = 234; segment.setBeginLine(1); segment.setEndLine(235); segment.persistent(out); if (out.getPos() != fd.confSegmentSize()) { System.out.println("seg.len:" + segment.len() + "seg.remain:" + segment.remain() + "index.len" + segment.unitIndex().len()); fail("error pos:" + out.getPos()); } out.close(); int unitlen = full7chunkLen * 100 + 8 * 100 + ConstVar.DataChunkMetaOffset; FSDataInputStream in = fs.open(path); in.seek(segment.lineIndexOffset()); info.offset = 0; info.len = segment.len(); fd.setWorkStatus(ConstVar.WS_Read); Segment segment2 = new Segment(info, fd); segment2.unpersistentUnitIndex(in); if (segment2.recordNum() != 234) { fail("error recordnum:" + segment2.recordNum()); } if (segment2.unitNum() != unitSize) { fail("error unitNum:" + segment2.unitNum()); } if (segment2.keyIndexOffset() != -1) { fail("error key index offset:" + segment2.keyIndexOffset()); } if (segment2.lineIndexOffset() != unitlen * unitSize) { fail("error line index offset:" + segment2.lineIndexOffset()); } if (segment2.units().size() != unitSize) { fail("error units.size:" + segment2.units().size()); } UnitIndex index = segment2.unitIndex(); if (index.lineIndexInfos().size() != unitSize) { fail("error line unit index size:" + index.lineIndexInfos().size()); } if (index.keyIndexInfos().size() != 0) { fail("error key unit index size:" + index.keyIndexInfos().size()); } for (int i = 0; i < unitSize; i++) { IndexInfo ii = index.lineIndexInfos().get(i); if (ii.beginLine() != (1 + i) * 100) { fail("error beginline:" + ii.beginLine() + "i:" + i); } if (ii.endLine() != (2 + i) * 100) { fail("error end line:" + ii.endLine() + "i:" + i); } if (ii.offset() != i * 100) { fail("error offset:" + ii.offset() + "i:" + i); } if (ii.len != unitlen) { fail("error len:" + ii.len() + "i:" + i); } if (ii.idx() != i) { fail("error idx:" + ii.idx() + "i:" + i); } } } catch (IOException e) { e.printStackTrace(); fail("get IOException:" + e.getMessage()); } catch (Exception e) { e.printStackTrace(); fail("get Exception:" + e.getMessage()); } }
From source file:FormatStorageBasicTest.java
License:Open Source License
public void testGetRecordByLineSegment() { Segment segment = null;/* w w w. j a v a2 s . co m*/ try { FieldMap fieldMap = new FieldMap(); fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 0)); fieldMap.addField(new Field(ConstVar.FieldType_Short, ConstVar.Sizeof_Short, (short) 1)); fieldMap.addField(new Field(ConstVar.FieldType_Int, ConstVar.Sizeof_Int, (short) 2)); fieldMap.addField(new Field(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, (short) 3)); fieldMap.addField(new Field(ConstVar.FieldType_Float, ConstVar.Sizeof_Float, (short) 4)); fieldMap.addField(new Field(ConstVar.FieldType_Double, ConstVar.Sizeof_Double, (short) 5)); fieldMap.addField(new Field(ConstVar.FieldType_String, 0, (short) 6)); Head head = new Head(); head.setFieldMap(fieldMap); String fileName = prefix + "testGetRecordByLineSegment"; Path path = new Path(fileName); FileSystem fs = FileSystem.get(new Configuration()); FSDataOutputStream out = fs.create(path); Configuration conf = new Configuration(); FormatDataFile fd = new FormatDataFile(conf); fd.setWorkStatus(ConstVar.WS_Write); fd.head = head; fd.setOut(out); IndexInfo info = new IndexInfo(); info.offset = 0; segment = new Segment(info, fd); int recordNum = 150000; for (int i = 0; i < recordNum; i++) { Record record = new Record(7); record.addValue(new FieldValue((byte) 1, (short) 0)); record.addValue(new FieldValue((short) 2, (short) 1)); record.addValue(new FieldValue((int) 3, (short) 2)); record.addValue(new FieldValue((long) 4, (short) 3)); record.addValue(new FieldValue((float) 5.5, (short) 4)); record.addValue(new FieldValue((double) 6.6, (short) 5)); record.addValue(new FieldValue("hello konten", (short) 6)); segment.addRecord(record); record = null; } segment.persistent(out); out.close(); FSDataInputStream in = fs.open(path); fd.setIn(in); fd.setWorkStatus(ConstVar.WS_Read); info.offset = 0; info.len = segment.len(); info.beginLine = 0; info.endLine = 1500000; Segment segment2 = new Segment(info, fd); Record record = segment2.getRecordByLine(-1); if (record != null) { fail("should get null"); } record = segment2.getRecordByLine(150000); if (record != null) { fail("should get null"); } record = segment2.getRecordByLine(150001); if (record != null) { fail("should get null"); } record = segment2.getRecordByLine(0); if (record == null) { fail("should not get null"); } judgeFixedRecord(record); int line = 150000 - 1; record = segment2.getRecordByLine(line); if (record == null) { fail("should not get null"); } judgeFixedRecord(record); } catch (IOException e) { e.printStackTrace(); fail("get IOException:" + e.getMessage()); } catch (Exception e) { e.printStackTrace(); fail("get exception:" + e.getMessage()); } }
From source file:FormatStorageBasicTest.java
License:Open Source License
public void testGetRecordByOrderSegment() { Segment segment = null;//from w w w . j ava 2s . c o m try { FieldMap fieldMap = new FieldMap(); fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 0)); fieldMap.addField(new Field(ConstVar.FieldType_Short, ConstVar.Sizeof_Short, (short) 1)); fieldMap.addField(new Field(ConstVar.FieldType_Int, ConstVar.Sizeof_Int, (short) 2)); fieldMap.addField(new Field(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, (short) 3)); fieldMap.addField(new Field(ConstVar.FieldType_Float, ConstVar.Sizeof_Float, (short) 4)); fieldMap.addField(new Field(ConstVar.FieldType_Double, ConstVar.Sizeof_Double, (short) 5)); fieldMap.addField(new Field(ConstVar.FieldType_String, 0, (short) 6)); Head head = new Head(); head.setFieldMap(fieldMap); String fileName = prefix + "testGetRecordByValueSegment"; Path path = new Path(fileName); FileSystem fs = FileSystem.get(new Configuration()); FSDataOutputStream out = fs.create(path); Configuration conf = new Configuration(); FormatDataFile fd = new FormatDataFile(conf); fd.setWorkStatus(ConstVar.WS_Write); fd.head = head; fd.setOut(out); IndexInfo info = new IndexInfo(); info.offset = 0; segment = new Segment(info, fd); int recordNum = 150000; for (int i = 0; i < recordNum; i++) { Record record = new Record(7); record.addValue(new FieldValue((byte) 1, (short) 0)); record.addValue(new FieldValue((short) 2, (short) 1)); record.addValue(new FieldValue((int) 3, (short) 2)); record.addValue(new FieldValue((long) 4, (short) 3)); record.addValue(new FieldValue((float) 5.5, (short) 4)); record.addValue(new FieldValue((double) 6.6, (short) 5)); record.addValue(new FieldValue("hello konten", (short) 6)); segment.addRecord(record); record = null; } segment.persistent(out); out.close(); FSDataInputStream in = fs.open(path); fd.setIn(in); fd.setWorkStatus(ConstVar.WS_Read); info.offset = 0; info.len = segment.len(); Segment segment2 = new Segment(info, fd); FieldValue[] values = new FieldValue[2]; values[0] = new FieldValue((byte) 1, (short) 2); values[1] = new FieldValue((short) 2, (short) 3); Record[] records = segment2.getRecordByOrder(values, values.length); if (records != null) { fail("should get null, index error, records.len:" + records.length); } values[0] = new FieldValue((byte) 1, (short) 0); values[1] = new FieldValue((short) 3, (short) 1); records = segment2.getRecordByOrder(values, values.length); if (records != null) { fail("should get null, value error"); } values[0] = new FieldValue((byte) 1, (short) 0); values[1] = new FieldValue((short) 2, (short) 1); records = segment2.getRecordByOrder(values, values.length); if (records == null) { fail("should not get null"); } if (records.length != 150000) { fail("error result size:" + records.length); } for (int i = 0; i < 150000; i++) { judgeFixedRecord(records[i]); } records = segment2.getRecordByOrder(null, 0); if (records == null) { fail("should not get null"); } if (records.length != 150000) { fail("error result size:" + records.length); } for (int i = 0; i < 150000; i++) { judgeFixedRecord(records[i]); } } catch (IOException e) { e.printStackTrace(); fail("get IOException:" + e.getMessage()); } catch (Exception e) { e.printStackTrace(); fail("get exception:" + e.getMessage()); } }
From source file:Script.java
License:Open Source License
/** Serialize the Javascript object into a file on HDFS and then add * the file to the distributed cache.//www .ja v a 2 s .c om * @param conf The Hadoop configuration object * @param o The Javascript object to serialize * @param name The name of file to save the serialized object to */ public void serialize(Configuration conf, Object o, String name) throws IOException { FileSystem hdfs = FileSystem.get(conf); Path path = new Path(Eggshell.SCRIPT_DIR + "/" + name); FSDataOutputStream out = hdfs.create(path); // create the file String buf; if (!(o instanceof NativeObject)) { buf = cx.toString(o); // serialize if (o instanceof NativeArray) buf = "[" + buf + "]"; // if array } else { buf = "{"; NativeObject obj = (NativeObject) o; Object[] propIds = obj.getPropertyIds(obj); for (Object propId : propIds) { String key = propId.toString(); Object value = obj.getProperty(obj, key); buf += key + ":" + cx.toString(value) + ","; } buf += "}"; } buf = "(" + buf + ")"; // force evaluation out.writeUTF(buf); out.close(); DistributedCache.addCacheFile(path.toUri(), conf); }
From source file:FormatStoragePerformanceTest.java
License:Open Source License
static void doInitFile(int count, boolean var) { try {//from ww w. j a v a 2s . c om String textFile = "MR_input_text/testPerformanceReadText"; if (var) { textFile += "_var"; } Path path = new Path(textFile); FileSystem fs = FileSystem.get(new Configuration()); FSDataOutputStream out = fs.create(path); OutputStream stream = new BufferedOutputStream(out); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(stream)); String value = null; if (var) { value = "111,22222,33333333,444444444444,5555555.5555,6666666666.666666,hello konten\n"; } else { value = "111,22222,33333333,444444444444,5555555.5555,6666666666.666666\n"; } long begin = System.currentTimeMillis(); for (int i = 0; i < count; i++) { writer.write(value); if (i % 10000000 == 0) { String string = "write " + i + " record, delay: " + ((System.currentTimeMillis() - begin) / 1000) + " s \n"; output.write(string.getBytes()); } } writer.close(); long end = System.currentTimeMillis(); String string = "write " + count + " record over, delay: " + ((end - begin) / 1000) + " s \n"; output.write(string.getBytes()); } catch (Exception e) { e.printStackTrace(); System.out.println(e.getMessage()); } }
From source file:ColumnStoragePerformanceTest.java
License:Open Source License
static void doInitTextFile() { try {/* ww w.j a v a 2 s . c o m*/ Path path = new Path(textFilename); FileSystem fs = FileSystem.get(conf); FSDataOutputStream out = fs.create(path); OutputStream stream = new BufferedOutputStream(out); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(stream)); String value = "111,22222,33333333,444444444444,5555555.5555,6666666666.666666,hello konten,111,22222,33333333,444444444444,5555555.5555,6666666666.666666,hello konten,111,22222,33333333,444444444444,5555555.5555,6666666666.666666,hello konten,111,22222,33333333,444444444444,5555555.5555,6666666666.666666,hello konten,111,22222,33333333,444444444444,5555555.5555,6666666666.666666,hello konten\n"; long begin = System.currentTimeMillis(); for (int i = 0; i < count; i++) { writer.write(value); if (i % 1000000 == 0) { String string = "write " + i + " record, delay: " + ((System.currentTimeMillis() - begin) / 1000) + " s \n"; output.write(string.getBytes()); } } writer.close(); out.close(); long end = System.currentTimeMillis(); String string = "write " + count + " record over(text), delay: " + ((end - begin) / 1000) + " s \n"; output.write(string.getBytes()); System.out.println(string); } catch (Exception e) { e.printStackTrace(); System.out.println(e.getMessage()); } }
From source file:CouchbaseExportStressTest.java
License:Apache License
public void createFile(int fileId) throws IOException { Configuration conf = getConf(); FileSystem fs = FileSystem.get(conf); Path dirPath = new Path("CouchbaseExportStressTest"); fs.mkdirs(dirPath);/*from w w w . j a v a 2 s.c o m*/ Path filePath = new Path(dirPath, "input-" + fileId); OutputStream os = fs.create(filePath); Writer w = new BufferedWriter(new OutputStreamWriter(os)); for (int i = 0; i < RECORDS_PER_FILE; i++) { long v = (long) i + ((long) RECORDS_PER_FILE * (long) fileId); w.write("" + v + "," + ALPHABET + ALPHABET + ALPHABET + ALPHABET + "\n"); } w.close(); os.close(); }
From source file:PerformanceEvaluation.java
License:Apache License
private Path writeInputFile(final Configuration c) throws IOException { FileSystem fs = FileSystem.get(c); if (!fs.exists(PERF_EVAL_DIR)) { fs.mkdirs(PERF_EVAL_DIR);//from w w w . j av a 2s. c o m } SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss"); Path subdir = new Path(PERF_EVAL_DIR, formatter.format(new Date())); fs.mkdirs(subdir); Path inputFile = new Path(subdir, "input.txt"); PrintStream out = new PrintStream(fs.create(inputFile)); // Make input random. Map<Integer, String> m = new TreeMap<Integer, String>(); Hash h = MurmurHash.getInstance(); int perClientRows = (this.R / this.N); try { for (int i = 0; i < 10; i++) { for (int j = 0; j < N; j++) { String s = "startRow=" + ((j * perClientRows) + (i * (perClientRows / 10))) + ", perClientRunRows=" + (perClientRows / 10) + ", totalRows=" + this.R + ", clients=" + this.N + ", flushCommits=" + this.flushCommits + ", writeToWAL=" + this.writeToWAL + ", scanCache=" + this.S; int hash = h.hash(Bytes.toBytes(s)); m.put(hash, s); } } for (Map.Entry<Integer, String> e : m.entrySet()) { out.println(e.getValue()); } } finally { out.close(); } return subdir; }
From source file:BytesBloomFilter.java
License:Apache License
public void writeToFileSystem(FileSystem fs, Path p) throws IOException { FSDataOutputStream os = fs.create(p); write(os);// ww w. j a v a 2 s .c o m os.close(); }
From source file:TestDistinct.java
License:Apache License
public void testDistinct() throws IOException { FileSystem fs = FileSystem.get(new Configuration()); fs.delete(new Path("/tmp/test_distinct_file"), true); fs.delete(new Path("/tmp/test_distinct_file_results"), true); FSDataOutputStream out = fs.create(new Path("/tmp/test_distinct_file")); PrintWriter pw = new PrintWriter(out); pw.println("distinct1"); pw.println("distinct2"); pw.println("distinct2"); pw.println("distinct3"); pw.println("distinct2"); pw.flush();// w w w.j a v a 2 s. c om out.close(); Map<String, Tap> sources = new HashMap<String, Tap>(); Map<String, Tap> sinks = new HashMap<String, Tap>(); Tap inTap = new Hfs(new TextLine(new Fields("line")), "/tmp/test_distinct_file"); Pipe inPipe = new Pipe("inPipe"); sources.put("inPipe", inTap); Distinct distinct = new Distinct(inPipe); Tap outTap = new Hfs(new TextLine(new Fields("line")), "/tmp/test_distinct_file_results"); Pipe outPipe = new Pipe("outPipe", distinct); sinks.put("outPipe", outTap); Flow flow = new FlowConnector().connect(sources, sinks, inPipe, outPipe); flow.complete(); FSDataInputStream in = fs.open(new Path("/tmp/test_distinct_file_results/part-00000")); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); ArrayList<String> results = new ArrayList<String>(); results.add("distinct1"); results.add("distinct2"); results.add("distinct3"); try { while (true) { String s = reader.readLine(); if (s == null) { break; } assertEquals(results.remove(0), s); } } catch (Exception e) { fail("Got an exception while trying to verify the results: " + e.toString()); } assertEquals("All results must be consumed!", 0, results.size()); }