List of usage examples for org.apache.hadoop.fs FileSystem close
@Override public void close() throws IOException
From source file:boa.datagen.SeqProjectCombiner.java
License:Apache License
public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); conf.set("fs.default.name", "hdfs://boa-njt/"); FileSystem fileSystem = FileSystem.get(conf); String base = conf.get("fs.default.name", ""); HashMap<String, String> sources = new HashMap<String, String>(); HashSet<String> marks = new HashSet<String>(); FileStatus[] files = fileSystem.listStatus(new Path(base + "tmprepcache/2015-07")); for (int i = 0; i < files.length; i++) { FileStatus file = files[i];/* w w w. j a va2 s . c o m*/ String name = file.getPath().getName(); if (name.startsWith("projects-") && name.endsWith(".seq")) { System.out.println("Reading file " + i + " in " + files.length + ": " + name); SequenceFile.Reader r = new SequenceFile.Reader(fileSystem, file.getPath(), conf); final Text key = new Text(); final BytesWritable value = new BytesWritable(); try { while (r.next(key, value)) { String s = key.toString(); if (marks.contains(s)) continue; Project p = Project .parseFrom(CodedInputStream.newInstance(value.getBytes(), 0, value.getLength())); if (p.getCodeRepositoriesCount() > 0 && p.getCodeRepositories(0).getRevisionsCount() > 0) marks.add(s); sources.put(s, name); } } catch (Exception e) { System.err.println(name); e.printStackTrace(); } r.close(); } } SequenceFile.Writer w = SequenceFile.createWriter(fileSystem, conf, new Path(base + "repcache/2015-07/projects.seq"), Text.class, BytesWritable.class); for (int i = 0; i < files.length; i++) { FileStatus file = files[i]; String name = file.getPath().getName(); if (name.startsWith("projects-") && name.endsWith(".seq")) { System.out.println("Reading file " + i + " in " + files.length + ": " + name); SequenceFile.Reader r = new SequenceFile.Reader(fileSystem, file.getPath(), conf); final Text key = new Text(); final BytesWritable value = new BytesWritable(); try { while (r.next(key, value)) { String s = key.toString(); if (sources.get(s).equals(name)) w.append(key, value); } } catch (Exception e) { System.err.println(name); e.printStackTrace(); } r.close(); } } w.close(); fileSystem.close(); }
From source file:boa.io.BoaOutputCommitter.java
License:Apache License
private void storeOutput(final JobContext context, final int jobId) { if (jobId == 0) return;//w w w .j a v a 2s. co m Connection con = null; FileSystem fileSystem = null; FSDataInputStream in = null; FSDataOutputStream out = null; try { fileSystem = outputPath.getFileSystem(context.getConfiguration()); con = DriverManager.getConnection(url, user, password); PreparedStatement ps = null; try { ps = con.prepareStatement("INSERT INTO boa_output (id, length) VALUES (" + jobId + ", 0)"); ps.executeUpdate(); } catch (final Exception e) { } finally { try { if (ps != null) ps.close(); } catch (final Exception e) { e.printStackTrace(); } } fileSystem.mkdirs(new Path("/boa", new Path("" + jobId))); out = fileSystem.create(new Path("/boa", new Path("" + jobId, new Path("output.txt")))); int partNum = 0; final byte[] b = new byte[64 * 1024 * 1024]; long length = 0; boolean hasWebResult = false; while (true) { final Path path = new Path(outputPath, "part-r-" + String.format("%05d", partNum++)); if (!fileSystem.exists(path)) break; if (in != null) try { in.close(); } catch (final Exception e) { e.printStackTrace(); } in = fileSystem.open(path); int numBytes = 0; while ((numBytes = in.read(b)) > 0) { if (!hasWebResult) { hasWebResult = true; try { ps = con.prepareStatement("UPDATE boa_output SET web_result=? WHERE id=" + jobId); int webSize = 64 * 1024 - 1; ps.setString(1, new String(b, 0, numBytes < webSize ? numBytes : webSize)); ps.executeUpdate(); } finally { try { if (ps != null) ps.close(); } catch (final Exception e) { e.printStackTrace(); } } } out.write(b, 0, numBytes); length += numBytes; this.context.progress(); } } try { ps = con.prepareStatement("UPDATE boa_output SET length=? WHERE id=" + jobId); ps.setLong(1, length); ps.executeUpdate(); } finally { try { if (ps != null) ps.close(); } catch (final Exception e) { e.printStackTrace(); } } } catch (final Exception e) { e.printStackTrace(); } finally { try { if (con != null) con.close(); } catch (final Exception e) { e.printStackTrace(); } try { if (in != null) in.close(); } catch (final Exception e) { e.printStackTrace(); } try { if (out != null) out.close(); } catch (final Exception e) { e.printStackTrace(); } try { if (fileSystem != null) fileSystem.close(); } catch (final Exception e) { e.printStackTrace(); } } }
From source file:cc.solr.lucene.store.hdfs.HdfsDirectory.java
License:Apache License
protected void reopenFileSystem() throws IOException { FileSystem fileSystem = FileSystem.get(_hdfsDirPath.toUri(), _configuration); FileSystem oldFs = _fileSystemRef.get(); _fileSystemRef.set(fileSystem);/*from w w w.j a va2 s. co m*/ if (oldFs != null) { oldFs.close(); } }
From source file:com.asakusafw.bulkloader.collector.ExportFileSend.java
License:Apache License
/** * ????TSV??//from w ww. ja va 2 s . c o m * {@link com.asakusafw.bulkloader.transfer.FileList.Writer}???? * @param <T> ? * @param targetTableModel Export??Model? * @param filePath Export * @param writer ?Writer * @param tableName ?? * @return ?????????????????? -1 * @throws BulkLoaderSystemException ?????? */ protected <T extends Writable> long send(Class<T> targetTableModel, String filePath, FileList.Writer writer, String tableName) throws BulkLoaderSystemException { FileSystem fs = null; String fileName = null; // ?? long maxSize = Long.parseLong(ConfigurationLoader.getProperty(Constants.PROP_KEY_EXP_LOAD_MAX_SIZE)); try { TsvIoFactory<T> factory = new TsvIoFactory<>(targetTableModel); Configuration conf = new Configuration(); fs = FileSystem.get(new URI(filePath), conf); // ????? FileStatus[] status = fs.globStatus(new Path(filePath)); Path[] listedPaths = FileUtil.stat2Paths(status); if (listedPaths == null) { LOG.info("TG-COLLECTOR-02006", tableName, filePath); return -1; } else { LOG.info("TG-COLLECTOR-02007", listedPaths.length, tableName, filePath); } long count = 0; boolean addEntry = false; for (Path path : listedPaths) { // ????? if (isSystemFile(path)) { continue; } // TODO ???? // ?????? ModelInput<T> input = TemporaryStorage.openInput(conf, targetTableModel, path); try { while (true) { // addEntry = true; fileName = FileNameUtil.createSendExportFileName(tableName, fileNameMap); OutputStream output = writer.openNext(FileList.content(fileName)); try { CountingOutputStream counter = new CountingOutputStream(output); ModelOutput<T> modelOut = factory.createModelOutput(counter); T model = factory.createModelObject(); LOG.info("TG-COLLECTOR-02004", tableName, path.toString(), fileName); // ???ModelTSV?? boolean nextFile = false; while (input.readTo(model)) { // Modol??? modelOut.write(model); count++; // ??????? // char?byte????????? // ??????(????) if (counter.getByteCount() > maxSize) { nextFile = true; break; } } modelOut.close(); LOG.info("TG-COLLECTOR-02005", tableName, path.toString(), fileName); if (nextFile) { // ??????? continue; } else { // ???????? break; } } finally { output.close(); } } } finally { input.close(); } } if (addEntry) { return count; } else { assert count == 0; return -1; } } catch (IOException e) { throw new BulkLoaderSystemException(e, getClass(), "TG-COLLECTOR-02001", MessageFormat .format("HDFS?{0} ???{1}", filePath, fileName)); } catch (URISyntaxException e) { throw new BulkLoaderSystemException(e, getClass(), "TG-COLLECTOR-02001", MessageFormat.format("HDFS???HDFS?{0}", filePath)); } finally { if (fs != null) { try { fs.close(); } catch (IOException e) { throw new BulkLoaderSystemException(e, this.getClass(), "TG-COLLECTOR-02001", MessageFormat.format( "HDFS???URI{0}", filePath)); } } } }
From source file:com.asakusafw.cleaner.main.HDFSCleaner.java
License:Apache License
/** * HDFSCleaner???//from ww w . j a va 2s. c o m * @param args * @return */ protected int execute(String[] args) { String[] prop = new String[1]; String mode = null; String user = null; FileSystem fs = null; if (args.length > 0) { mode = args[0]; } if (args.length > 1) { user = args[1]; } if (args.length > 2) { prop[0] = args[2]; } // ?? if (args.length != 3) { System.err.println("ERROR????? ?" + args.length + " " + mode + " ??" + user + " " + prop[0]); Log.log(CLASS, MessageIdConst.HCLN_PARAMCHECK_ERROR, "?", args.length, new Date(), mode, prop[0]); return Constants.EXIT_CODE_ERROR; } try { // ?? if (!CleanerInitializer.initDFSCleaner(prop)) { Log.log(CLASS, MessageIdConst.HCLN_INIT_ERROR, new Date(), mode, prop[0]); return Constants.EXIT_CODE_ERROR; } // Log.log(CLASS, MessageIdConst.HCLN_START, new Date(), mode, prop[0]); // ? boolean recursive = false; if (Constants.CLEAN_MODE_NOMAL.equals(mode)) { recursive = false; } else if (Constants.CLEAN_MODE_RECURSIVE.equals(mode)) { recursive = true; } else { Log.log(CLASS, MessageIdConst.HCLN_PARAMCHECK_ERROR, "", mode, new Date(), mode, prop[0]); return Constants.EXIT_CODE_ERROR; } // HDFS?? DFSCleanerBean[] bean = null; try { bean = getCleanLocalPath(user); } catch (CleanerSystemException e) { Log.log(e.getCause(), e.getClazz(), e.getMessageId(), e.getMessageArgs()); return Constants.EXIT_CODE_ERROR; } // ??? int keepDate = getHDFSFileKeepDate(); boolean cleanResult = true; Date now = new Date(); for (int i = 0; i < bean.length; i++) { try { // Path cleanDir = bean[i].getCleanDir(); // ? try { Configuration conf = getConf(); fs = cleanDir.getFileSystem(conf); if (fs == null) { Log.log(CLASS, MessageIdConst.HCLN_CLEN_DIR_ERROR, "Path.getFileSystem??null", cleanDir.toString()); cleanResult = false; continue; } } catch (IOException e) { Log.log(e, CLASS, MessageIdConst.HCLN_CLEN_DIR_ERROR, "HDFS????", cleanDir.toString()); cleanResult = false; continue; } boolean target = bean[i].hasExecutionId(); String pattern = bean[i].getPattern(); Log.log(CLASS, MessageIdConst.HCLN_CLEN_FILE, cleanDir.toString(), pattern, keepDate, mode, target, now); if (cleanDir(fs, cleanDir, target, pattern, keepDate, now, recursive)) { Log.log(CLASS, MessageIdConst.HCLN_CLEN_DIR_SUCCESS, cleanDir.toString(), keepDate, mode); } else { Log.log(CLASS, MessageIdConst.HCLN_CLEN_DIR_FAIL, cleanDir.toString(), keepDate, mode); cleanResult = false; } } catch (CleanerSystemException e) { Log.log(e.getCause(), e.getClazz(), e.getMessageId(), e.getMessageArgs()); cleanResult = false; } finally { if (fs != null) { // CHECKSTYLE:OFF EmptyBlockCheck try { fs.close(); } catch (IOException ignored) { // ignored } // CHECKSTYLE:ON EmptyBlockCheck } } } // if (cleanResult) { Log.log(CLASS, MessageIdConst.HCLN_EXIT_SUCCESS, new Date(), mode, prop[0]); return Constants.EXIT_CODE_SUCCESS; } else { Log.log(CLASS, MessageIdConst.HCLN_EXIT_WARNING, new Date(), mode, prop[0]); return Constants.EXIT_CODE_WARNING; } } catch (RuntimeException e) { try { Log.log(e, CLASS, MessageIdConst.HCLN_EXCEPRION, new Date(), mode, prop[0]); return Constants.EXIT_CODE_ERROR; } catch (Exception e1) { System.err.print("HDFSCleaner????????"); e1.printStackTrace(); return Constants.EXIT_CODE_ERROR; } } }
From source file:com.asiainfo.srd.HioBench.java
License:Apache License
public static void main(String[] args) throws Exception { options = new Options(); final Configuration conf = new Configuration(); if (options.dumpConf) { Configuration.dumpConfiguration(conf, new PrintWriter(System.out)); }/* w ww . j a v a 2 s . com*/ final FileSystem fs = FileSystem.get(new URI(options.hdfsUri), conf); if (!fs.exists(options.filePath)) { System.out.println("no file at " + options.filePath + "; writing " + "new file now with length " + options.nGigsInFile + " gigs..."); writeFile(fs); System.out.println("done."); } else if (fs.getLength(options.filePath) != options.nBytesInFile) { System.out.println("existing file " + options.filename + " has length " + fs.getLength(options.filePath) + ", but we wanted length " + options.nBytesInFile + ". Re-creating."); writeFile(fs); System.out.println("done."); } else { System.out.println( "using existing file at " + options.filePath + " of length " + options.nGigsInFile + " gigs."); } long nanoStart = System.nanoTime(); WorkerThread threads[] = new WorkerThread[options.nThreads]; for (int i = 0; i < options.nThreads; i++) { threads[i] = new WorkerThread(i == 0, fs, WorkerThread.createBenchReader(options, i)); } for (int i = 0; i < options.nThreads; i++) { threads[i].start(); } for (int i = 0; i < options.nThreads; i++) { threads[i].join(); } for (int i = 0; i < options.nThreads; i++) { Throwable t = threads[i].getException(); if (t != null) { System.err.println("there were exceptions. Aborting."); System.exit(1); } } long nanoEnd = System.nanoTime(); fs.close(); long totalIo = options.nThreads; totalIo *= options.nBytesToRead; float nanoDiff = nanoEnd - nanoStart; float seconds = nanoDiff / 1000000000; System.out.println(String.format("Using %d threads, read %s in %f seconds", options.nThreads, prettyPrintByteSize(totalIo), seconds)); float rate = totalIo / seconds; System.out.println("Average rate was " + prettyPrintByteSize(rate) + "/s"); }
From source file:com.bigdog.hadoop.hdfs.HDFS_Test.java
public void createNewHDFSFile(String toCreateFilePath, String content) throws IOException { Configuration config = new Configuration(); FileSystem hdfs = FileSystem.get(config); FSDataOutputStream os = hdfs.create(new Path(toCreateFilePath)); os.write(content.getBytes("UTF-8")); os.close();/*from w w w .j a v a 2 s .c om*/ hdfs.close(); }
From source file:com.bigdog.hadoop.hdfs.HDFS_Test.java
public boolean deleteHDFSFile(String dst) throws IOException { Configuration config = new Configuration(); FileSystem hdfs = FileSystem.get(config); Path path = new Path(dst); boolean isDeleted = hdfs.delete(path); hdfs.close(); return isDeleted; }
From source file:com.bigdog.hadoop.hdfs.HDFS_Test.java
public byte[] readHDFSFile(String dst) throws Exception { Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); // check if the file exists Path path = new Path(dst); if (fs.exists(path)) { FSDataInputStream is = fs.open(path); // get the file info to create the buffer FileStatus stat = fs.getFileStatus(path); // create the buffer byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat.getLen()))]; is.readFully(0, buffer);/* w w w.j a va 2s .co m*/ is.close(); fs.close(); return buffer; } else { throw new Exception("the file is not found ."); } }
From source file:com.bigdog.hadoop.hdfs.HDFS_Test.java
public void mkdir(String dir) throws IOException { Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); fs.mkdirs(new Path(dir)); fs.close(); }