Java tutorial
/** * Flamingo HDFS File Uploader - a tool to upload from datasource to datasource and schedule jobs * * Copyright (C) 2011-2012 Cloudine. * * This file is part of Flamingo HDFS File Uploader. * * Flamingo HDFS File Uploader is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Flamingo HDFS File Uploader is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.openflamingo.uploader.util; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.permission.FsPermission; import org.openflamingo.uploader.exception.FileSystemException; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; /** * Hadoop HDFS Utility. * * @author Edward KIM * @since 0.1 */ public class HdfsUtils { /** * HDFS URL? Prefix */ public static final String HDFS_URL_PREFIX = "hdfs://"; /** * ?? ? ?? ? */ public static final long MAX_SIZE = 500 * FileUtils.KB; /** * ? ? ??. * * @param source ?? * @param target ?? * @param fs Hadoop FileSystem */ public static void move(String source, String target, FileSystem fs) throws Exception { Path srcPath = new Path(source); Path[] srcs = FileUtil.stat2Paths(fs.globStatus(srcPath), srcPath); Path dst = new Path(target); if (srcs.length > 1 && !fs.getFileStatus(dst).isDir()) { throw new FileSystemException("When moving multiple files, destination should be a directory."); } for (int i = 0; i < srcs.length; i++) { if (!fs.rename(srcs[i], dst)) { FileStatus srcFstatus = null; FileStatus dstFstatus = null; try { srcFstatus = fs.getFileStatus(srcs[i]); } catch (FileNotFoundException e) { throw new FileNotFoundException(srcs[i] + ": No such file or directory"); } try { dstFstatus = fs.getFileStatus(dst); } catch (IOException e) { // Nothing } if ((srcFstatus != null) && (dstFstatus != null)) { if (srcFstatus.isDir() && !dstFstatus.isDir()) { throw new FileSystemException( "cannot overwrite non directory " + dst + " with directory " + srcs[i]); } } throw new FileSystemException("Failed to rename " + srcs[i] + " to " + dst); } } } /** * HDFS? ?? InputStream? . * * @param fs FileSystem * @param filename fully qualified path * @return * @throws java.io.IOException ?? */ public static InputStream getInputStream(FileSystem fs, String filename) throws IOException { return fs.open(new Path(filename)); } /** * "<tt>fs.default.name</tt>"? HDFS URL Hadoop FileSystem? . * * @param fsDefaultName "<tt>core-site.xml</tt>"? ?? "<tt>fs.default.name</tt>" * @return FileSystem */ public static FileSystem getFileSystem(String fsDefaultName) { Configuration conf = new Configuration(); conf.set("fs.default.name", fsDefaultName); try { return FileSystem.get(conf); } catch (IOException e) { throw new FileSystemException("Cannot get FileSystem.", e); } } /** * ? ?? ? . * * @param fs FileSystem * @param path ? Path * @return ?? ? */ public static String load(FileSystem fs, String path) { return load(fs, path, "UTF-8"); } /** * ? ?. * * @param fs FileSystem * @param path ? Path * @return ? <tt>true</tt> */ public static boolean isDir(FileSystem fs, String path) { try { return !fs.isFile(new Path(path)); } catch (Exception ex) { throw new FileSystemException( ExceptionUtils.getMessage(" '{}'? .", path), ex); } } /** * ?? ?. * * @param fs FileSystem * @param path ? Path * @return ?? <tt>true</tt> */ public static boolean isFile(FileSystem fs, String path) { try { return fs.isFile(new Path(path)); } catch (Exception ex) { throw new FileSystemException( ExceptionUtils.getMessage(" '{}'? .", path), ex); } } /** * . * * @param fs FileSystem * @param path * @return ?? <tt>true</tt> */ public static boolean delete(FileSystem fs, String path) { try { return fs.delete(new Path(path), true); } catch (Exception ex) { throw new FileSystemException( ExceptionUtils.getMessage(" '{}'? .", path), ex); } } /** * ?. * * @param fs FileSystem * @param path ? * @return ?? ? <tt>true</tt> */ public static boolean mkdir(FileSystem fs, String path) { try { return FileSystem.mkdirs(fs, new Path(path), FsPermission.getDefault()); } catch (Exception ex) { throw new FileSystemException( ExceptionUtils.getMessage(" '{}'? ? .", path), ex); } } /** * ? ?? ? . ?? ? . * <ul> * <li>?? </li> * <li>?? ? </li> * </ul> * * @param fs Hadoop? {@link org.apache.hadoop.fs.FileSystem} * @param path Path * @param encoding ? * @return ? */ public static String load(FileSystem fs, String path, String encoding) { try { FileStatus fileStatus = fs.getFileStatus(new Path(path)); long length = fileStatus.getLen(); if (length > MAX_SIZE) { throw new IllegalArgumentException(" ?? " + MAX_SIZE + " bytes ."); } } catch (Exception ex) { throw new FileSystemException( ExceptionUtils.getMessage(" '{}'? .", path), ex); } FSDataInputStream is = null; try { is = fs.open(new Path(path)); return IOUtils.toString(is, encoding); } catch (IOException e) { throw new FileSystemException( ExceptionUtils.getMessage(" '{}' ?? .", path), e); } finally { IOUtils.closeQuietly(is); } } /** * "<tt>hdfs://</tt>" HDFS? Hadoop? FileSystem? . * * @param path HDFS Path * @return FileSystem */ public static FileSystem getFileSystemFromPath(String path) { if (!path.startsWith(HDFS_URL_PREFIX) || path.startsWith("file:///")) { try { Configuration conf = new Configuration(); return FileSystem.getLocal(conf); } catch (IOException e) { throw new FileSystemException("Apache Hadoop? Local FileSystem? ? .", e); } } StringBuilder builder = new StringBuilder(); builder.append(HDFS_URL_PREFIX); builder.append(getIpAddressFromPath(path)); builder.append(getPortFromPath(path)); return getFileSystem(builder.toString()); } /** * HDFS Path? IP . * * @param path HDFS Path * @return IP Address */ public static String getIpAddressFromPath(String path) { if (!path.startsWith(HDFS_URL_PREFIX)) { throw new FileSystemException( ExceptionUtils.getMessage(" '{}'? .", path)); } String[] split = org.springframework.util.StringUtils.delete(path, HDFS_URL_PREFIX).split(":"); return split[0]; } /** * HDFS Path? Port . * * @param path HDFS Path * @return Port */ public static String getPortFromPath(String path) { if (!path.startsWith(HDFS_URL_PREFIX)) { throw new FileSystemException( ExceptionUtils.getMessage(" '{}'? .", path)); } String[] split = org.springframework.util.StringUtils.delete(path, HDFS_URL_PREFIX).split(":"); if (split.length != 2) { throw new FileSystemException( " ? ?? . ? \"hdfs://IP:PORT\" ? ."); } return split[1]; } /** * ? ? ?? ?. * * @param fs Hadoop {@link org.apache.hadoop.fs.FileSystem} * @param path * @return <tt>true</tt> */ public static boolean isExist(FileSystem fs, String path) { try { return fs.exists(new Path(path)); } catch (IOException e) { return false; } } /** * Hadoop {@link org.apache.hadoop.conf.Configuration} FileSystem? ?. * * @param conf {@link org.apache.hadoop.conf.Configuration} * @return FileSystem */ public static FileSystem getFileSystem(Configuration conf) { try { return FileSystem.get(conf); } catch (Exception e) { throw new FileSystemException("Apache Hadoop? FileSystem? .", e); } } }