Java tutorial
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openflamingo.util; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.permission.FsPermission; import org.openflamingo.core.exception.FileSystemException; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; /** * Hadoop HDFS Utility. * * @author Edward KIM * @since 0.1 */ public class HdfsUtils { /** * HDFS URL? Prefix */ public static final String HDFS_URL_PREFIX = "hdfs://"; /** * ?? ? ?? ? */ public static final long MAX_SIZE = 500 * FileUtils.KB; /** * ? ? ??. * * @param source ?? * @param target ?? * @param fs Hadoop FileSystem */ public static void move(String source, String target, FileSystem fs) throws Exception { Path srcPath = new Path(source); Path[] srcs = FileUtil.stat2Paths(fs.globStatus(srcPath), srcPath); Path dst = new Path(target); if (srcs.length > 1 && !fs.getFileStatus(dst).isDir()) { throw new FileSystemException("When moving multiple files, destination should be a directory."); } for (int i = 0; i < srcs.length; i++) { if (!fs.rename(srcs[i], dst)) { FileStatus srcFstatus = null; FileStatus dstFstatus = null; try { srcFstatus = fs.getFileStatus(srcs[i]); } catch (FileNotFoundException e) { throw new FileNotFoundException(srcs[i] + ": No such file or directory"); } try { dstFstatus = fs.getFileStatus(dst); } catch (IOException e) { // Nothing } if ((srcFstatus != null) && (dstFstatus != null)) { if (srcFstatus.isDir() && !dstFstatus.isDir()) { throw new FileSystemException( "cannot overwrite non directory " + dst + " with directory " + srcs[i]); } } throw new FileSystemException("Failed to rename " + srcs[i] + " to " + dst); } } } /** * HDFS? ?? InputStream? . * * @param fs FileSystem * @param filename fully qualified path * @return * @throws java.io.IOException ?? */ public static InputStream getInputStream(FileSystem fs, String filename) throws IOException { return fs.open(new Path(filename)); } /** * "<tt>fs.default.name</tt>"? HDFS URL Hadoop FileSystem? . * * @param fsDefaultName "<tt>core-site.xml</tt>"? ?? "<tt>fs.default.name</tt>" * @return FileSystem */ public static FileSystem getFileSystem(String fsDefaultName) { Configuration conf = new Configuration(); conf.set("fs.default.name", fsDefaultName); try { return FileSystem.get(conf); } catch (IOException e) { throw new FileSystemException("Cannot get FileSystem.", e); } } /** * ? ?? ? . * * @param fs FileSystem * @param path ? Path * @return ?? ? */ public static String load(FileSystem fs, String path) { return load(fs, path, "UTF-8"); } /** * ? ?. * * @param fs FileSystem * @param path ? Path * @return ? <tt>true</tt> */ public static boolean isDir(FileSystem fs, String path) { try { return !fs.isFile(new Path(path)); } catch (Exception ex) { throw new FileSystemException(ExceptionUtils.getMessage("Cannot access '{}'", path), ex); } } /** * ?? ?. * * @param fs FileSystem * @param path ? Path * @return ?? <tt>true</tt> */ public static boolean isFile(FileSystem fs, String path) { try { return fs.isFile(new Path(path)); } catch (Exception ex) { throw new FileSystemException(ExceptionUtils.getMessage("Cannot access '{}'", path), ex); } } /** * . * * @param fs FileSystem * @param path * @return ?? <tt>true</tt> */ public static boolean delete(FileSystem fs, String path) { try { return fs.delete(new Path(path), true); } catch (Exception ex) { throw new FileSystemException(ExceptionUtils.getMessage("Cannot delete '{}'", path), ex); } } /** * ?. * * @param fs FileSystem * @param path ? * @return ?? ? <tt>true</tt> */ public static boolean mkdir(FileSystem fs, String path) { try { return FileSystem.mkdirs(fs, new Path(path), FsPermission.getDefault()); } catch (Exception ex) { throw new FileSystemException(ExceptionUtils.getMessage("Cannot create '{}'", path), ex); } } /** * ? ?? ? . ?? ? . * <ul> * <li>?? </li> * <li>?? ? </li> * </ul> * * @param fs Hadoop? {@link org.apache.hadoop.fs.FileSystem} * @param path Path * @param encoding ? * @return ? */ public static String load(FileSystem fs, String path, String encoding) { try { FileStatus fileStatus = fs.getFileStatus(new Path(path)); long length = fileStatus.getLen(); if (length > MAX_SIZE) { throw new IllegalArgumentException("Exceeded " + MAX_SIZE + " bytes : '" + path + "'"); } } catch (Exception ex) { throw new FileSystemException(ExceptionUtils.getMessage("Cannot access '{}'", path), ex); } FSDataInputStream is = null; try { is = fs.open(new Path(path)); return IOUtils.toString(is, encoding); } catch (IOException e) { throw new FileSystemException(ExceptionUtils.getMessage("Cannot load '{}'", path), e); } finally { IOUtils.closeQuietly(is); } } /** * "<tt>hdfs://</tt>" HDFS? Hadoop? FileSystem? . * * @param path HDFS Path * @return FileSystem */ public static FileSystem getFileSystemFromPath(String path) { if (!path.startsWith(HDFS_URL_PREFIX) || path.startsWith("file:///")) { try { Configuration conf = new Configuration(); return FileSystem.getLocal(conf); } catch (IOException e) { throw new FileSystemException("Cannot create local file system of Apache Hadoop.", e); } } StringBuilder builder = new StringBuilder(); builder.append(HDFS_URL_PREFIX); builder.append(getIpAddressFromPath(path)); builder.append(getPortFromPath(path)); return getFileSystem(builder.toString()); } /** * HDFS Path? IP . * * @param path HDFS Path * @return IP Address */ public static String getIpAddressFromPath(String path) { if (!path.startsWith(HDFS_URL_PREFIX)) { throw new FileSystemException(ExceptionUtils.getMessage("Invalid path '{}'", path)); } String[] split = org.springframework.util.StringUtils.delete(path, HDFS_URL_PREFIX).split(":"); return split[0]; } /** * HDFS Path? Port . * * @param path HDFS Path * @return Port */ public static String getPortFromPath(String path) { if (!path.startsWith(HDFS_URL_PREFIX)) { throw new FileSystemException(ExceptionUtils.getMessage("Invalid path '{}'", path)); } String[] split = org.springframework.util.StringUtils.delete(path, HDFS_URL_PREFIX).split(":"); if (split.length != 2) { throw new FileSystemException("Invalid path pattern. Path pattern must be \"hdfs://IP:PORT\"."); } return split[1]; } /** * ? ? ?? ?. * * @param fs Hadoop {@link org.apache.hadoop.fs.FileSystem} * @param path * @return <tt>true</tt> */ public static boolean isExist(FileSystem fs, String path) { try { return fs.exists(new Path(path)); } catch (IOException e) { return false; } } /** * Hadoop {@link org.apache.hadoop.conf.Configuration} FileSystem? ?. * * @param conf {@link org.apache.hadoop.conf.Configuration} * @return FileSystem */ public static FileSystem getFileSystem(Configuration conf) { try { return FileSystem.get(conf); } catch (Exception e) { throw new FileSystemException("Cannot access file system of Apache Hadoop", e); } } }