Example usage for org.apache.hadoop.conf Configuration Configuration

List of usage examples for org.apache.hadoop.conf Configuration Configuration

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration Configuration.

Prototype

public Configuration() 

Source Link

Document

A new configuration.

Usage

From source file:azkaban.security.HadoopSecurityManager_H_1_0.java

License:Apache License

private HadoopSecurityManager_H_1_0(Props props) throws HadoopSecurityManagerException, IOException {

    // for now, assume the same/compatible native library, the same/compatible
    // hadoop-core jar
    String hadoopHome = props.getString("hadoop.home", null);
    String hadoopConfDir = props.getString("hadoop.conf.dir", null);

    if (hadoopHome == null) {
        hadoopHome = System.getenv("HADOOP_HOME");
    }//from  ww  w  .  j a  va2 s.  co m
    if (hadoopConfDir == null) {
        hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
    }

    List<URL> resources = new ArrayList<URL>();
    if (hadoopConfDir != null) {
        logger.info("Using hadoop config found in " + new File(hadoopConfDir).toURI().toURL());
        resources.add(new File(hadoopConfDir).toURI().toURL());
    } else if (hadoopHome != null) {
        logger.info("Using hadoop config found in " + new File(hadoopHome, "conf").toURI().toURL());
        resources.add(new File(hadoopHome, "conf").toURI().toURL());
    } else {
        logger.info("HADOOP_HOME not set, using default hadoop config.");
    }

    ucl = new URLClassLoader(resources.toArray(new URL[resources.size()]));

    conf = new Configuration();
    conf.setClassLoader(ucl);

    if (props.containsKey("fs.hdfs.impl.disable.cache")) {
        logger.info("Setting fs.hdfs.impl.disable.cache to " + props.get("fs.hdfs.impl.disable.cache"));
        conf.setBoolean("fs.hdfs.impl.disable.cache", Boolean.valueOf(props.get("fs.hdfs.impl.disable.cache")));
    }

    logger.info("hadoop.security.authentication set to " + conf.get("hadoop.security.authentication"));
    logger.info("hadoop.security.authorization set to " + conf.get("hadoop.security.authorization"));
    logger.info("DFS name " + conf.get("fs.default.name"));

    UserGroupInformation.setConfiguration(conf);

    securityEnabled = UserGroupInformation.isSecurityEnabled();
    if (securityEnabled) {
        logger.info("The Hadoop cluster has enabled security");
        shouldProxy = true;
        try {
            keytabLocation = props.getString(PROXY_KEYTAB_LOCATION);
            keytabPrincipal = props.getString(PROXY_USER);
        } catch (UndefinedPropertyException e) {
            throw new HadoopSecurityManagerException(e.getMessage());
        }

        // try login
        try {
            if (loginUser == null) {
                logger.info("No login user. Creating login user");
                logger.info("Logging with " + keytabPrincipal + " and " + keytabLocation);
                UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabLocation);
                loginUser = UserGroupInformation.getLoginUser();
                logger.info("Logged in with user " + loginUser);
            } else {
                logger.info("loginUser (" + loginUser + ") already created, refreshing tgt.");
                loginUser.checkTGTAndReloginFromKeytab();
            }
        } catch (IOException e) {
            throw new HadoopSecurityManagerException("Failed to login with kerberos ", e);
        }

    }

    userUgiMap = new ConcurrentHashMap<String, UserGroupInformation>();

    logger.info("Hadoop Security Manager Initiated");
}

From source file:azkaban.security.HadoopSecurityManager_H_1_0.java

License:Apache License

@Override
public void cancelTokens(File tokenFile, String userToProxy, Logger logger)
        throws HadoopSecurityManagerException {
    // nntoken/*from  w ww . j a  va  2  s .  co m*/
    Credentials cred = null;
    try {
        cred = Credentials.readTokenStorageFile(new Path(tokenFile.toURI()), new Configuration());
        for (Token<? extends TokenIdentifier> t : cred.getAllTokens()) {
            logger.info("Got token: " + t.toString());
            logger.info("Token kind: " + t.getKind());
            logger.info("Token id: " + new String(t.getIdentifier()));
            logger.info("Token service: " + t.getService());
            if (t.getKind().equals(new Text("HIVE_DELEGATION_TOKEN"))) {
                logger.info("Cancelling hive token " + new String(t.getIdentifier()));
                cancelHiveToken(t, userToProxy);
            } else if (t.getKind().equals(new Text("MAPREDUCE_DELEGATION_TOKEN"))) {
                logger.info("Cancelling mr job tracker token " + new String(t.getIdentifier()));
                cancelMRJobTrackerToken(t, userToProxy);
            } else if (t.getKind().equals(new Text("HDFS_DELEGATION_TOKEN"))) {
                logger.info("Cancelling namenode token " + new String(t.getIdentifier()));
                cancelNameNodeToken(t, userToProxy);
            } else {
                logger.info("unknown token type " + t.getKind());
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:azkaban.security.HadoopSecurityManager_H_2_0.java

License:Apache License

private HadoopSecurityManager_H_2_0(Props props) throws HadoopSecurityManagerException, IOException {

    // for now, assume the same/compatible native library, the same/compatible
    // hadoop-core jar
    String hadoopHome = props.getString("hadoop.home", null);
    String hadoopConfDir = props.getString("hadoop.conf.dir", null);

    if (hadoopHome == null) {
        hadoopHome = System.getenv("HADOOP_HOME");
    }//from   w  ww  .  j  av a  2  s  .  com
    if (hadoopConfDir == null) {
        hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
    }

    List<URL> resources = new ArrayList<URL>();
    URL urlToHadoop = null;
    if (hadoopConfDir != null) {
        urlToHadoop = new File(hadoopConfDir).toURI().toURL();
        logger.info("Using hadoop config found in " + urlToHadoop);
        resources.add(urlToHadoop);
    } else if (hadoopHome != null) {
        urlToHadoop = new File(hadoopHome, "conf").toURI().toURL();
        logger.info("Using hadoop config found in " + urlToHadoop);
        resources.add(urlToHadoop);
    } else {
        logger.info("HADOOP_HOME not set, using default hadoop config.");
    }

    ucl = new URLClassLoader(resources.toArray(new URL[resources.size()]));

    conf = new Configuration();
    conf.setClassLoader(ucl);

    if (props.containsKey(FS_HDFS_IMPL_DISABLE_CACHE)) {
        logger.info("Setting " + FS_HDFS_IMPL_DISABLE_CACHE + " to " + props.get(FS_HDFS_IMPL_DISABLE_CACHE));
        conf.setBoolean(FS_HDFS_IMPL_DISABLE_CACHE, Boolean.valueOf(props.get(FS_HDFS_IMPL_DISABLE_CACHE)));
    }

    logger.info(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION + ": "
            + conf.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION));
    logger.info(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION + ":  "
            + conf.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION));
    logger.info(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY + ": "
            + conf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY));

    UserGroupInformation.setConfiguration(conf);

    securityEnabled = UserGroupInformation.isSecurityEnabled();
    if (securityEnabled) {
        logger.info("The Hadoop cluster has enabled security");
        shouldProxy = true;
        try {

            keytabLocation = props.getString(AZKABAN_KEYTAB_LOCATION);
            keytabPrincipal = props.getString(AZKABAN_PRINCIPAL);
        } catch (UndefinedPropertyException e) {
            throw new HadoopSecurityManagerException(e.getMessage());
        }

        // try login
        try {
            if (loginUser == null) {
                logger.info("No login user. Creating login user");
                logger.info("Using principal from " + keytabPrincipal + " and " + keytabLocation);
                UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabLocation);
                loginUser = UserGroupInformation.getLoginUser();
                logger.info("Logged in with user " + loginUser);
            } else {
                logger.info("loginUser (" + loginUser + ") already created, refreshing tgt.");
                loginUser.checkTGTAndReloginFromKeytab();
            }
        } catch (IOException e) {
            throw new HadoopSecurityManagerException("Failed to login with kerberos ", e);
        }

    }

    userUgiMap = new ConcurrentHashMap<String, UserGroupInformation>();

    logger.info("Hadoop Security Manager initialized");
}

From source file:azkaban.security.HadoopSecurityManager_H_2_0.java

License:Apache License

@Override
public void cancelTokens(File tokenFile, String userToProxy, Logger logger)
        throws HadoopSecurityManagerException {
    // nntoken//from w ww .  j  av  a2  s . c om
    Credentials cred = null;
    try {
        cred = Credentials.readTokenStorageFile(new Path(tokenFile.toURI()), new Configuration());
        for (Token<? extends TokenIdentifier> t : cred.getAllTokens()) {

            logger.info("Got token: " + t.toString());
            logger.info("Token kind: " + t.getKind());
            logger.info("Token id: " + new String(t.getIdentifier()));
            logger.info("Token service: " + t.getService());

            if (t.getKind().equals(new Text("HIVE_DELEGATION_TOKEN"))) {
                logger.info("Cancelling hive token " + new String(t.getIdentifier()));
                cancelHiveToken(t, userToProxy);
            } else if (t.getKind().equals(new Text("RM_DELEGATION_TOKEN"))) {
                logger.info("Cancelling mr job tracker token " + new String(t.getIdentifier()));
                // cancelMRJobTrackerToken(t, userToProxy);
            } else if (t.getKind().equals(new Text("HDFS_DELEGATION_TOKEN"))) {
                logger.info("Cancelling namenode token " + new String(t.getIdentifier()));
                // cancelNameNodeToken(t, userToProxy);
            } else if (t.getKind().equals(new Text("MR_DELEGATION_TOKEN"))) {
                logger.info("Cancelling jobhistoryserver mr token " + new String(t.getIdentifier()));
                // cancelJhsToken(t, userToProxy);
            } else {
                logger.info("unknown token type " + t.getKind());
            }
        }
    } catch (Exception e) {
        throw new HadoopSecurityManagerException("Failed to cancel tokens " + e.getMessage() + e.getCause(), e);
    }

}

From source file:azkaban.viewer.hdfs.HdfsSequenceFileViewer.java

License:Apache License

public boolean canReadFile(FileSystem fs, Path file) {
    boolean result = false;
    AzkabanSequenceFileReader.Reader reader = null;
    try {/*from www  . j  a v a2  s  .  c o m*/
        reader = new AzkabanSequenceFileReader.Reader(fs, file, new Configuration());
        result = canReadFile(reader);
    } catch (Exception e) {
        return false;
    } finally {
        if (reader != null) {
            try {
                reader.close();
            } catch (IOException e) {
            }
        }
    }

    return result;
}

From source file:azkaban.viewer.hdfs.HdfsSequenceFileViewer.java

License:Apache License

public void displayFile(FileSystem fs, Path file, OutputStream outputStream, int startLine, int endLine)
        throws IOException {
    AzkabanSequenceFileReader.Reader reader = null;
    PrintWriter writer = new PrintWriter(outputStream);
    try {//ww  w  .j a v a 2s.  com
        reader = new AzkabanSequenceFileReader.Reader(fs, file, new Configuration());
        displaySequenceFile(reader, writer, startLine, endLine);
    } catch (IOException e) {
        writer.write("Error opening sequence file " + e);
        throw e;
    } finally {
        if (reader != null) {
            reader.close();
        }
    }
}

From source file:azkaban.viewer.hdfs.SequenceFileViewer.java

License:Apache License

@Override
public Set<Capability> getCapabilities(FileSystem fs, Path path) throws AccessControlException {
    Set<Capability> result = EnumSet.noneOf(Capability.class);
    AzkabanSequenceFileReader.Reader reader = null;
    try {//w  w w.j  a v a2 s  . c  o  m
        reader = new AzkabanSequenceFileReader.Reader(fs, path, new Configuration());
        result = getCapabilities(reader);
    } catch (AccessControlException e) {
        throw e;
    } catch (IOException e) {
        return EnumSet.noneOf(Capability.class);
    } finally {
        if (reader != null) {
            try {
                reader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    return result;
}

From source file:azkaban.viewer.hdfs.SequenceFileViewer.java

License:Apache License

@Override
public void displayFile(FileSystem fs, Path file, OutputStream outputStream, int startLine, int endLine)
        throws IOException {

    AzkabanSequenceFileReader.Reader reader = null;
    PrintWriter writer = new PrintWriter(outputStream);
    try {/*from   w w  w  . ja va 2 s. co  m*/
        reader = new AzkabanSequenceFileReader.Reader(fs, file, new Configuration());
        displaySequenceFile(reader, writer, startLine, endLine);
    } catch (IOException e) {
        writer.write("Error opening sequence file " + e);
        throw e;
    } finally {
        if (reader != null) {
            reader.close();
        }
    }
}

From source file:azkaban.web.pages.HdfsBrowserServlet.java

License:Apache License

@Override
public void init(ServletConfig config) throws ServletException {
    super.init(config);
    try {// www.  j  av  a2s  .  com
        Configuration conf = new Configuration();
        conf.setClassLoader(this.getApplication().getClassLoader());
        _fs = FileSystem.get(conf);
    } catch (IOException e) {
        throw new ServletException(e);
    }
}

From source file:azkaban.webapp.servlet.hdfsviewer.SequenceFileViewer.java

License:Apache License

@Override
public Set<Capability> getCapabilities(FileSystem fs, Path path) {
    Set<Capability> result = EnumSet.noneOf(Capability.class);
    AzkabanSequenceFileReader.Reader reader = null;

    try {/*from w  w w . j a v  a  2s.  c om*/
        reader = new AzkabanSequenceFileReader.Reader(fs, path, new Configuration());
        result = getCapabilities(reader);
    } catch (Exception e) {
        return EnumSet.noneOf(Capability.class);
    } finally {
        if (reader != null) {
            try {
                reader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    return result;
}