Example usage for org.apache.hadoop.conf Configuration Configuration

List of usage examples for org.apache.hadoop.conf Configuration Configuration

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration Configuration.

Prototype

public Configuration() 

Source Link

Document

A new configuration.

Usage

From source file:azkaban.jobtype.JavaJobRunnerMain.java

License:Apache License

private void runMethodAsProxyUser(Properties props, final Object obj, final String runMethod)
        throws IOException, InterruptedException {
    UserGroupInformation ugi = SecurityUtils.getProxiedUser(props, _logger, new Configuration());
    _logger.info("user " + ugi + " authenticationMethod " + ugi.getAuthenticationMethod());
    _logger.info("user " + ugi + " hasKerberosCredentials " + ugi.hasKerberosCredentials());
    SecurityUtils.getProxiedUser(props, _logger, new Configuration())
            .doAs(new PrivilegedExceptionAction<Void>() {
                @Override//from  w ww  . j  a  v  a2s  .c om
                public Void run() throws Exception {
                    runMethod(obj, runMethod);
                    return null;
                }
            });
}

From source file:azkaban.jobtype.JavaJobRunnerMain.java

License:Apache License

private static Object getObjectAsProxyUser(final Properties props, final Logger logger, final String jobName,
        final String className) throws Exception {
    Object obj = SecurityUtils.getProxiedUser(props, logger, new Configuration())
            .doAs(new PrivilegedExceptionAction<Object>() {
                @Override//from ww  w .j av  a2  s.co  m
                public Object run() throws Exception {
                    return getObject(jobName, className, props, logger);
                }
            });

    return obj;
}

From source file:azkaban.jobtype.javautils.AbstractHadoopJob.java

License:Apache License

public AbstractHadoopJob(String name, Props props) {
    this.props = props;
    this.jobName = name;
    conf = new Configuration();
    jobconf = new JobConf(conf);
    jobconf.setJobName(name);/* www .j  a va2s  .com*/

    visualizer = props.getBoolean("mr.listener.visualizer", false) == true;
    if (visualizer == true) {
        jobStatsFileName = props.getString("azkaban.job.attachment.file");
    }
}

From source file:azkaban.jobtype.javautils.HadoopUtils.java

License:Apache License

public static void saveProps(Props props, String file) throws IOException {
    Path path = new Path(file);

    FileSystem fs = null;//  w  w w  . j  a  v a2  s.co  m
    fs = path.getFileSystem(new Configuration());

    saveProps(fs, props, file);
}

From source file:azkaban.jobtype.ReportalAbstractRunner.java

License:Apache License

public void run() throws Exception {
    System.out.println("Reportal: Setting up environment");

    // Check the properties file
    if (props == null) {
        throw new ReportalRunnerException("Properties file not loaded correctly.");
    }/*from  w w w.j a v  a2s  .c om*/

    // Get the hadoop token
    Configuration conf = new Configuration();
    if (System.getenv(HADOOP_TOKEN_FILE_LOCATION) != null) {
        conf.set(MAPREDUCE_JOB_CREDENTIALS_BINARY, System.getenv(HADOOP_TOKEN_FILE_LOCATION));
    }

    // Get properties
    String execId = props.getString(CommonJobProperties.EXEC_ID);
    outputCapacity = props.getInt("reportal.output.capacity", 10 * 1024 * 1024);
    proxyUser = props.getString("reportal.proxy.user");
    jobQuery = props.getString("reportal.job.query");
    jobTitle = props.getString("reportal.job.title");
    reportalTitle = props.getString("reportal.title");
    reportalStorageUser = props.getString("reportal.storage.user", "reportal");
    Map<String, String> reportalVariables = props.getMapByPrefix(REPORTAL_VARIABLE_PREFIX);

    // Parse variables
    for (Entry<String, String> entry : reportalVariables.entrySet()) {
        if (entry.getKey().endsWith("from")) {
            String fromValue = entry.getValue();
            String toKey = entry.getKey().substring(0, entry.getKey().length() - 4) + "to";
            String toValue = reportalVariables.get(toKey);
            if (toValue != null) {
                variables.put(fromValue, toValue);
            }
        }
    }

    // Built-in variables
    variables.put("run_id", execId);
    variables.put("sys_date", Long.toString(System.currentTimeMillis() / 1000));

    Calendar cal = Calendar.getInstance();
    Date date = new Date();
    cal.setTime(date);

    String timeZone = props.getString("reportal.default.timezone", "UTC");
    TimeZone.setDefault(TimeZone.getTimeZone(timeZone));

    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
    SimpleDateFormat hourFormat = new SimpleDateFormat("yyyy-MM-dd-HH");

    variables.put("hive_current_hour", hourFormat.format(cal.getTime()));
    variables.put("hive_current_day", dateFormat.format(cal.getTime()));
    cal.add(Calendar.HOUR, -1);
    variables.put("hive_last_hour", hourFormat.format(cal.getTime()));
    cal.add(Calendar.HOUR, 1);
    cal.add(Calendar.DATE, -1);
    variables.put("hive_yesterday", dateFormat.format(cal.getTime()));
    cal.add(Calendar.DATE, -6);
    variables.put("hive_last_seven_days", dateFormat.format(cal.getTime()));
    cal.add(Calendar.DATE, -1);
    variables.put("hive_last_eight_days", dateFormat.format(cal.getTime()));
    variables.put("owner", proxyUser);
    variables.put("title", reportalTitle);

    // Props debug
    System.out.println("Reportal Variables:");
    for (Entry<String, String> data : variables.entrySet()) {
        System.out.println(data.getKey() + " -> " + data.getValue());
    }

    if (requiresOutput()) {
        // Get output stream to data
        String locationTemp = ("./reportal/" + jobTitle + ".csv").replace("//", "/");
        File tempOutput = new File(locationTemp);
        tempOutput.getParentFile().mkdirs();
        tempOutput.createNewFile();
        outputStream = new BoundedOutputStream(new BufferedOutputStream(new FileOutputStream(tempOutput)),
                outputCapacity);

        // Run the reportal
        runReportal();

        // Cleanup the reportal
        try {
            outputStream.close();
        } catch (IOException e) {
            // We can safely ignore this exception since we're just making sure the
            // stream is closed.
        }
    } else {
        runReportal();
    }
}

From source file:azkaban.jobtype.ReportalPrestoRunner.java

License:Apache License

private String decrypt(final String encrypted, final String keyPath) throws IOException {
    final FileSystem fs = FileSystem.get(URI.create("file:///"), new Configuration());
    return new Decryptions().decrypt(encrypted, keyPath, fs);
}

From source file:azkaban.jobtype.SecurePigWrapper.java

License:Apache License

public static void main(final String[] args) throws IOException, InterruptedException {
    final Logger logger = Logger.getRootLogger();
    final Properties p = System.getProperties();
    final Configuration conf = new Configuration();

    SecurityUtils.getProxiedUser(p, logger, conf).doAs(new PrivilegedExceptionAction<Void>() {
        @Override//from   w w w.ja  v a  2 s .  c om
        public Void run() throws Exception {
            prefetchToken();
            org.apache.pig.Main.main(args);
            return null;
        }

        // For Pig jobs that need to do extra communication with the
        // JobTracker, it's necessary to pre-fetch a token and include it in
        // the credentials cache
        private void prefetchToken() throws InterruptedException, IOException {
            String shouldPrefetch = p.getProperty(OBTAIN_BINARY_TOKEN);
            if (shouldPrefetch != null && shouldPrefetch.equals("true")) {
                logger.info("Pre-fetching token");
                Job job = new Job(conf, "totally phony, extremely fake, not real job");

                JobConf jc = new JobConf(conf);
                JobClient jobClient = new JobClient(jc);
                logger.info("Pre-fetching: Got new JobClient: " + jc);
                Token<DelegationTokenIdentifier> mrdt = jobClient.getDelegationToken(new Text("hi"));
                job.getCredentials().addToken(new Text("howdy"), mrdt);

                File temp = File.createTempFile("mr-azkaban", ".token");
                temp.deleteOnExit();

                FileOutputStream fos = null;
                DataOutputStream dos = null;
                try {
                    fos = new FileOutputStream(temp);
                    dos = new DataOutputStream(fos);
                    job.getCredentials().writeTokenStorageToStream(dos);
                } finally {
                    if (dos != null) {
                        dos.close();
                    }
                    if (fos != null) {
                        fos.close();
                    }
                }
                logger.info("Setting " + MAPREDUCE_JOB_CREDENTIALS_BINARY + " to " + temp.getAbsolutePath());
                System.setProperty(MAPREDUCE_JOB_CREDENTIALS_BINARY, temp.getAbsolutePath());
            } else {
                logger.info("Not pre-fetching token");
            }
        }
    });
}

From source file:azkaban.jobtype.TestWhitelist.java

License:Open Source License

@Before
@SuppressWarnings("DefaultCharset")
public void setup() throws IOException, URISyntaxException {
    temp = File.createTempFile(TestWhitelist.class.getSimpleName(), null);
    temp.deleteOnExit();// w  w  w.  jav a2 s. c o m

    try (BufferedWriter bw = new BufferedWriter(new FileWriter(temp))) {
        for (String s : whitelisted) {
            bw.write(s);
            bw.newLine();
        }
    }

    FileSystem fs = FileSystem.get(new URI("file:///"), new Configuration());
    whitelist = new Whitelist(temp.getAbsolutePath(), fs);
}

From source file:azkaban.reportal.util.StreamProviderHDFS.java

License:Apache License

private void ensureHdfs() throws HadoopSecurityManagerException, IOException {
    if (hdfs == null) {
        if (securityManager == null) {
            hdfs = FileSystem.get(new Configuration());
        } else {/* www. jav  a  2s  .  co  m*/
            hdfs = securityManager.getFSAsUser(username);
        }
    }
}

From source file:azkaban.security.commons.SecurityUtils.java

License:Apache License

public static synchronized void prefetchToken(final File tokenFile, final Props p, final Logger logger)
        throws InterruptedException, IOException {

    final Configuration conf = new Configuration();
    logger.info("Getting proxy user for " + p.getString(TO_PROXY));
    logger.info("Getting proxy user for " + p.toString());

    getProxiedUser(p.toProperties(), logger, conf).doAs(new PrivilegedExceptionAction<Void>() {
        @Override/* www . j  a  va2  s.c  o  m*/
        public Void run() throws Exception {
            getToken(p);
            return null;
        }

        private void getToken(Props p) throws InterruptedException, IOException {
            String shouldPrefetch = p.getString(OBTAIN_BINARY_TOKEN);
            if (shouldPrefetch != null && shouldPrefetch.equals("true")) {
                logger.info("Pre-fetching token");

                logger.info("Pre-fetching fs token");
                FileSystem fs = FileSystem.get(conf);
                Token<?> fsToken = fs.getDelegationToken(p.getString("user.to.proxy"));
                logger.info("Created token: " + fsToken.toString());

                Job job = new Job(conf, "totally phony, extremely fake, not real job");
                JobConf jc = new JobConf(conf);
                JobClient jobClient = new JobClient(jc);
                logger.info("Pre-fetching job token: Got new JobClient: " + jc);
                Token<DelegationTokenIdentifier> mrdt = jobClient.getDelegationToken(new Text("hi"));
                logger.info("Created token: " + mrdt.toString());

                job.getCredentials().addToken(new Text("howdy"), mrdt);
                job.getCredentials().addToken(fsToken.getService(), fsToken);

                FileOutputStream fos = null;
                DataOutputStream dos = null;
                try {
                    fos = new FileOutputStream(tokenFile);
                    dos = new DataOutputStream(fos);
                    job.getCredentials().writeTokenStorageToStream(dos);
                } finally {
                    if (dos != null) {
                        dos.close();
                    }
                    if (fos != null) {
                        fos.close();
                    }
                }
                logger.info("Loading hadoop tokens into " + tokenFile.getAbsolutePath());
                p.put("HadoopTokenFileLoc", tokenFile.getAbsolutePath());
            } else {
                logger.info("Not pre-fetching token");
            }
        }
    });
}