Example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction

List of usage examples for java.security PrivilegedExceptionAction PrivilegedExceptionAction

Introduction

In this page you can find the example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction.

Prototype

PrivilegedExceptionAction

Source Link

Usage

From source file:org.apache.druid.security.kerberos.DruidKerberosAuthenticationHandler.java

@Override
public void init(Properties config) throws ServletException {
    try {/*w  w  w  .j  a  v  a  2  s  .  c om*/
        String principal = config.getProperty(PRINCIPAL);
        if (principal == null || principal.trim().length() == 0) {
            throw new ServletException("Principal not defined in configuration");
        }
        keytab = config.getProperty(KEYTAB, keytab);
        if (keytab == null || keytab.trim().length() == 0) {
            throw new ServletException("Keytab not defined in configuration");
        }
        if (!new File(keytab).exists()) {
            throw new ServletException("Keytab does not exist: " + keytab);
        }

        // use all SPNEGO principals in the keytab if a principal isn't
        // specifically configured
        final String[] spnegoPrincipals;
        if ("*".equals(principal)) {
            spnegoPrincipals = KerberosUtil.getPrincipalNames(keytab, Pattern.compile("HTTP/.*"));
            if (spnegoPrincipals.length == 0) {
                throw new ServletException("Principals do not exist in the keytab");
            }
        } else {
            spnegoPrincipals = new String[] { principal };
        }

        String nameRules = config.getProperty(NAME_RULES, null);
        if (nameRules != null) {
            KerberosName.setRules(nameRules);
        }

        for (String spnegoPrincipal : spnegoPrincipals) {
            log.info("Login using keytab %s, for principal %s", keytab, spnegoPrincipal);
            final KerberosAuthenticator.DruidKerberosConfiguration kerberosConfiguration = new KerberosAuthenticator.DruidKerberosConfiguration(
                    keytab, spnegoPrincipal);
            final LoginContext loginContext = new LoginContext("", serverSubject, null, kerberosConfiguration);
            try {
                loginContext.login();
            } catch (LoginException le) {
                log.warn(le, "Failed to login as [%s]", spnegoPrincipal);
                throw new AuthenticationException(le);
            }
            loginContexts.add(loginContext);
        }
        try {
            gssManager = Subject.doAs(serverSubject, new PrivilegedExceptionAction<GSSManager>() {

                @Override
                public GSSManager run() {
                    return GSSManager.getInstance();
                }
            });
        } catch (PrivilegedActionException ex) {
            throw ex.getException();
        }
    } catch (Exception ex) {
        throw new ServletException(ex);
    }
}

From source file:azkaban.jobtype.connectors.teradata.TeradataToHdfsJobRunnerMain.java

public void run() throws IOException, InterruptedException {
    String jobName = System.getenv(AbstractProcessJob.JOB_NAME_ENV);
    _logger.info("Running job " + jobName);

    if (HadoopSecureWrapperUtils.shouldProxy(_jobProps)) {
        String tokenFile = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
        UserGroupInformation proxyUser = HadoopSecureWrapperUtils.setupProxyUser(_jobProps, tokenFile, _logger);

        proxyUser.doAs(new PrivilegedExceptionAction<Void>() {
            @Override//from   w  w  w .  java2 s .  c o  m
            public Void run() throws Exception {
                runCopyTdToHdfs();
                return null;
            }
        });
    } else {
        runCopyTdToHdfs();
    }
}

From source file:org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.java

@Override
public void channelRead0(final ChannelHandlerContext ctx, final HttpRequest req) throws Exception {
    Preconditions.checkArgument(req.getUri().startsWith(WEBHDFS_PREFIX));
    QueryStringDecoder queryString = new QueryStringDecoder(req.getUri());
    params = new ParameterParser(queryString, conf);
    DataNodeUGIProvider ugiProvider = new DataNodeUGIProvider(params);
    ugi = ugiProvider.ugi();/*from w  ww. ja  va2  s.  c om*/
    path = params.path();

    injectToken();
    ugi.doAs(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws Exception {
            handle(ctx, req);
            return null;
        }
    });
}

From source file:com.thinkbiganalytics.datalake.authorization.SentryAuthorizationService.java

@Override
public void createOrUpdateReadOnlyHivePolicy(String categoryName, String feedName,
        List<String> hadoopAuthorizationGroups, String datebaseName, List<String> tableNames) {

    if (this.sentryConnection.getKerberosTicketConfiguration().isKerberosEnabled()) {
        try {//from   w  w w  .j  a  v  a 2  s  .  c om
            UserGroupInformation ugi = authenticatePolicyCreatorWithKerberos();
            if (ugi == null) {
                log.error(UserGroupObjectError);
            } else {
                ugi.doAs(new PrivilegedExceptionAction<Void>() {
                    @Override
                    public Void run() throws Exception {
                        String sentryPolicyName = getHivePolicyName(categoryName, feedName);
                        if (!(sentryClientObject.checkIfRoleExists(sentryPolicyName))) {
                            createReadOnlyHivePolicy(categoryName, feedName, hadoopAuthorizationGroups,
                                    datebaseName, tableNames);
                        } else {
                            try {
                                updateReadOnlyHivePolicy(categoryName, feedName, hadoopAuthorizationGroups,
                                        datebaseName, tableNames);
                            } catch (Exception e) {
                                log.error("Failed to update Hive Policy" + e.getMessage());
                                throw new RuntimeException(e);
                            }
                        }
                        return null;
                    }
                });
            }
        } catch (Exception e) {
            log.error("Error Creating Sentry Hive Policy using Kerberos Authentication" + e.getMessage());
            throw new RuntimeException(e);
        }
    } else {
        String sentryPolicyName = getHivePolicyName(categoryName, feedName);
        if (!(sentryClientObject.checkIfRoleExists(sentryPolicyName))) {
            createReadOnlyHivePolicy(categoryName, feedName, hadoopAuthorizationGroups, datebaseName,
                    tableNames);
        } else {
            try {
                updateReadOnlyHivePolicy(categoryName, feedName, hadoopAuthorizationGroups, datebaseName,
                        tableNames);
            } catch (Exception e) {
                log.error("Failed to update Hive Policy" + e.getMessage());
                throw new RuntimeException(e);
            }
        }
    }

}

From source file:org.apache.axiom.attachments.lifecycle.impl.LifecycleManagerImpl.java

private VMShutdownHook RegisterVMShutdownHook() throws RuntimeException {
    if (log.isDebugEnabled()) {
        log.debug("Start RegisterVMShutdownHook()");
    }// w ww . j a v a 2s  .  com
    try {
        hook = (VMShutdownHook) AccessController.doPrivileged(new PrivilegedExceptionAction() {
            public Object run() throws SecurityException, IllegalStateException, IllegalArgumentException {
                VMShutdownHook hook = VMShutdownHook.hook();
                if (!hook.isRegistered()) {
                    Runtime.getRuntime().addShutdownHook(hook);
                    hook.setRegistered(true);
                }
                return hook;
            }
        });
    } catch (PrivilegedActionException e) {
        if (log.isDebugEnabled()) {
            log.debug("Exception thrown from AccessController: " + e);
            log.debug("VM Shutdown Hook not registered.");
        }
        throw new RuntimeException(e);
    }
    if (log.isDebugEnabled()) {
        log.debug("Exit RegisterVMShutdownHook()");
    }
    return hook;
}

From source file:org.apache.hadoop.mapred.gridmix.GridmixJob.java

protected GridmixJob(final Configuration conf, long submissionMillis, final String name) throws IOException {
    submissionTimeNanos = TimeUnit.NANOSECONDS.convert(submissionMillis, TimeUnit.MILLISECONDS);
    jobdesc = null;//from   ww w .j av  a  2  s  .  c  om
    outdir = null;
    seq = -1;
    ugi = UserGroupInformation.getCurrentUser();

    try {
        job = this.ugi.doAs(new PrivilegedExceptionAction<Job>() {
            public Job run() throws IOException {
                Job ret = new Job(conf, name);
                ret.getConfiguration().setInt("gridmix.job.seq", seq);
                setJobQueue(ret, conf.get(GRIDMIX_DEFAULT_QUEUE));

                return ret;
            }
        });
    } catch (InterruptedException e) {
        throw new IOException(e);
    }
}

From source file:com.stratuscom.harvester.classloading.VirtualFileSystemClassLoader.java

/**
 * Find a resource by searching through all the classpath entries that have
 * been set up./*  w w w.ja va  2s .c  o m*/
 *
 * @param name
 * @return
 */
@Override
public URL findResource(final String name) {
    try {
        return (URL) Security.doPrivileged(new PrivilegedExceptionAction<URL>() {

            @Override
            public URL run() throws Exception {
                FileObject fo = findResourceFileObject(name);
                return fo == null ? null : fo.getURL();
            }
        });

    } catch (Exception ex) {
        Logger.getLogger(VirtualFileSystemClassLoader.class.getName()).log(Level.SEVERE, null, ex);
    }
    return null;
}

From source file:com.netspective.commons.io.UriAddressableUniqueFileLocator.java

public UriAddressableFile findUriAddressableFile(final String name) throws IOException {
    final boolean logging = log.isDebugEnabled();
    if (logging)//from   w  w w.j  a va 2 s.co  m
        log.debug("SingleUriAddressableFileLocator searching for " + name);

    if (cacheLocations) {
        UriAddressableFile resource = (UriAddressableFile) cache.get(name);
        if (resource != null) {
            if (logging)
                log.debug("SingleUriAddressableFileLocator cache hit for " + resource);
            return resource;
        }
    }

    try {
        return (UriAddressableFile) AccessController.doPrivileged(new PrivilegedExceptionAction() {
            public Object run() throws IOException {
                File source = new File(baseDir, SEP_IS_SLASH ? name : name.replace(File.separatorChar, '/'));
                // Security check for inadvertently returning something outside the
                // resource directory.
                String normalized = source.getCanonicalPath();
                if (!normalized.startsWith(canonicalPath)) {
                    throw new SecurityException();
                }

                if (logging)
                    log.debug("SingleUriAddressableFileLocator looking for '" + name + "' as " + source);
                UriAddressableFile result = source.exists() ? new UriAddressableFile(rootUrl, name, source)
                        : null;
                if (result != null) {
                    if (logging)
                        log.debug("SingleUriAddressableFileLocator found " + result);
                    if (cacheLocations)
                        cache.put(name, result);
                }
                return result;
            }
        });
    } catch (PrivilegedActionException e) {
        throw (IOException) e.getException();
    }
}

From source file:org.apache.hadoop.hive.llap.security.LlapSecurityHelper.java

private ByteString getTokenBytes(final String appId) throws InterruptedException, IOException {
    return llapUgi.doAs(new PrivilegedExceptionAction<ByteString>() {
        @Override//from ww  w  .j  a v a2  s .  co m
        public ByteString run() throws Exception {
            assert clientInstance != null;
            if (client == null) {
                client = new LlapManagementProtocolClientImpl(conf, clientInstance.getHost(),
                        clientInstance.getManagementPort(), retryPolicy, socketFactory);
            }
            // Client only connects on the first call, so this has to be done in doAs.
            GetTokenRequestProto.Builder req = GetTokenRequestProto.newBuilder();
            if (!StringUtils.isBlank(appId)) {
                req.setAppId(appId);
            }
            return client.getDelegationToken(null, req.build()).getToken();
        }
    });
}

From source file:org.apache.axis2.jaxws.message.databinding.impl.ClassFinderImpl.java

/**
 * Return the class for this name//from  ww w.j a v  a2s  .  co m
 *
 * @return Class
 */
private static Class forName(final String className, final boolean initialize, final ClassLoader classloader) {
    // NOTE: This method must remain private because it uses AccessController
    Class cl = null;
    try {
        cl = (Class) AccessController.doPrivileged(new PrivilegedExceptionAction() {
            public Object run() throws ClassNotFoundException {
                return Class.forName(className, initialize, classloader);
            }
        });
    } catch (PrivilegedActionException e) {
        if (log.isDebugEnabled()) {
            log.debug("Exception thrown from AccessController: " + e);
        }
        throw ExceptionFactory.makeWebServiceException(e.getException());
    }

    return cl;
}