Example usage for javax.security.auth Subject doAs

List of usage examples for javax.security.auth Subject doAs

Introduction

In this page you can find the example usage for javax.security.auth Subject doAs.

Prototype

public static <T> T doAs(final Subject subject, final java.security.PrivilegedExceptionAction<T> action)
        throws java.security.PrivilegedActionException 

Source Link

Document

Perform work as a particular Subject .

Usage

From source file:com.cloudera.alfredo.server.KerberosAuthenticationHandler.java

/**
 * It enforces the the Kerberos SPNEGO authentication sequence returning an {@link AuthenticationToken} only
 * after the Kerberos SPNEGO sequence completed successfully.
 * <p/>/*from w  ww . j  a  v  a2  s. c o m*/
 *
 * @param request the HTTP client request.
 * @param response the HTTP client response.
 * @return an authentication token if the Kerberos SPNEGO sequence is complete and valid,
 * <code>null</code> if is in progress (in this case the handler handles the response to the client).
 * @throws IOException thrown if an IO error occurred.
 * @throws AuthenticationException thrown if Kerberos SPNEGO sequence failed.
 */
@Override
public AuthenticationToken authenticate(HttpServletRequest request, final HttpServletResponse response)
        throws IOException, AuthenticationException {
    AuthenticationToken token = null;
    String authorization = request.getHeader(KerberosAuthenticator.AUTHORIZATION);

    if (authorization == null) {
        response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
        response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
        LOG.trace("SPNEGO starts");
    } else if (!authorization.startsWith(KerberosAuthenticator.NEGOTIATE)) {
        response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
        response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
        LOG.warn("'" + KerberosAuthenticator.AUTHORIZATION + "' does not start with '"
                + KerberosAuthenticator.NEGOTIATE + "' :  {}", authorization);
    } else {
        authorization = authorization.substring(KerberosAuthenticator.NEGOTIATE.length()).trim();
        final Base64 base64 = new Base64(0);
        final byte[] clientToken = base64.decode(authorization);
        Subject serverSubject = loginContext.getSubject();
        try {
            token = Subject.doAs(serverSubject, new PrivilegedExceptionAction<AuthenticationToken>() {

                @Override
                public AuthenticationToken run() throws Exception {
                    AuthenticationToken token = null;
                    GSSContext gssContext = null;
                    try {
                        gssContext = gssManager.createContext((GSSCredential) null);
                        byte[] serverToken = gssContext.acceptSecContext(clientToken, 0, clientToken.length);
                        if (serverToken != null && serverToken.length > 0) {
                            String authenticate = base64.encodeToString(serverToken);
                            response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE,
                                    KerberosAuthenticator.NEGOTIATE + " " + authenticate);
                        }
                        if (!gssContext.isEstablished()) {
                            response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
                            LOG.trace("SPNEGO in progress");
                        } else {
                            String clientPrincipal = gssContext.getSrcName().toString();
                            int index = clientPrincipal.indexOf("/");
                            if (index == -1) {
                                index = clientPrincipal.indexOf("@");
                            }
                            String userName = (index == -1) ? clientPrincipal
                                    : clientPrincipal.substring(0, index);
                            token = new AuthenticationToken(userName, clientPrincipal, TYPE);
                            response.setStatus(HttpServletResponse.SC_OK);
                            LOG.trace("SPNEGO completed for principal [{}]", clientPrincipal);
                        }
                    } finally {
                        if (gssContext != null) {
                            gssContext.dispose();
                        }
                    }
                    return token;
                }
            });
        } catch (PrivilegedActionException ex) {
            if (ex.getException() instanceof IOException) {
                throw (IOException) ex.getException();
            } else {
                throw new AuthenticationException(ex.getException());
            }
        }
    }
    return token;
}

From source file:org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.java

/**
 * It enforces the the Kerberos SPNEGO authentication sequence returning an {@link AuthenticationToken} only
 * after the Kerberos SPNEGO sequence has completed successfully.
 * <p/>/*  w  w w  .j a  v  a  2  s. c o m*/
 *
 * @param request the HTTP client request.
 * @param response the HTTP client response.
 *
 * @return an authentication token if the Kerberos SPNEGO sequence is complete and valid,
 *         <code>null</code> if it is in progress (in this case the handler handles the response to the client).
 *
 * @throws IOException thrown if an IO error occurred.
 * @throws AuthenticationException thrown if Kerberos SPNEGO sequence failed.
 */
@Override
public AuthenticationToken authenticate(HttpServletRequest request, final HttpServletResponse response)
        throws IOException, AuthenticationException {
    AuthenticationToken token = null;
    String authorization = request.getHeader(KerberosAuthenticator.AUTHORIZATION);

    if (authorization == null || !authorization.startsWith(KerberosAuthenticator.NEGOTIATE)) {
        response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
        response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
        if (authorization == null) {
            LOG.trace("SPNEGO starting");
        } else {
            LOG.warn("'" + KerberosAuthenticator.AUTHORIZATION + "' does not start with '"
                    + KerberosAuthenticator.NEGOTIATE + "' :  {}", authorization);
        }
    } else {
        authorization = authorization.substring(KerberosAuthenticator.NEGOTIATE.length()).trim();
        final Base64 base64 = new Base64(0);
        final byte[] clientToken = base64.decode(authorization);
        Subject serverSubject = loginContext.getSubject();
        try {
            token = Subject.doAs(serverSubject, new PrivilegedExceptionAction<AuthenticationToken>() {

                @Override
                public AuthenticationToken run() throws Exception {
                    AuthenticationToken token = null;
                    GSSContext gssContext = null;
                    try {
                        gssContext = gssManager.createContext((GSSCredential) null);
                        byte[] serverToken = gssContext.acceptSecContext(clientToken, 0, clientToken.length);
                        if (serverToken != null && serverToken.length > 0) {
                            String authenticate = base64.encodeToString(serverToken);
                            response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE,
                                    KerberosAuthenticator.NEGOTIATE + " " + authenticate);
                        }
                        if (!gssContext.isEstablished()) {
                            response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
                            LOG.trace("SPNEGO in progress");
                        } else {
                            String clientPrincipal = gssContext.getSrcName().toString();
                            KerberosName kerberosName = new KerberosName(clientPrincipal);
                            String userName = kerberosName.getShortName();
                            token = new AuthenticationToken(userName, clientPrincipal, TYPE);
                            response.setStatus(HttpServletResponse.SC_OK);
                            LOG.trace("SPNEGO completed for principal [{}]", clientPrincipal);
                        }
                    } finally {
                        if (gssContext != null) {
                            gssContext.dispose();
                        }
                    }
                    return token;
                }
            });
        } catch (PrivilegedActionException ex) {
            if (ex.getException() instanceof IOException) {
                throw (IOException) ex.getException();
            } else {
                throw new AuthenticationException(ex.getException());
            }
        }
    }
    return token;
}

From source file:org.apache.sentry.service.thrift.SentryServiceIntegrationBase.java

public void connectToSentryService() throws Exception {
    if (kerberos) {
        client = Subject.doAs(clientSubject, new PrivilegedExceptionAction<SentryPolicyServiceClient>() {
            @Override//from w  w  w . ja va2  s  .c o m
            public SentryPolicyServiceClient run() throws Exception {
                return SentryServiceClientFactory.create(conf);
            }
        });
    } else {
        client = SentryServiceClientFactory.create(conf);
    }
}

From source file:org.apache.ranger.tagsync.source.atlasrest.AtlasRESTUtil.java

private Map<String, Object> atlasAPI(final String endpoint) {

    if (LOG.isDebugEnabled()) {
        LOG.debug("==> atlasAPI(" + endpoint + ")");
    }// w  w  w. ja  va 2  s.  com
    Map<String, Object> ret = new HashMap<String, Object>();

    try {
        if (kerberized) {
            LOG.debug("Using kerberos authentication");
            Subject sub = SecureClientLogin.loginUserFromKeytab(principal, keytab, nameRules);
            if (LOG.isDebugEnabled()) {
                LOG.debug("Using Principal = " + principal + ", keytab = " + keytab);
            }
            ret = Subject.doAs(sub, new PrivilegedAction<Map<String, Object>>() {
                @Override
                public Map<String, Object> run() {
                    try {
                        return executeAtlasAPI(endpoint);
                    } catch (Exception e) {
                        LOG.error("Atlas API failed with message : ", e);
                    }
                    return null;
                }
            });
        } else {
            LOG.debug("Using basic authentication");
            ret = executeAtlasAPI(endpoint);
        }
    } catch (Exception exception) {
        LOG.error("Exception when fetching Atlas objects.", exception);
        ret = null;
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("<== atlasAPI(" + endpoint + ")");
    }
    return ret;
}

From source file:com.lucidworks.security.authentication.client.KerberosAuthenticator.java

/**
 * Implements the SPNEGO authentication sequence interaction using the current default principal
 * in the Kerberos cache (normally set via kinit).
 *
 * @param token the authentication token being used for the user.
 *
 * @throws IOException if an IO error occurred.
 * @throws AuthenticationException if an authentication error occurred.
 *///w w w. j a  v a  2 s . c  om
private void doSpnegoSequence(AuthenticatedURL.Token token) throws IOException, AuthenticationException {
    try {
        AccessControlContext context = AccessController.getContext();
        Subject subject = Subject.getSubject(context);
        if (subject == null) {
            LOG.debug("No subject in context, logging in");
            subject = new Subject();
            LoginContext login = new LoginContext("", subject, null, new KerberosConfiguration());
            login.login();
        }

        if (LOG.isDebugEnabled()) {
            LOG.debug("Using subject: " + subject);
        }
        Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                GSSContext gssContext = null;
                try {
                    GSSManager gssManager = GSSManager.getInstance();
                    String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP",
                            KerberosAuthenticator.this.url.getHost());
                    Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
                    GSSName serviceName = gssManager.createName(servicePrincipal, oid);
                    oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
                    gssContext = gssManager.createContext(serviceName, oid, null, GSSContext.DEFAULT_LIFETIME);
                    gssContext.requestCredDeleg(true);
                    gssContext.requestMutualAuth(true);

                    byte[] inToken = new byte[0];
                    byte[] outToken;
                    boolean established = false;

                    // Loop while the context is still not established
                    while (!established) {
                        outToken = gssContext.initSecContext(inToken, 0, inToken.length);
                        if (outToken != null) {
                            sendToken(outToken);
                        }

                        if (!gssContext.isEstablished()) {
                            inToken = readToken();
                        } else {
                            established = true;
                        }
                    }
                } finally {
                    if (gssContext != null) {
                        gssContext.dispose();
                        gssContext = null;
                    }
                }
                return null;
            }
        });
    } catch (PrivilegedActionException ex) {
        throw new AuthenticationException(ex.getException());
    } catch (LoginException ex) {
        throw new AuthenticationException(ex);
    }
    AuthenticatedURL.extractToken(conn, token);
}

From source file:org.apache.ranger.hive.client.HiveClient.java

public void close() {
    Subject.doAs(getLoginSubject(), new PrivilegedAction<Void>() {
        public Void run() {
            close(con);//from ww  w  .j ava  2 s.  c o m
            return null;
        }
    });
}

From source file:org.apache.ranger.storm.client.StormClient.java

public static <T> T executeUnderKerberos(String userName, String password, PrivilegedAction<T> action)
        throws IOException {

    final String errMsg = " You can still save the repository and start creating "
            + "policies, but you would not be able to use autocomplete for "
            + "resource names. Check xa_portal.log for more info.";
    class MySecureClientLoginConfiguration extends javax.security.auth.login.Configuration {

        private String userName;
        private String password;

        MySecureClientLoginConfiguration(String aUserName, String password) {
            this.userName = aUserName;
            this.password = password;
        }/*  w w w . j  a v  a2 s.co m*/

        @Override
        public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {

            Map<String, String> kerberosOptions = new HashMap<String, String>();
            kerberosOptions.put("principal", this.userName);
            kerberosOptions.put("debug", "true");
            kerberosOptions.put("useKeyTab", "false");
            kerberosOptions.put(KrbPasswordSaverLoginModule.USERNAME_PARAM, this.userName);
            kerberosOptions.put(KrbPasswordSaverLoginModule.PASSWORD_PARAM, this.password);
            kerberosOptions.put("doNotPrompt", "false");
            kerberosOptions.put("useFirstPass", "true");
            kerberosOptions.put("tryFirstPass", "false");
            kerberosOptions.put("storeKey", "true");
            kerberosOptions.put("refreshKrb5Config", "true");

            AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = null;
            AppConfigurationEntry KERBEROS_PWD_SAVER = null;
            try {
                KEYTAB_KERBEROS_LOGIN = new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
                        AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, kerberosOptions);
                KERBEROS_PWD_SAVER = new AppConfigurationEntry(KrbPasswordSaverLoginModule.class.getName(),
                        LoginModuleControlFlag.REQUIRED, kerberosOptions);

            } catch (IllegalArgumentException e) {
                String msgDesc = "executeUnderKerberos: Exception while getting Storm TopologyList.";
                HadoopException hdpException = new HadoopException(msgDesc, e);
                LOG.error(msgDesc, e);

                hdpException.generateResponseDataMap(false, BaseClient.getMessage(e), msgDesc + errMsg, null,
                        null);
                throw hdpException;
            }

            LOG.debug("getAppConfigurationEntry():" + kerberosOptions.get("principal"));

            return new AppConfigurationEntry[] { KERBEROS_PWD_SAVER, KEYTAB_KERBEROS_LOGIN };
        }

    }
    ;

    T ret = null;

    Subject subject = null;
    LoginContext loginContext = null;

    try {
        subject = new Subject();
        LOG.debug("executeUnderKerberos():user=" + userName + ",pass=");
        LOG.debug("executeUnderKerberos():Creating config..");
        MySecureClientLoginConfiguration loginConf = new MySecureClientLoginConfiguration(userName, password);
        LOG.debug("executeUnderKerberos():Creating Context..");
        loginContext = new LoginContext("hadoop-keytab-kerberos", subject, null, loginConf);

        LOG.debug("executeUnderKerberos():Logging in..");
        loginContext.login();

        Subject loginSubj = loginContext.getSubject();

        if (loginSubj != null) {
            ret = Subject.doAs(loginSubj, action);
        }
    } catch (LoginException le) {
        String msgDesc = "executeUnderKerberos: Login failure using given"
                + " configuration parameters, username : `" + userName + "`.";
        HadoopException hdpException = new HadoopException(msgDesc, le);
        LOG.error(msgDesc, le);

        hdpException.generateResponseDataMap(false, BaseClient.getMessage(le), msgDesc + errMsg, null, null);
        throw hdpException;
    } catch (SecurityException se) {
        String msgDesc = "executeUnderKerberos: Exception while getting Storm TopologyList.";
        HadoopException hdpException = new HadoopException(msgDesc, se);
        LOG.error(msgDesc, se);

        hdpException.generateResponseDataMap(false, BaseClient.getMessage(se), msgDesc + errMsg, null, null);
        throw hdpException;

    } finally {
        if (loginContext != null) {
            if (subject != null) {
                try {
                    loginContext.logout();
                } catch (LoginException e) {
                    throw new IOException("logout failure", e);
                }
            }
        }
    }

    return ret;
}

From source file:org.apache.ranger.hbase.client.HBaseClient.java

public List<String> getTableList(final String tableNameMatching) {
    List<String> ret = null;
    final String errMsg = " You can still save the repository and start creating "
            + "policies, but you would not be able to use autocomplete for "
            + "resource names. Check xa_portal.log for more info.";

    subj = getLoginSubject();/*w  w  w  . j a va 2 s  .c  om*/

    if (subj != null) {
        ClassLoader prevCl = Thread.currentThread().getContextClassLoader();
        try {
            Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());

            ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {

                @Override
                public List<String> run() {

                    List<String> tableList = new ArrayList<String>();
                    HBaseAdmin admin = null;
                    try {

                        Configuration conf = HBaseConfiguration.create();
                        admin = new HBaseAdmin(conf);
                        for (HTableDescriptor htd : admin.listTables(tableNameMatching)) {
                            tableList.add(htd.getNameAsString());
                        }
                    } catch (ZooKeeperConnectionException zce) {
                        String msgDesc = "getTableList: Unable to connect to `ZooKeeper` "
                                + "using given config parameters.";
                        HadoopException hdpException = new HadoopException(msgDesc, zce);
                        hdpException.generateResponseDataMap(false, getMessage(zce), msgDesc + errMsg, null,
                                null);
                        throw hdpException;

                    } catch (MasterNotRunningException mnre) {
                        String msgDesc = "getTableList: Looks like `Master` is not running, "
                                + "so couldn't check that running HBase is available or not, "
                                + "Please try again later.";
                        HadoopException hdpException = new HadoopException(msgDesc, mnre);
                        hdpException.generateResponseDataMap(false, getMessage(mnre), msgDesc + errMsg, null,
                                null);
                        throw hdpException;

                    } catch (IOException io) {
                        String msgDesc = "Unable to get HBase table List for [repository:"
                                + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching
                                + "].";
                        HadoopException hdpException = new HadoopException(msgDesc, io);
                        hdpException.generateResponseDataMap(false, getMessage(io), msgDesc + errMsg, null,
                                null);
                        throw hdpException;
                    } catch (Throwable e) {
                        String msgDesc = "Unable to get HBase table List for [repository:"
                                + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching
                                + "].";
                        LOG.error(msgDesc);
                        HadoopException hdpException = new HadoopException(msgDesc, e);
                        hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + errMsg, null,
                                null);
                        throw hdpException;
                    } finally {
                        if (admin != null) {
                            try {
                                admin.close();
                            } catch (IOException e) {
                                LOG.error("Unable to close HBase connection ["
                                        + getConfigHolder().getDatasourceName() + "]", e);
                            }
                        }
                    }
                    return tableList;
                }

            });
        } finally {
            Thread.currentThread().setContextClassLoader(prevCl);
        }
    }
    return ret;
}

From source file:org.jolokia.http.AgentServlet.java

private JSONAware handleSecurely(final ServletRequestHandler pReqHandler, final HttpServletRequest pReq,
        final HttpServletResponse pResp) throws IOException, PrivilegedActionException {
    Subject subject = (Subject) pReq.getAttribute(ConfigKey.JAAS_SUBJECT_REQUEST_ATTRIBUTE);
    if (subject != null) {
        return Subject.doAs(subject, new PrivilegedExceptionAction<JSONAware>() {
            public JSONAware run() throws IOException {
                return pReqHandler.handleRequest(pReq, pResp);
            }//from  ww w . ja v a2s.  c o  m
        });
    } else {
        return pReqHandler.handleRequest(pReq, pResp);
    }
}

From source file:org.apache.ranger.services.hbase.client.HBaseClient.java

public List<String> getTableList(final String tableNameMatching, final List<String> existingTableList)
        throws HadoopException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> HbaseClient.getTableList()  tableNameMatching " + tableNameMatching
                + " ExisitingTableList " + existingTableList);
    }/*from w w w .j  a v a  2 s. co  m*/

    List<String> ret = null;
    final String errMsg = " You can still save the repository and start creating "
            + "policies, but you would not be able to use autocomplete for "
            + "resource names. Check ranger_admin.log for more info.";

    subj = getLoginSubject();

    if (subj != null) {
        ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {

            @Override
            public List<String> run() {

                List<String> tableList = new ArrayList<String>();
                HBaseAdmin admin = null;
                try {
                    LOG.info("getTableList: setting config values from client");
                    setClientConfigValues(conf);
                    LOG.info("getTableList: checking HbaseAvailability with the new config");
                    HBaseAdmin.checkHBaseAvailable(conf);
                    LOG.info("getTableList: no exception: HbaseAvailability true");
                    admin = new HBaseAdmin(conf);
                    HTableDescriptor[] htds = admin.listTables(tableNameMatching);
                    if (htds != null) {
                        for (HTableDescriptor htd : admin.listTables(tableNameMatching)) {
                            String tableName = htd.getNameAsString();
                            if (existingTableList != null && existingTableList.contains(tableName)) {
                                continue;
                            } else {
                                tableList.add(htd.getNameAsString());
                            }
                        }
                    } else {
                        LOG.error("getTableList: null HTableDescription received from HBaseAdmin.listTables");
                    }
                } catch (ZooKeeperConnectionException zce) {
                    String msgDesc = "getTableList: Unable to connect to `ZooKeeper` "
                            + "using given config parameters.";
                    HadoopException hdpException = new HadoopException(msgDesc, zce);
                    hdpException.generateResponseDataMap(false, getMessage(zce), msgDesc + errMsg, null, null);
                    LOG.error(msgDesc + zce);
                    throw hdpException;

                } catch (MasterNotRunningException mnre) {
                    String msgDesc = "getTableList: Looks like `Master` is not running, "
                            + "so couldn't check that running HBase is available or not, "
                            + "Please try again later.";
                    HadoopException hdpException = new HadoopException(msgDesc, mnre);
                    hdpException.generateResponseDataMap(false, getMessage(mnre), msgDesc + errMsg, null, null);
                    LOG.error(msgDesc + mnre);
                    throw hdpException;

                } catch (IOException io) {
                    String msgDesc = "getTableList: Unable to get HBase table List for [repository:"
                            + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching
                            + "].";
                    HadoopException hdpException = new HadoopException(msgDesc, io);
                    hdpException.generateResponseDataMap(false, getMessage(io), msgDesc + errMsg, null, null);
                    LOG.error(msgDesc + io);
                    throw hdpException;
                } catch (Throwable e) {
                    String msgDesc = "getTableList : Unable to get HBase table List for [repository:"
                            + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching
                            + "].";
                    LOG.error(msgDesc + e);
                    HadoopException hdpException = new HadoopException(msgDesc, e);
                    hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + errMsg, null, null);
                    throw hdpException;
                } finally {
                    if (admin != null) {
                        try {
                            admin.close();
                        } catch (IOException e) {
                            LOG.error("Unable to close HBase connection ["
                                    + getConfigHolder().getDatasourceName() + "]", e);
                        }
                    }
                }
                return tableList;
            }

        });
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== HbaseClient.getTableList() " + ret);
    }
    return ret;
}