Example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction

List of usage examples for java.security PrivilegedExceptionAction PrivilegedExceptionAction

Introduction

In this page you can find the example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction.

Prototype

PrivilegedExceptionAction

Source Link

Usage

From source file:org.apache.hadoop.hive.metastore.utils.HdfsUtils.java

public static boolean runDistCpAs(List<Path> srcPaths, Path dst, Configuration conf, String doAsUser)
        throws IOException {
    UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(doAsUser,
            UserGroupInformation.getLoginUser());
    try {/*  w  w w.  java 2 s  .  com*/
        return proxyUser.doAs(new PrivilegedExceptionAction<Boolean>() {
            @Override
            public Boolean run() throws Exception {
                return runDistCp(srcPaths, dst, conf);
            }
        });
    } catch (InterruptedException e) {
        throw new IOException(e);
    }
}

From source file:org.apache.atlas.web.security.SSLAndKerberosTest.java

@BeforeClass
public void setUp() throws Exception {
    jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
    providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file/" + jksPath.toUri();

    String persistDir = TestUtils.getTempDirectory();

    setupKDCAndPrincipals();// w  w w.  j av  a2 s .co m
    setupCredentials();

    // client will actually only leverage subset of these properties
    final PropertiesConfiguration configuration = getSSLConfiguration(providerUrl);

    persistSSLClientConfiguration(configuration);

    TestUtils.writeConfiguration(configuration,
            persistDir + File.separator + ApplicationProperties.APPLICATION_PROPERTIES);

    String confLocation = System.getProperty("atlas.conf");
    URL url;
    if (confLocation == null) {
        url = SSLAndKerberosTest.class.getResource("/" + ApplicationProperties.APPLICATION_PROPERTIES);
    } else {
        url = new File(confLocation, ApplicationProperties.APPLICATION_PROPERTIES).toURI().toURL();
    }
    configuration.load(url);
    configuration.setProperty(TLS_ENABLED, true);
    configuration.setProperty("atlas.authentication.method.kerberos", "true");
    configuration.setProperty("atlas.authentication.keytab", userKeytabFile.getAbsolutePath());
    configuration.setProperty("atlas.authentication.principal", "dgi/localhost@" + kdc.getRealm());

    configuration.setProperty("atlas.authentication.method.file", "false");
    configuration.setProperty("atlas.authentication.method.kerberos", "true");
    configuration.setProperty("atlas.authentication.method.kerberos.principal",
            "HTTP/localhost@" + kdc.getRealm());
    configuration.setProperty("atlas.authentication.method.kerberos.keytab", httpKeytabFile.getAbsolutePath());
    configuration.setProperty("atlas.authentication.method.kerberos.name.rules",
            "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");

    configuration.setProperty("atlas.authentication.method.file", "true");
    configuration.setProperty("atlas.authentication.method.file.filename", persistDir + "/users-credentials");
    configuration.setProperty("atlas.auth.policy.file", persistDir + "/policy-store.txt");

    TestUtils.writeConfiguration(configuration, persistDir + File.separator + "atlas-application.properties");

    setupUserCredential(persistDir);
    setUpPolicyStore(persistDir);

    subject = loginTestUser();
    UserGroupInformation.loginUserFromSubject(subject);
    UserGroupInformation proxyUser = UserGroupInformation.createProxyUser("testUser",
            UserGroupInformation.getLoginUser());

    // save original setting
    originalConf = System.getProperty("atlas.conf");
    System.setProperty("atlas.conf", persistDir);

    originalHomeDir = System.getProperty("atlas.home");
    System.setProperty("atlas.home", TestUtils.getTargetDirectory());

    dgiCLient = proxyUser.doAs(new PrivilegedExceptionAction<AtlasClient>() {
        @Override
        public AtlasClient run() throws Exception {
            return new AtlasClient(configuration, DGI_URL);
        }
    });

    secureEmbeddedServer = new TestSecureEmbeddedServer(21443, getWarPath()) {
        @Override
        public PropertiesConfiguration getConfiguration() {
            return configuration;
        }
    };
    secureEmbeddedServer.getServer().start();
}

From source file:org.apache.hadoop.hive.thrift.HiveDelegationTokenManager.java

public String getDelegationToken(final String owner, final String renewer, String remoteAddr)
        throws IOException, InterruptedException {
    /**//from w  ww  .j a v a  2 s  . com
     * If the user asking the token is same as the 'owner' then don't do
     * any proxy authorization checks. For cases like oozie, where it gets
     * a delegation token for another user, we need to make sure oozie is
     * authorized to get a delegation token.
     */
    // Do all checks on short names
    UserGroupInformation currUser = UserGroupInformation.getCurrentUser();
    UserGroupInformation ownerUgi = UserGroupInformation.createRemoteUser(owner);
    if (!ownerUgi.getShortUserName().equals(currUser.getShortUserName())) {
        // in the case of proxy users, the getCurrentUser will return the
        // real user (for e.g. oozie) due to the doAs that happened just before the
        // server started executing the method getDelegationToken in the MetaStore
        ownerUgi = UserGroupInformation.createProxyUser(owner, UserGroupInformation.getCurrentUser());
        ProxyUsers.authorize(ownerUgi, remoteAddr, null);
    }
    return ownerUgi.doAs(new PrivilegedExceptionAction<String>() {

        @Override
        public String run() throws IOException {
            return secretManager.getDelegationToken(renewer);
        }
    });
}

From source file:com.buaa.cfs.nfs3.DFSClientCache.java

private CacheLoader<String, DFSClient> clientLoader() {
    return new CacheLoader<String, DFSClient>() {
        @Override/* w  w w . ja va2s  . co m*/
        public DFSClient load(String userName) throws Exception {
            UserGroupInformation ugi = getUserGroupInformation(userName, UserGroupInformation.getCurrentUser());

            // Guava requires CacheLoader never returns null.
            return ugi.doAs(new PrivilegedExceptionAction<DFSClient>() {
                @Override
                public DFSClient run() throws IOException {
                    //            return new DFSClient(NameNode.getAddress(config), config);
                    return null;
                }
            });
        }
    };
}

From source file:gobblin.util.ProxiedFileSystemWrapper.java

/**
 * Getter for proxiedFs, using the passed parameters to create an instance of a proxiedFs.
 * @param properties//from   ww  w  .  j a v a2s. com
 * @param authType is either TOKEN or KEYTAB.
 * @param authPath is the KEYTAB location if the authType is KEYTAB; otherwise, it is the token file.
 * @param uri File system URI.
 * @throws IOException
 * @throws InterruptedException
 * @throws URISyntaxException
 * @return proxiedFs
 */
public FileSystem getProxiedFileSystem(State properties, AuthType authType, String authPath, String uri,
        final Configuration conf) throws IOException, InterruptedException, URISyntaxException {
    Preconditions.checkArgument(
            StringUtils.isNotBlank(properties.getProp(ConfigurationKeys.FS_PROXY_AS_USER_NAME)),
            "State does not contain a proper proxy user name");
    String proxyUserName = properties.getProp(ConfigurationKeys.FS_PROXY_AS_USER_NAME);
    UserGroupInformation proxyUser;
    switch (authType) {
    case KEYTAB: // If the authentication type is KEYTAB, log in a super user first before creating a proxy user.
        Preconditions.checkArgument(
                StringUtils
                        .isNotBlank(properties.getProp(ConfigurationKeys.SUPER_USER_NAME_TO_PROXY_AS_OTHERS)),
                "State does not contain a proper proxy token file name");
        String superUser = properties.getProp(ConfigurationKeys.SUPER_USER_NAME_TO_PROXY_AS_OTHERS);
        UserGroupInformation.loginUserFromKeytab(superUser, authPath);
        proxyUser = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser());
        break;
    case TOKEN: // If the authentication type is TOKEN, create a proxy user and then add the token to the user.
        proxyUser = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser());
        Optional<Token<?>> proxyToken = getTokenFromSeqFile(authPath, proxyUserName);
        if (proxyToken.isPresent()) {
            proxyUser.addToken(proxyToken.get());
        } else {
            LOG.warn("No delegation token found for the current proxy user.");
        }
        break;
    default:
        LOG.warn(
                "Creating a proxy user without authentication, which could not perform File system operations.");
        proxyUser = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser());
        break;
    }

    final URI fsURI = URI.create(uri);
    proxyUser.doAs(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws IOException {
            LOG.debug("Now performing file system operations as :" + UserGroupInformation.getCurrentUser());
            proxiedFs = FileSystem.get(fsURI, conf);
            return null;
        }
    });
    return this.proxiedFs;
}

From source file:org.apache.axis2.deployment.DescriptionBuilder.java

/**
 * Processes default message receivers specified either in axis2.xml or
 * services.xml./*from  www . j a v a2s. co m*/
 *
 */
protected HashMap<String, MessageReceiver> processMessageReceivers(OMElement messageReceivers)
        throws DeploymentException {
    HashMap<String, MessageReceiver> mr_mep = new HashMap<String, MessageReceiver>();
    Iterator msgReceivers = messageReceivers.getChildrenWithName(new QName(TAG_MESSAGE_RECEIVER));
    while (msgReceivers.hasNext()) {
        OMElement msgReceiver = (OMElement) msgReceivers.next();
        final OMElement tempMsgReceiver = msgReceiver;
        MessageReceiver receiver = null;
        try {
            receiver = org.apache.axis2.java.security.AccessController
                    .doPrivileged(new PrivilegedExceptionAction<MessageReceiver>() {
                        public MessageReceiver run() throws org.apache.axis2.deployment.DeploymentException {
                            return loadMessageReceiver(Thread.currentThread().getContextClassLoader(),
                                    tempMsgReceiver);
                        }
                    });
        } catch (PrivilegedActionException e) {
            throw (DeploymentException) e.getException();
        }
        OMAttribute mepAtt = msgReceiver.getAttribute(new QName(TAG_MEP));
        mr_mep.put(mepAtt.getAttributeValue(), receiver);
    }
    return mr_mep;
}

From source file:org.apache.drill.exec.store.hive.DrillHiveMetaStoreClient.java

/**
 * Create a DrillHiveMetaStoreClient for cases where:
 *   1. Drill impersonation is enabled and
 *   2. either storage (in remote HiveMetaStore server) or SQL standard based authorization (in Hive storage plugin)
 *      is enabled//  w ww . j a va2 s .  com
 * @param processUserMetaStoreClient MetaStoreClient of process user. Useful for generating the delegation tokens when
 *                                   SASL (KERBEROS or custom SASL implementations) is enabled.
 * @param hiveConf Conf including authorization configuration
 * @param userName User who is trying to access the Hive metadata
 * @return
 * @throws MetaException
 */
public static DrillHiveMetaStoreClient createClientWithAuthz(
        final DrillHiveMetaStoreClient processUserMetaStoreClient, final HiveConf hiveConf,
        final String userName) throws MetaException {
    try {
        boolean delegationTokenGenerated = false;

        final UserGroupInformation ugiForRpc; // UGI credentials to use for RPC communication with Hive MetaStore server
        if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
            // If the user impersonation is disabled in Hive storage plugin (not Drill impersonation), use the process
            // user UGI credentials.
            ugiForRpc = ImpersonationUtil.getProcessUserUGI();
        } else {
            ugiForRpc = ImpersonationUtil.createProxyUgi(userName);
            if (hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL)) {
                // When SASL is enabled for proxy user create a delegation token. Currently HiveMetaStoreClient can create
                // client transport for proxy users only when the authentication mechanims is DIGEST (through use of
                // delegation tokens).
                String delegationToken = processUserMetaStoreClient.getDelegationToken(userName, userName);
                try {
                    Utils.setTokenStr(ugiForRpc, delegationToken,
                            HiveClientWithAuthzWithCaching.DRILL2HMS_TOKEN);
                } catch (IOException e) {
                    throw new DrillRuntimeException(
                            "Couldn't setup delegation token in the UGI for Hive MetaStoreClient", e);
                }
                delegationTokenGenerated = true;
            }
        }

        final HiveConf hiveConfForClient;
        if (delegationTokenGenerated) {
            hiveConfForClient = new HiveConf(hiveConf);
            hiveConfForClient.set("hive.metastore.token.signature",
                    HiveClientWithAuthzWithCaching.DRILL2HMS_TOKEN);
        } else {
            hiveConfForClient = hiveConf;
        }

        return ugiForRpc.doAs(new PrivilegedExceptionAction<DrillHiveMetaStoreClient>() {
            @Override
            public DrillHiveMetaStoreClient run() throws Exception {
                return new HiveClientWithAuthzWithCaching(hiveConfForClient, ugiForRpc, userName);
            }
        });
    } catch (final Exception e) {
        throw new DrillRuntimeException("Failure setting up HiveMetaStore client.", e);
    }
}

From source file:com.zimbra.cs.security.sasl.GssAuthenticator.java

@Override
public boolean initialize() throws IOException {
    Krb5Keytab keytab = getKeytab(LC.krb5_keytab.value());
    if (keytab == null) {
        sendFailed("mechanism not supported");
        return false;
    }//w w w.  j av a 2  s . c  o m
    debug("keytab file = %s", keytab.getFile());

    final String host;
    if (LC.krb5_service_principal_from_interface_address.booleanValue()) {
        String localSocketHostname = localAddress.getCanonicalHostName().toLowerCase();
        if (localSocketHostname.length() == 0 || Character.isDigit(localSocketHostname.charAt(0)))
            localSocketHostname = LC.zimbra_server_hostname.value();
        host = localSocketHostname;
    } else {
        host = LC.zimbra_server_hostname.value();
    }

    KerberosPrincipal kp = new KerberosPrincipal(getProtocol() + '/' + host);
    debug("kerberos principal = %s", kp);
    Subject subject = getSubject(keytab, kp);
    if (subject == null) {
        sendFailed();
        return false;
    }
    debug("subject = %s", subject);

    final Map<String, String> props = getSaslProperties();
    if (DEBUG && props != null) {
        String qop = props.get(Sasl.QOP);
        debug("Sent QOP = " + (qop != null ? qop : "auth"));
    }

    try {
        mSaslServer = (SaslServer) Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
            @Override
            public Object run() throws SaslException {
                return Sasl.createSaslServer(getMechanism(), getProtocol(), host, props,
                        new GssCallbackHandler());
            }
        });
    } catch (PrivilegedActionException e) {
        sendFailed();
        getLog().warn("Could not create SaslServer", e.getCause());
        return false;
    }
    return true;
}

From source file:org.apache.hadoop.hdfs.security.TestDelegationTokenForProxyUser.java

@Test
public void testDelegationTokenWithRealUser() throws IOException {
    UserGroupInformation ugi = UserGroupInformation.createRemoteUser(REAL_USER);
    final UserGroupInformation proxyUgi = UserGroupInformation.createProxyUserForTesting(PROXY_USER, ugi,
            GROUP_NAMES);/* ww w .j a v a 2 s  .c  om*/
    try {
        Token<DelegationTokenIdentifier> token = proxyUgi
                .doAs(new PrivilegedExceptionAction<Token<DelegationTokenIdentifier>>() {
                    public Token<DelegationTokenIdentifier> run() throws IOException {
                        DistributedFileSystem dfs = (DistributedFileSystem) cluster.getFileSystem();
                        return dfs.getDelegationToken("RenewerUser");
                    }
                });
        DelegationTokenIdentifier identifier = new DelegationTokenIdentifier();
        byte[] tokenId = token.getIdentifier();
        identifier.readFields(new DataInputStream(new ByteArrayInputStream(tokenId)));
        Assert.assertEquals(identifier.getUser().getUserName(), PROXY_USER);
        Assert.assertEquals(identifier.getUser().getRealUser().getUserName(), REAL_USER);
    } catch (InterruptedException e) {
        //Do Nothing
    }
}

From source file:org.apache.falcon.catalog.HiveCatalogService.java

/**
 * This is used from with in falcon namespace.
 *
 * @param conf                      conf
 * @param catalogUrl                metastore uri
 * @return hive metastore client handle//  w ww.ja  va2s  . c  o  m
 * @throws FalconException
 */
private static HiveMetaStoreClient createProxiedClient(Configuration conf, String catalogUrl)
        throws FalconException {

    try {
        final HiveConf hcatConf = createHiveConf(conf, catalogUrl);
        UserGroupInformation proxyUGI = CurrentUser.getProxyUGI();
        addSecureCredentialsAndToken(conf, hcatConf, proxyUGI);

        LOG.info("Creating HCatalog client object for {}", catalogUrl);
        return proxyUGI.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {
            public HiveMetaStoreClient run() throws Exception {
                return new HiveMetaStoreClient(hcatConf);
            }
        });
    } catch (Exception e) {
        throw new FalconException("Exception creating Proxied HiveMetaStoreClient: " + e.getMessage(), e);
    }
}