List of usage examples for java.security PrivilegedExceptionAction PrivilegedExceptionAction
PrivilegedExceptionAction
From source file:org.apache.hadoop.hbase.security.visibility.TestVisibilityLabelsWithDefaultVisLabelService.java
@Test(timeout = 60 * 1000) public void testAddVisibilityLabelsOnRSRestart() throws Exception { List<RegionServerThread> regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); for (RegionServerThread rsThread : regionServerThreads) { rsThread.getRegionServer().abort("Aborting "); }// w w w. ja v a 2 s .com // Start one new RS RegionServerThread rs = TEST_UTIL.getHBaseCluster().startRegionServer(); waitForLabelsRegionAvailability(rs.getRegionServer()); final AtomicBoolean vcInitialized = new AtomicBoolean(true); do { PrivilegedExceptionAction<VisibilityLabelsResponse> action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() { public VisibilityLabelsResponse run() throws Exception { String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, "ABC", "XYZ" }; try (Connection conn = ConnectionFactory.createConnection(conf)) { VisibilityLabelsResponse resp = VisibilityClient.addLabels(conn, labels); List<RegionActionResult> results = resp.getResultList(); if (results.get(0).hasException()) { NameBytesPair pair = results.get(0).getException(); Throwable t = ProtobufUtil.toException(pair); LOG.debug("Got exception writing labels", t); if (t instanceof VisibilityControllerNotReadyException) { vcInitialized.set(false); LOG.warn("VisibilityController was not yet initialized"); Threads.sleep(10); } else { vcInitialized.set(true); } } else LOG.debug("new labels added: " + resp); } catch (Throwable t) { throw new IOException(t); } return null; } }; SUPERUSER.runAs(action); } while (!vcInitialized.get()); // Scan the visibility label Scan s = new Scan(); s.setAuthorizations(new Authorizations(VisibilityUtils.SYSTEM_LABEL)); int i = 0; try (Table ht = TEST_UTIL.getConnection().getTable(LABELS_TABLE_NAME); ResultScanner scanner = ht.getScanner(s)) { while (true) { Result next = scanner.next(); if (next == null) { break; } i++; } } // One label is the "system" label. Assert.assertEquals("The count should be 13", 13, i); }
From source file:com.thinkbiganalytics.nifi.security.ApplySecurityPolicy.java
protected FileSystem getFileSystemAsUser(final Configuration config, UserGroupInformation ugi) throws IOException { try {/*w w w.ja v a 2 s.c o m*/ return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() { @Override public FileSystem run() throws Exception { return FileSystem.get(config); } }); } catch (InterruptedException e) { throw new IOException("Unable to create file system: " + e.getMessage()); } }
From source file:backtype.storm.security.auth.kerberos.KerberosSaslTransportPlugin.java
@Override public TTransport connect(TTransport transport, String serverHost, String asUser) throws TTransportException, IOException { // create an authentication callback handler ClientCallbackHandler client_callback_handler = new ClientCallbackHandler(login_conf); // login our user Login login = null;/*from www.ja va 2 s . c om*/ try { // specify a configuration object to be used Configuration.setConfiguration(login_conf); // now login login = new Login(AuthUtils.LOGIN_CONTEXT_CLIENT, client_callback_handler); } catch (LoginException ex) { LOG.error("Server failed to login in principal:" + ex, ex); throw new RuntimeException(ex); } final Subject subject = login.getSubject(); if (subject.getPrivateCredentials(KerberosTicket.class).isEmpty()) { // error throw new RuntimeException("Fail to verify user principal with section \"" + AuthUtils.LOGIN_CONTEXT_CLIENT + "\" in login configuration file " + login_conf); } final String principal = StringUtils.isBlank(asUser) ? getPrincipal(subject) : asUser; String serviceName = AuthUtils.get(login_conf, AuthUtils.LOGIN_CONTEXT_CLIENT, "serviceName"); if (serviceName == null) { serviceName = AuthUtils.SERVICE; } Map<String, String> props = new TreeMap<String, String>(); props.put(Sasl.QOP, "auth"); props.put(Sasl.SERVER_AUTH, "false"); LOG.debug("SASL GSSAPI client transport is being established"); final TTransport sasalTransport = new TSaslClientTransport(KERBEROS, principal, serviceName, serverHost, props, null, transport); // open Sasl transport with the login credential try { Subject.doAs(subject, new PrivilegedExceptionAction<Void>() { public Void run() { try { LOG.debug("do as:" + principal); sasalTransport.open(); } catch (Exception e) { LOG.error( "Client failed to open SaslClientTransport to interact with a server during session initiation: " + e, e); } return null; } }); } catch (PrivilegedActionException e) { throw new RuntimeException(e); } return sasalTransport; }
From source file:org.apache.axis2.deployment.util.Utils.java
public static boolean loadHandler(ClassLoader loader1, HandlerDescription desc) throws DeploymentException { String handlername = desc.getClassName(); Handler handler;// w ww . j av a 2s. co m try { final Class handlerClass = Loader.loadClass(loader1, handlername); Package aPackage = (Package) org.apache.axis2.java.security.AccessController .doPrivileged(new PrivilegedAction() { public Object run() { return handlerClass.getPackage(); } }); if (aPackage != null && aPackage.getName().equals("org.apache.axis2.engine")) { String name = handlerClass.getName(); log.warn("Dispatcher " + name + " is now deprecated."); if (name.indexOf("InstanceDispatcher") != -1) { log.warn("Please remove the entry for " + handlerClass.getName() + "from axis2.xml"); } else { log.warn( "Please edit axis2.xml and replace with the same class in org.apache.axis2.dispatchers package"); } } handler = (Handler) org.apache.axis2.java.security.AccessController .doPrivileged(new PrivilegedExceptionAction() { public Object run() throws InstantiationException, IllegalAccessException { return handlerClass.newInstance(); } }); handler.init(desc); desc.setHandler(handler); } catch (ClassNotFoundException e) { if (handlername.indexOf("jaxws") > 0) { log.warn("[JAXWS] - unable to load " + handlername); return false; } throw new DeploymentException(e); } catch (Exception e) { throw new DeploymentException(e); } return true; }
From source file:org.keycloak.adapters.cloned.SniSSLSocketFactory.java
private Socket applySNI(final Socket socket, String hostname) { if (socket instanceof SSLSocket) { try {//from w w w. j a va 2 s. c om Method setHostMethod = AccessController.doPrivileged(new PrivilegedExceptionAction<Method>() { @Override public Method run() throws NoSuchMethodException { return socket.getClass().getMethod("setHost", String.class); } }); setHostMethod.invoke(socket, hostname); LOG.log(Level.FINEST, "Applied SNI to socket for host {0}", hostname); } catch (PrivilegedActionException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { LOG.log(Level.WARNING, "Failed to apply SNI to SSLSocket", e); } } return socket; }
From source file:azkaban.jobtype.connectors.teradata.HdfsToTeradataJobRunnerMain.java
public void run() throws IOException, InterruptedException { String jobName = System.getenv(AbstractProcessJob.JOB_NAME_ENV); _logger.info("Running job " + jobName); preprocess();/*ww w .j a v a2 s . co m*/ if (HadoopSecureWrapperUtils.shouldProxy(_jobProps)) { String tokenFile = System.getenv(HADOOP_TOKEN_FILE_LOCATION); UserGroupInformation proxyUser = HadoopSecureWrapperUtils.setupProxyUser(_jobProps, tokenFile, _logger); proxyUser.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { copyHdfsToTd(); return null; } }); } else { copyHdfsToTd(); } }
From source file:org.apache.hadoop.hbase.security.token.TokenUtil.java
/** * Obtain an authentication token on behalf of the given user and add it to * the credentials for the given map reduce job. * @param user The user for whom to obtain the token * @param job The job configuration in which the token should be stored * @throws IOException If making a remote call to the {@link TokenProvider} fails * @throws InterruptedException If executing as the given user is interrupted *//*from w w w . java 2 s.c om*/ public static void obtainTokenForJob(final JobConf job, UserGroupInformation user) throws IOException, InterruptedException { try { Token<AuthenticationTokenIdentifier> token = user .doAs(new PrivilegedExceptionAction<Token<AuthenticationTokenIdentifier>>() { public Token<AuthenticationTokenIdentifier> run() throws Exception { return obtainToken(job); } }); if (token == null) { throw new IOException("No token returned for user " + user.getUserName()); } Text clusterId = getClusterId(token); LOG.info("Obtained token " + token.getKind().toString() + " for user " + user.getUserName() + " on cluster " + clusterId.toString()); job.getCredentials().addToken(clusterId, token); } catch (IOException ioe) { throw ioe; } catch (InterruptedException ie) { throw ie; } catch (RuntimeException re) { throw re; } catch (Exception e) { throw new UndeclaredThrowableException(e, "Unexpected exception obtaining token for user " + user.getUserName()); } }
From source file:com.trendmicro.hdfs.webdav.HDFSResource.java
@Override public void addMember(final DavResource resource, final InputContext context) throws DavException { // A PUT performed on an existing resource replaces the GET response entity // of the resource. Properties defined on the resource may be recomputed // during PUT processing but are not otherwise affected. final HDFSResource dfsResource = (HDFSResource) resource; final Path destPath = dfsResource.getPath(); try {/*from ww w. j a v a 2 s. c om*/ if (dfsResource.isCollectionRequest) { if (LOG.isDebugEnabled()) { LOG.debug("Creating new directory '" + destPath.toUri().getPath() + "'"); } boolean success = user.doAs(new PrivilegedExceptionAction<Boolean>() { public Boolean run() throws Exception { return FileSystem.get(conf).mkdirs(destPath); } }); if (!success) { throw new DavException(DavServletResponse.SC_CONFLICT); } } else { if (LOG.isDebugEnabled()) { LOG.debug("Creating new file '" + destPath.toUri().getPath() + "'"); } if (!context.hasStream() || context.getContentLength() < 0) { boolean success = user.doAs(new PrivilegedExceptionAction<Boolean>() { public Boolean run() throws Exception { return FileSystem.get(conf).createNewFile(destPath); } }); if (!success) { throw new DavException(DavServletResponse.SC_CONFLICT); } } else { user.doAs(new PrivilegedExceptionAction<Void>() { public Void run() throws Exception { OutputStream out = FileSystem.get(conf).create(destPath); InputStream in = context.getInputStream(); IOUtils.copyBytes(in, out, conf, true); return null; } }); } } } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } }
From source file:com.alibaba.wasp.LocalWaspCluster.java
public JVMClusterUtil.FServerThread addFServer(final Configuration config, final int index) throws IOException { try {/*from w w w.j a v a 2s. c o m*/ return new PrivilegedExceptionAction<FServerThread>() { public JVMClusterUtil.FServerThread run() throws Exception { JVMClusterUtil.FServerThread rst = JVMClusterUtil.createFServerThread(config, fserverClass, index); fserverThreads.add(rst); return rst; } }.run(); } catch (Exception e) { throw new IOException(e); } }
From source file:io.druid.security.kerberos.DruidKerberosAuthenticationHandler.java
@Override public AuthenticationToken authenticate(HttpServletRequest request, final HttpServletResponse response) throws IOException, AuthenticationException { AuthenticationToken token = null;//from w ww .ja v a2 s .co m String authorization = request .getHeader(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.AUTHORIZATION); if (authorization == null || !authorization .startsWith(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE)) { return null; } else { authorization = authorization.substring( org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE.length()) .trim(); final Base64 base64 = new Base64(0); final byte[] clientToken = base64.decode(authorization); final String serverName = request.getServerName(); try { token = Subject.doAs(serverSubject, new PrivilegedExceptionAction<AuthenticationToken>() { @Override public AuthenticationToken run() throws Exception { AuthenticationToken token = null; GSSContext gssContext = null; GSSCredential gssCreds = null; try { gssCreds = gssManager.createCredential( gssManager.createName(KerberosUtil.getServicePrincipal("HTTP", serverName), KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL")), GSSCredential.INDEFINITE_LIFETIME, new Oid[] { KerberosUtil.getOidInstance("GSS_SPNEGO_MECH_OID"), KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID") }, GSSCredential.ACCEPT_ONLY); gssContext = gssManager.createContext(gssCreds); byte[] serverToken = gssContext.acceptSecContext(clientToken, 0, clientToken.length); if (serverToken != null && serverToken.length > 0) { String authenticate = base64.encodeToString(serverToken); response.setHeader( org.apache.hadoop.security.authentication.client.KerberosAuthenticator.WWW_AUTHENTICATE, org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE + " " + authenticate); } if (!gssContext.isEstablished()) { response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); log.trace("SPNEGO in progress"); } else { String clientPrincipal = gssContext.getSrcName().toString(); KerberosName kerberosName = new KerberosName(clientPrincipal); String userName = kerberosName.getShortName(); token = new AuthenticationToken(userName, clientPrincipal, getType()); response.setStatus(HttpServletResponse.SC_OK); log.trace("SPNEGO completed for principal [%s]", clientPrincipal); } } finally { if (gssContext != null) { gssContext.dispose(); } if (gssCreds != null) { gssCreds.dispose(); } } return token; } }); } catch (PrivilegedActionException ex) { if (ex.getException() instanceof IOException) { throw (IOException) ex.getException(); } else { throw new AuthenticationException(ex.getException()); } } } return token; }