Example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction

List of usage examples for java.security PrivilegedExceptionAction PrivilegedExceptionAction

Introduction

In this page you can find the example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction.

Prototype

PrivilegedExceptionAction

Source Link

Usage

From source file:org.apache.hadoop.mapred.TestHAStateTransitionFailure.java

/**
 * Ensure that a failure to fully transition to the active state causes a
 * shutdown of the jobtracker.//from ww w .  j  a va 2s  . co  m
 */
@Test(timeout = 60000)
public void testFailureToTransitionCausesShutdown() throws Exception {
    MiniDFSCluster dfs = null;
    MiniMRHACluster cluster = null;
    try {
        Configuration conf = new Configuration();
        dfs = new MiniDFSCluster.Builder(conf).numDataNodes(1).format(true).checkExitOnShutdown(false).build();

        // Set the owner of the system directory to a different user to the one
        // that starts the JT. This will cause the JT to fail to transition to
        // the active state.
        FileSystem fs = dfs.getFileSystem();
        Path mapredSysDir = new Path(conf.get("mapred.system.dir"), "seq-000000000000");
        fs.mkdirs(mapredSysDir, new FsPermission((short) 700));
        fs.setOwner(mapredSysDir, "mr", "mrgroup");

        cluster = new MiniMRHACluster(fs.getConf());
        final MiniMRHACluster finalCluster = cluster;
        UserGroupInformation ugi = UserGroupInformation.createUserForTesting("notmr",
                new String[] { "notmrgroup" });
        ugi.doAs(new PrivilegedExceptionAction<Object>() {
            @Override
            public Object run() throws Exception {
                finalCluster.getJobTrackerHaDaemon(0).makeActive();
                return null;
            }
        });
        cluster.getJobTrackerHaDaemon(0).makeActive();
        Thread.sleep(1000);
        assertTrue("Should have called terminate", ExitUtil.terminateCalled());
        assertExceptionContains("is not owned by", ExitUtil.getFirstExitException());
    } finally {
        if (cluster != null) {
            cluster.shutdown();
        }
        if (dfs != null) {
            dfs.shutdown();
        }
    }
}

From source file:org.apache.hadoop.hdfs.server.namenode.CancelDelegationTokenServlet.java

@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
        throws ServletException, IOException {
    final UserGroupInformation ugi;
    final ServletContext context = getServletContext();
    final Configuration conf = (Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
    try {//from w w  w.j  a  v  a  2s  .  c  o m
        ugi = getUGI(req, conf);
    } catch (IOException ioe) {
        LOG.info("Request for token received with no authentication from " + req.getRemoteAddr(), ioe);
        resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unable to identify or authenticate user");
        return;
    }
    final NameNode nn = (NameNode) context.getAttribute("name.node");
    String tokenString = req.getParameter(TOKEN);
    if (tokenString == null) {
        resp.sendError(HttpServletResponse.SC_MULTIPLE_CHOICES, "Token to renew not specified");
    }
    final Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>();
    token.decodeFromUrlString(tokenString);

    try {
        ugi.doAs(new PrivilegedExceptionAction<Void>() {
            public Void run() throws Exception {
                nn.cancelDelegationToken(token);
                return null;
            }
        });
    } catch (Exception e) {
        LOG.info("Exception while cancelling token. Re-throwing. ", e);
        resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
    }
}

From source file:org.apache.hadoop.hdfs.server.namenode.RenewDelegationTokenServlet.java

@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
        throws ServletException, IOException {
    final UserGroupInformation ugi;
    final ServletContext context = getServletContext();
    final Configuration conf = (Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
    try {/*from   w w  w .  j  av  a  2s .  co  m*/
        ugi = getUGI(req, conf);
    } catch (IOException ioe) {
        LOG.info("Request for token received with no authentication from " + req.getRemoteAddr(), ioe);
        resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unable to identify or authenticate user");
        return;
    }
    final NameNode nn = (NameNode) context.getAttribute("name.node");
    String tokenString = req.getParameter(TOKEN);
    if (tokenString == null) {
        resp.sendError(HttpServletResponse.SC_MULTIPLE_CHOICES, "Token to renew not specified");
    }
    final Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>();
    token.decodeFromUrlString(tokenString);

    try {
        long result = ugi.doAs(new PrivilegedExceptionAction<Long>() {
            public Long run() throws Exception {
                return nn.renewDelegationToken(token);
            }
        });
        PrintStream os = new PrintStream(resp.getOutputStream());
        os.println(result);
        os.close();
    } catch (Exception e) {
        // transfer exception over the http
        String exceptionClass = e.getClass().getCanonicalName();
        String exceptionMsg = e.getLocalizedMessage();
        String strException = exceptionClass + ";" + exceptionMsg;
        LOG.info("Exception while renewing token. Re-throwing. s=" + strException, e);

        resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, strException);
    }
}

From source file:org.apache.axis2.jaxws.util.ClassLoaderUtils.java

/**
 * Return the class for this name//from w  w w. j av a2s .  co m
 *
 * @return Class
 */
public static Class forName(final String className) throws ClassNotFoundException {
    Class cl = null;
    try {
        cl = (Class) AccessController.doPrivileged(new PrivilegedExceptionAction() {
            public Object run() throws ClassNotFoundException {
                return Class.forName(className);
            }
        });
    } catch (PrivilegedActionException e) {
        if (log.isDebugEnabled()) {
            log.debug("Exception thrown from AccessController: " + e.getMessage(), e);
        }
        throw (ClassNotFoundException) e.getException();
    }

    return cl;
}

From source file:org.apache.hadoop.hbase.security.token.TokenUtil.java

/**
 * Obtain an authentication token for the given user and add it to the
 * user's credentials.//from   w w  w. j ava  2 s.c  o m
 * @param conf The configuration for connecting to the cluster
 * @param user The user for whom to obtain the token
 * @throws IOException If making a remote call to the {@link TokenProvider} fails
 * @throws InterruptedException If executing as the given user is interrupted
 */
public static void obtainAndCacheToken(final Configuration conf, UserGroupInformation user)
        throws IOException, InterruptedException {
    try {
        Token<AuthenticationTokenIdentifier> token = user
                .doAs(new PrivilegedExceptionAction<Token<AuthenticationTokenIdentifier>>() {
                    public Token<AuthenticationTokenIdentifier> run() throws Exception {
                        return obtainToken(conf);
                    }
                });

        if (token == null) {
            throw new IOException("No token returned for user " + user.getUserName());
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("Obtained token " + token.getKind().toString() + " for user " + user.getUserName());
        }
        user.addToken(token);
    } catch (IOException ioe) {
        throw ioe;
    } catch (InterruptedException ie) {
        throw ie;
    } catch (RuntimeException re) {
        throw re;
    } catch (Exception e) {
        throw new UndeclaredThrowableException(e,
                "Unexpected exception obtaining token for user " + user.getUserName());
    }
}

From source file:org.apache.axis2.deployment.ModuleBuilder.java

private void loadModuleClass(AxisModule module, String moduleClassName) throws DeploymentException {
    Class moduleClass;/*  w w w .  j  a v  a 2s .c o  m*/

    try {
        if ((moduleClassName != null) && !"".equals(moduleClassName)) {
            moduleClass = Loader.loadClass(module.getModuleClassLoader(), moduleClassName);
            final Class fmoduleClass = moduleClass;
            final AxisModule fmodule = module;
            try {
                AccessController.doPrivileged(new PrivilegedExceptionAction() {
                    public Object run() throws IllegalAccessException, InstantiationException {
                        Module new_module = (Module) fmoduleClass.newInstance();
                        fmodule.setModule(new_module);
                        return null;
                    }
                });
            } catch (PrivilegedActionException e) {
                throw e.getException();
            }
        }
    } catch (Exception e) {
        throw new DeploymentException(e.getMessage(), e);
    }
}

From source file:org.apache.blur.hive.BlurHiveMRLoaderOutputCommitter.java

private void finishBulkJob(JobContext context, final boolean apply) throws IOException {
    final Configuration configuration = context.getConfiguration();
    PrivilegedExceptionAction<Void> action = new PrivilegedExceptionAction<Void>() {
        @Override//from ww  w . jav a 2  s  .  c o  m
        public Void run() throws Exception {
            String workingPathStr = configuration.get(BlurConstants.BLUR_BULK_UPDATE_WORKING_PATH);
            Path workingPath = new Path(workingPathStr);
            Path tmpDir = new Path(workingPath, "tmp");
            FileSystem fileSystem = tmpDir.getFileSystem(configuration);
            String loadId = configuration.get(BlurSerDe.BLUR_MR_LOAD_ID);
            Path loadPath = new Path(tmpDir, loadId);

            if (apply) {
                Path newDataPath = new Path(workingPath, "new");
                Path dst = new Path(newDataPath, loadId);
                if (!fileSystem.rename(loadPath, dst)) {
                    LOG.error("Could not move data from src [" + loadPath + "] to dst [" + dst + "]");
                    throw new IOException(
                            "Could not move data from src [" + loadPath + "] to dst [" + dst + "]");
                }

                TableDescriptor tableDescriptor = BlurOutputFormat.getTableDescriptor(configuration);
                String connectionStr = configuration.get(BlurSerDe.BLUR_CONTROLLER_CONNECTION_STR);
                BulkTableUpdateCommand bulkTableUpdateCommand = new BulkTableUpdateCommand();
                bulkTableUpdateCommand.setAutoLoad(true);
                bulkTableUpdateCommand.setTable(tableDescriptor.getName());
                bulkTableUpdateCommand.setWaitForDataBeVisible(true);

                Configuration config = new Configuration(false);
                config.addResource(HDFS_SITE_XML);
                config.addResource(YARN_SITE_XML);
                config.addResource(MAPRED_SITE_XML);

                bulkTableUpdateCommand.addExtraConfig(config);
                if (bulkTableUpdateCommand.run(BlurClient.getClient(connectionStr)) != 0) {
                    throw new IOException("Unknown error occured duing load.");
                }
            } else {
                fileSystem.delete(loadPath, true);
            }
            return null;
        }
    };
    UserGroupInformation userGroupInformation = BlurHiveOutputFormat.getUGI(configuration);
    try {
        userGroupInformation.doAs(action);
    } catch (InterruptedException e) {
        throw new IOException(e);
    }
}

From source file:com.trendmicro.hdfs.webdav.test.TestPropfindSimple.java

@BeforeClass
public static void setup() throws Exception {
    Configuration conf = minicluster.getConfiguration();
    conf.set("hadoop.proxyuser." + UserGroupInformation.getCurrentUser().getShortUserName() + ".groups",
            "users");
    conf.set("hadoop.proxyuser." + UserGroupInformation.getCurrentUser().getShortUserName() + ".hosts",
            "localhost");
    conf.set("hadoop.webdav.authentication.type", "simple");
    conf.setBoolean("hadoop.webdav.authentication.simple.anonymous.allowed", true);

    minicluster.startMiniCluster(gatewayUser);
    LOG.info("Gateway started on port " + minicluster.getGatewayPort());

    FsPermission.setUMask(conf, new FsPermission((short) 0));

    FileSystem fs = minicluster.getTestFileSystem();
    Path path = new Path("/test");
    assertTrue(fs.mkdirs(path, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)));
    fs.setOwner(path, ownerUser.getShortUserName(), ownerUser.getGroupNames()[0]);

    ownerUser.doAs(new PrivilegedExceptionAction<Void>() {
        public Void run() throws Exception {
            FileSystem fs = minicluster.getTestFileSystem();
            for (Path dir : publicDirPaths) {
                assertTrue(/* w w w  .j a v a  2  s.  c om*/
                        fs.mkdirs(dir, new FsPermission(FsAction.ALL, FsAction.READ_EXECUTE, FsAction.NONE)));
            }
            for (Path dir : privateDirPaths) {
                assertTrue(fs.mkdirs(dir, new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE)));
            }
            for (Path path : publicFilePaths) {
                FSDataOutputStream os = fs.create(path,
                        new FsPermission(FsAction.ALL, FsAction.READ, FsAction.NONE), true, 4096, (short) 1,
                        65536, null);
                assertNotNull(os);
                os.write(testPublicData.getBytes());
                os.close();
            }
            for (Path path : privateFilePaths) {
                FSDataOutputStream os = fs.create(path,
                        new FsPermission(FsAction.ALL, FsAction.READ, FsAction.NONE), true, 4096, (short) 1,
                        65536, null);
                assertNotNull(os);
                os.write(testPrivateData.getBytes());
                os.close();
            }
            return null;
        }
    });

}

From source file:org.apache.hadoop.mapred.JobTrackerHAServiceProtocol.java

private FileSystem createFileSystem(final Configuration conf) throws IOException, InterruptedException {
    ACLsManager aclsManager = new ACLsManager(conf, null, null);
    return aclsManager.getMROwner().doAs(new PrivilegedExceptionAction<FileSystem>() {
        public FileSystem run() throws IOException {
            return FileSystem.get(conf);
        }// w  w w .j av a  2  s . c  om
    });
}

From source file:org.apache.hadoop.hdfs.server.namenode.GetDelegationTokenServlet.java

@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
        throws ServletException, IOException {
    final UserGroupInformation ugi;
    final ServletContext context = getServletContext();
    final Configuration conf = (Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
    try {// ww w .j a va2  s .  com
        ugi = getUGI(req, conf);
    } catch (IOException ioe) {
        LOG.info("Request for token received with no authentication from " + req.getRemoteAddr(), ioe);
        resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unable to identify or authenticate user");
        return;
    }
    LOG.info("Sending token: {" + ugi.getUserName() + "," + req.getRemoteAddr() + "}");
    final NameNode nn = (NameNode) context.getAttribute("name.node");
    String renewer = req.getParameter(RENEWER);
    final String renewerFinal = (renewer == null) ? req.getUserPrincipal().getName() : renewer;

    DataOutputStream dos = null;
    try {
        dos = new DataOutputStream(resp.getOutputStream());
        final DataOutputStream dosFinal = dos; // for doAs block
        ugi.doAs(new PrivilegedExceptionAction<Void>() {
            @Override
            public Void run() throws IOException {
                final Credentials ts = DelegationTokenSecretManager.createCredentials(nn, ugi, renewerFinal);
                ts.write(dosFinal);
                dosFinal.close();
                return null;
            }
        });

    } catch (Exception e) {
        LOG.info("Exception while sending token. Re-throwing. ", e);
        resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
    } finally {
        if (dos != null)
            dos.close();
    }
}