Example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction

List of usage examples for java.security PrivilegedExceptionAction PrivilegedExceptionAction

Introduction

In this page you can find the example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction.

Prototype

PrivilegedExceptionAction

Source Link

Usage

From source file:org.nebulaframework.deployment.classloading.GridArchiveClassLoader.java

/**
 * Attempts to find the given Class with in the {@code GridArchive}. If
 * found (either as direct class file or with in a {@code .jar} library
 * inside {@code .nar} file), returns the Class instance for it.
 * /*from  ww w  .j  a  va  2 s  . co m*/
 * @return the {@code Class<?>} instance for the class to be loaded
 * 
 * @throws ClassNotFoundException if unable to find the class
 */
@Override
public Class<?> findClass(String name) throws ClassNotFoundException {
    try {

        // Convert class name to file name
        final String fileName = name.replaceAll("\\.", "/") + ".class";

        // Search in Archive | Exception if failed
        byte[] bytes = AccessController.doPrivileged(new PrivilegedExceptionAction<byte[]>() {

            @Override
            public byte[] run() throws IOException, ClassNotFoundException {
                return findInArchive(fileName);
            }

        });

        // If found, define class and return
        return defineClass(name, bytes, 0, bytes.length, REMOTE_CODESOURCE);

    } catch (Exception e) {
        throw new ClassNotFoundException("Unable to locate class", e);
    }
}

From source file:org.apache.hadoop.hbase.master.procedure.TruncateTableProcedure.java

private boolean preTruncate(final MasterProcedureEnv env) throws IOException, InterruptedException {
    final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
    if (cpHost != null) {
        final TableName tableName = getTableName();
        user.doAs(new PrivilegedExceptionAction<Void>() {
            @Override//from   w w w  .j a va  2s  .  c o  m
            public Void run() throws Exception {
                cpHost.preTruncateTableHandler(tableName);
                return null;
            }
        });
    }
    return true;
}

From source file:org.apache.hadoop.hive.ql.txn.compactor.Initiator.java

private CompactionType checkForCompaction(final CompactionInfo ci, final ValidTxnList txns,
        final StorageDescriptor sd, final String runAs) throws IOException, InterruptedException {
    // If it's marked as too many aborted, we already know we need to compact
    if (ci.tooManyAborts) {
        LOG.debug("Found too many aborted transactions for " + ci.getFullPartitionName() + ", "
                + "initiating major compaction");
        return CompactionType.MAJOR;
    }/* ww w .  j a va 2 s  .  c  om*/
    if (runJobAsSelf(runAs)) {
        return determineCompactionType(ci, txns, sd);
    } else {
        LOG.info("Going to initiate as user " + runAs);
        UserGroupInformation ugi = UserGroupInformation.createProxyUser(runAs,
                UserGroupInformation.getLoginUser());
        return ugi.doAs(new PrivilegedExceptionAction<CompactionType>() {
            @Override
            public CompactionType run() throws Exception {
                return determineCompactionType(ci, txns, sd);
            }
        });
    }
}

From source file:org.apache.carbondata.core.util.CarbonUtil.java

/**
 * This method will be used to delete the folder and files
 *
 * @param path file path array//from ww w  .  j  a  v a2 s. c om
 * @throws Exception exception
 */
public static void deleteFoldersAndFiles(final File... path) throws IOException, InterruptedException {
    UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() {

        @Override
        public Void run() throws Exception {
            for (int i = 0; i < path.length; i++) {
                deleteRecursive(path[i]);
            }
            return null;
        }
    });
}

From source file:org.apache.hadoop.hdfs.TestDistributedFileSystem.java

@Test
public void testFileChecksum() throws Exception {
    ((Log4JLogger) HftpFileSystem.LOG).getLogger().setLevel(Level.ALL);

    final long seed = RAN.nextLong();
    System.out.println("seed=" + seed);
    RAN.setSeed(seed);/*www  .  ja  v a  2  s. com*/

    final Configuration conf = getTestConfiguration();
    conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
    conf.set("slave.host.name", "localhost");

    final MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
    final FileSystem hdfs = cluster.getFileSystem();

    final String nnAddr = conf.get("dfs.http.address");
    final UserGroupInformation current = UserGroupInformation.getCurrentUser();
    final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(current.getShortUserName() + "x",
            new String[] { "user" });

    try {
        ((DistributedFileSystem) hdfs).getFileChecksum(new Path("/test/TestNonExistingFile"));
        fail("Expecting FileNotFoundException");
    } catch (FileNotFoundException e) {
        assertTrue("Not throwing the intended exception message",
                e.getMessage().contains("File does not exist: /test/TestNonExistingFile"));
    }

    try {
        Path path = new Path("/test/TestExistingDir/");
        hdfs.mkdirs(path);
        ((DistributedFileSystem) hdfs).getFileChecksum(path);
        fail("Expecting FileNotFoundException");
    } catch (FileNotFoundException e) {
        assertTrue("Not throwing the intended exception message",
                e.getMessage().contains("File does not exist: /test/TestExistingDir"));
    }

    //hftp
    final String hftpuri = "hftp://" + nnAddr;
    System.out.println("hftpuri=" + hftpuri);
    final FileSystem hftp = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
        @Override
        public FileSystem run() throws Exception {
            return new Path(hftpuri).getFileSystem(conf);
        }
    });

    //webhdfs
    final String webhdfsuri = WebHdfsFileSystem.SCHEME + "://" + nnAddr;
    System.out.println("webhdfsuri=" + webhdfsuri);
    final FileSystem webhdfs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
        @Override
        public FileSystem run() throws Exception {
            return new Path(webhdfsuri).getFileSystem(conf);
        }
    });

    final Path dir = new Path("/filechecksum");
    final int block_size = 1024;
    final int buffer_size = conf.getInt("io.file.buffer.size", 4096);
    conf.setInt("io.bytes.per.checksum", 512);

    //try different number of blocks
    for (int n = 0; n < 5; n++) {
        //generate random data
        final byte[] data = new byte[RAN.nextInt(block_size / 2 - 1) + n * block_size + 1];
        RAN.nextBytes(data);
        System.out.println("data.length=" + data.length);

        //write data to a file
        final Path foo = new Path(dir, "foo" + n);
        {
            final FSDataOutputStream out = hdfs.create(foo, false, buffer_size, (short) 2, block_size);
            out.write(data);
            out.close();
        }

        //compute checksum
        final FileChecksum hdfsfoocs = hdfs.getFileChecksum(foo);
        System.out.println("hdfsfoocs=" + hdfsfoocs);

        //hftp
        final FileChecksum hftpfoocs = hftp.getFileChecksum(foo);
        System.out.println("hftpfoocs=" + hftpfoocs);

        final Path qualified = new Path(hftpuri + dir, "foo" + n);
        final FileChecksum qfoocs = hftp.getFileChecksum(qualified);
        System.out.println("qfoocs=" + qfoocs);

        //webhdfs
        final FileChecksum webhdfsfoocs = webhdfs.getFileChecksum(foo);
        System.out.println("webhdfsfoocs=" + webhdfsfoocs);

        final Path webhdfsqualified = new Path(webhdfsuri + dir, "foo" + n);
        final FileChecksum webhdfs_qfoocs = webhdfs.getFileChecksum(webhdfsqualified);
        System.out.println("webhdfs_qfoocs=" + webhdfs_qfoocs);

        //write another file
        final Path bar = new Path(dir, "bar" + n);
        {
            final FSDataOutputStream out = hdfs.create(bar, false, buffer_size, (short) 2, block_size);
            out.write(data);
            out.close();
        }

        { //verify checksum
            final FileChecksum barcs = hdfs.getFileChecksum(bar);
            final int barhashcode = barcs.hashCode();
            assertEquals(hdfsfoocs.hashCode(), barhashcode);
            assertEquals(hdfsfoocs, barcs);

            //hftp
            assertEquals(hftpfoocs.hashCode(), barhashcode);
            assertEquals(hftpfoocs, barcs);

            assertEquals(qfoocs.hashCode(), barhashcode);
            assertEquals(qfoocs, barcs);

            //webhdfs
            assertEquals(webhdfsfoocs.hashCode(), barhashcode);
            assertEquals(webhdfsfoocs, barcs);

            assertEquals(webhdfs_qfoocs.hashCode(), barhashcode);
            assertEquals(webhdfs_qfoocs, barcs);
        }

        hdfs.setPermission(dir, new FsPermission((short) 0));
        { //test permission error on hftp 
            try {
                hftp.getFileChecksum(qualified);
                fail();
            } catch (IOException ioe) {
                FileSystem.LOG.info("GOOD: getting an exception", ioe);
            }
        }

        { //test permission error on webhdfs 
            try {
                webhdfs.getFileChecksum(webhdfsqualified);
                fail();
            } catch (IOException ioe) {
                FileSystem.LOG.info("GOOD: getting an exception", ioe);
            }
        }
        hdfs.setPermission(dir, new FsPermission((short) 0777));
    }
    cluster.shutdown();
}

From source file:com.thinkbiganalytics.kerberos.TestKerberosKinit.java

private void testHiveJdbcConnection(final String configResources, final String keytab,
        final String realUserPrincipal, final String proxyUser, final String hiveHostName) throws Exception {

    final Configuration configuration = TestKerberosKinit.createConfigurationFromList(configResources);
    UserGroupInformation realugi = TestKerberosKinit.generateKerberosTicket(configuration, keytab,
            realUserPrincipal);/*w w  w  .j  a  v a2  s . c  om*/

    System.out.println(" ");
    System.out.println("Sucessfully got a kerberos ticket in the JVM");

    HiveConnection realUserConnection = (HiveConnection) realugi
            .doAs(new PrivilegedExceptionAction<Connection>() {
                public Connection run() {
                    Connection connection;
                    try {
                        Class.forName(DRIVER_NAME);
                        String url = hiveHostName;
                        if (proxyUser != null) {
                            url = url + ";hive.server2.proxy.user=" + proxyUser;
                        }
                        System.out.println("Hive URL: " + url);
                        connection = DriverManager.getConnection(url);

                        Class.forName(DRIVER_NAME);

                        System.out.println("creating statement");
                        Statement stmt = connection.createStatement();

                        String sql = "show databases";
                        ResultSet res = stmt.executeQuery(sql);
                        System.out.println(" \n");
                        System.out.println("Executing the Hive Query:");
                        System.out.println(" ");

                        System.out.println("List of Databases");
                        while (res.next()) {
                            System.out.println(res.getString(1));
                        }

                    } catch (Exception e) {
                        throw new RuntimeException("Error creating connection with proxy user", e);
                    }
                    return connection;
                }
            });

}

From source file:org.apache.hadoop.crypto.key.kms.server.KMS.java

@GET
@Path(KMSRESTConstants.KEYS_METADATA_RESOURCE)
@Produces(MediaType.APPLICATION_JSON)//from  w  w  w.  j  a v a2 s.  c  o  m
public Response getKeysMetadata(@QueryParam(KMSRESTConstants.KEY) List<String> keyNamesList) throws Exception {
    try {
        LOG.trace("Entering getKeysMetadata method.");
        KMSWebApp.getAdminCallsMeter().mark();
        UserGroupInformation user = HttpUserGroupInformation.get();
        final String[] keyNames = keyNamesList.toArray(new String[keyNamesList.size()]);
        assertAccess(KMSACLs.Type.GET_METADATA, user, KMSOp.GET_KEYS_METADATA);

        KeyProvider.Metadata[] keysMeta = user.doAs(new PrivilegedExceptionAction<KeyProvider.Metadata[]>() {
            @Override
            public KeyProvider.Metadata[] run() throws Exception {
                return provider.getKeysMetadata(keyNames);
            }
        });

        Object json = KMSServerJSONUtils.toJSON(keyNames, keysMeta);
        kmsAudit.ok(user, KMSOp.GET_KEYS_METADATA, "");
        LOG.trace("Exiting getKeysMetadata method.");
        return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
    } catch (Exception e) {
        LOG.debug("Exception in getKeysmetadata.", e);
        throw e;
    }
}

From source file:org.apache.hadoop.hbase.master.procedure.DeleteTableProcedure.java

private boolean preDelete(final MasterProcedureEnv env) throws IOException, InterruptedException {
    final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
    if (cpHost != null) {
        final TableName tableName = this.tableName;
        user.doAs(new PrivilegedExceptionAction<Void>() {
            @Override//from   w ww. j av  a  2s  .  c om
            public Void run() throws Exception {
                cpHost.preDeleteTableHandler(tableName);
                return null;
            }
        });
    }
    return true;
}

From source file:org.apache.axis2.jaxws.description.impl.URIResolverImpl.java

private InputStream openStream_doPriv(final URL streamURL) throws IOException {
    try {//from  ww w.j a  v  a2s. c  om
        return (InputStream) AccessController.doPrivileged(new PrivilegedExceptionAction() {
            public Object run() throws IOException {
                return streamURL.openStream();
            }
        });
    } catch (PrivilegedActionException e) {
        throw (IOException) e.getException();
    }
}

From source file:org.apache.hadoop.hbase.quotas.TestSuperUserQuotaPermissions.java

private <T> T doAsUser(UserGroupInformation ugi, Callable<T> task) throws Exception {
    return ugi.doAs(new PrivilegedExceptionAction<T>() {
        public T run() throws Exception {
            return task.call();
        }// ww w .j  a  v a  2s .  com
    });
}