Example usage for java.security PrivilegedAction PrivilegedAction

List of usage examples for java.security PrivilegedAction PrivilegedAction

Introduction

In this page you can find the example usage for java.security PrivilegedAction PrivilegedAction.

Prototype

PrivilegedAction

Source Link

Usage

From source file:tools.xor.util.ClassUtil.java

/**
 * Creates a new instance of the given class via the no-arg constructor,
 * invoking the constructor as a privileged action if it is protected or
 * private./*from  w  ww . j a  v  a 2  s .  c  om*/
 * 
 * @param c given class
 * @return a new instance of the given class via the no-arg constructor
 * @throws Exception when creating the instance
 */
public static Object newInstanceAsPrivileged(final Class<?> c) throws Exception {

    try {
        return c.newInstance();

    } catch (Exception e) {
        return AccessController.doPrivileged(new PrivilegedAction<Object>() {
            public Object run() {
                try {
                    final Constructor<?> constructor = c.getDeclaredConstructor();
                    constructor.setAccessible(true);
                    return constructor.newInstance();
                } catch (Exception e) {
                    throw ClassUtil.wrapRun(e);
                }
            }
        });
    }
}

From source file:org.eclipse.gemini.blueprint.config.internal.adapter.OsgiServiceLifecycleListenerAdapter.java

public void bind(final Object service, final Map properties) throws Exception {
    boolean trace = log.isTraceEnabled();
    if (trace)// ww  w. java2s.co m
        log.trace("Invoking bind method for service " + ObjectUtils.identityToString(service) + " with props="
                + properties);

    if (!initialized)
        retrieveTarget();

    boolean isSecurityEnabled = (System.getSecurityManager() != null);
    AccessControlContext acc = null;

    if (isSecurityEnabled) {
        acc = SecurityUtils.getAccFrom(beanFactory);
    }

    // first call interface method (if it exists)
    if (isLifecycleListener) {
        if (trace)
            log.trace("Invoking listener interface methods");

        try {
            if (isSecurityEnabled) {
                AccessController.doPrivileged(new PrivilegedExceptionAction<Object>() {
                    public Object run() throws Exception {
                        ((OsgiServiceLifecycleListener) target).bind(service, properties);
                        return null;
                    }
                }, acc);
            } else {
                ((OsgiServiceLifecycleListener) target).bind(service, properties);
            }
        } catch (Exception ex) {
            if (ex instanceof PrivilegedActionException) {
                ex = ((PrivilegedActionException) ex).getException();
            }
            log.warn("standard bind method on [" + target.getClass().getName() + "] threw exception", ex);
        }
    }

    if (isSecurityEnabled) {
        AccessController.doPrivileged(new PrivilegedAction<Object>() {
            public Object run() {
                CustomListenerAdapterUtils.invokeCustomMethods(target, bindMethods, service, properties);
                invokeCustomServiceReferenceMethod(target, bindReference, service);
                return null;
            }
        }, acc);
    } else {
        CustomListenerAdapterUtils.invokeCustomMethods(target, bindMethods, service, properties);
        invokeCustomServiceReferenceMethod(target, bindReference, service);
    }
}

From source file:org.apache.ranger.hive.client.HiveClient.java

public void close() {
    Subject.doAs(getLoginSubject(), new PrivilegedAction<Void>() {
        public Void run() {
            close(con);//  w ww.j a va 2s.c  o m
            return null;
        }
    });
}

From source file:org.echocat.nodoodle.classloading.FileClassLoader.java

@Override
protected synchronized Enumeration<URL> findResources(final String name) throws IOException {
    ensureNotClosed();//  ww w  . j a  v a 2 s  .c o  m
    final Iterable<URL> iterable = doPrivileged(new PrivilegedAction<Iterable<URL>>() {
        @Override
        public Iterable<URL> run() {
            final Collection<URL> result = new ArrayList<URL>();
            for (File directory : _directories) {
                final File file = new File(directory, name);
                if (file.isFile()) {
                    result.add(new DirectoryResource(directory, file).getResourceUrl());
                }
            }
            for (JarFile jarFile : _jarFiles) {
                final JarEntry jarEntry = jarFile.getJarEntry(name);
                if (jarEntry != null) {
                    result.add(new JarResource(jarFile, jarEntry).getResourceUrl());
                }
            }
            return result;
        }
    }, _acc);
    //noinspection unchecked
    return new IteratorEnumeration(iterable.iterator());
}

From source file:org.apache.hadoop.mapred.pipes.SubmitterToAccels.java

@Override
public int run(String[] args) throws Exception {
    CommandLineParser cli = new CommandLineParser();
    if (args.length == 0) {
        cli.printUsage();/*  ww  w  . j a v  a2s  .c om*/
        return 1;
    }

    cli.addOption("input", false, "input path to the maps", "path");
    cli.addOption("output", false, "output path from the reduces", "path");

    cli.addOption("cpubin", false, "URI to application cpu executable", "class");
    cli.addOption("gpubin", false, "URI to application gpu executable", "class");

    Parser parser = cli.createParser();
    try {
        GenericOptionsParser genericParser = new GenericOptionsParser(getConf(), args);
        CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs());
        JobConf job = new JobConf(getConf());

        if (results.hasOption("input")) {
            FileInputFormat.setInputPaths(job, (String) results.getOptionValue("input"));
        }
        if (results.hasOption("output")) {
            FileOutputFormat.setOutputPath(job, new Path((String) results.getOptionValue("output")));
        }
        if (results.hasOption("cpubin")) {
            setCPUExecutable(job, (String) results.getOptionValue("cpubin"));
        }
        if (results.hasOption("gpubin")) {
            setGPUExecutable(job, (String) results.getOptionValue("gpubin"));
        }
        // if they gave us a jar file, include it into the class path
        String jarFile = job.getJar();
        if (jarFile != null) {
            final URL[] urls = new URL[] { FileSystem.getLocal(job).pathToFile(new Path(jarFile)).toURL() };
            //FindBugs complains that creating a URLClassLoader should be
            //in a doPrivileged() block. 
            ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
                public ClassLoader run() {
                    return new URLClassLoader(urls);
                }
            });
            job.setClassLoader(loader);
        }
        runJob(job);
        return 0;
    } catch (ParseException pe) {
        LOG.info("Error :" + pe);
        cli.printUsage();
        return 1;
    }
}

From source file:org.apache.ranger.hbase.client.HBaseClient.java

public List<String> getTableList(final String tableNameMatching) {
    List<String> ret = null;
    final String errMsg = " You can still save the repository and start creating "
            + "policies, but you would not be able to use autocomplete for "
            + "resource names. Check xa_portal.log for more info.";

    subj = getLoginSubject();/*w  ww  .  j  av  a  2  s  .  c o  m*/

    if (subj != null) {
        ClassLoader prevCl = Thread.currentThread().getContextClassLoader();
        try {
            Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());

            ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {

                @Override
                public List<String> run() {

                    List<String> tableList = new ArrayList<String>();
                    HBaseAdmin admin = null;
                    try {

                        Configuration conf = HBaseConfiguration.create();
                        admin = new HBaseAdmin(conf);
                        for (HTableDescriptor htd : admin.listTables(tableNameMatching)) {
                            tableList.add(htd.getNameAsString());
                        }
                    } catch (ZooKeeperConnectionException zce) {
                        String msgDesc = "getTableList: Unable to connect to `ZooKeeper` "
                                + "using given config parameters.";
                        HadoopException hdpException = new HadoopException(msgDesc, zce);
                        hdpException.generateResponseDataMap(false, getMessage(zce), msgDesc + errMsg, null,
                                null);
                        throw hdpException;

                    } catch (MasterNotRunningException mnre) {
                        String msgDesc = "getTableList: Looks like `Master` is not running, "
                                + "so couldn't check that running HBase is available or not, "
                                + "Please try again later.";
                        HadoopException hdpException = new HadoopException(msgDesc, mnre);
                        hdpException.generateResponseDataMap(false, getMessage(mnre), msgDesc + errMsg, null,
                                null);
                        throw hdpException;

                    } catch (IOException io) {
                        String msgDesc = "Unable to get HBase table List for [repository:"
                                + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching
                                + "].";
                        HadoopException hdpException = new HadoopException(msgDesc, io);
                        hdpException.generateResponseDataMap(false, getMessage(io), msgDesc + errMsg, null,
                                null);
                        throw hdpException;
                    } catch (Throwable e) {
                        String msgDesc = "Unable to get HBase table List for [repository:"
                                + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching
                                + "].";
                        LOG.error(msgDesc);
                        HadoopException hdpException = new HadoopException(msgDesc, e);
                        hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + errMsg, null,
                                null);
                        throw hdpException;
                    } finally {
                        if (admin != null) {
                            try {
                                admin.close();
                            } catch (IOException e) {
                                LOG.error("Unable to close HBase connection ["
                                        + getConfigHolder().getDatasourceName() + "]", e);
                            }
                        }
                    }
                    return tableList;
                }

            });
        } finally {
            Thread.currentThread().setContextClassLoader(prevCl);
        }
    }
    return ret;
}

From source file:org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint.java

@Override
public void secureBulkLoadHFiles(RpcController controller, SecureBulkLoadHFilesRequest request,
        RpcCallback<SecureBulkLoadHFilesResponse> done) {
    final List<Pair<byte[], String>> familyPaths = new ArrayList<Pair<byte[], String>>();
    for (ClientProtos.BulkLoadHFileRequest.FamilyPath el : request.getFamilyPathList()) {
        familyPaths.add(new Pair(el.getFamily().toByteArray(), el.getPath()));
    }/*from   w  ww  .j  ava  2  s. c  o m*/
    final Token userToken = new Token(request.getFsToken().getIdentifier().toByteArray(),
            request.getFsToken().getPassword().toByteArray(), new Text(request.getFsToken().getKind()),
            new Text(request.getFsToken().getService()));
    final String bulkToken = request.getBulkToken();
    User user = getActiveUser();
    final UserGroupInformation ugi = user.getUGI();
    if (userToken != null) {
        ugi.addToken(userToken);
    } else if (userProvider.isHadoopSecurityEnabled()) {
        //we allow this to pass through in "simple" security mode
        //for mini cluster testing
        ResponseConverter.setControllerException(controller,
                new DoNotRetryIOException("User token cannot be null"));
        return;
    }

    HRegion region = env.getRegion();
    boolean bypass = false;
    if (region.getCoprocessorHost() != null) {
        try {
            bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths);
        } catch (IOException e) {
            ResponseConverter.setControllerException(controller, e);
            done.run(null);
            return;
        }
    }
    boolean loaded = false;
    if (!bypass) {
        // Get the target fs (HBase region server fs) delegation token
        // Since we have checked the permission via 'preBulkLoadHFile', now let's give
        // the 'request user' necessary token to operate on the target fs.
        // After this point the 'doAs' user will hold two tokens, one for the source fs
        // ('request user'), another for the target fs (HBase region server principal).
        FsDelegationToken targetfsDelegationToken = new FsDelegationToken(userProvider, "renewer");
        try {
            targetfsDelegationToken.acquireDelegationToken(fs);
        } catch (IOException e) {
            ResponseConverter.setControllerException(controller, e);
            done.run(null);
            return;
        }
        Token<?> targetFsToken = targetfsDelegationToken.getUserToken();
        if (targetFsToken != null
                && (userToken == null || !targetFsToken.getService().equals(userToken.getService()))) {
            ugi.addToken(targetFsToken);
        }

        loaded = ugi.doAs(new PrivilegedAction<Boolean>() {
            @Override
            public Boolean run() {
                FileSystem fs = null;
                try {
                    Configuration conf = env.getConfiguration();
                    fs = FileSystem.get(conf);
                    for (Pair<byte[], String> el : familyPaths) {
                        Path stageFamily = new Path(bulkToken, Bytes.toString(el.getFirst()));
                        if (!fs.exists(stageFamily)) {
                            fs.mkdirs(stageFamily);
                            fs.setPermission(stageFamily, PERM_ALL_ACCESS);
                        }
                    }
                    //We call bulkLoadHFiles as requesting user
                    //To enable access prior to staging
                    return env.getRegion().bulkLoadHFiles(familyPaths, true,
                            new SecureBulkLoadListener(fs, bulkToken, conf));
                } catch (Exception e) {
                    LOG.error("Failed to complete bulk load", e);
                }
                return false;
            }
        });
    }
    if (region.getCoprocessorHost() != null) {
        try {
            loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded);
        } catch (IOException e) {
            ResponseConverter.setControllerException(controller, e);
            done.run(null);
            return;
        }
    }
    done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(loaded).build());
}

From source file:org.apache.roller.planet.business.jpa.JPAPersistenceStrategy.java

/**
 * Get the context class loader associated with the current thread. This is
 * done in a doPrivileged block because it is a secure method.
 * @return the current thread's context class loader.
 *///from   ww w .j  av a 2  s  .  c o m
protected static ClassLoader getContextClassLoader() {
    return (ClassLoader) AccessController.doPrivileged(new PrivilegedAction() {
        public Object run() {
            return Thread.currentThread().getContextClassLoader();
        }
    });
}

From source file:org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.java

public void run() {
    if (UserGroupInformation.isSecurityEnabled()) {
        UserGroupInformation ugi = null;
        try {/*from   www  .  java2 s  .  co m*/
            ugi = UserGroupInformation.getLoginUser();
        } catch (IOException e) {
            LOG.error(StringUtils.stringifyException(e));
            e.printStackTrace();
            Runtime.getRuntime().exit(-1);
        }
        ugi.doAs(new PrivilegedAction<Object>() {
            @Override
            public Object run() {
                doWork();
                return null;
            }
        });
    } else {
        doWork();
    }
}

From source file:org.apache.roller.weblogger.business.jpa.JPAPersistenceStrategy.java

/**
 * Get the context class loader associated with the current thread. This is
 * done in a doPrivileged block because it is a secure method.
 * @return the current thread's context class loader.
 *///from   ww w .ja v a2  s  .c  o m
private static ClassLoader getContextClassLoader() {
    return (ClassLoader) AccessController.doPrivileged(new PrivilegedAction() {
        public Object run() {
            return Thread.currentThread().getContextClassLoader();
        }
    });
}