List of usage examples for java.security PrivilegedAction PrivilegedAction
PrivilegedAction
From source file:org.apache.axis2.jaxws.description.builder.converter.JavaClassToDBCConverter.java
/** * Adds any checked exceptions (i.e. declared on a method via a throws clause) * to the list of classes for which a DBC needs to be built. * @param rootClass//from w w w .java 2 s . c o m */ private void establishExceptionClasses(final Class rootClass) { Method[] methods = (Method[]) AccessController.doPrivileged(new PrivilegedAction() { public Object run() { return rootClass.getMethods(); } }); for (Method method : methods) { Class[] exceptionClasses = method.getExceptionTypes(); if (exceptionClasses.length > 0) { for (Class checkedException : exceptionClasses) { classes.add(checkedException); } } } }
From source file:de.ingrid.usermanagement.jetspeed.IngridPermissionManager.java
public boolean checkPermission(Subject subject, final Permission permission) { try {/*from w w w .java 2 s.c o m*/ //Subject.doAs(subject, new PrivilegedAction() Subject.doAsPrivileged(subject, new PrivilegedAction() { public Object run() { AccessController.checkPermission(permission); return null; } }, null); } catch (Exception e) { return false; } return true; }
From source file:org.apache.hadoop.yarn.server.resourcemanager.security.TestAMRMTokens.java
private ApplicationMasterProtocol createRMClient(final MockRM rm, final Configuration conf, final YarnRPC rpc, UserGroupInformation currentUser) { return currentUser.doAs(new PrivilegedAction<ApplicationMasterProtocol>() { @Override//from ww w . j a v a 2s .c o m public ApplicationMasterProtocol run() { return (ApplicationMasterProtocol) rpc.getProxy(ApplicationMasterProtocol.class, rm.getApplicationMasterService().getBindAddress(), conf); } }); }
From source file:Tcpbw100.java
public void run_test() { // The Java security model considers calling a method that opens a socket // from JavaScript to be a privileged action. By using // java.security.privilegedAction here, we can grant JavaScript the // same expanded privileges as the signed applet to open a socket. AccessController.doPrivileged(new PrivilegedAction() { public Object run() { pub_errmsg = "Test in progress."; runtest();/*from ww w . ja va 2 s.co m*/ return null; } }); }
From source file:org.apache.hadoop.tools.mapred.TestCopyMapper.java
@Test(timeout = 40000) public void testCopyReadableFiles() { try {//from w w w .ja va 2s.com deleteState(); createSourceData(); try { UsersGroups.addUser("guest"); } catch (UserAlreadyExistsException e) { } UserGroupInformation tmpUser = UserGroupInformation.createRemoteUser("guest"); final CopyMapper copyMapper = new CopyMapper(); final Mapper<Text, CopyListingFileStatus, Text, Text>.Context context = tmpUser .doAs(new PrivilegedAction<Mapper<Text, CopyListingFileStatus, Text, Text>.Context>() { @Override public Mapper<Text, CopyListingFileStatus, Text, Text>.Context run() { try { StubContext stubContext = new StubContext(getConfiguration(), null, 0); return stubContext.getContext(); } catch (Exception e) { LOG.error("Exception encountered ", e); throw new RuntimeException(e); } } }); touchFile(SOURCE_PATH + "/src/file"); mkdirs(TARGET_PATH); cluster.getFileSystem().setPermission(new Path(SOURCE_PATH + "/src/file"), new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ)); cluster.getFileSystem().setPermission(new Path(TARGET_PATH), new FsPermission((short) 511)); final FileSystem tmpFS = tmpUser.doAs(new PrivilegedAction<FileSystem>() { @Override public FileSystem run() { try { return FileSystem.get(configuration); } catch (IOException e) { LOG.error("Exception encountered ", e); Assert.fail("Test failed: " + e.getMessage()); throw new RuntimeException("Test ought to fail here"); } } }); tmpUser.doAs(new PrivilegedAction<Integer>() { @Override public Integer run() { try { copyMapper.setup(context); copyMapper.map(new Text("/src/file"), new CopyListingFileStatus(tmpFS.getFileStatus(new Path(SOURCE_PATH + "/src/file"))), context); } catch (Exception e) { throw new RuntimeException(e); } return null; } }); } catch (Exception e) { LOG.error("Exception encountered ", e); Assert.fail("Test failed: " + e.getMessage()); } }
From source file:org.apache.catalina.cluster.session.DeltaSession.java
/** * Return the <code>HttpSession</code> for which this object * is the facade.//from w w w . j a va 2 s .c om */ public HttpSession getSession() { if (facade == null) { if (System.getSecurityManager() != null) { final DeltaSession fsession = this; facade = (DeltaSessionFacade) AccessController.doPrivileged(new PrivilegedAction() { public Object run() { return new DeltaSessionFacade(fsession); } }); } else { facade = new DeltaSessionFacade(this); } } return (facade); }
From source file:org.apache.axiom.om.util.StAXUtils.java
private static XMLInputFactory newXMLInputFactory(final ClassLoader classLoader, final StAXParserConfiguration configuration) { return (XMLInputFactory) AccessController.doPrivileged(new PrivilegedAction() { public Object run() { ClassLoader savedClassLoader; if (classLoader == null) { savedClassLoader = null; } else { savedClassLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(classLoader); }/*from w w w . j a v a 2 s .c om*/ try { XMLInputFactory factory = XMLInputFactory.newInstance(); // Woodstox by default creates coalescing parsers. Even if this violates // the StAX specs, for compatibility with Woodstox, we always enable the // coalescing mode. Note that we need to do that before loading // XMLInputFactory.properties so that this setting can be overridden. factory.setProperty(XMLInputFactory.IS_COALESCING, Boolean.TRUE); Map props = loadFactoryProperties("XMLInputFactory.properties"); if (props != null) { for (Iterator it = props.entrySet().iterator(); it.hasNext();) { Map.Entry entry = (Map.Entry) it.next(); factory.setProperty((String) entry.getKey(), entry.getValue()); } } StAXDialect dialect = StAXDialectDetector.getDialect(factory.getClass()); if (configuration != null) { factory = configuration.configure(factory, dialect); } return new ImmutableXMLInputFactory(dialect.normalize(dialect.makeThreadSafe(factory))); } finally { if (savedClassLoader != null) { Thread.currentThread().setContextClassLoader(savedClassLoader); } } } }); }
From source file:it.crs4.pydoop.pipes.Submitter.java
@Override public int run(String[] args) throws Exception { CommandLineParser cli = new CommandLineParser(); if (args.length == 0) { cli.printUsage();/* w w w . j ava 2s . c o m*/ return 1; } cli.addOption("input", false, "input path to the maps", "path"); cli.addOption("output", false, "output path from the reduces", "path"); cli.addOption("jar", false, "job jar file", "path"); cli.addOption("inputformat", false, "java classname of InputFormat", "class"); //cli.addArgument("javareader", false, "is the RecordReader in Java"); cli.addOption("map", false, "java classname of Mapper", "class"); cli.addOption("partitioner", false, "java classname of Partitioner", "class"); cli.addOption("reduce", false, "java classname of Reducer", "class"); cli.addOption("writer", false, "java classname of OutputFormat", "class"); cli.addOption("program", false, "URI to application executable", "class"); cli.addOption("reduces", false, "number of reduces", "num"); cli.addOption("jobconf", false, "\"n1=v1,n2=v2,..\" (Deprecated) Optional. Add or override a JobConf property.", "key=val"); cli.addOption("lazyOutput", false, "Optional. Create output lazily", "boolean"); Parser parser = cli.createParser(); try { GenericOptionsParser genericParser = new GenericOptionsParser(getConf(), args); CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs()); JobConf job = new JobConf(getConf()); if (results.hasOption("input")) { FileInputFormat.setInputPaths(job, results.getOptionValue("input")); } if (results.hasOption("output")) { FileOutputFormat.setOutputPath(job, new Path(results.getOptionValue("output"))); } if (results.hasOption("jar")) { job.setJar(results.getOptionValue("jar")); } if (results.hasOption("inputformat")) { setIsJavaRecordReader(job, true); job.setInputFormat(getClass(results, "inputformat", job, InputFormat.class)); } if (results.hasOption("javareader")) { setIsJavaRecordReader(job, true); } if (results.hasOption("map")) { setIsJavaMapper(job, true); job.setMapperClass(getClass(results, "map", job, Mapper.class)); } if (results.hasOption("partitioner")) { job.setPartitionerClass(getClass(results, "partitioner", job, Partitioner.class)); } if (results.hasOption("reduce")) { setIsJavaReducer(job, true); job.setReducerClass(getClass(results, "reduce", job, Reducer.class)); } if (results.hasOption("reduces")) { job.setNumReduceTasks(Integer.parseInt(results.getOptionValue("reduces"))); } if (results.hasOption("writer")) { setIsJavaRecordWriter(job, true); job.setOutputFormat(getClass(results, "writer", job, OutputFormat.class)); } if (results.hasOption("lazyOutput")) { if (Boolean.parseBoolean(results.getOptionValue("lazyOutput"))) { LazyOutputFormat.setOutputFormatClass(job, job.getOutputFormat().getClass()); } } if (results.hasOption("program")) { setExecutable(job, results.getOptionValue("program")); } if (results.hasOption("jobconf")) { LOG.warn("-jobconf option is deprecated, please use -D instead."); String options = results.getOptionValue("jobconf"); StringTokenizer tokenizer = new StringTokenizer(options, ","); while (tokenizer.hasMoreTokens()) { String keyVal = tokenizer.nextToken().trim(); String[] keyValSplit = keyVal.split("="); job.set(keyValSplit[0], keyValSplit[1]); } } // if they gave us a jar file, include it into the class path String jarFile = job.getJar(); if (jarFile != null) { final URL[] urls = new URL[] { FileSystem.getLocal(job).pathToFile(new Path(jarFile)).toURL() }; //FindBugs complains that creating a URLClassLoader should be //in a doPrivileged() block. ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() { public ClassLoader run() { return new URLClassLoader(urls); } }); job.setClassLoader(loader); } runJob(job); return 0; } catch (ParseException pe) { LOG.info("Error : " + pe); cli.printUsage(); return 1; } }
From source file:org.codehaus.groovy.grails.web.pages.GroovyPagesTemplateEngine.java
/** * Establishes whether a Groovy page is reloadable. A GSP is only reloadable in the development environment. * * @param resource The Resource to check. * @param meta The current GroovyPageMetaInfo instance * @return true if it is reloadable// ww w.j av a 2 s . c om */ private boolean isGroovyPageReloadable(final Resource resource, GroovyPageMetaInfo meta) { return isReloadEnabled() && meta.shouldReload(new PrivilegedAction<Resource>() { public Resource run() { return resource; } }); }
From source file:org.apache.hadoop.tools.distcp2.mapred.TestCopyMapper.java
@Test public void testFailCopyWithAccessControlException() { try {//from w w w. ja v a 2 s . c o m deleteState(); createSourceData(); UserGroupInformation tmpUser = UserGroupInformation.createRemoteUser("guest"); final CopyMapper copyMapper = new CopyMapper(); final StubContext stubContext = tmpUser.doAs(new PrivilegedAction<StubContext>() { @Override public StubContext run() { try { return new StubContext(getConfiguration(), null, 0); } catch (Exception e) { LOG.error("Exception encountered ", e); throw new RuntimeException(e); } } }); EnumSet<DistCpOptions.FileAttribute> preserveStatus = EnumSet.allOf(DistCpOptions.FileAttribute.class); final Mapper<Text, FileStatus, Text, Text>.Context context = stubContext.getContext(); context.getConfiguration().set(DistCpConstants.CONF_LABEL_PRESERVE_STATUS, DistCpUtils.packAttributes(preserveStatus)); touchFile(SOURCE_PATH + "/src/file"); OutputStream out = cluster.getFileSystem().create(new Path(TARGET_PATH + "/src/file")); out.write("hello world".getBytes()); out.close(); cluster.getFileSystem().setPermission(new Path(SOURCE_PATH + "/src/file"), new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ)); cluster.getFileSystem().setPermission(new Path(TARGET_PATH + "/src/file"), new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ)); final FileSystem tmpFS = tmpUser.doAs(new PrivilegedAction<FileSystem>() { @Override public FileSystem run() { try { return FileSystem.get(configuration); } catch (IOException e) { LOG.error("Exception encountered ", e); Assert.fail("Test failed: " + e.getMessage()); throw new RuntimeException("Test ought to fail here"); } } }); tmpUser.doAs(new PrivilegedAction<Integer>() { @Override public Integer run() { try { copyMapper.setup(context); copyMapper.map(new Text("/src/file"), tmpFS.getFileStatus(new Path(SOURCE_PATH + "/src/file")), context); Assert.fail("Didn't expect the file to be copied"); } catch (AccessControlException ignore) { } catch (Exception e) { if (e.getCause() == null || !(e.getCause() instanceof AccessControlException)) { throw new RuntimeException(e); } } return null; } }); } catch (Exception e) { LOG.error("Exception encountered ", e); Assert.fail("Test failed: " + e.getMessage()); } }