List of usage examples for java.security PrivilegedAction PrivilegedAction
PrivilegedAction
From source file:org.apache.hadoop.hbase.security.TestUser.java
@Test public void testRunAs() throws Exception { Configuration conf = HBaseConfiguration.create(); final User user = User.createUserForTesting(conf, "testuser", new String[] { "foo" }); final PrivilegedExceptionAction<String> action = new PrivilegedExceptionAction<String>() { public String run() throws IOException { User u = User.getCurrent();/*from w w w . j av a2s . c o m*/ return u.getName(); } }; String username = user.runAs(action); assertEquals("Current user within runAs() should match", "testuser", username); // ensure the next run is correctly set User user2 = User.createUserForTesting(conf, "testuser2", new String[] { "foo" }); String username2 = user2.runAs(action); assertEquals("Second username should match second user", "testuser2", username2); // check the exception version username = user.runAs(new PrivilegedExceptionAction<String>() { public String run() throws Exception { return User.getCurrent().getName(); } }); assertEquals("User name in runAs() should match", "testuser", username); // verify that nested contexts work user2.runAs(new PrivilegedExceptionAction<Object>() { public Object run() throws IOException, InterruptedException { String nestedName = user.runAs(action); assertEquals("Nest name should match nested user", "testuser", nestedName); assertEquals("Current name should match current user", "testuser2", User.getCurrent().getName()); return null; } }); username = user.runAs(new PrivilegedAction<String>() { String result = null; @Override public String run() { try { return User.getCurrent().getName(); } catch (IOException e) { result = "empty"; } return result; } }); assertEquals("Current user within runAs() should match", "testuser", username); }
From source file:com.agimatec.validation.jsr303.util.SecureActions.java
public static Method[] getDeclaredMethods(final Class<?> clazz) { return run(new PrivilegedAction<Method[]>() { public Method[] run() { return clazz.getDeclaredMethods(); }//from w w w.ja v a 2s .c o m }); }
From source file:org.apache.axis2.jaxws.ClientConfigurationFactory.java
private static String getProperty_doPriv(final String property) { return (String) AccessController.doPrivileged(new PrivilegedAction() { public Object run() { try { return System.getProperty(property); } catch (Throwable t) { return null; }/* w w w . j a va 2 s . co m*/ } }); }
From source file:org.javascool.polyfilewriter.Gateway.java
/** * Return the user's home directory.// ww w .j a v a 2 s.co m * Use the System.getProperty("user.home") * * @return The path to the directory */ public String getHomeDirectory() throws Exception { assertSafeUsage(); try { return AccessController.doPrivileged(new PrivilegedAction<String>() { public String run() { return System.getProperty("user.home"); } }); } catch (Exception e) { popException(e); throw e; } }
From source file:org.castor.jaxb.CastorJAXBContextFactory.java
/** * Registers the {@link CastorJAXBContextFactory} as the default JAXB provider. *//* w w w. j av a 2s . co m*/ public static void registerContextFactory() { AccessController.doPrivileged(new PrivilegedAction<Object>() { public Object run() { System.setProperty(JAXBCONTEXT_PROPERTY_NAME, CASTOR_JAXBCONTEXT_FACTORY); return null; } }); }
From source file:org.eclipse.gemini.blueprint.extender.internal.blueprint.event.EventAdminDispatcher.java
public void beforeRefresh(final BlueprintEvent event) { if (dispatcher != null) { try {/* www . j a va2s .com*/ if (System.getSecurityManager() != null) { AccessController.doPrivileged(new PrivilegedAction<Object>() { public Object run() { dispatcher.beforeRefresh(event); return null; } }); } else { dispatcher.beforeRefresh(event); } } catch (Throwable th) { log.warn("Cannot dispatch event " + event, th); } } }
From source file:com.linkedin.drelephant.ElephantRunner.java
@Override public void run() { logger.info("Dr.elephant has started"); try {// w ww .j av a2 s.co m _hadoopSecurity = new HadoopSecurity(); _hadoopSecurity.doAs(new PrivilegedAction<Void>() { @Override public Void run() { HDFSContext.load(); loadGeneralConfiguration(); loadAnalyticJobGenerator(); ElephantContext.init(); // Initialize the metrics registries. MetricsController.init(); logger.info("executor num is " + _executorNum); if (_executorNum < 1) { throw new RuntimeException("Must have at least 1 worker thread."); } ThreadFactory factory = new ThreadFactoryBuilder().setNameFormat("dr-el-executor-thread-%d") .build(); _threadPoolExecutor = new ThreadPoolExecutor(_executorNum, _executorNum, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(), factory); while (_running.get() && !Thread.currentThread().isInterrupted()) { _analyticJobGenerator.updateResourceManagerAddresses(); lastRun = System.currentTimeMillis(); logger.info("Fetching analytic job list..."); try { _hadoopSecurity.checkLogin(); } catch (IOException e) { logger.info("Error with hadoop kerberos login", e); //Wait for a while before retry waitInterval(_retryInterval); continue; } List<AnalyticJob> todos; try { todos = _analyticJobGenerator.fetchAnalyticJobs(); } catch (Exception e) { logger.error("Error fetching job list. Try again later...", e); //Wait for a while before retry waitInterval(_retryInterval); continue; } for (AnalyticJob analyticJob : todos) { _threadPoolExecutor.submit(new ExecutorJob(analyticJob)); } int queueSize = _threadPoolExecutor.getQueue().size(); MetricsController.setQueueSize(queueSize); logger.info("Job queue size is " + queueSize); //Wait for a while before next fetch waitInterval(_fetchInterval); } logger.info("Main thread is terminated."); return null; } }); } catch (Exception e) { logger.error(e.getMessage()); logger.error(ExceptionUtils.getStackTrace(e)); } }
From source file:com.srotya.collectd.storm.StormNimbusMetrics.java
@Override public int read() { Gson gson = new Gson(); login();//from ww w. ja v a 2 s .com for (String nimbus : nimbusAddresses) { Subject.doAs(subject, new PrivilegedAction<Void>() { @Override public Void run() { HttpGet request = new HttpGet(nimbus + "/api/v1/topology/summary"); CloseableHttpClient client = builder.build(); try { HttpResponse response = client.execute(request, context); if (response.getStatusLine().getStatusCode() == 200) { HttpEntity entity = response.getEntity(); String result = EntityUtils.toString(entity); JsonObject topologySummary = gson.fromJson(result, JsonObject.class); List<String> ids = extractTopologyIds( topologySummary.get("topologies").getAsJsonArray()); if (ids.isEmpty()) { Collectd.logInfo("No storm topologies deployed"); } for (String id : ids) { PluginData pd = new PluginData(); pd.setPluginInstance(id); pd.setTime(System.currentTimeMillis()); try { pd.setHost(new URI(nimbus).getHost()); } catch (URISyntaxException e) { continue; } ValueList values = new ValueList(pd); fetchTopologyMetrics(nimbus, id, values, builder, gson); } } else { Collectd.logError("Unable to fetch Storm metrics:" + response.getStatusLine() + "\t" + EntityUtils.toString(response.getEntity())); } client.close(); } catch (Exception e) { e.printStackTrace(); Collectd.logError( "Failed to fetch metrics from Nimbus:" + nimbus + "\treason:" + e.getMessage()); } return null; } }); } return 0; }
From source file:org.apache.ranger.services.storm.client.StormClient.java
public List<String> getTopologyList(final String topologyNameMatching, final List<String> stormTopologyList) { LOG.debug("Getting Storm topology list for topologyNameMatching : " + topologyNameMatching); final String errMsg = errMessage; List<String> ret = new ArrayList<String>(); PrivilegedAction<ArrayList<String>> topologyListGetter = new PrivilegedAction<ArrayList<String>>() { @Override/*from w ww .j a v a 2 s.c om*/ public ArrayList<String> run() { ArrayList<String> lret = new ArrayList<String>(); String url = stormUIUrl + TOPOLOGY_LIST_API_ENDPOINT; Client client = null; ClientResponse response = null; try { client = Client.create(); WebResource webResource = client.resource(url); response = webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class); LOG.debug("getTopologyList():calling " + url); if (response != null) { LOG.debug("getTopologyList():response.getStatus()= " + response.getStatus()); if (response.getStatus() == 200) { String jsonString = response.getEntity(String.class); Gson gson = new GsonBuilder().setPrettyPrinting().create(); TopologyListResponse topologyListResponse = gson.fromJson(jsonString, TopologyListResponse.class); if (topologyListResponse != null) { if (topologyListResponse.getTopologyList() != null) { for (Topology topology : topologyListResponse.getTopologyList()) { String topologyName = topology.getName(); if (stormTopologyList != null && stormTopologyList.contains(topologyName)) { continue; } LOG.debug("getTopologyList():Found topology " + topologyName); LOG.debug("getTopologyList():topology Name=[" + topology.getName() + "], topologyNameMatching=[" + topologyNameMatching + "], existingStormTopologyList=[" + stormTopologyList + "]"); if (topologyName != null) { if (topologyNameMatching == null || topologyNameMatching.isEmpty() || FilenameUtils.wildcardMatch(topology.getName(), topologyNameMatching + "*")) { LOG.debug("getTopologyList():Adding topology " + topologyName); lret.add(topologyName); } } } } } } else { LOG.info("getTopologyList():response.getStatus()= " + response.getStatus() + " for URL " + url + ", so returning null list"); String jsonString = response.getEntity(String.class); LOG.info(jsonString); lret = null; } } else { String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "] URL : " + url + " - got null response."; LOG.error(msgDesc); HadoopException hdpException = new HadoopException(msgDesc); hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null); throw hdpException; } } catch (HadoopException he) { throw he; } catch (Throwable t) { String msgDesc = "Exception while getting Storm TopologyList." + " URL : " + url; HadoopException hdpException = new HadoopException(msgDesc, t); LOG.error(msgDesc, t); hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null); throw hdpException; } finally { if (response != null) { response.close(); } if (client != null) { client.destroy(); } } return lret; } }; try { ret = executeUnderKerberos(this.userName, this.password, this.lookupPrincipal, this.lookupKeytab, this.nameRules, topologyListGetter); } catch (IOException e) { LOG.error("Unable to get Topology list from [" + stormUIUrl + "]", e); } return ret; }
From source file:org.getobjects.foundation.kvc.KVCWrapper.java
@SuppressWarnings("unchecked") private static Method[] getPublicDeclaredMethods(Class _class) { Method methods[] = declaredMethodCache.get(_class); if (methods != null) return methods; final Class fclz = _class; methods = (Method[]) AccessController.doPrivileged(new PrivilegedAction() { public Object run() { return fclz.getMethods(); }/*from w w w. java 2 s. c o m*/ }); for (int i = 0; i < methods.length; i++) { Method method = methods[i]; int j = method.getModifiers(); if (!Modifier.isPublic(j)) methods[i] = null; } declaredMethodCache.put(_class, methods); return methods; }