List of usage examples for java.security PrivilegedExceptionAction PrivilegedExceptionAction
PrivilegedExceptionAction
From source file:com.trendmicro.hdfs.webdav.HDFSResource.java
@Override public DavResourceIterator getMembers() { List<DavResource> list = new ArrayList<DavResource>(); try {// ww w . j ava2 s . co m FileStatus[] stat = user.doAs(new PrivilegedExceptionAction<FileStatus[]>() { public FileStatus[] run() throws Exception { return FileSystem.get(conf).listStatus(path); } }); if (stat != null) { for (FileStatus s : stat) { Path p = s.getPath(); DavResourceLocator resourceLocator = locator.getFactory().createResourceLocator( locator.getPrefix(), locator.getWorkspacePath(), p.toString(), false); try { HDFSResource resource = (HDFSResource) factory.createResource(resourceLocator, getSession()); resource.user = this.user; list.add(resource); } catch (DavException ex) { LOG.warn("Exception adding resource '" + p.toUri().getPath() + "' to iterator"); } } } } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } return new DavResourceIteratorImpl(list); }
From source file:org.apache.axis2.jaxws.runtime.description.marshal.impl.AnnotationBuilder.java
/** * Return the class for this name// w ww . j av a 2 s . c om * * @return Class */ static Class forName(final String className, final boolean initialize, final ClassLoader classloader) throws ClassNotFoundException { // NOTE: This method must remain protected because it uses AccessController Class cl = null; try { cl = (Class) AccessController.doPrivileged(new PrivilegedExceptionAction() { public Object run() throws ClassNotFoundException { // Class.forName does not support primitives Class cls = ClassUtils.getPrimitiveClass(className); if (cls == null) { cls = Class.forName(className, initialize, classloader); } return cls; } }); } catch (PrivilegedActionException e) { /* An exception should NOT be logged. Depending on the JAXWS scenario, certain classes * may or may not exist. Putting an exception in the log will confuse programmers who * are servicing the product * if (log.isDebugEnabled()) { log.debug("Exception thrown from AccessController: " + e); } */ throw (ClassNotFoundException) e.getException(); } return cl; }
From source file:org.apache.hadoop.hive.metastore.HiveMetaStoreClientPreCatalog.java
public HiveMetaStoreClientPreCatalog(Configuration conf, HiveMetaHookLoader hookLoader, Boolean allowEmbedded) throws MetaException { this.hookLoader = hookLoader; if (conf == null) { conf = MetastoreConf.newMetastoreConf(); this.conf = conf; } else {//from ww w .j av a2s .c o m this.conf = new Configuration(conf); } version = MetastoreConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST) ? TEST_VERSION : VERSION; filterHook = loadFilterHooks(); uriResolverHook = loadUriResolverHook(); fileMetadataBatchSize = MetastoreConf.getIntVar(conf, ConfVars.BATCH_RETRIEVE_OBJECTS_MAX); String msUri = MetastoreConf.getVar(conf, ConfVars.THRIFT_URIS); localMetaStore = MetastoreConf.isEmbeddedMetaStore(msUri); if (localMetaStore) { if (!allowEmbedded) { throw new MetaException("Embedded metastore is not allowed here. Please configure " + ConfVars.THRIFT_URIS.toString() + "; it is currently set to [" + msUri + "]"); } client = callEmbeddedMetastore(this.conf); isConnected = true; snapshotActiveConf(); return; } // get the number retries retries = MetastoreConf.getIntVar(conf, ConfVars.THRIFT_CONNECTION_RETRIES); retryDelaySeconds = MetastoreConf.getTimeVar(conf, ConfVars.CLIENT_CONNECT_RETRY_DELAY, TimeUnit.SECONDS); // user wants file store based configuration if (MetastoreConf.getVar(conf, ConfVars.THRIFT_URIS) != null) { resolveUris(); } else { LOG.error("NOT getting uris from conf"); throw new MetaException("MetaStoreURIs not found in conf file"); } //If HADOOP_PROXY_USER is set in env or property, //then need to create metastore client that proxies as that user. String HADOOP_PROXY_USER = "HADOOP_PROXY_USER"; String proxyUser = System.getenv(HADOOP_PROXY_USER); if (proxyUser == null) { proxyUser = System.getProperty(HADOOP_PROXY_USER); } //if HADOOP_PROXY_USER is set, create DelegationToken using real user if (proxyUser != null) { LOG.info(HADOOP_PROXY_USER + " is set. Using delegation " + "token for HiveMetaStore connection."); try { UserGroupInformation.getLoginUser().getRealUser().doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { open(); return null; } }); String delegationTokenPropString = "DelegationTokenForHiveMetaStoreServer"; String delegationTokenStr = getDelegationToken(proxyUser, proxyUser); SecurityUtils.setTokenStr(UserGroupInformation.getCurrentUser(), delegationTokenStr, delegationTokenPropString); MetastoreConf.setVar(this.conf, ConfVars.TOKEN_SIGNATURE, delegationTokenPropString); close(); } catch (Exception e) { LOG.error("Error while setting delegation token for " + proxyUser, e); if (e instanceof MetaException) { throw (MetaException) e; } else { throw new MetaException(e.getMessage()); } } } // finally open the store open(); }
From source file:com.hurence.logisland.service.hbase.HBase_1_1_2_ClientService.java
protected Connection createConnection(final ControllerServiceInitializationContext context) throws IOException, InterruptedException { final String configFiles = context.getPropertyValue(HADOOP_CONF_FILES).asString(); final Configuration hbaseConfig = getConfigurationFromFiles(configFiles); // override with any properties that are provided if (context.getPropertyValue(ZOOKEEPER_QUORUM).isSet()) { hbaseConfig.set(HBASE_CONF_ZK_QUORUM, context.getPropertyValue(ZOOKEEPER_QUORUM).asString()); }//ww w . j a v a 2 s . c om if (context.getPropertyValue(ZOOKEEPER_CLIENT_PORT).isSet()) { hbaseConfig.set(HBASE_CONF_ZK_PORT, context.getPropertyValue(ZOOKEEPER_CLIENT_PORT).asString()); } if (context.getPropertyValue(ZOOKEEPER_ZNODE_PARENT).isSet()) { hbaseConfig.set(HBASE_CONF_ZNODE_PARENT, context.getPropertyValue(ZOOKEEPER_ZNODE_PARENT).asString()); } if (context.getPropertyValue(HBASE_CLIENT_RETRIES).isSet()) { hbaseConfig.setInt(HBASE_CONF_CLIENT_RETRIES, context.getPropertyValue(HBASE_CLIENT_RETRIES).asInteger()); } // add any dynamic properties to the HBase configuration for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) { final PropertyDescriptor descriptor = entry.getKey(); if (descriptor.isDynamic()) { hbaseConfig.set(descriptor.getName(), entry.getValue()); } } if (SecurityUtil.isSecurityEnabled(hbaseConfig)) { final String principal = context.getPropertyValue(kerberosProperties.getKerberosPrincipal()).asString(); final String keyTab = context.getPropertyValue(kerberosProperties.getKerberosKeytab()).asString(); getLogger().info("HBase Security Enabled, logging in as principal {} with keytab {}", new Object[] { principal, keyTab }); ugi = SecurityUtil.loginKerberos(hbaseConfig, principal, keyTab); getLogger().info("Successfully logged in as principal {} with keytab {}", new Object[] { principal, keyTab }); return ugi.doAs(new PrivilegedExceptionAction<Connection>() { @Override public Connection run() throws Exception { return ConnectionFactory.createConnection(hbaseConfig); } }); } else { getLogger().info("Simple Authentication"); return ConnectionFactory.createConnection(hbaseConfig); } }
From source file:org.apache.carbondata.core.util.CarbonUtil.java
public static void deleteFoldersAndFiles(final CarbonFile... file) throws IOException, InterruptedException { UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() { @Override/*from ww w .j a va 2 s. c om*/ public Void run() throws Exception { for (int i = 0; i < file.length; i++) { deleteRecursive(file[i]); } return null; } }); }
From source file:org.apache.bsf.BSFManager.java
/** * Compile the given expression of the given language into the given * <tt>CodeBuffer</tt>.// www. ja v a 2 s. c om * * @param lang language identifier * @param source (context info) the source of this expression (e.g., filename) * @param lineNo (context info) the line number in source for expr * @param columnNo (context info) the column number in source for expr * @param expr the expression to compile * @param cb code buffer to compile into * * @exception BSFException if any error while compiling the expression */ public void compileExpr(String lang, String source, int lineNo, int columnNo, Object expr, CodeBuffer cb) throws BSFException { logger.debug("BSFManager:compileExpr"); final BSFEngine e = loadScriptingEngine(lang); final String sourcef = source; final int lineNof = lineNo, columnNof = columnNo; final Object exprf = expr; final CodeBuffer cbf = cb; try { AccessController.doPrivileged(new PrivilegedExceptionAction() { public Object run() throws Exception { e.compileExpr(sourcef, lineNof, columnNof, exprf, cbf); return null; } }); } catch (PrivilegedActionException prive) { logger.error("Exception :", prive); throw (BSFException) prive.getException(); } }
From source file:org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.java
private void preCreate(final MasterProcedureEnv env) throws IOException, InterruptedException { if (!getTableName().isSystemTable()) { ProcedureSyncWait.getMasterQuotaManager(env).checkNamespaceTableAndRegionQuota(getTableName(), newRegions.size());//from ww w.j a v a 2 s . c om } final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost(); if (cpHost != null) { final HRegionInfo[] regions = newRegions == null ? null : newRegions.toArray(new HRegionInfo[newRegions.size()]); user.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { cpHost.preCreateTableHandler(hTableDescriptor, regions); return null; } }); } }
From source file:org.apache.hadoop.hive.metastore.HiveMetaStoreClient.java
public HiveMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader, Boolean allowEmbedded) throws MetaException { this.hookLoader = hookLoader; if (conf == null) { conf = new HiveConf(HiveMetaStoreClient.class); this.conf = conf; } else {/*from w ww. j av a 2s .c om*/ this.conf = new HiveConf(conf); } version = HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST) ? TEST_VERSION : VERSION; filterHook = loadFilterHooks(); fileMetadataBatchSize = HiveConf.getIntVar(conf, HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_OBJECTS_MAX); String msUri = conf.getVar(ConfVars.METASTOREURIS); localMetaStore = HiveConfUtil.isEmbeddedMetaStore(msUri); if (localMetaStore) { if (!allowEmbedded) { throw new MetaException("Embedded metastore is not allowed here. Please configure " + ConfVars.METASTOREURIS.varname + "; it is currently set to [" + msUri + "]"); } // instantiate the metastore server handler directly instead of connecting // through the network if (conf.getBoolVar(ConfVars.METASTORE_FASTPATH)) { client = new HiveMetaStore.HMSHandler("hive client", this.conf, true); fastpath = true; } else { client = HiveMetaStore.newRetryingHMSHandler("hive client", this.conf, true); } isConnected = true; snapshotActiveConf(); return; } else { if (conf.getBoolVar(ConfVars.METASTORE_FASTPATH)) { throw new RuntimeException("You can't set hive.metastore.fastpath to true when you're " + "talking to the thrift metastore service. You must run the metastore locally."); } } // get the number retries retries = HiveConf.getIntVar(conf, HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES); retryDelaySeconds = conf.getTimeVar(ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY, TimeUnit.SECONDS); // user wants file store based configuration if (conf.getVar(HiveConf.ConfVars.METASTOREURIS) != null) { String metastoreUrisString[] = conf.getVar(HiveConf.ConfVars.METASTOREURIS).split(","); metastoreUris = new URI[metastoreUrisString.length]; try { int i = 0; for (String s : metastoreUrisString) { URI tmpUri = new URI(s); if (tmpUri.getScheme() == null) { throw new IllegalArgumentException("URI: " + s + " does not have a scheme"); } metastoreUris[i++] = tmpUri; } // make metastore URIS random List uriList = Arrays.asList(metastoreUris); Collections.shuffle(uriList); metastoreUris = (URI[]) uriList.toArray(); } catch (IllegalArgumentException e) { throw (e); } catch (Exception e) { MetaStoreUtils.logAndThrowMetaException(e); } } else { LOG.error("NOT getting uris from conf"); throw new MetaException("MetaStoreURIs not found in conf file"); } //If HADOOP_PROXY_USER is set in env or property, //then need to create metastore client that proxies as that user. String HADOOP_PROXY_USER = "HADOOP_PROXY_USER"; String proxyUser = System.getenv(HADOOP_PROXY_USER); if (proxyUser == null) { proxyUser = System.getProperty(HADOOP_PROXY_USER); } //if HADOOP_PROXY_USER is set, create DelegationToken using real user if (proxyUser != null) { LOG.info(HADOOP_PROXY_USER + " is set. Using delegation " + "token for HiveMetaStore connection."); try { UserGroupInformation.getLoginUser().getRealUser().doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { open(); return null; } }); String delegationTokenPropString = "DelegationTokenForHiveMetaStoreServer"; String delegationTokenStr = getDelegationToken(proxyUser, proxyUser); Utils.setTokenStr(UserGroupInformation.getCurrentUser(), delegationTokenStr, delegationTokenPropString); this.conf.setVar(ConfVars.METASTORE_TOKEN_SIGNATURE, delegationTokenPropString); close(); } catch (Exception e) { LOG.error("Error while setting delegation token for " + proxyUser, e); if (e instanceof MetaException) { throw (MetaException) e; } else { throw new MetaException(e.getMessage()); } } } // finally open the store open(); }
From source file:org.apache.axis2.jaxws.spi.handler.BaseHandlerResolver.java
/** * Return the class for this name/*from w w w . java 2 s . c om*/ * * @return Class */ private static Class forName(final String className, final boolean initialize, final ClassLoader classLoader) throws ClassNotFoundException { // NOTE: This method must remain protected because it uses AccessController Class cl = null; try { cl = (Class) AccessController.doPrivileged(new PrivilegedExceptionAction() { public Object run() throws ClassNotFoundException { try { if (log.isDebugEnabled()) { log.debug("HandlerResolverImpl attempting to load Class: " + className); } return Class.forName(className, initialize, classLoader); } catch (Throwable e) { // TODO Should the exception be swallowed ? if (log.isDebugEnabled()) { log.debug( "HandlerResolverImpl cannot load the following class Throwable Exception Occured: " + className); } throw new ClassNotFoundException( "HandlerResolverImpl cannot load the following class Throwable Exception Occured:" + className); } } }); } catch (PrivilegedActionException e) { if (log.isDebugEnabled()) { log.debug("Exception thrown from AccessController: " + e); } throw (ClassNotFoundException) e.getException(); } return cl; }
From source file:org.apache.hadoop.hdfs.server.namenode.TestPathBasedCacheRequests.java
private static long addAsUnprivileged(final PathBasedCacheDirective directive) throws Exception { return unprivilegedUser.doAs(new PrivilegedExceptionAction<Long>() { @Override//from ww w.ja v a2s . co m public Long run() throws IOException { DistributedFileSystem myDfs = (DistributedFileSystem) FileSystem.get(conf); return myDfs.addPathBasedCacheDirective(directive); } }); }