List of usage examples for java.lang.reflect InvocationTargetException getTargetException
public Throwable getTargetException()
From source file:org.apache.flink.runtime.fs.hdfs.HadoopRecoverableFsDataOutputStream.java
static void truncate(FileSystem hadoopFs, Path file, long length) throws IOException { if (truncateHandle != null) { try {/* www .j a va 2 s.co m*/ truncateHandle.invoke(hadoopFs, file, length); } catch (InvocationTargetException e) { ExceptionUtils.rethrowIOException(e.getTargetException()); } catch (Throwable t) { throw new IOException( "Truncation of file failed because of access/linking problems with Hadoop's truncate call. " + "This is most likely a dependency conflict or class loading problem."); } } else { throw new IllegalStateException("Truncation handle has not been initialized"); } }
From source file:org.apache.hadoop.hbase.ipc.ScheduleHBaseServer.java
@Override public Writable call(Writable param, long receivedTime) throws IOException { try {/*from w w w .j a v a 2 s . co m*/ Invocation call = (Invocation) param; if (call.getMethodName() == null) { throw new IOException("Could not find requested method, the usual " + "cause is a version mismatch between client and server."); } Method method = implementation.getMethod(call.getMethodName(), call.getParameterClasses()); long startTime = System.currentTimeMillis(); Object value = method.invoke(instance, call.getParameters()); /** * do with openScanner option, added by ScheduleHBaseServer */ if (call.getMethodName().endsWith("openScanner")) { this.initScannerPri(call, value); } int processingTime = (int) (System.currentTimeMillis() - startTime); int qTime = (int) (startTime - receivedTime); if (LOG.isDebugEnabled()) { LOG.debug("Served: " + call.getMethodName() + " queueTime= " + qTime + " procesingTime= " + processingTime); } rpcMetrics.rpcQueueTime.inc(qTime); rpcMetrics.rpcProcessingTime.inc(processingTime); rpcMetrics.inc(call.getMethodName(), processingTime); return new HbaseObjectWritable(method.getReturnType(), value); } catch (InvocationTargetException e) { Throwable target = e.getTargetException(); if (target instanceof IOException) { throw (IOException) target; } IOException ioe = new IOException(target.toString()); ioe.setStackTrace(target.getStackTrace()); throw ioe; } catch (Throwable e) { IOException ioe = new IOException(e.toString()); ioe.setStackTrace(e.getStackTrace()); throw ioe; } }
From source file:org.apache.hadoop.hbase.master.HMaster.java
/** * Utility for constructing an instance of the passed HMaster class. * @param masterClass//from w w w . ja v a 2s. com * @param conf * @return HMaster instance. */ public static HMaster constructMaster(Class<? extends HMaster> masterClass, final Configuration conf, final CoordinatedStateManager cp) { try { Constructor<? extends HMaster> c = masterClass.getConstructor(Configuration.class, CoordinatedStateManager.class); return c.newInstance(conf, cp); } catch (InvocationTargetException ite) { Throwable target = ite.getTargetException() != null ? ite.getTargetException() : ite; if (target.getCause() != null) target = target.getCause(); throw new RuntimeException("Failed construction of Master: " + masterClass.toString(), target); } catch (Exception e) { throw new RuntimeException("Failed construction of Master: " + masterClass.toString() + ((e.getCause() != null) ? e.getCause().getMessage() : ""), e); } }
From source file:org.apache.hadoop.hbase.thrift.HbaseHandlerMetricsProxy.java
@Override public Object invoke(Object proxy, Method m, Object[] args) throws Throwable { Object result;//www . jav a 2 s .co m try { long start = now(); result = m.invoke(handler, args); long processTime = now() - start; metrics.incMethodTime(m.getName(), processTime); } catch (InvocationTargetException e) { throw e.getTargetException(); } catch (Exception e) { throw new RuntimeException("unexpected invocation exception: " + e.getMessage()); } return result; }
From source file:org.apache.hadoop.hbase.util.JVMClusterUtil.java
/** * Creates a {@link RegionServerThread}. * Call 'start' on the returned thread to make it run. * @param c Configuration to use./*from ww w. j a v a 2 s. co m*/ * @param cp consensus provider to use * @param hrsc Class to create. * @param index Used distinguishing the object returned. * @throws IOException * @return Region server added. */ public static JVMClusterUtil.RegionServerThread createRegionServerThread(final Configuration c, CoordinatedStateManager cp, final Class<? extends HRegionServer> hrsc, final int index) throws IOException { HRegionServer server; try { Constructor<? extends HRegionServer> ctor = hrsc.getConstructor(Configuration.class, CoordinatedStateManager.class); ctor.setAccessible(true); server = ctor.newInstance(c, cp); } catch (InvocationTargetException ite) { Throwable target = ite.getTargetException(); throw new RuntimeException("Failed construction of RegionServer: " + hrsc.toString() + ((target.getCause() != null) ? target.getCause().getMessage() : ""), target); } catch (Exception e) { IOException ioe = new IOException(); ioe.initCause(e); throw ioe; } return new JVMClusterUtil.RegionServerThread(server, index); }
From source file:org.apache.hadoop.hbase.util.JVMClusterUtil.java
/** * Creates a {@link MasterThread}.//ww w . j a v a 2s .co m * Call 'start' on the returned thread to make it run. * @param c Configuration to use. * @param cp consensus provider to use * @param hmc Class to create. * @param index Used distinguishing the object returned. * @throws IOException * @return Master added. */ public static JVMClusterUtil.MasterThread createMasterThread(final Configuration c, CoordinatedStateManager cp, final Class<? extends HMaster> hmc, final int index) throws IOException { HMaster server; try { server = hmc.getConstructor(Configuration.class, CoordinatedStateManager.class).newInstance(c, cp); } catch (InvocationTargetException ite) { Throwable target = ite.getTargetException(); throw new RuntimeException("Failed construction of Master: " + hmc.toString() + ((target.getCause() != null) ? target.getCause().getMessage() : ""), target); } catch (Exception e) { IOException ioe = new IOException(); ioe.initCause(e); throw ioe; } return new JVMClusterUtil.MasterThread(server, index); }
From source file:org.apache.hadoop.hive.common.JavaUtils.java
public static void closeClassLoader(ClassLoader loader) throws IOException { if (loader instanceof Closeable) { ((Closeable) loader).close(); } else if (SUN_MISC_UTIL_RELEASE != null && loader instanceof URLClassLoader) { PrintStream outputStream = System.out; ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); PrintStream newOutputStream = new PrintStream(byteArrayOutputStream); try {// w ww . j av a2 s . c o m // SUN_MISC_UTIL_RELEASE.invoke prints to System.out // So we're changing the outputstream for that call, // and setting it back to original System.out when we're done System.setOut(newOutputStream); SUN_MISC_UTIL_RELEASE.invoke(null, loader); String output = byteArrayOutputStream.toString("UTF8"); LOG.debug(output); } catch (InvocationTargetException e) { if (e.getTargetException() instanceof IOException) { throw (IOException) e.getTargetException(); } throw new IOException(e.getTargetException()); } catch (Exception e) { throw new IOException(e); } finally { System.setOut(outputStream); newOutputStream.close(); } } LogFactory.release(loader); }
From source file:org.apache.hivemind.examples.impl.ProxyLoggingInvocationHandler.java
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { boolean debug = _log.isDebugEnabled(); if (debug)// w ww . j a va 2s.c o m LoggingUtils.entry(_log, method.getName(), args); try { Object result = method.invoke(_inner, args); if (debug) { if (method.getReturnType() == void.class) LoggingUtils.voidExit(_log, method.getName()); else LoggingUtils.exit(_log, method.getName(), result); } return result; } catch (InvocationTargetException ex) { Throwable targetException = ex.getTargetException(); if (debug) LoggingUtils.exception(_log, method.getName(), targetException); throw targetException; } }
From source file:org.apache.ode.utils.LoggingInterceptor.java
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { try {/*from w w w . j a v a 2 s. c o m*/ if (method.getDeclaringClass() == DataSource.class && "getConnection".equals(method.getName())) { Connection conn = (Connection) method.invoke(_delegate, args); print("getConnection (tx=" + conn.getTransactionIsolation() + ")"); return Proxy.newProxyInstance(_delegate.getClass().getClassLoader(), new Class[] { Connection.class }, new LoggingInterceptor<Connection>(conn, _log)); } else if (method.getDeclaringClass() == Connection.class && Statement.class.isAssignableFrom(method.getReturnType())) { Statement stmt = (Statement) method.invoke(_delegate, args); print(method, args); return Proxy.newProxyInstance(_delegate.getClass().getClassLoader(), new Class[] { method.getReturnType() }, new LoggingInterceptor<Statement>(stmt, _log)); } else { print(method, args); return method.invoke(_delegate, args); } } catch (InvocationTargetException e) { throw e.getTargetException(); } }
From source file:org.apache.openjpa.persistence.EntityManagerFactoryImpl.java
/** * Create a store-specific facade for the given fetch configuration. * If no facade class exists, we use the default {@link FetchPlan}. *//*from w w w . j a v a 2s .co m*/ FetchPlan toFetchPlan(Broker broker, FetchConfiguration fetch) { if (fetch == null) return null; if (fetch instanceof DelegatingFetchConfiguration) fetch = ((DelegatingFetchConfiguration) fetch).getInnermostDelegate(); try { if (_plan == null) { Class storeType = (broker == null) ? null : broker.getStoreManager().getInnermostDelegate().getClass(); Class cls = _factory.getConfiguration().getStoreFacadeTypeRegistry() .getImplementation(FetchPlan.class, storeType, FetchPlanImpl.class); _plan = cls.getConstructor(FetchConfiguration.class); } return _plan.newInstance(fetch); } catch (InvocationTargetException ite) { throw PersistenceExceptions.toPersistenceException(ite.getTargetException()); } catch (Exception e) { throw PersistenceExceptions.toPersistenceException(e); } }