Example usage for org.objectweb.asm Opcodes ASM5

List of usage examples for org.objectweb.asm Opcodes ASM5

Introduction

In this page you can find the example usage for org.objectweb.asm Opcodes ASM5.

Prototype

int ASM5

To view the source code for org.objectweb.asm Opcodes ASM5.

Click Source Link

Usage

From source file:co.cask.cdap.app.runtime.spark.SparkRunnerClassLoader.java

License:Apache License

/**
 * Rewrites the constructors who don't delegate to other constructor with the given {@link ConstructorRewriter}
 * and define the class./*from www  . j av a2 s  .c om*/
 *
 * @param classType type of the class to be defined
 * @param byteCodeStream {@link InputStream} for reading the original bytecode of the class
 * @param rewriter a {@link ConstructorRewriter} for rewriting the constructor
 * @return a defined Class
 */
private Class<?> rewriteConstructorAndDefineClass(final Type classType, InputStream byteCodeStream,
        final ConstructorRewriter rewriter) throws IOException {
    ClassReader cr = new ClassReader(byteCodeStream);
    ClassWriter cw = new ClassWriter(0);

    cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {

        @Override
        public MethodVisitor visitMethod(int access, String name, String desc, String signature,
                String[] exceptions) {
            // Call super so that the method signature is registered with the ClassWriter (parent)
            MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);

            // We only attempt to rewrite constructor
            if (!"<init>".equals(name)) {
                return mv;
            }

            return new AdviceAdapter(Opcodes.ASM5, mv, access, name, desc) {

                boolean calledThis;

                @Override
                public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) {
                    // See if in this constructor it is calling other constructor (this(..)).
                    calledThis = calledThis || (opcode == Opcodes.INVOKESPECIAL
                            && Type.getObjectType(owner).equals(classType) && name.equals("<init>")
                            && Type.getReturnType(desc).equals(Type.VOID_TYPE));
                    super.visitMethodInsn(opcode, owner, name, desc, itf);
                }

                @Override
                protected void onMethodExit(int opcode) {
                    if (calledThis) {
                        // For constructors that call this(), we don't need to generate a call to SparkContextCache
                        return;
                    }
                    // Add a call to SparkContextCache.setContext() for the normal method return path
                    if (opcode == RETURN) {
                        rewriter.onMethodExit(this);
                    }
                }
            };
        }
    }, ClassReader.EXPAND_FRAMES);

    byte[] byteCode = cw.toByteArray();
    return defineClass(classType.getClassName(), byteCode, 0, byteCode.length);
}

From source file:co.cask.cdap.app.runtime.spark.SparkRunnerClassLoader.java

License:Apache License

/**
 * Defines a class by rewriting all calls to {@link System#setProperty(String, String)} to
 * {@link SparkRuntimeEnv#setProperty(String, String)}.
 *
 * @param name name of the class to define
 * @param byteCodeStream {@link InputStream} for reading in the original bytecode.
 * @return a defined class/*ww  w .ja  v  a  2 s  . c  o m*/
 */
private Class<?> rewriteSetPropertiesAndDefineClass(String name, InputStream byteCodeStream)
        throws IOException {
    final Type systemType = Type.getType(System.class);
    ClassReader cr = new ClassReader(byteCodeStream);
    ClassWriter cw = new ClassWriter(0);

    cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {
        @Override
        public MethodVisitor visitMethod(int access, String name, String desc, String signature,
                String[] exceptions) {
            MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
            return new MethodVisitor(Opcodes.ASM5, mv) {
                @Override
                public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) {
                    // If we see a call to System.setProperty, change it to SparkRuntimeEnv.setProperty
                    if (opcode == Opcodes.INVOKESTATIC && name.equals("setProperty")
                            && owner.equals(systemType.getInternalName())) {
                        super.visitMethodInsn(opcode, SPARK_RUNTIME_ENV_TYPE.getInternalName(), name, desc,
                                false);
                    } else {
                        super.visitMethodInsn(opcode, owner, name, desc, itf);
                    }
                }
            };
        }
    }, ClassReader.EXPAND_FRAMES);

    byte[] byteCode = cw.toByteArray();
    return defineClass(name, byteCode, 0, byteCode.length);
}

From source file:co.cask.cdap.app.runtime.spark.SparkRunnerClassLoader.java

License:Apache License

/**
 * Define the akka.remote.Remoting by rewriting usages of scala.concurrent.ExecutionContext.Implicits.global
 * to Remoting.system().dispatcher() in the shutdown() method for fixing the Akka thread/permgen leak bug in
 * https://github.com/akka/akka/issues/17729
 */// ww w  .  j ava  2s. c  o m
private Class<?> defineAkkaRemoting(String name, InputStream byteCodeStream)
        throws IOException, ClassNotFoundException {
    final Type dispatcherReturnType = determineAkkaDispatcherReturnType();
    if (dispatcherReturnType == null) {
        LOG.warn("Failed to determine ActorSystem.dispatcher() return type. "
                + "No rewriting of akka.remote.Remoting class. ClassLoader leakage might happen in SDK.");
        return findClass(name);
    }

    ClassReader cr = new ClassReader(byteCodeStream);
    ClassWriter cw = new ClassWriter(0);

    cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {
        @Override
        public MethodVisitor visitMethod(int access, String name, String desc, String signature,
                String[] exceptions) {
            // Call super so that the method signature is registered with the ClassWriter (parent)
            MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);

            // Only rewrite the shutdown() method
            if (!"shutdown".equals(name)) {
                return mv;
            }

            return new MethodVisitor(Opcodes.ASM5, mv) {
                @Override
                public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) {
                    // Detect if it is making call "import scala.concurrent.ExecutionContext.Implicits.global",
                    // which translate to Java code as
                    // scala.concurrent.ExecutionContext$Implicits$.MODULE$.global()
                    // hence as bytecode
                    // GETSTATIC scala/concurrent/ExecutionContext$Implicits$.MODULE$ :
                    //           Lscala/concurrent/ExecutionContext$Implicits$;
                    // INVOKEVIRTUAL scala/concurrent/ExecutionContext$Implicits$.global
                    //           ()Lscala/concurrent/ExecutionContextExecutor;
                    if (opcode == Opcodes.INVOKEVIRTUAL && "global".equals(name)
                            && "scala/concurrent/ExecutionContext$Implicits$".equals(owner)
                            && Type.getMethodDescriptor(EXECUTION_CONTEXT_EXECUTOR_TYPE).equals(desc)) {
                        // Discard the GETSTATIC result from the stack by popping it
                        super.visitInsn(Opcodes.POP);
                        // Make the call "import system.dispatch", which translate to Java code as
                        // this.system().dispatcher()
                        // hence as bytecode
                        // ALOAD 0 (load this)
                        // INVOKEVIRTUAL akka/remote/Remoting.system ()Lakka/actor/ExtendedActorSystem;
                        // INVOKEVIRTUAL akka/actor/ExtendedActorSystem.dispatcher ()Lscala/concurrent/ExecutionContextExecutor;
                        Type extendedActorSystemType = Type.getObjectType("akka/actor/ExtendedActorSystem");
                        super.visitVarInsn(Opcodes.ALOAD, 0);
                        super.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "akka/remote/Remoting", "system",
                                Type.getMethodDescriptor(extendedActorSystemType), false);
                        super.visitMethodInsn(Opcodes.INVOKEVIRTUAL, extendedActorSystemType.getInternalName(),
                                "dispatcher", Type.getMethodDescriptor(dispatcherReturnType), false);
                    } else {
                        // For other instructions, just call parent to deal with it
                        super.visitMethodInsn(opcode, owner, name, desc, itf);
                    }
                }
            };
        }
    }, ClassReader.EXPAND_FRAMES);

    byte[] byteCode = cw.toByteArray();
    return defineClass(name, byteCode, 0, byteCode.length);
}

From source file:co.cask.cdap.app.runtime.spark.SparkRunnerClassLoader.java

License:Apache License

/**
 * Find the return type of the ActorSystem.dispatcher() method. It is ExecutionContextExecutor in
 * Akka 2.3 (Spark 1.2+) and ExecutionContext in Akka 2.2 (Spark < 1.2, which CDAP doesn't support,
 * however the Spark 1.5 in CDH 5.6. still has Akka 2.2, instead of 2.3).
 *
 * @return the return type of the ActorSystem.dispatcher() method or {@code null} if no such method
 *//*from   www  .ja va 2 s .  c om*/
@Nullable
private Type determineAkkaDispatcherReturnType() {
    try (InputStream is = openResource("akka/actor/ActorSystem.class")) {
        if (is == null) {
            return null;
        }
        final AtomicReference<Type> result = new AtomicReference<>();
        ClassReader cr = new ClassReader(is);
        cr.accept(new ClassVisitor(Opcodes.ASM5) {
            @Override
            public MethodVisitor visitMethod(int access, String name, String desc, String signature,
                    String[] exceptions) {
                if (name.equals("dispatcher") && Type.getArgumentTypes(desc).length == 0) {
                    // Expected to be either ExecutionContext (akka 2.2, only in CDH spark)
                    // or ExecutionContextExecutor (akka 2.3, for open source, HDP spark).
                    Type returnType = Type.getReturnType(desc);
                    if (returnType.equals(EXECUTION_CONTEXT_TYPE)
                            || returnType.equals(EXECUTION_CONTEXT_EXECUTOR_TYPE)) {
                        result.set(returnType);
                    } else {
                        LOG.warn("Unsupported return type of ActorSystem.dispatcher(): {}",
                                returnType.getClassName());
                    }
                }
                return super.visitMethod(access, name, desc, signature, exceptions);
            }
        }, ClassReader.SKIP_DEBUG | ClassReader.SKIP_CODE | ClassReader.SKIP_FRAMES);
        return result.get();
    } catch (IOException e) {
        LOG.warn("Failed to determine ActorSystem dispatcher() return type.", e);
        return null;
    }
}

From source file:co.cask.cdap.app.runtime.spark.SparkRunnerClassLoader.java

License:Apache License

/**
 * Defines the org.apache.spark.deploy.yarn.Client class with rewriting of the createConfArchive method to
 * workaround the SPARK-13441 bug.//  w  ww. java  2s  . c o  m
 */
private Class<?> defineClient(String name, InputStream createConfArchive)
        throws IOException, ClassNotFoundException {
    // We only need to rewrite if listing either HADOOP_CONF_DIR or YARN_CONF_DIR return null.
    boolean needRewrite = false;
    for (String env : ImmutableList.of("HADOOP_CONF_DIR", "YARN_CONF_DIR")) {
        String value = System.getenv(env);
        if (value != null) {
            File path = new File(value);
            if (path.isDirectory() && path.listFiles() == null) {
                needRewrite = true;
                break;
            }
        }
    }

    // If rewrite is not needed
    if (!needRewrite) {
        return findClass(name);
    }

    ClassReader cr = new ClassReader(createConfArchive);
    ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS);
    cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {
        @Override
        public MethodVisitor visitMethod(final int access, final String name, final String desc,
                String signature, String[] exceptions) {
            MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);

            // Only rewrite the createConfArchive method
            if (!"createConfArchive".equals(name)) {
                return mv;
            }

            // Check if it's a recognizable return type.
            // Spark 1.5+ return type is File
            boolean isReturnFile = Type.getReturnType(desc).equals(Type.getType(File.class));
            Type optionType = Type.getObjectType("scala/Option");
            if (!isReturnFile) {
                // Spark 1.4 return type is Option<File>
                if (!Type.getReturnType(desc).equals(optionType)) {
                    // Unknown type. Not going to modify the code.
                    return mv;
                }
            }

            // Generate this for Spark 1.5+
            // return SparkRuntimeUtils.createConfArchive(this.sparkConf, SPARK_CONF_FILE,
            //                                            LOCALIZED_CONF_DIR, LOCALIZED_CONF_DIR_ZIP);
            // Generate this for Spark 1.4
            // return Option.apply(SparkRuntimeUtils.createConfArchive(this.sparkConf, SPARK_CONF_FILE,
            //                                                         LOCALIZED_CONF_DIR, LOCALIZED_CONF_DIR_ZIP));
            GeneratorAdapter mg = new GeneratorAdapter(mv, access, name, desc);

            // load this.sparkConf to the stack
            mg.loadThis();
            mg.getField(Type.getObjectType("org/apache/spark/deploy/yarn/Client"), "sparkConf",
                    SPARK_CONF_TYPE);

            // push three constants to the stack
            mg.visitLdcInsn(SPARK_CONF_FILE);
            mg.visitLdcInsn(LOCALIZED_CONF_DIR);
            mg.visitLdcInsn(LOCALIZED_CONF_DIR_ZIP);

            // call SparkRuntimeUtils.createConfArchive, return a File and leave it in stack
            Type stringType = Type.getType(String.class);
            mg.invokeStatic(SPARK_RUNTIME_UTILS_TYPE, new Method("createConfArchive", Type.getType(File.class),
                    new Type[] { SPARK_CONF_TYPE, stringType, stringType, stringType }));
            if (isReturnFile) {
                // Spark 1.5+ return type is File, hence just return the File from the stack
                mg.returnValue();
                mg.endMethod();
            } else {
                // Spark 1.4 return type is Option<File>
                // return Option.apply(<file from stack>);
                // where the file is actually just popped from the stack
                mg.invokeStatic(optionType,
                        new Method("apply", optionType, new Type[] { Type.getType(Object.class) }));
                mg.checkCast(optionType);
                mg.returnValue();
                mg.endMethod();
            }

            return null;
        }
    }, ClassReader.EXPAND_FRAMES);

    byte[] byteCode = cw.toByteArray();
    return defineClass(name, byteCode, 0, byteCode.length);
}

From source file:co.cask.cdap.explore.service.ExploreServiceUtils.java

License:Apache License

@VisibleForTesting
static File rewriteHiveAuthFactory(File sourceJar, File targetJar) throws IOException {
    try (JarFile input = new JarFile(sourceJar);
            JarOutputStream output = new JarOutputStream(new FileOutputStream(targetJar))) {
        String hiveAuthFactoryPath = HIVE_AUTHFACTORY_CLASS_NAME.replace('.', '/') + ".class";

        Enumeration<JarEntry> sourceEntries = input.entries();
        while (sourceEntries.hasMoreElements()) {
            JarEntry entry = sourceEntries.nextElement();
            output.putNextEntry(new JarEntry(entry.getName()));

            try (InputStream entryInputStream = input.getInputStream(entry)) {
                if (!hiveAuthFactoryPath.equals(entry.getName())) {
                    ByteStreams.copy(entryInputStream, output);
                    continue;
                }/*from   ww w .  j  av a 2s.  com*/

                try {
                    // Rewrite the bytecode of HiveAuthFactory.loginFromKeytab method to a no-op method
                    ClassReader cr = new ClassReader(entryInputStream);
                    ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS);
                    cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {
                        @Override
                        public MethodVisitor visitMethod(final int access, final String name, final String desc,
                                String signature, String[] exceptions) {
                            MethodVisitor methodVisitor = super.visitMethod(access, name, desc, signature,
                                    exceptions);
                            if (!"loginFromKeytab".equals(name)) {
                                return methodVisitor;
                            }
                            GeneratorAdapter adapter = new GeneratorAdapter(methodVisitor, access, name, desc);
                            adapter.returnValue();

                            // VisitMaxs with 0 so that COMPUTE_MAXS from ClassWriter will compute the right values.
                            adapter.visitMaxs(0, 0);
                            return new MethodVisitor(Opcodes.ASM5) {
                            };
                        }
                    }, 0);
                    output.write(cw.toByteArray());
                } catch (Exception e) {
                    throw new IOException("Unable to generate HiveAuthFactory class", e);
                }
            }
        }

        return targetJar;
    }
}

From source file:co.cask.cdap.internal.app.runtime.adapter.PluginTest.java

License:Apache License

private static File generateClass(Class<?> fromClass, final String className, File directory)
        throws IOException {
    // Generate a class dynamically using ASM from another class, but use a different class name,
    // so that it won't be in the test class path.
    try (InputStream byteCode = fromClass.getClassLoader()
            .getResourceAsStream(Type.getInternalName(fromClass) + ".class")) {
        ClassReader reader = new ClassReader(byteCode);
        ClassWriter writer = new ClassWriter(0);
        reader.accept(new ClassVisitor(Opcodes.ASM5, writer) {
            @Override/* w w w.j a  va  2 s .  c  om*/
            public void visit(int version, int access, String name, String signature, String superName,
                    String[] interfaces) {
                super.visit(version, access, className.replace('.', '/'), signature, superName, interfaces);
            }
        }, 0);

        File target = new File(directory, className.replace('.', File.separatorChar) + ".class");
        DirUtils.mkdirs(target.getParentFile());
        Files.write(writer.toByteArray(), target);
        return target;
    }
}

From source file:co.cask.cdap.internal.app.runtime.artifact.ArtifactInspector.java

License:Apache License

/**
 * Detects if a class is annotated with {@link Plugin} without loading the class.
 *
 * @param className name of the class/*from  ww  w  .  j a v  a2 s . c  o m*/
 * @param classLoader ClassLoader for loading the class file of the given class
 * @return true if the given class is annotated with {@link Plugin}
 */
private boolean isPlugin(String className, ClassLoader classLoader) {
    try (InputStream is = classLoader.getResourceAsStream(className.replace('.', '/') + ".class")) {
        if (is == null) {
            return false;
        }

        // Use ASM to inspect the class bytecode to see if it is annotated with @Plugin
        final boolean[] isPlugin = new boolean[1];
        ClassReader cr = new ClassReader(is);
        cr.accept(new ClassVisitor(Opcodes.ASM5) {
            @Override
            public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
                if (Plugin.class.getName().equals(Type.getType(desc).getClassName()) && visible) {
                    isPlugin[0] = true;
                }
                return super.visitAnnotation(desc, visible);
            }
        }, ClassReader.SKIP_CODE | ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES);

        return isPlugin[0];
    } catch (IOException e) {
        // If failed to open the class file, then it cannot be a plugin
        LOG.warn("Failed to open class file for {}", className, e);
        return false;
    }
}

From source file:co.cask.cdap.internal.app.runtime.batch.distributed.MapReduceContainerHelper.java

License:Apache License

/**
 * Rewrites the TwillLauncher bytecode as described
 * in {@link #saveLauncher(Configuration, File, List)}.
 *
 * @param hConf the hadoop configuration
 * @param sourceByteCode the original bytecode of the TwillLauncher
 * @param output output stream for writing the modified bytecode.
 * @throws IOException// w  w  w . j  av a 2 s  . com
 */
private static void rewriteLauncher(Configuration hConf, InputStream sourceByteCode, OutputStream output)
        throws IOException {
    URI frameworkURI = getFrameworkURI(hConf);
    if (frameworkURI == null) {
        ByteStreams.copy(sourceByteCode, output);
        return;
    }

    // It is localized as archive, and due to TWILL-144, a suffix is added. We need to reverse the effect of it
    // by creating an extra symlink as the first line in the TwillLauncher.main() method.
    String ext = Paths.getExtension(frameworkURI.getPath());
    if (ext.isEmpty()) {
        ByteStreams.copy(sourceByteCode, output);
        return;
    }

    final String sourceName = frameworkURI.getFragment();
    final String targetName = sourceName + "." + ext;

    ClassReader cr = new ClassReader(sourceByteCode);
    ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS);
    cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {

        @Override
        public MethodVisitor visitMethod(int access, String name, String desc, String signature,
                String[] exceptions) {
            MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
            if (!name.equals("main")) {
                return mv;
            }
            Type[] argTypes = Type.getArgumentTypes(desc);
            if (argTypes.length != 1) {
                return mv;
            }
            Type argType = argTypes[0];
            if (argType.getSort() != Type.ARRAY
                    || !String.class.getName().equals(argType.getElementType().getClassName())) {
                return mv;
            }

            return new AdviceAdapter(Opcodes.ASM5, mv, access, name, desc) {
                @Override
                protected void onMethodEnter() {
                    visitLdcInsn(sourceName);
                    visitLdcInsn(targetName);
                    invokeStatic(Type.getType(MapReduceContainerSymLinker.class),
                            Methods.getMethod(void.class, "symlink", String.class, String.class));
                }
            };
        }
    }, ClassReader.EXPAND_FRAMES);

    output.write(cw.toByteArray());
}

From source file:co.cask.cdap.internal.app.runtime.service.http.HttpHandlerGenerator.java

License:Apache License

/**
 * Inspects the given type and copy/rewrite handler methods from it into the newly generated class.
 *
 * @param delegateType The user handler type
 * @param inspectType The type that needs to be inspected. It's either the delegateType or one of its parents
 *//*  ww  w  .java2s.  co m*/
private void inspectHandler(final TypeToken<?> delegateType, final TypeToken<?> inspectType,
        final String pathPrefix, final Type classType, final ClassWriter classWriter,
        final List<Class<?>> preservedClasses) throws IOException {
    Class<?> rawType = inspectType.getRawType();

    // Visit the delegate class, copy and rewrite handler method, with method body just do delegation
    try (InputStream sourceBytes = rawType.getClassLoader()
            .getResourceAsStream(Type.getInternalName(rawType) + ".class")) {
        ClassReader classReader = new ClassReader(sourceBytes);
        classReader.accept(new ClassVisitor(Opcodes.ASM5) {

            // Only need to visit @Path at the class level if we are inspecting the user handler class
            private final boolean inspectDelegate = delegateType.equals(inspectType);
            private boolean visitedPath = !inspectDelegate;

            @Override
            public void visit(int version, int access, String name, String signature, String superName,
                    String[] interfaces) {
                super.visit(version, access, name, signature, superName, interfaces);
            }

            @Override
            public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
                // Copy the class annotation if it is @Path. Only do it for one time
                Type type = Type.getType(desc);
                if (inspectDelegate && type.equals(Type.getType(Path.class))) {
                    visitedPath = true;
                    AnnotationVisitor annotationVisitor = classWriter.visitAnnotation(desc, visible);
                    return new AnnotationVisitor(Opcodes.ASM5, annotationVisitor) {
                        @Override
                        public void visit(String name, Object value) {
                            // "value" is the key for the Path annotation string.
                            if (name.equals("value")) {
                                super.visit(name, pathPrefix + value.toString());
                            } else {
                                super.visit(name, value);
                            }
                        }
                    };

                } else {
                    return super.visitAnnotation(desc, visible);
                }
            }

            @Override
            public MethodVisitor visitMethod(int access, String name, String desc, String signature,
                    String[] exceptions) {
                // Create a class-level annotation with the prefix, if the user has not specified any class-level
                // annotation.
                if (!visitedPath) {
                    String pathDesc = Type.getType(Path.class).getDescriptor();
                    AnnotationVisitor annotationVisitor = classWriter.visitAnnotation(pathDesc, true);
                    annotationVisitor.visit("value", pathPrefix);
                    annotationVisitor.visitEnd();
                    visitedPath = true;
                }

                // Copy the method if it is public and annotated with one of the HTTP request method
                MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
                if (!Modifier.isPublic(access)) {
                    return mv;
                }
                return new HandlerMethodVisitor(delegateType, mv, desc, signature, access, name, exceptions,
                        classType, classWriter, preservedClasses);
            }
        }, ClassReader.SKIP_DEBUG);
    }
}