io.reappt.adapters.kafka.KafkaAdapter.java Source code

Java tutorial

Introduction

Here is the source code for io.reappt.adapters.kafka.KafkaAdapter.java

Source

/*
 * Copyright (C) 2016 Push Technology Ltd.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 *
 */
package io.reappt.adapters.kafka;

import com.pushtechnology.diffusion.client.Diffusion;
import com.pushtechnology.diffusion.client.session.Session;
import io.reappt.adapters.kafka.utils.RESTUtils;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import java.io.IOException;
import java.net.ServerSocket;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * Kafka Publisher
 */
public class KafkaAdapter {

    private static Logger log = LoggerFactory.getLogger(KafkaAdapter.class);

    private final String serverTopicPartition;

    // These are just defaults, the actual values should be parsed from VCAP_SERVICES
    private String bootstrap = "kafka01-prod01.messagehub.services.us-south.bluemix.net:9093";
    private String apiKey = System.getProperty("messagehub.apikey", "XXX");
    private String restUrl = "https://kafka-rest-prod01.messagehub.services.us-south.bluemix.net:443";
    private String user = System.getProperty("messagehub.user", "YYY");
    private String password = System.getProperty("messagehub.password", "ZZZ");
    private String reapptUser = System.getProperty("reappt.user", "UUU");
    private String reapptPassword = System.getProperty("reappt.password", "PPP");
    private String reapptUrl = System.getProperty("reappt.url", "wss://rushinganotherHera.us.reappt.io:443");

    private final InboundKafkaAdapter consumer;

    private final OutboundKafkaAdapter producer;

    /**
     * The Kafka topic to which all Diffusion information will be published.
     * The topic messages are further partitioned by Diffusion topic path.
     */
    private static final String KAFKA_DIFFUSION_TOPIC = "Diffusion";

    public static final String DIFFUSION_KAFKA_ROOT_TOPIC = "Kafka";

    /**
     * Consumer group used for publication and subscription in kafka
     */
    public static final String DIFFUSION_KAFKA_CONSUMER_GROUP = "Diffusion";

    private final Session session;

    public static void main(String args[])
            throws InterruptedException, IOException, KeyManagementException, NoSuchAlgorithmException {
        KafkaAdapter producer = new KafkaAdapter();
        producer.start();
        producer.listen();
        while (true) {
            Thread.sleep(1000);
        }
    }

    private ServerSocket livenessSocket;

    /**
     * Hacky way of telling cloudfoundry that the application is alive. There doesn't
     * appear to be a way of specifying --no-route to the maven plugin.
     */
    private void listen() throws IOException {
        try {
            int port = Integer.parseInt(System.getenv("PORT"));
            livenessSocket = new ServerSocket(port);
            livenessSocket.accept();
        } catch (NumberFormatException nfe) {
            // No port so don't listen
        }
    }

    public KafkaAdapter() throws NoSuchAlgorithmException, KeyManagementException {
        parseVcapServices(System.getenv("VCAP_SERVICES"));
        // Kafka or message hub insists that this property is set, even though we ignore it
        System.setProperty("java.security.auth.login.config", "make_kafka_happy");
        // Install our own Configuration implementation
        final Map<String, String> options = new HashMap<>();
        options.put("serviceName", "kafka");
        options.put("username", user);
        options.put("password", password);

        Configuration.setConfiguration(new Configuration() {
            @Override
            public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
                return new AppConfigurationEntry[] {
                        new AppConfigurationEntry("com.ibm.messagehub.login.MessageHubLoginModule",
                                AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) };
            }
        });

        this.session = createSession(this.reapptUrl, this.reapptUser, this.reapptPassword);

        serverTopicPartition = KAFKA_DIFFUSION_TOPIC + "-" + reapptUrl.hashCode();

        this.producer = new OutboundKafkaAdapter(session, bootstrap, serverTopicPartition);
        this.consumer = new InboundKafkaAdapter(session, bootstrap);
    }

    private Session createSession(String url, String reapptUser, String reapptPassword)
            throws KeyManagementException, NoSuchAlgorithmException {

        // For the purpose of the test we accept every certificate.
        TrustManager trustManager = new X509TrustManager() {
            public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
            }

            public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
            }

            public X509Certificate[] getAcceptedIssuers() {
                return new X509Certificate[0];
            }
        };

        final SSLContext context = SSLContext.getInstance("TLSv1");
        context.init(null, new TrustManager[] { trustManager }, null);

        return Diffusion.sessions().connectionTimeout(10000).errorHandler(new Session.ErrorHandler.Default())
                .principal(reapptUser).password(reapptPassword).reconnectionTimeout(5000).sslContext(context)
                .open(url);
    }

    protected void start() {
        createMessageHubTopic(serverTopicPartition);

        producer.start();
        consumer.start();

        // Make sure we clean up at shutdown
        Runtime.getRuntime().addShutdownHook(new Thread() {
            @Override
            public void run() {
                KafkaAdapter.this.stop();
            }
        });
    }

    /**
     * Stop the Kafka producer
     */
    protected void stop() {
        try {
            producer.stop();
            consumer.stop();
            session.close();
            if (livenessSocket != null) {
                livenessSocket.close();
            }
        } catch (Throwable thr) {
            log.error("Failed to shutdown adapter cleanly", thr);
        }
    }

    /**
     * The message hub way of creating topics since auto creation is not supported.
     */
    private void createMessageHubTopic(String topicName) {
        RESTUtils restApi = new RESTUtils(restUrl, apiKey);
        // Create a topic, ignore a 422 response - this means that the
        // topic name already exists.
        restApi.post("/admin/topics", "{ \"name\" : \"" + topicName + "\", \"partitions\" : 1 }",
                new int[] { 422 });
    }

    private void parseVcapServices(String vcapServices) {
        if (vcapServices == null)
            return;
        JSONObject jsonObject = new JSONObject(vcapServices);
        if (jsonObject.has("messagehub")) {
            JSONObject messagehub = jsonObject.getJSONArray("messagehub").getJSONObject(0);
            JSONObject creds = messagehub.getJSONObject("credentials");
            apiKey = creds.getString("api_key");
            bootstrap = (String) creds.getJSONArray("kafka_brokers_sasl").get(0);
            restUrl = creds.getString("kafka_rest_url");
            user = creds.getString("user");
            password = creds.getString("password");
        }
        if (jsonObject.has("push-reappt")) {
            JSONObject reappt = jsonObject.getJSONArray("push-reappt").getJSONObject(0);
            JSONObject creds = reappt.getJSONObject("credentials");
            reapptUrl = "wss://" + creds.getString("host") + ":443";
            reapptUser = creds.getString("principal");
            reapptPassword = creds.getString("credentials");
        }
    }
}