Java tutorial
/* * Copyright (C) 2016 Push Technology Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package io.reappt.adapters.kafka; import static com.pushtechnology.diffusion.client.Diffusion.authenticationResult; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.StringSerializer; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pushtechnology.diffusion.client.callbacks.ErrorReason; import com.pushtechnology.diffusion.client.content.Content; import com.pushtechnology.diffusion.client.details.SessionDetails; import com.pushtechnology.diffusion.client.features.RegisteredHandler; import com.pushtechnology.diffusion.client.features.Topics; import com.pushtechnology.diffusion.client.features.control.clients.AuthenticationControl; import com.pushtechnology.diffusion.client.features.control.clients.ClientControl; import com.pushtechnology.diffusion.client.security.authentication.AuthenticationResult; import com.pushtechnology.diffusion.client.session.Session; import com.pushtechnology.diffusion.client.session.SessionId; import com.pushtechnology.diffusion.client.topics.details.TopicDetails; import com.pushtechnology.diffusion.client.types.Credentials; import com.pushtechnology.diffusion.client.types.UpdateContext; /** * The outbound side of Diffusion/Reappt <-> kafka, publishing Reappt data to kafka */ public class OutboundKafkaAdapter { private static Logger log = LoggerFactory.getLogger(OutboundKafkaAdapter.class); private final Session session; private final String serverTopicPartition; private final String kafkaBootstrapUrl; private KafkaProducer<String, String> producer; public static final String CLIENTS_TOPIC = "Diffusion/clients"; /** * The root Diffusion topic from which all subscriptions will be published to kafka */ //public static final String DIFFUSION_KAFKA_SUBSCRIPTION_TOPIC = KafkaAdapter.DIFFUSION_KAFKA_ROOT_TOPIC + "/inbound"; public OutboundKafkaAdapter(Session session, String bootstrapUrl, String serverTopicPartition) { this.session = session; this.serverTopicPartition = serverTopicPartition; this.kafkaBootstrapUrl = bootstrapUrl; } private Topics.CompletionCallback callback = new Topics.CompletionCallback() { @Override public void onComplete() { log.info("onComplete()"); } @Override public void onDiscard() { log.info("onDiscard()"); } }; private Topics.TopicStream topicStream = new Topics.TopicStream() { @Override public void onSubscription(String s, TopicDetails topicDetails) { log.info("onSubscription({})", s); } @Override public void onUnsubscription(String s, Topics.UnsubscribeReason unsubscribeReason) { log.info("onUnsubscription({})", s); } @Override public void onTopicUpdate(String topicPath, Content content, UpdateContext updateContext) { log.info("Publishing \"{}\" from \"{}\" to kafka", content.asString(), topicPath); ProducerRecord<String, String> record = new ProducerRecord<>(serverTopicPartition, topicPath, content.asString()); producer.send(record); } @Override public void onClose() { log.info("onClose()"); } @Override public void onError(ErrorReason errorReason) { log.warn("onError({})", errorReason.toString()); } }; private ClientControl.SessionPropertiesListener propertiesListener = new ClientControl.SessionPropertiesListener.Default() { @Override public void onSessionOpen(SessionId sessionId, Map<String, String> properties) { properties = new HashMap<>(properties); properties.put("SessionEvent", "open"); log.info("Publishing \"{}\" from session \"{}\" to kafka", properties, sessionId); ProducerRecord<String, String> record = new ProducerRecord<>(serverTopicPartition, CLIENTS_TOPIC, new JSONObject(properties).toString()); producer.send(record); } @Override public void onSessionEvent(SessionId sessionId, EventType eventType, Map<String, String> properties, Map<String, String> previousValues) { if (eventType == EventType.UPDATED) { properties = new HashMap<>(properties); properties.put("SessionEvent", "update"); log.info("Publishing updated \"{}\" from session \"{}\" to kafka", properties, sessionId); ProducerRecord<String, String> record = new ProducerRecord<>(serverTopicPartition, CLIENTS_TOPIC, new JSONObject(properties).toString()); producer.send(record); } } @Override public void onSessionClose(SessionId sessionId, Map<String, String> properties, ClientControl.CloseReason closeReason) { properties = new HashMap<>(properties); properties.put("SessionEvent", "close"); log.info("Publishing \"{}\" from session \"{}\" to kafka", properties, sessionId); ProducerRecord<String, String> record = new ProducerRecord<>(serverTopicPartition, CLIENTS_TOPIC, new JSONObject(properties).toString()); producer.send(record); } }; private AuthenticationControl.ControlAuthenticationHandler authenticationHandler = new AuthenticationControl.ControlAuthenticationHandler() { @Override public void authenticate(final String principal, Credentials credentials, final SessionDetails sessionDetails, Callback callback) { log.info("ControlAuthenticationHandler.authenticate({})", principal); Map<String, String> props = new HashMap<>(); log.info("getAdditionalProperties({})", principal); props.put("Latitude", Float.toString(sessionDetails.getLocation().getCoordinates().getLatitude())); props.put("Longitude", Float.toString(sessionDetails.getLocation().getCoordinates().getLongitude())); log.info("adding {} to {}", props, principal); callback.allow(authenticationResult().withProperties(props).withRoles("ADMINISTRATOR")); } @Override public void onActive(RegisteredHandler registeredHandler) { log.info("ControlAuthenticationHandler.onActive()"); } @Override public void onClose() { log.info("ControlAuthenticationHandler.onClose()"); } }; public void start() { Properties kprops = new Properties(); kprops.put("acks", "1"); kprops.put("bootstrap.servers", kafkaBootstrapUrl); kprops.put("security.protocol", "SASL_SSL"); kprops.put("group.id", KafkaAdapter.DIFFUSION_KAFKA_CONSUMER_GROUP); producer = new KafkaProducer<>(kprops, new StringSerializer(), new StringSerializer()); final Topics topicsFeature = session.feature(Topics.class); topicsFeature.addFallbackTopicStream(topicStream); ClientControl clientControl = session.feature(ClientControl.class); AuthenticationControl authControl = session.feature(AuthenticationControl.class); authControl.setAuthenticationHandler("before-system-handler", EnumSet.allOf(SessionDetails.DetailType.class), authenticationHandler); // Listen to client events and publish to kafka clientControl.setSessionPropertiesListener(propertiesListener, Session.ALL_FIXED_PROPERTIES, Session.ALL_USER_PROPERTIES); // Get everything except the Diffusion publisher and the synthetic kafka topic topicsFeature.subscribe("?(?!(Diffusion|" + KafkaAdapter.DIFFUSION_KAFKA_ROOT_TOPIC + ")).*//", callback); } /** * Stop the Kafka producer */ public void stop() { producer.close(); } }