Java tutorial
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.examples.consumer; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.inf.ArgumentParser; import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.Namespace; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import java.util.Arrays; import java.util.Collections; import java.util.Properties; import static net.sourceforge.argparse4j.impl.Arguments.store; public class BasicConsumerExample { public static void main(String[] args) { ArgumentParser parser = argParser(); try { Namespace res = parser.parseArgs(args); /* parse args */ String brokerList = res.getString("bootstrap.servers"); String topic = res.getString("topic"); String serializer = res.getString("serializer"); Properties consumerConfig = new Properties(); consumerConfig.put("group.id", "my-group"); consumerConfig.put("bootstrap.servers", brokerList); consumerConfig.put("auto.offset.reset", "earliest"); consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer"); consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer"); KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(consumerConfig); consumer.subscribe(Collections.singletonList(topic)); while (true) { ConsumerRecords<byte[], byte[]> records = consumer.poll(1000); for (ConsumerRecord<byte[], byte[]> record : records) { System.out.printf( "Received Message topic =%s, partition =%s, offset = %d, key = %s, value = %s\n", record.topic(), record.partition(), record.offset(), deserialize(record.key()), deserialize(record.value())); } consumer.commitSync(); } } catch (ArgumentParserException e) { if (args.length == 0) { parser.printHelp(); System.exit(0); } else { parser.handleError(e); System.exit(1); } } } private static <V> V deserialize(final byte[] objectData) { return (V) org.apache.commons.lang3.SerializationUtils.deserialize(objectData); } /** * Get the command-line argument parser. */ private static ArgumentParser argParser() { ArgumentParser parser = ArgumentParsers.newArgumentParser("simple-producer").defaultHelp(true) .description("This example is to demonstrate kafka producer capabilities"); parser.addArgument("--bootstrap.servers").action(store()).required(true).type(String.class) .metavar("BROKER-LIST").help("comma separated broker list"); parser.addArgument("--topic").action(store()).required(true).type(String.class).metavar("TOPIC") .help("produce messages to this topic"); parser.addArgument("--serializer").action(store()).required(false).setDefault("byte").type(String.class) .choices(Arrays.asList("byte", "kryo")).metavar("BYTE/KRYO") .help("use byte array or kryo serializer"); return parser; } }