装修建材网站模板,云网站功能,网站备案 接入商名称,wordpress所有人提问注#xff1a;此内容是本人在另一个技术平台发布的历史文章#xff0c;转载发布到CSDN#xff1b; Apache Kafka是一个开源分布式事件流平台#xff0c;也是当前系统开发中流行的高性能消息队列服务#xff0c;数千家公司使用它来实现高性能数据管道、流分析、数据集成和关… 注此内容是本人在另一个技术平台发布的历史文章转载发布到CSDN Apache Kafka是一个开源分布式事件流平台也是当前系统开发中流行的高性能消息队列服务数千家公司使用它来实现高性能数据管道、流分析、数据集成和关键任务应用程序。 Kafka 可以很好地替代更传统的消息代理。消息代理的使用原因多种多样将处理与数据生产者分离开来、缓冲未处理的消息等。与大多数消息系统相比Kafka 具有更好的吞吐量、内置分区、复制和容错能力这使其成为大规模消息处理应用程序的良好解决方案。
Java工具类
此基于kafka客户端的工具类提供基础的消息发送与监听功能。
pom.xml !-- 集成kafka --dependencygroupIdorg.springframework.kafka/groupIdartifactIdspring-kafka/artifactId/dependencydependencygroupIdorg.apache.kafka/groupIdartifactIdkafka-streams/artifactIdversion2.2.2/version/dependencydependencygroupIdorg.apache.kafka/groupIdartifactIdkafka-clients/artifactIdversion2.2.2/version/dependency
KafkaUtils.java
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;import java.util.Collections;
import java.util.Properties;
import java.util.concurrent.Future;/*** Description kafka工具类提供消息发送与监听*/
public class KafkaUtils {/*** 获取实始化KafkaStreamServer对象* return*/public static KafkaStreamServer bulidServer(){return new KafkaStreamServer();}/*** 获取实始化KafkaStreamClient对象* return*/public static KafkaStreamClient bulidClient(){return new KafkaStreamClient();}public static class KafkaStreamServer{KafkaProducerString, String kafkaProducer null;private KafkaStreamServer(){}/*** 创建配置属性* param host* param port* return*/public KafkaStreamServer createKafkaStreamServer(String host, int port){String bootstrapServers String.format(%s:%d, host, port);if (kafkaProducer ! null){return this;}Properties properties new Properties();//kafka地址多个地址用逗号分割properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);kafkaProducer new KafkaProducer(properties);return this;}/*** 向kafka服务发送生产者消息* param topic* param msg* return*/public FutureRecordMetadata sendMsg(String topic, String msg){ProducerRecordString, String record new ProducerRecordString, String(topic, msg);FutureRecordMetadata future kafkaProducer.send(record);System.out.println(消息发送成功: msg);return future;}/*** 关闭kafka连接*/public void close(){if (kafkaProducer ! null){kafkaProducer.flush();kafkaProducer.close();kafkaProducer null;}}}public static class KafkaStreamClient {KafkaConsumerString, String kafkaConsumer null;private KafkaStreamClient(){}/*** 配置属性,创建消费者* param host* param port* return*/public KafkaStreamClient createKafkaStreamClient(String host, int port, String groupId){String bootstrapServers String.format(%s:%d, host, port);if (kafkaConsumer ! null){return this;}Properties properties new Properties();//kafka地址多个地址用逗号分割properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);kafkaConsumer new KafkaConsumerString, String(properties);return this;}/*** 客户端消费者拉取消息并通过回调HeaderInterface实现类传递消息* param topic* param headerInterface*/public void pollMsg(String topic, HeaderInterface headerInterface) {kafkaConsumer.subscribe(Collections.singletonList(topic));while (true) {ConsumerRecordsString, String records kafkaConsumer.poll(100);for (ConsumerRecordString, String record : records) {try{headerInterface.execute(record);}catch(Exception e){e.printStackTrace();}}}}/*** 关闭kafka连接*/public void close(){if (kafkaConsumer ! null){kafkaConsumer.close();kafkaConsumer null;}}}FunctionalInterfaceinterface HeaderInterface{void execute(ConsumerRecordString, String record);}/*** 测试示例* param args* throws InterruptedException*/public static void main(String[] args) throws InterruptedException {//生产者发送消息
// KafkaStreamServer kafkaStreamServer KafkaUtils.bulidServer().createKafkaStreamServer(127.0.0.1, 9092);
// int i0;
// while (i10) {
// String msg Hello, new Random().nextInt(100);
// kafkaStreamServer.sendMsg(test, msg);
// i;
// Thread.sleep(100);
// }
// kafkaStreamServer.close();
// System.out.println(发送结束);System.out.println(接收消息);KafkaStreamClient kafkaStreamClient KafkaUtils.bulidClient().createKafkaStreamClient(127.0.0.1, 9092, consumer-45);kafkaStreamClient.pollMsg(test, new HeaderInterface() {Overridepublic void execute(ConsumerRecordString, String record) {System.out.println(String.format(topic:%s,offset:%d,消息:%s, record.topic(), record.offset(), record.value()));}});}
}