springboot配置kafka⽣产者和消费者详解在原有l依赖下新添加⼀下kafka依赖ar包
<!--kafka-->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>1.1.1.RELEASE</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.10.0.1</version>
</dependency>
application.properties:
1 #原始数据kafka读取
sumer.servers=IP:9092,IP:9092(kafka消费集ip+port端⼝)
able.automit=true(是否⾃动提交)
sumer.session.timeout=20000(连接超时时间)
sumer.automit.interval=100
sumer.set=latest(实时⽣产,实时消费,不会从头开始消费)
pic=result(消费的topic)
up.id=test(消费组)
urrency=10(设置消费线程数)
10
11 #协议转换后存储kafka
12 kafka.producer.servers=IP:9092,IP:9092(kafka⽣产集ip+port端⼝)
13 pic=result(⽣产的topic)
14 ies=0
15 kafka.producer.batch.size=4096
16 kafka.producer.linger=1
17 kafka.=40960
springboot⽣产者配置:
1package fig;
2
3import org.apache.kafka.clients.producer.ProducerConfig;
4import org.apache.kafkamon.serialization.StringSerializer;
5import org.springframework.beans.factory.annotation.Value;
6import t.annotation.Bean;
7import t.annotation.Configuration;
8import org.springframework.kafka.annotation.EnableKafka;
9import org.DefaultKafkaProducerFactory;
10import org.KafkaTemplate;
11import org.ProducerFactory;
12
13import java.util.HashMap;
14import java.util.Map;
15
16/**
17* kafka⽣产配置
18* @author Lvjiapeng
19*
20*/
21 @Configuration
22 @EnableKafka
23public class KafkaProducerConfig {
24 @Value("${kafka.producer.servers}")
25private String servers;
26 @Value("${ies}")
27private int retries;
28 @Value("${kafka.producer.batch.size}")
29private int batchSize;
30 @Value("${kafka.producer.linger}")
31private int linger;
32 @Value("${kafka.}")
33private int bufferMemory;
34
35public Map<String, Object> producerConfigs() {
36 Map<String, Object> props = new HashMap<>();
37 props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
38 props.put(ProducerConfig.RETRIES_CONFIG, retries);
39 props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
40 props.put(ProducerConfig.LINGER_MS_CONFIG, linger);
41 props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory);
42 props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
43 props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
44return props;
45 }
46
47public ProducerFactory<String, String> producerFactory() {
48return new DefaultKafkaProducerFactory<>(producerConfigs());
49 }
50
51 @Bean
52public KafkaTemplate<String, String> kafkaTemplate() {
53return new KafkaTemplate<String, String>(producerFactory());
54 }
55 }
springboot消费者配置:
1package fig;
2
3import org.apache.sumer.ConsumerConfig;
4import org.apache.kafkamon.serialization.StringDeserializer;
5import org.springframework.beans.factory.annotation.Value;
6import t.annotation.Bean;
7import t.annotation.Configuration;
8import org.springframework.kafka.annotation.EnableKafka;
9import org.fig.ConcurrentKafkaListenerContainerFactory;
10import org.fig.KafkaListenerContainerFactory;
11import org.ConsumerFactory;
12import org.DefaultKafkaConsumerFactory;
13import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
14
springboot切换log4j215import java.util.HashMap;
16import java.util.Map;
17
18/**
19* kafka消费者配置
20* @author Lvjiapeng
21*
22*/
23 @Configuration
24 @EnableKafka
25public class KafkaConsumerConfig {
26
27 @Value("${sumer.servers}")
28private String servers;
29 @Value("${able.automit}")
30private boolean enableAutoCommit;
31 @Value("${sumer.session.timeout}")
32private String sessionTimeout;
33 @Value("${sumer.automit.interval}")
34private String autoCommitInterval;
35 @Value("${up.id}")
36private String groupId;
37 @Value("${sumer.set}")
38private String autoOffsetReset;
39 @Value("${urrency}")
40private int concurrency;
41
42 @Bean
43public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
44 ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
45 factory.setConsumerFactory(consumerFactory());
46 factory.setConcurrency(concurrency);
ContainerProperties().setPollTimeout(1500);
48return factory;
49 }
50
51public ConsumerFactory<String, String> consumerFactory() {
52return new DefaultKafkaConsumerFactory<>(consumerConfigs());
53 }
54
55
56public Map<String, Object> consumerConfigs() {
57 Map<String, Object> propsMap = new HashMap<>();
58 propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
59 propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
60 propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitInterval);
61 propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sessionTimeout);
62 propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
63 propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
64 propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
65 propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
66return propsMap;
67 }
68/**
69* kafka监听
70* @return
71*/
72 @Bean
73public RawDataListener listener() {
74return new RawDataListener();
75 }
76
77 }
⽣产者测试:
1package ller;
2
3import org.springframework.beans.factory.annotation.Autowired;
4import org.KafkaTemplate;
5import org.springframework.stereotype.Controller;
6import org.springframework.web.bind.annotation.RequestMapping;
7import org.springframework.web.bind.annotation.RequestMethod;
8
9import javax.servlet.http.HttpServletRequest;
10import javax.servlet.http.HttpServletResponse;
11import java.io.IOException;
12
13 @RequestMapping(value = "/kafka")
14 @Controller
15public class ProducerController {
16 @Autowired
17private KafkaTemplate kafkaTemplate;
18
19 @RequestMapping(value = "/producer",method = RequestMethod.GET)
20public void consume(HttpServletRequest request, HttpServletResponse response) throws IOException{
21 String value = "{\"code\":200,\"dataVersion\":\"17q1\",\"message\":\"\",\"id\":\"364f79f28eea48eefeca8c85477a10d3\",\"source\":\"didi\",\"tripList\":[{\"subTripList\":[{\"startTimeStamp\":1519879598,\"schemeList\":[{\"distance\":0.0,\"ids\":\"9466 22for (int i = 1; i<=500; i++){
23 kafkaTemplate.send("result",value);
24 }
25 }
26 }
消费者测试:
1import net.sf.json.JSONObject;
2import org.apache.sumer.ConsumerRecord;
3import org.apache.log4j.Logger;
4import org.springframework.beans.factory.annotation.Autowired;
5import org.springframework.kafka.annotation.KafkaListener;
6import org.springframework.stereotype.Component;
7
8import java.io.IOException;
9import java.util.List;
10
11/**
12* kafka监听
13* @author shangzz
14*
15*/
16 @Component
17public class RawDataListener {
18 Logger Logger(RawDataListener.class);
19 @Autowired
20private MatchRoadService matchRoadService;
21
22/**
23* 实时获取kafka数据(⽣产⼀条,监听⽣产topic⾃动消费⼀条)
24* @param record
25* @throws IOException
26*/
27 @KafkaListener(topics = {"${pic}"})
28public void listen(ConsumerRecord<?, ?> record) throws IOException {
29 String value = (String) record.value();
30 System.out.println(value);
31 }
32
33 }
总结:
①⽣产者环境类配置好以后,@Autowired⾃动注⼊KafkaTemplate类,使⽤send⽅法⽣产消息
②消费者环境类配置好以后,⽅法头前使⽤@KafkaListener(topics = {"${pic}"})注解监听topic并传⼊ConsumerRecord<?, ?> record对象即可⾃动消费topic
③相关kafka配置只需在application.properties照葫芦画瓢添加,修改或者删除配置并在环境配置类中做出相应修改即可
版权声明:本站内容均来自互联网,仅供演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系QQ:729038198,我们将在24小时内删除。
发表评论