此次给im服务增加kafka渠道,刚好最近有对SpringCloudStream进行了解,刚好用来练练手

增加kafka渠道

  • pom.xml

引入stream相关依赖

<dependency>
    <groupId>org.springframework.cloud</groupId>
    <artifactId>spring-cloud-stream</artifactId>
</dependency>

<dependency>
    <groupId>org.springframework.cloud</groupId>
    <artifactId>spring-cloud-stream-binder-kafka</artifactId>
</dependency>

由于涉及到SpringCloud,可以交由spring-cloud-dependencies统一管理

<dependencyManagement>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-dependencies</artifactId>
            <version>2.7.18</version>
            <type>pom</type>
            <scope>import</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.cloud</groupId>
            <artifactId>spring-cloud-dependencies</artifactId>
            <version>2021.0.8</version>
            <type>pom</type>
            <scope>import</scope>
        </dependency>
    </dependencies>
</dependencyManagement>
  • application.yml

在yml中对stream相关项进行配置

server:
  port: 18080
cus:
  ws:
    exclude-receiver-info-flag: true
    receiver-excludes-himself-flag: true
    #更改渠道为stream
    communication-type: stream
  cloud:
    function:
      #允许stream访问的bean
      definition: listener
    stream:
      kafka:
        binder:
          #kafka链接信息
          brokers: ${KAFKA_BROKERS:127.0.0.1:9092}
          #允许自动创建topic
          auto-create-topics: true
      bindings:
        #消费者bean-in-index
        listener-in-0:
          #主题
          destination: TEST_TOPIC


  • RedisSendExecutor

kafka生产者

package com.example.im.infra.executor.send.stream;

import com.example.im.infra.constant.ImConstants;
import com.example.im.infra.executor.send.AbstractBaseSendExecutor;
import com.example.im.infra.executor.send.dto.MessageInfo;
import com.example.im.infra.executor.send.dto.ScopeOfSendingEnum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.stream.function.StreamBridge;
import org.springframework.stereotype.Component;

/**
 * @author PC
 * 消息队列执行
 */
@Component
public class StreamSendExecutor extends AbstractBaseSendExecutor {
    private final static Logger logger = LoggerFactory.getLogger(StreamSendExecutor.class);

    private final StreamBridge streamBridge;

    @Autowired
    public StreamSendExecutor(StreamBridge streamBridge) {
        this.streamBridge = streamBridge;
    }

    @Override
    public String getCommunicationType() {
        return ImConstants.CommunicationType.STREAM;
    }

    @Override
    public void sendToUser(String sendUserName, String message) {
        MessageInfo messageInfo = new MessageInfo();
        messageInfo.setSendUserName(sendUserName);
        messageInfo.setMessage(message);
        messageInfo.setScopeOfSending(ScopeOfSendingEnum.USER);
        logger.debug("send to user stream websocket, topic is " + "TEST_TOPIC");
        streamBridge.send("TEST_TOPIC", messageInfo);
    }

    @Override
    public void sendToAll(String sendUserName, String message) {
        MessageInfo messageInfo = new MessageInfo();
        messageInfo.setSendUserName(sendUserName);
        messageInfo.setMessage(message);
        messageInfo.setScopeOfSending(ScopeOfSendingEnum.ALL);
        logger.debug("send to user stream websocket, topic is " + "TEST_TOPIC");
        streamBridge.send("TEST_TOPIC", messageInfo);
    }
}
  • StreamMessageListener

kafka消费者

package com.example.im.infra.executor.send.stream;

import com.example.im.infra.executor.send.DefaultSendExecutor;
import com.example.im.infra.executor.send.dto.MessageInfo;
import com.example.im.infra.util.JsonUtils;
import com.fasterxml.jackson.core.type.TypeReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.messaging.Message;
import org.springframework.stereotype.Component;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;

import java.nio.charset.StandardCharsets;
import java.util.function.Function;

/**
 * @author PC
 * 消息队列监听
 */
@Component
public class StreamMessageListener {
    private final static Logger logger = LoggerFactory.getLogger(StreamSendExecutor.class);

    private DefaultSendExecutor defaultSendExecutor;

    @Autowired
    public void setDefaultSendExecutor(DefaultSendExecutor defaultSendExecutor) {
        this.defaultSendExecutor = defaultSendExecutor;
    }

    @Bean
    public Function<Flux<Message<byte[]>>, Mono<Void>> listener() {
        return messageInfoFlux -> messageInfoFlux.map(message -> {
            String messageJson = new String(message.getPayload(), StandardCharsets.UTF_8);
            MessageInfo messageInfo = JsonUtils.toObjectByTypeReference(messageJson, new TypeReference<MessageInfo>() {
            });
            switch (messageInfo.getScopeOfSending()) {
                case USER:
                    defaultSendExecutor.sendToUser(messageInfo.getSendUserName(), messageInfo.getMessage());
                    break;
                case ALL:
                    defaultSendExecutor.sendToAll(messageInfo.getSendUserName(), messageInfo.getMessage());
                    break;
                default:
                    //一般来说不会出现该情况,除非用户覆盖了ScopeOfSending,后续可以开个扩展发送范围的口子
                    logger.warn("invalid sending range:" + messageInfo.getScopeOfSending().getScopeCode());
                    break;
            }
            return messageInfo;
        }).then();
    }
}

测试

test2向test1发送消息,成功接收

直接在消息队列中发送消息,test1也接收到了消息

参考资料

[1].SpringCloudStream中文文档

[2].im项目

点赞(0) 打赏

评论列表 共有 0 条评论

暂无评论

微信公众账号

微信扫一扫加关注

发表
评论
返回
顶部