(window.webpackJsonp=window.webpackJsonp||[]).push([[474],{906:function(e,a,n){"use strict";n.r(a);var r=n(56),t=Object(r.a)({},(function(){var e=this,a=e.$createElement,n=e._self._c||a;return n("ContentSlotsDistributor",{attrs:{"slot-key":e.$parent.slotKey}},[n("h1",{attrs:{id:"apache-kafka-支持"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#apache-kafka-支持"}},[e._v("#")]),e._v(" Apache Kafka 支持")]),e._v(" "),n("h2",{attrs:{id:"apache-kafka-支持-2"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#apache-kafka-支持-2"}},[e._v("#")]),e._v(" Apache Kafka 支持")]),e._v(" "),n("h3",{attrs:{id:"概述"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#概述"}},[e._v("#")]),e._v(" 概述")]),e._v(" "),n("p",[e._v("Spring Apache Kafka 的集成基于"),n("a",{attrs:{href:"https://projects.spring.io/spring-kafka/",target:"_blank",rel:"noopener noreferrer"}},[e._v("Spring for Apache Kafka project"),n("OutboundLink")],1),e._v("。")]),e._v(" "),n("p",[e._v("你需要在项目中包含此依赖项:")]),e._v(" "),n("p",[e._v("Maven")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v("\n org.springframework.integration\n spring-integration-kafka\n 5.5.9\n\n")])])]),n("p",[e._v("Gradle")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('compile "org.springframework.integration:spring-integration-kafka:5.5.9"\n')])])]),n("p",[e._v("它提供了以下组件:")]),e._v(" "),n("ul",[n("li",[n("p",[n("a",{attrs:{href:"#kafka-outbound"}},[e._v("出站通道适配器")])])]),e._v(" "),n("li",[n("p",[n("a",{attrs:{href:"#kafka-inbound"}},[e._v("消息驱动通道适配器")])])]),e._v(" "),n("li",[n("p",[n("a",{attrs:{href:"#kafka-inbound-pollable"}},[e._v("入站通道适配器")])])]),e._v(" "),n("li",[n("p",[n("a",{attrs:{href:"#kafka-outbound-gateway"}},[e._v("出站网关")])])]),e._v(" "),n("li",[n("p",[n("a",{attrs:{href:"#kafka-inbound-gateway"}},[e._v("入站网关")])])]),e._v(" "),n("li",[n("p",[n("a",{attrs:{href:"#kafka-channels"}},[e._v("Apache Kafka 主题支持的频道")])])])]),e._v(" "),n("h3",{attrs:{id:"出站通道适配器"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#出站通道适配器"}},[e._v("#")]),e._v(" 出站通道适配器")]),e._v(" "),n("p",[e._v("出站通道适配器用于将消息从 Spring 集成通道发布到 Apache Kafka 主题。通道在应用程序上下文中定义,然后连接到将消息发送到 Apache Kafka 的应用程序中。发送方应用程序可以通过使用 Spring 集成消息发布到 Apache Kafka,这些消息由出站通道适配器在内部转换为 Kafka 记录,如下所示:")]),e._v(" "),n("ul",[n("li",[n("p",[e._v("Spring 集成消息的有效负载用于填充 Kafka 记录的有效负载。")])]),e._v(" "),n("li",[n("p",[e._v("默认情况下, Spring 集成消息的"),n("code",[e._v("kafka_messageKey")]),e._v("头用于填充 Kafka 记录的键。")])])]),e._v(" "),n("p",[e._v("你可以分别通过"),n("code",[e._v("kafka_topic")]),e._v("和"),n("code",[e._v("kafka_partitionId")]),e._v("标题定制用于发布消息的目标主题和分区。")]),e._v(" "),n("p",[e._v("此外,"),n("code",[e._v("")]),e._v("提供了通过在出站消息上应用 SPEL 表达式来提取密钥、目标主题和目标分区的能力。为此,它支持三对相互排斥的属性:")]),e._v(" "),n("ul",[n("li",[n("p",[n("code",[e._v("topic")]),e._v("和"),n("code",[e._v("topic-expression")])])]),e._v(" "),n("li",[n("p",[n("code",[e._v("message-key")]),e._v("和"),n("code",[e._v("message-key-expression")])])]),e._v(" "),n("li",[n("p",[n("code",[e._v("partition-id")]),e._v("和"),n("code",[e._v("partition-id-expression")])])])]),e._v(" "),n("p",[e._v("它们允许你分别将"),n("code",[e._v("topic")]),e._v("、"),n("code",[e._v("message-key")]),e._v("和"),n("code",[e._v("partition-id")]),e._v("指定为适配器上的静态值,或者在运行时根据请求消息动态计算它们的值。")]),e._v(" "),n("table",[n("thead",[n("tr",[n("th"),e._v(" "),n("th",[n("code",[e._v("KafkaHeaders")]),e._v("接口(由"),n("code",[e._v("spring-kafka")]),e._v("提供)包含用于与"),n("br"),e._v("头部交互的常量。"),n("br"),n("code",[e._v("messageKey")]),e._v("和"),n("code",[e._v("topic")]),e._v("默认头部现在需要一个"),n("code",[e._v("kafka_")]),e._v("前缀。"),n("br"),e._v("从使用旧头部的早期版本迁移时,你需要在"),n("code",[e._v("")]),e._v("上指定"),n("code",[e._v("message-key-expression=\"headers['messageKey']\"")]),e._v("和"),n("code",[e._v("topic-expression=\"headers['topic']\"")]),e._v("。"),n("br"),e._v("或者,你可以通过使用"),n("code",[e._v("")]),e._v("或"),n("code",[e._v("MessageBuilder")]),e._v("将上游的头更改为新的头,如果你使用常量值,你还可以使用"),n("code",[e._v("topic")]),e._v("和"),n("code",[e._v("message-key")]),e._v("在适配器上配置它们。")])])]),e._v(" "),n("tbody")]),e._v(" "),n("p",[e._v("注意:如果适配器配置了主题或消息键(使用常量或表达式),则使用这些选项,并忽略相应的头。如果希望标头覆盖配置,则需要在表达式中对其进行配置,如以下所示:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v("topic-expression=\"headers['topic'] != null ? headers['topic'] : 'myTopic'\"\n")])])]),n("p",[e._v("适配器需要"),n("code",[e._v("KafkaTemplate")]),e._v(",而这又需要适当配置的"),n("code",[e._v("KafkaProducerFactory")]),e._v("。")]),e._v(" "),n("p",[e._v("如果提供了"),n("code",[e._v("send-failure-channel")]),e._v("("),n("code",[e._v("sendFailureChannel")]),e._v(")并接收到发送失败(同步或异步),则将向通道发送"),n("code",[e._v("ErrorMessage")]),e._v("。有效负载是一个"),n("code",[e._v("KafkaSendFailureException")]),e._v(",带有"),n("code",[e._v("failedMessage")]),e._v(","),n("code",[e._v("record")]),e._v("(the"),n("code",[e._v("ProducerRecord")]),e._v(")和"),n("code",[e._v("cause")]),e._v("属性。通过设置"),n("code",[e._v("error-message-strategy")]),e._v("属性,可以覆盖"),n("code",[e._v("DefaultErrorMessageStrategy")]),e._v("。")]),e._v(" "),n("p",[e._v("如果提供了"),n("code",[e._v("send-success-channel")]),e._v("("),n("code",[e._v("sendSuccessChannel")]),e._v("),则在成功发送后将发送有效负载类型为"),n("code",[e._v("org.apache.kafka.clients.producer.RecordMetadata")]),e._v("的消息。")]),e._v(" "),n("table",[n("thead",[n("tr",[n("th"),e._v(" "),n("th",[e._v("如果你的应用程序使用事务,并且在由侦听器容器启动事务的地方使用相同的通道适配器来发布消息,以及在不存在事务的地方发布消息,你必须在"),n("code",[e._v("KafkaTemplate")]),e._v("上配置"),n("code",[e._v("transactionIdPrefix")]),e._v("以覆盖容器或事务管理器使用的前缀。"),n("br"),e._v("容器发起的事务(生产者工厂或事务管理器属性)使用的前缀必须在所有应用程序实例上都相同。"),n("br"),e._v("使用的前缀对于仅用于生产者的事务,必须在所有应用程序实例上都是唯一的。")])])]),e._v(" "),n("tbody")]),e._v(" "),n("p",[e._v("你可以配置一个"),n("code",[e._v("flushExpression")]),e._v(",它必须解析为布尔值。如果你使用"),n("code",[e._v("linger.ms")]),e._v("和"),n("code",[e._v("batch.size")]),e._v("Kafka Producer 属性,则在发送多条消息后进行刷新可能会很有用;在最后一条消息上,表达式应该计算为"),n("code",[e._v("Boolean.TRUE")]),e._v(",并且将立即发送一个不完整的批处理。默认情况下,表达式在"),n("code",[e._v("KafkaIntegrationHeaders.FLUSH")]),e._v("报头("),n("code",[e._v("kafka_flush")]),e._v(")中查找"),n("code",[e._v("Boolean")]),e._v("值。如果值"),n("code",[e._v("true")]),e._v("而不是"),n("code",[e._v("false")]),e._v("或者不存在标头,就会发生刷新。")]),e._v(" "),n("p",[n("code",[e._v("KafkaProducerMessageHandler.sendTimeoutExpression")]),e._v("默认值已从 10 秒更改为"),n("code",[e._v("delivery.timeout.ms")]),e._v("Kafka Producer 属性"),n("code",[e._v("+ 5000")]),e._v(",以便将超时后的实际 Kafka 错误传播到应用程序,而不是由此框架生成的超时。为了保持一致性,对此进行了更改,因为你可能会遇到意外的行为( Spring 可能会超时发送,而实际上最终是成功的)。重要提示:默认情况下,超时时间为 120 秒,因此你可能希望减少超时时间,以获得更多的及时故障。")]),e._v(" "),n("h4",{attrs:{id:"java-配置"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#java-配置"}},[e._v("#")]),e._v(" Java 配置")]),e._v(" "),n("p",[e._v("下面的示例展示了如何使用 Java 为 Apache Kafka 配置出站通道适配器:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@Bean\n@ServiceActivator(inputChannel = "toKafka")\npublic MessageHandler handler() throws Exception {\n KafkaProducerMessageHandler handler =\n new KafkaProducerMessageHandler<>(kafkaTemplate());\n handler.setTopicExpression(new LiteralExpression("someTopic"));\n handler.setMessageKeyExpression(new LiteralExpression("someKey"));\n handler.setSuccessChannel(successes());\n handler.setFailureChannel(failures());\n return handler;\n}\n\n@Bean\npublic KafkaTemplate kafkaTemplate() {\n return new KafkaTemplate<>(producerFactory());\n}\n\n@Bean\npublic ProducerFactory producerFactory() {\n Map props = new HashMap<>();\n props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokerAddress);\n // set more properties\n return new DefaultKafkaProducerFactory<>(props);\n}\n')])])]),n("h4",{attrs:{id:"java-dsl-配置"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#java-dsl-配置"}},[e._v("#")]),e._v(" Java DSL 配置")]),e._v(" "),n("p",[e._v("下面的示例展示了如何使用 Spring 集成 Java DSL 为 Apache Kafka 配置出站通道适配器:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@Bean\npublic ProducerFactory producerFactory() {\n return new DefaultKafkaProducerFactory<>(KafkaTestUtils.producerProps(embeddedKafka));\n}\n\n@Bean\npublic IntegrationFlow sendToKafkaFlow() {\n return f -> f\n .split(p -> Stream.generate(() -> p).limit(101).iterator(), null)\n .publishSubscribeChannel(c -> c\n .subscribe(sf -> sf.handle(\n kafkaMessageHandler(producerFactory(), TEST_TOPIC1)\n .timestampExpression("T(Long).valueOf(\'1487694048633\')"),\n e -> e.id("kafkaProducer1")))\n .subscribe(sf -> sf.handle(\n kafkaMessageHandler(producerFactory(), TEST_TOPIC2)\n .timestamp(m -> 1487694048644L),\n e -> e.id("kafkaProducer2")))\n );\n}\n\n@Bean\npublic DefaultKafkaHeaderMapper mapper() {\n return new DefaultKafkaHeaderMapper();\n}\n\nprivate KafkaProducerMessageHandlerSpec kafkaMessageHandler(\n ProducerFactory producerFactory, String topic) {\n return Kafka\n .outboundChannelAdapter(producerFactory)\n .messageKey(m -> m\n .getHeaders()\n .get(IntegrationMessageHeaderAccessor.SEQUENCE_NUMBER))\n .headerMapper(mapper())\n .partitionId(m -> 10)\n .topicExpression("headers[kafka_topic] ?: \'" + topic + "\'")\n .configureKafkaTemplate(t -> t.id("kafkaTemplate:" + topic));\n}\n')])])]),n("h4",{attrs:{id:"xml-配置"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#xml-配置"}},[e._v("#")]),e._v(" XML 配置")]),e._v(" "),n("p",[e._v("下面的示例展示了如何使用 XML 配置 Kafka 出站通道适配器:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('\n\n\n\n \n \n \n \n \n ... \x3c!-- more producer properties --\x3e\n \n \n \n \n\n')])])]),n("h3",{attrs:{id:"消息驱动通道适配器"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#消息驱动通道适配器"}},[e._v("#")]),e._v(" 消息驱动通道适配器")]),e._v(" "),n("p",[n("code",[e._v("KafkaMessageDrivenChannelAdapter")]),e._v("("),n("code",[e._v("")]),e._v(")使用"),n("code",[e._v("spring-kafka``KafkaMessageListenerContainer")]),e._v("或"),n("code",[e._v("ConcurrentListenerContainer")]),e._v("。")]),e._v(" "),n("p",[e._v("而且"),n("code",[e._v("mode")]),e._v("属性也是可用的。它可以接受"),n("code",[e._v("record")]),e._v("或"),n("code",[e._v("batch")]),e._v("的值(默认:"),n("code",[e._v("record")]),e._v(")。对于"),n("code",[e._v("record")]),e._v("模式,每个消息有效负载都是从单个"),n("code",[e._v("ConsumerRecord")]),e._v("转换而来的。对于"),n("code",[e._v("batch")]),e._v("模式,有效负载是由消费者投票返回的所有"),n("code",[e._v("ConsumerRecord")]),e._v("实例转换的对象列表。与批处理的"),n("code",[e._v("@KafkaListener")]),e._v("一样,"),n("code",[e._v("KafkaHeaders.RECEIVED_MESSAGE_KEY")]),e._v("、"),n("code",[e._v("KafkaHeaders.RECEIVED_PARTITION_ID")]),e._v("、"),n("code",[e._v("KafkaHeaders.RECEIVED_TOPIC")]),e._v("和"),n("code",[e._v("KafkaHeaders.OFFSET")]),e._v("标题也是列表,其位置与有效载荷中的位置相对应。")]),e._v(" "),n("p",[e._v("接收到的消息有特定的标题填充。有关更多信息,请参见["),n("code",[e._v("KafkaHeaders")]),e._v("类](https://DOCS. Spring.io/ Spring-kafka/api/org/springframework/kafka/support/kafkaheaders.html)。")]),e._v(" "),n("table",[n("thead",[n("tr",[n("th"),e._v(" "),n("th",[n("code",[e._v("Consumer")]),e._v("对象(在"),n("code",[e._v("kafka_consumer")]),e._v("标头中)不是线程安全的。"),n("br"),e._v("你必须仅在调用适配器中的侦听器的线程上调用它的方法。"),n("br"),e._v("如果将消息传递给另一个线程,则不能调用它的方法。")])])]),e._v(" "),n("tbody")]),e._v(" "),n("p",[e._v("当提供了"),n("code",[e._v("retry-template")]),e._v("时,将根据其重试策略重试交付失败。在这种情况下,"),n("code",[e._v("error-channel")]),e._v("是不允许的。当重试用完时,可以使用"),n("code",[e._v("recovery-callback")]),e._v("来处理错误。在大多数情况下,这是一个"),n("code",[e._v("ErrorMessageSendingRecoverer")]),e._v(",它将"),n("code",[e._v("ErrorMessage")]),e._v("发送到一个通道。")]),e._v(" "),n("p",[e._v("在构建"),n("code",[e._v("ErrorMessage")]),e._v("(用于"),n("code",[e._v("error-channel")]),e._v("或"),n("code",[e._v("recovery-callback")]),e._v(")时,可以通过设置"),n("code",[e._v("error-message-strategy")]),e._v("属性来定制错误消息。默认情况下,将使用"),n("code",[e._v("RawRecordHeaderErrorMessageStrategy")]),e._v(",以提供对转换后的消息以及原始"),n("code",[e._v("ConsumerRecord")]),e._v("的访问。")]),e._v(" "),n("h4",{attrs:{id:"java-配置-2"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#java-配置-2"}},[e._v("#")]),e._v(" Java 配置")]),e._v(" "),n("p",[e._v("下面的示例展示了如何使用 Java 配置消息驱动的通道适配器:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v("@Bean\npublic KafkaMessageDrivenChannelAdapter\n adapter(KafkaMessageListenerContainer container) {\n KafkaMessageDrivenChannelAdapter kafkaMessageDrivenChannelAdapter =\n new KafkaMessageDrivenChannelAdapter<>(container, ListenerMode.record);\n kafkaMessageDrivenChannelAdapter.setOutputChannel(received());\n return kafkaMessageDrivenChannelAdapter;\n}\n\n@Bean\npublic KafkaMessageListenerContainer container() throws Exception {\n ContainerProperties properties = new ContainerProperties(this.topic);\n // set more properties\n return new KafkaMessageListenerContainer<>(consumerFactory(), properties);\n}\n\n@Bean\npublic ConsumerFactory consumerFactory() {\n Map props = new HashMap<>();\n props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokerAddress);\n // set more properties\n return new DefaultKafkaConsumerFactory<>(props);\n}\n")])])]),n("h4",{attrs:{id:"java-dsl-配置-2"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#java-dsl-配置-2"}},[e._v("#")]),e._v(" Java DSL 配置")]),e._v(" "),n("p",[e._v("下面的示例展示了如何使用 Spring 集成 Java DSL 配置消息驱动通道适配器:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@Bean\npublic IntegrationFlow topic1ListenerFromKafkaFlow() {\n return IntegrationFlows\n .from(Kafka.messageDrivenChannelAdapter(consumerFactory(),\n KafkaMessageDrivenChannelAdapter.ListenerMode.record, TEST_TOPIC1)\n .configureListenerContainer(c ->\n c.ackMode(AbstractMessageListenerContainer.AckMode.MANUAL)\n .id("topic1ListenerContainer"))\n .recoveryCallback(new ErrorMessageSendingRecoverer(errorChannel(),\n new RawRecordHeaderErrorMessageStrategy()))\n .retryTemplate(new RetryTemplate())\n .filterInRetry(true))\n .filter(Message.class, m ->\n m.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY, Integer.class) < 101,\n f -> f.throwExceptionOnRejection(true))\n .transform(String::toUpperCase)\n .channel(c -> c.queue("listeningFromKafkaResults1"))\n .get();\n}\n')])])]),n("p",[e._v("还可以使用用于"),n("code",[e._v("@KafkaListener")]),e._v("注释的容器工厂为其他目的创建"),n("code",[e._v("ConcurrentMessageListenerContainer")]),e._v("实例。有关示例,请参见"),n("a",{attrs:{href:"https://docs.spring.io/spring-kafka/docs/current/reference/html/",target:"_blank",rel:"noopener noreferrer"}},[e._v("the Spring for Apache Kafka documentation"),n("OutboundLink")],1),e._v("。")]),e._v(" "),n("p",[e._v("对于 Java DSL,容器不必配置为"),n("code",[e._v("@Bean")]),e._v(",因为 DSL 将容器注册为 Bean。下面的示例展示了如何做到这一点:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@Bean\npublic IntegrationFlow topic2ListenerFromKafkaFlow() {\n return IntegrationFlows\n .from(Kafka.messageDrivenChannelAdapter(kafkaListenerContainerFactory().createContainer(TEST_TOPIC2),\n KafkaMessageDrivenChannelAdapter.ListenerMode.record)\n .id("topic2Adapter"))\n ...\n get();\n}\n')])])]),n("p",[e._v("注意,在这种情况下,适配器被赋予"),n("code",[e._v("id")]),e._v("("),n("code",[e._v("topic2Adapter")]),e._v(")。容器以"),n("code",[e._v("topic2Adapter.container")]),e._v("的名称注册在应用程序上下文中。如果适配器没有"),n("code",[e._v("id")]),e._v("属性,则容器的 Bean 名称是容器的完全限定类名称加上"),n("code",[e._v("#n")]),e._v(",其中"),n("code",[e._v("n")]),e._v("对每个容器递增。")]),e._v(" "),n("h4",{attrs:{id:"xml-配置-2"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#xml-配置-2"}},[e._v("#")]),e._v(" XML 配置")]),e._v(" "),n("p",[e._v("下面的示例展示了如何使用 XML 配置消息驱动的通道适配器:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('\n\n\n \n \n \n \n \n ...\n \n \n \n \n \n \n \n \n \n\n\n')])])]),n("h3",{attrs:{id:"入站通道适配器"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#入站通道适配器"}},[e._v("#")]),e._v(" 入站通道适配器")]),e._v(" "),n("p",[n("code",[e._v("KafkaMessageSource")]),e._v("提供了一个可匹配的通道适配器实现。")]),e._v(" "),n("h4",{attrs:{id:"java-配置-3"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#java-配置-3"}},[e._v("#")]),e._v(" Java 配置")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@InboundChannelAdapter(channel = "fromKafka", poller = @Poller(fixedDelay = "5000"))\n@Bean\npublic KafkaMessageSource source(ConsumerFactory cf) {\n KafkaMessageSource source = new KafkaMessageSource<>(cf, "myTopic");\n source.setGroupId("myGroupId");\n source.setClientId("myClientId");\n return source;\n}\n')])])]),n("p",[e._v("请参考 Javadocs 以获得可用的属性。")]),e._v(" "),n("p",[e._v("默认情况下,"),n("code",[e._v("max.poll.records")]),e._v("必须在消费工厂中显式设置,或者如果消费工厂是"),n("code",[e._v("DefaultKafkaConsumerFactory")]),e._v(",则将强制设置为 1。可以将属性"),n("code",[e._v("allowMultiFetch")]),e._v("设置为"),n("code",[e._v("true")]),e._v("以覆盖此行为。")]),e._v(" "),n("table",[n("thead",[n("tr",[n("th"),e._v(" "),n("th",[e._v("你必须在"),n("code",[e._v("max.poll.interval.ms")]),e._v("内轮询消费者以避免重新平衡。"),n("br"),e._v("如果你将"),n("code",[e._v("allowMultiFetch")]),e._v("设置为"),n("code",[e._v("true")]),e._v(",你必须处理所有检索到的记录,并在"),n("code",[e._v("max.poll.interval.ms")]),e._v("内再次轮询。")])])]),e._v(" "),n("tbody")]),e._v(" "),n("p",[e._v("此适配器发出的消息包含一个标题"),n("code",[e._v("kafka_remainingRecords")]),e._v(",其中包含上一个轮询中剩余的记录的计数。")]),e._v(" "),n("h4",{attrs:{id:"java-dsl-配置-3"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#java-dsl-配置-3"}},[e._v("#")]),e._v(" Java DSL 配置")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@Bean\npublic IntegrationFlow flow(ConsumerFactory cf) {\n return IntegrationFlows.from(Kafka.inboundChannelAdapter(cf, "myTopic")\n .groupId("myDslGroupId"), e -> e.poller(Pollers.fixedDelay(5000)))\n .handle(System.out::println)\n .get();\n}\n')])])]),n("h4",{attrs:{id:"xml-配置-3"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#xml-配置-3"}},[e._v("#")]),e._v(" XML 配置")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('\n \n\n')])])]),n("h3",{attrs:{id:"出站网关"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#出站网关"}},[e._v("#")]),e._v(" 出站网关")]),e._v(" "),n("p",[e._v("出站网关用于请求/回复操作。它与大多数 Spring 集成网关的不同之处在于,发送线程不会在网关中阻塞,而应答将在应答侦听器容器线程上进行处理。如果你的代码调用同步"),n("a",{attrs:{href:"https://docs.spring.io/spring-integration/reference/html/messaging-endpoints-chapter.html#gateway",target:"_blank",rel:"noopener noreferrer"}},[e._v("消息传递网关"),n("OutboundLink")],1),e._v("后面的网关,则用户线程将在此阻塞,直到收到答复(或者发生超时)。")]),e._v(" "),n("table",[n("thead",[n("tr",[n("th"),e._v(" "),n("th",[e._v("网关不接受请求,直到应答容器分配了它的主题和分区。"),n("br"),e._v("建议你在模板的应答容器属性中添加"),n("code",[e._v("ConsumerRebalanceListener")]),e._v(",并在向网关发送消息之前等待"),n("code",[e._v("onPartitionsAssigned")]),e._v("调用。")])])]),e._v(" "),n("tbody")]),e._v(" "),n("p",[n("code",[e._v("KafkaProducerMessageHandler``sendTimeoutExpression")]),e._v("默认值是"),n("code",[e._v("delivery.timeout.ms")]),e._v("Kafka Producer 属性"),n("code",[e._v("+ 5000")]),e._v(",这样超时后的实际 Kafka 错误将传播到应用程序,而不是由该框架生成的超时。为了保持一致性,对此进行了更改,因为你可能会遇到意外的行为( Spring 可能会超时发送,而实际上最终是成功的)。重要提示:默认情况下,超时时间为 120 秒,因此你可能希望减少超时时间,以获得更多的及时故障。")]),e._v(" "),n("h4",{attrs:{id:"java-配置-4"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#java-配置-4"}},[e._v("#")]),e._v(" Java 配置")]),e._v(" "),n("p",[e._v("下面的示例展示了如何使用 Java 配置网关:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@Bean\n@ServiceActivator(inputChannel = "kafkaRequests", outputChannel = "kafkaReplies")\npublic KafkaProducerMessageHandler outGateway(\n ReplyingKafkaTemplate kafkaTemplate) {\n return new KafkaProducerMessageHandler<>(kafkaTemplate);\n}\n')])])]),n("p",[e._v("请参考 Javadocs 以获得可用的属性。")]),e._v(" "),n("p",[e._v("请注意,使用了与"),n("a",{attrs:{href:"#kafka-outbound"}},[e._v("出站通道适配器")]),e._v("相同的类,唯一的区别是传递到构造函数的"),n("code",[e._v("KafkaTemplate")]),e._v("是"),n("code",[e._v("ReplyingKafkaTemplate")]),e._v("。有关更多信息,请参见"),n("a",{attrs:{href:"https://docs.spring.io/spring-kafka/docs/current/reference/html/",target:"_blank",rel:"noopener noreferrer"}},[e._v("the Spring for Apache Kafka documentation"),n("OutboundLink")],1),e._v("。")]),e._v(" "),n("p",[e._v("出站主题、分区、键等都是以与出站适配器相同的方式确定的。答复题目确定如下:")]),e._v(" "),n("ol",[n("li",[n("p",[e._v("一个名为"),n("code",[e._v("KafkaHeaders.REPLY_TOPIC")]),e._v("的消息头(如果存在,它必须具有"),n("code",[e._v("String")]),e._v("或"),n("code",[e._v("byte[]")]),e._v("值)将根据模板的回复容器的订阅主题进行验证。")])]),e._v(" "),n("li",[n("p",[e._v("如果模板的"),n("code",[e._v("replyContainer")]),e._v("仅订阅了一个主题,则使用该主题。")])])]),e._v(" "),n("p",[e._v("还可以指定"),n("code",[e._v("KafkaHeaders.REPLY_PARTITION")]),e._v("头,以确定用于答复的特定分区。同样,这是根据模板的回复容器的订阅进行验证的。")]),e._v(" "),n("h4",{attrs:{id:"java-dsl-配置-4"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#java-dsl-配置-4"}},[e._v("#")]),e._v(" Java DSL 配置")]),e._v(" "),n("p",[e._v("下面的示例展示了如何使用 Java DSL 配置出站网关:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@Bean\npublic IntegrationFlow outboundGateFlow(\n ReplyingKafkaTemplate kafkaTemplate) {\n\n return IntegrationFlows.from("kafkaRequests")\n .handle(Kafka.outboundGateway(kafkaTemplate))\n .channel("kafkaReplies")\n .get();\n}\n')])])]),n("p",[e._v("或者,也可以使用类似于以下 Bean 的配置:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@Bean\npublic IntegrationFlow outboundGateFlow() {\n return IntegrationFlows.from("kafkaRequests")\n .handle(Kafka.outboundGateway(producerFactory(), replyContainer())\n .configureKafkaTemplate(t -> t.replyTimeout(30_000)))\n .channel("kafkaReplies")\n .get();\n}\n')])])]),n("h4",{attrs:{id:"xml-配置-4"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#xml-配置-4"}},[e._v("#")]),e._v(" XML 配置")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('\n')])])]),n("h3",{attrs:{id:"入站网关"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#入站网关"}},[e._v("#")]),e._v(" 入站网关")]),e._v(" "),n("p",[e._v("入站网关用于请求/回复操作。")]),e._v(" "),n("p",[e._v("下面的示例展示了如何使用 Java 配置入站网关:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v("@Bean\npublic KafkaInboundGateway inboundGateway(\n AbstractMessageListenerContainercontainer,\n KafkaTemplate replyTemplate) {\n\n KafkaInboundGateway gateway =\n new KafkaInboundGateway<>(container, replyTemplate);\n gateway.setRequestChannel(requests);\n gateway.setReplyChannel(replies);\n gateway.setReplyTimeout(30_000);\n return gateway;\n}\n")])])]),n("p",[e._v("请参考 Javadocs 以获得可用的属性。")]),e._v(" "),n("p",[e._v("下面的示例展示了如何使用 Java DSL 配置一个简单的大写转换器:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v("@Bean\npublic IntegrationFlow serverGateway(\n ConcurrentMessageListenerContainer container,\n KafkaTemplate replyTemplate) {\n return IntegrationFlows\n .from(Kafka.inboundGateway(container, replyTemplate)\n .replyTimeout(30_000))\n .transform(String::toUpperCase)\n .get();\n}\n")])])]),n("p",[e._v("或者,你可以通过使用类似于以下代码来配置大写转换器:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v("@Bean\npublic IntegrationFlow serverGateway() {\n return IntegrationFlows\n .from(Kafka.inboundGateway(consumerFactory(), containerProperties(),\n producerFactory())\n .replyTimeout(30_000))\n .transform(String::toUpperCase)\n .get();\n}\n")])])]),n("p",[e._v("还可以使用用于"),n("code",[e._v("@KafkaListener")]),e._v("注释的容器工厂为其他目的创建"),n("code",[e._v("ConcurrentMessageListenerContainer")]),e._v("实例。有关示例,请参见"),n("a",{attrs:{href:"https://docs.spring.io/spring-kafka/docs/current/reference/html/",target:"_blank",rel:"noopener noreferrer"}},[e._v("the Spring for Apache Kafka documentation"),n("OutboundLink")],1),e._v("和"),n("a",{attrs:{href:"#kafka-inbound"}},[e._v("消息驱动通道适配器")]),e._v("。")]),e._v(" "),n("h4",{attrs:{id:"xml-配置-5"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#xml-配置-5"}},[e._v("#")]),e._v(" XML 配置")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('\n')])])]),n("p",[e._v("有关每个属性的描述,请参见 XML 模式。")]),e._v(" "),n("h3",{attrs:{id:"由-apache-kafka-主题支持的通道"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#由-apache-kafka-主题支持的通道"}},[e._v("#")]),e._v(" 由 Apache Kafka 主题支持的通道")]),e._v(" "),n("p",[e._v("Spring 集成具有"),n("code",[e._v("MessageChannel")]),e._v("实现,该实现由用于持久性的 Apache Kafka 主题支持。")]),e._v(" "),n("p",[e._v("每个通道都需要一个"),n("code",[e._v("KafkaTemplate")]),e._v("用于发送端,或者一个侦听器容器工厂(用于可订阅通道),或者一个"),n("code",[e._v("KafkaMessageSource")]),e._v("用于可检索通道。")]),e._v(" "),n("h4",{attrs:{id:"java-dsl-配置-5"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#java-dsl-配置-5"}},[e._v("#")]),e._v(" Java DSL 配置")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@Bean\npublic IntegrationFlow flowWithSubscribable(KafkaTemplate template,\n ConcurrentKafkaListenerContainerFactory containerFactory) {\n\n return IntegrationFlows.from(...)\n ...\n .channel(Kafka.channel(template, containerFactory, "someTopic1").groupId("group1"))\n ...\n .get();\n}\n\n@Bean\npublic IntegrationFlow flowWithPubSub(KafkaTemplate template,\n ConcurrentKafkaListenerContainerFactory containerFactory) {\n\n return IntegrationFlows.from(...)\n ...\n .publishSubscribeChannel(pubSub(template, containerFactory),\n pubsub -> pubsub\n .subscribe(subflow -> ...)\n .subscribe(subflow -> ...))\n .get();\n}\n\n@Bean\npublic BroadcastCapableChannel pubSub(KafkaTemplate template,\n ConcurrentKafkaListenerContainerFactory containerFactory) {\n\n return Kafka.publishSubscribeChannel(template, containerFactory, "someTopic2")\n .groupId("group2")\n .get();\n}\n\n@Bean\npublic IntegrationFlow flowWithPollable(KafkaTemplate template,\n KafkaMessageSource source) {\n\n return IntegrationFlows.from(...)\n ...\n .channel(Kafka.pollableChannel(template, source, "someTopic3").groupId("group3"))\n .handle(..., e -> e.poller(...))\n ...\n .get();\n}\n')])])]),n("h4",{attrs:{id:"java-配置-5"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#java-配置-5"}},[e._v("#")]),e._v(" Java 配置")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('/**\n * Channel for a single subscriber.\n **/\n@Bean\nSubscribableKafkaChannel pointToPoint(KafkaTemplate template,\n KafkaListenerContainerFactory factory)\n\n SubscribableKafkaChannel channel =\n new SubscribableKafkaChannel(template, factory, "topicA");\n channel.setGroupId("group1");\n return channel;\n}\n\n/**\n * Channel for multiple subscribers.\n **/\n@Bean\nSubscribableKafkaChannel pubsub(KafkaTemplate template,\n KafkaListenerContainerFactory factory)\n\n SubscribableKafkaChannel channel =\n new SubscribableKafkaChannel(template, factory, "topicB", true);\n channel.setGroupId("group2");\n return channel;\n}\n\n/**\n * Pollable channel (topic is configured on the source)\n **/\n@Bean\nPollableKafkaChannel pollable(KafkaTemplate template,\n KafkaMessageSource source)\n\n PollableKafkaChannel channel =\n new PollableKafkaChannel(template, source);\n channel.setGroupId("group3");\n return channel;\n}\n')])])]),n("h4",{attrs:{id:"xml-配置-6"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#xml-配置-6"}},[e._v("#")]),e._v(" XML 配置")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('\n\n\n\n\n')])])]),n("h3",{attrs:{id:"消息转换"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#消息转换"}},[e._v("#")]),e._v(" 消息转换")]),e._v(" "),n("p",[e._v("a"),n("code",[e._v("StringJsonMessageConverter")]),e._v("。有关更多信息,请参见"),n("a",{attrs:{href:"https://docs.spring.io/spring-kafka/docs/current/reference/html/",target:"_blank",rel:"noopener noreferrer"}},[e._v("the Spring for Apache Kafka documentation"),n("OutboundLink")],1),e._v("。")]),e._v(" "),n("p",[e._v("当将此转换器与消息驱动通道适配器一起使用时,你可以指定要将传入的有效负载转换为哪种类型。这是通过在适配器上设置"),n("code",[e._v("payload-type")]),e._v("属性("),n("code",[e._v("payloadType")]),e._v("属性)来实现的。下面的示例展示了如何在 XML 配置中实现这一点:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('\n\n\n')])])]),n("p",[e._v("下面的示例展示了如何在 Java 配置中设置适配器上的"),n("code",[e._v("payload-type")]),e._v("属性("),n("code",[e._v("payloadType")]),e._v("属性):")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v("@Bean\npublic KafkaMessageDrivenChannelAdapter\n adapter(KafkaMessageListenerContainer container) {\n KafkaMessageDrivenChannelAdapter kafkaMessageDrivenChannelAdapter =\n new KafkaMessageDrivenChannelAdapter<>(container, ListenerMode.record);\n kafkaMessageDrivenChannelAdapter.setOutputChannel(received());\n kafkaMessageDrivenChannelAdapter.setMessageConverter(converter());\n kafkaMessageDrivenChannelAdapter.setPayloadType(Foo.class);\n return kafkaMessageDrivenChannelAdapter;\n}\n")])])]),n("h3",{attrs:{id:"空有效载荷和日志压缩-墓碑-记录"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#空有效载荷和日志压缩-墓碑-记录"}},[e._v("#")]),e._v(" 空有效载荷和日志压缩“墓碑”记录")]),e._v(" "),n("p",[e._v("Spring 消息传递"),n("code",[e._v("Message")]),e._v("对象不能具有"),n("code",[e._v("null")]),e._v("有效负载。当你使用 Apache Kafka 的端点时,"),n("code",[e._v("null")]),e._v("有效载荷(也称为 Tombstone 记录)由类型为"),n("code",[e._v("KafkaNull")]),e._v("的有效载荷表示。有关更多信息,请参见"),n("a",{attrs:{href:"https://docs.spring.io/spring-kafka/docs/current/reference/html/",target:"_blank",rel:"noopener noreferrer"}},[e._v("the Spring for Apache Kafka documentation"),n("OutboundLink")],1),e._v("。")]),e._v(" "),n("p",[e._v("Spring 集成端点的 POJO 方法可以使用真正的"),n("code",[e._v("null")]),e._v("值,而不是"),n("code",[e._v("KafkaNull")]),e._v("值。要这样做,用"),n("code",[e._v("@Payload(required = false)")]),e._v("标记参数。下面的示例展示了如何做到这一点:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@ServiceActivator(inputChannel = "fromSomeKafkaInboundEndpoint")\npublic void in(@Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String key,\n @Payload(required = false) Customer customer) {\n // customer is null if a tombstone record\n ...\n}\n')])])]),n("h3",{attrs:{id:"从kstream调用-spring-集成流"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#从kstream调用-spring-集成流"}},[e._v("#")]),e._v(" 从"),n("code",[e._v("KStream")]),e._v("调用 Spring 集成流")]),e._v(" "),n("p",[e._v("你可以使用"),n("code",[e._v("MessagingTransformer")]),e._v("从"),n("code",[e._v("KStream")]),e._v("调用集成流:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@Bean\npublic KStream kStream(StreamsBuilder kStreamBuilder,\n MessagingTransformer transformer) transformer) {\n KStream stream = kStreamBuilder.stream(STREAMING_TOPIC1);\n stream.mapValues((ValueMapper) String::toUpperCase)\n ...\n .transform(() -> transformer)\n .to(streamingTopic2);\n\n stream.print(Printed.toSysOut());\n\n return stream;\n}\n\n@Bean\n@DependsOn("flow")\npublic MessagingTransformer transformer(\n MessagingFunction function) {\n\n MessagingMessageConverter converter = new MessagingMessageConverter();\n converter.setHeaderMapper(new SimpleKafkaHeaderMapper("*"));\n return new MessagingTransformer<>(function, converter);\n}\n\n@Bean\npublic IntegrationFlow flow() {\n return IntegrationFlows.from(MessagingFunction.class)\n ...\n .get();\n}\n')])])]),n("p",[e._v("当一个集成流从一个接口开始时,所创建的代理具有该流的名称 Bean,并附加了“.gateway”,因此如果需要,这个 Bean 名称可以使用 a"),n("code",[e._v("@Qualifier")]),e._v("。")]),e._v(" "),n("h3",{attrs:{id:"读-处理-写场景的性能注意事项"}},[n("a",{staticClass:"header-anchor",attrs:{href:"#读-处理-写场景的性能注意事项"}},[e._v("#")]),e._v(" 读/处理/写场景的性能注意事项")]),e._v(" "),n("p",[e._v("许多应用程序使用一个主题,执行一些处理并写入另一个主题。在大多数情况下,如果写入失败,应用程序将希望抛出一个异常,以便可以重试传入的请求并/或将其发送到一个死信主题。该功能由底层消息侦听器容器以及适当配置的错误处理程序支持。然而,为了支持这一点,我们需要阻塞侦听器线程,直到写操作成功(或失败),以便可以将任何异常抛到容器中。当使用单个记录时,可以通过在出站适配器上设置"),n("code",[e._v("sync")]),e._v("属性来实现。然而,当使用批处理时,使用"),n("code",[e._v("sync")]),e._v("会导致显著的性能下降,因为应用程序将在发送下一条消息之前等待每次发送的结果。你还可以执行多个发送,然后等待这些发送的结果。这是通过向消息处理程序添加"),n("code",[e._v("futuresChannel")]),e._v("来实现的。要启用该功能,请将"),n("code",[e._v("KafkaIntegrationHeaders.FUTURE_TOKEN")]),e._v("添加到出站消息中;然后可以使用此功能将"),n("code",[e._v("Future")]),e._v("关联到特定的已发送消息。下面是一个如何使用此功能的示例:")]),e._v(" "),n("div",{staticClass:"language- extra-class"},[n("pre",{pre:!0,attrs:{class:"language-text"}},[n("code",[e._v('@SpringBootApplication\npublic class FuturesChannelApplication {\n\n public static void main(String[] args) {\n SpringApplication.run(FuturesChannelApplication.class, args);\n }\n\n @Bean\n IntegrationFlow inbound(ConsumerFactory consumerFactory, Handler handler) {\n return IntegrationFlows.from(Kafka.messageDrivenChannelAdapter(consumerFactory,\n ListenerMode.batch, "inTopic"))\n .handle(handler)\n .get();\n }\n\n @Bean\n IntegrationFlow outbound(KafkaTemplate kafkaTemplate) {\n return IntegrationFlows.from(Gate.class)\n .enrichHeaders(h -> h\n .header(KafkaHeaders.TOPIC, "outTopic")\n .headerExpression(KafkaIntegrationHeaders.FUTURE_TOKEN, "headers[id]"))\n .handle(Kafka.outboundChannelAdapter(kafkaTemplate)\n .futuresChannel("futures"))\n .get();\n }\n\n @Bean\n PollableChannel futures() {\n return new QueueChannel();\n }\n\n}\n\n@Component\n@DependsOn("outbound")\nclass Handler {\n\n @Autowired\n Gate gate;\n\n @Autowired\n PollableChannel futures;\n\n public void handle(List input) throws Exception {\n System.out.println(input);\n input.forEach(str -> this.gate.send(str.toUpperCase()));\n for (int i = 0; i < input.size(); i++) {\n Message future = this.futures.receive(10000);\n ((Future) future.getPayload()).get(10, TimeUnit.SECONDS);\n }\n }\n\n}\n\ninterface Gate {\n\n void send(String out);\n\n}\n')])])])])}),[],!1,null,null,null);a.default=t.exports}}]);