commit 6d632b6f4717d83c964c7e15cff2cdad7dcefbb0
Author: maojian <550076202@qq.com>
Date: Fri Dec 12 18:22:51 2025 +0800
nvps rag智能检索
diff --git a/.classpath b/.classpath
new file mode 100644
index 0000000..20b2042
--- /dev/null
+++ b/.classpath
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/.project b/.project
new file mode 100644
index 0000000..c102fec
--- /dev/null
+++ b/.project
@@ -0,0 +1,23 @@
+
+
+ RagEngine
+
+
+
+
+
+ org.eclipse.jdt.core.javabuilder
+
+
+
+
+ org.eclipse.m2e.core.maven2Builder
+
+
+
+
+
+ org.eclipse.jdt.core.javanature
+ org.eclipse.m2e.core.maven2Nature
+
+
diff --git a/.settings/org.eclipse.core.resources.prefs b/.settings/org.eclipse.core.resources.prefs
new file mode 100644
index 0000000..839d647
--- /dev/null
+++ b/.settings/org.eclipse.core.resources.prefs
@@ -0,0 +1,5 @@
+eclipse.preferences.version=1
+encoding//src/main/java=UTF-8
+encoding//src/main/resources=UTF-8
+encoding//src/test/java=UTF-8
+encoding/=UTF-8
diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..71df522
--- /dev/null
+++ b/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,9 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.methodParameters=generate
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
+org.eclipse.jdt.core.compiler.compliance=1.8
+org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
+org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore
+org.eclipse.jdt.core.compiler.release=disabled
+org.eclipse.jdt.core.compiler.source=1.8
diff --git a/.settings/org.eclipse.m2e.core.prefs b/.settings/org.eclipse.m2e.core.prefs
new file mode 100644
index 0000000..f897a7f
--- /dev/null
+++ b/.settings/org.eclipse.m2e.core.prefs
@@ -0,0 +1,4 @@
+activeProfiles=
+eclipse.preferences.version=1
+resolveWorkspaceProjects=true
+version=1
diff --git a/logs/ragEngineInfo.log b/logs/ragEngineInfo.log
new file mode 100644
index 0000000..cd9977d
--- /dev/null
+++ b/logs/ragEngineInfo.log
@@ -0,0 +1,242 @@
+2025-11-07 18:21:17.669 [main] 55 INFO com.bw.search.Application - Starting Application on maojian with PID 72232 (D:\eclipseWork\RagEngine\target\classes started by 55007 in D:\eclipseWork\RagEngine)
+2025-11-07 18:21:17.676 [main] 651 INFO com.bw.search.Application - No active profile set, falling back to default profiles: default
+2025-11-07 18:21:17.719 [background-preinit] 127 WARN o.s.h.converter.json.Jackson2ObjectMapperBuilder - For Jackson Kotlin classes support please add "com.fasterxml.jackson.module:jackson-module-kotlin" to the classpath
+2025-11-07 18:21:18.738 [main] 92 INFO o.s.boot.web.embedded.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8001 (http)
+2025-11-07 18:21:18.744 [main] 173 INFO org.apache.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8001"]
+2025-11-07 18:21:18.744 [main] 173 INFO org.apache.catalina.core.StandardService - Starting service [Tomcat]
+2025-11-07 18:21:18.744 [main] 173 INFO org.apache.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.30]
+2025-11-07 18:21:18.816 [main] 173 INFO o.a.c.c.ContainerBase.[Tomcat].[localhost].[/nvps] - Initializing Spring embedded WebApplicationContext
+2025-11-07 18:21:18.816 [main] 284 INFO org.springframework.web.context.ContextLoader - Root WebApplicationContext: initialization completed in 1094 ms
+2025-11-07 18:21:19.120 [main] 171 INFO o.s.scheduling.concurrent.ThreadPoolTaskExecutor - Initializing ExecutorService 'applicationTaskExecutor'
+2025-11-07 18:21:19.302 [main] 171 INFO o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'taskScheduler'
+2025-11-07 18:21:19.307 [main] 58 INFO o.s.b.actuate.endpoint.web.EndpointLinksResolver - Exposing 2 endpoint(s) beneath base path '/actuator'
+2025-11-07 18:21:19.363 [main] 347 INFO org.apache.kafka.clients.admin.AdminClientConfig - AdminClientConfig values:
+ bootstrap.servers = [node-01:19092, node-02:19092, node-03:19092]
+ client.dns.lookup = default
+ client.id =
+ connections.max.idle.ms = 300000
+ metadata.max.age.ms = 300000
+ metric.reporters = []
+ metrics.num.samples = 2
+ metrics.recording.level = INFO
+ metrics.sample.window.ms = 30000
+ receive.buffer.bytes = 65536
+ reconnect.backoff.max.ms = 1000
+ reconnect.backoff.ms = 50
+ request.timeout.ms = 120000
+ retries = 5
+ retry.backoff.ms = 100
+ sasl.client.callback.handler.class = null
+ sasl.jaas.config = null
+ sasl.kerberos.kinit.cmd = /usr/bin/kinit
+ sasl.kerberos.min.time.before.relogin = 60000
+ sasl.kerberos.service.name = null
+ sasl.kerberos.ticket.renew.jitter = 0.05
+ sasl.kerberos.ticket.renew.window.factor = 0.8
+ sasl.login.callback.handler.class = null
+ sasl.login.class = null
+ sasl.login.refresh.buffer.seconds = 300
+ sasl.login.refresh.min.period.seconds = 60
+ sasl.login.refresh.window.factor = 0.8
+ sasl.login.refresh.window.jitter = 0.05
+ sasl.mechanism = GSSAPI
+ security.protocol = PLAINTEXT
+ send.buffer.bytes = 131072
+ ssl.cipher.suites = null
+ ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
+ ssl.endpoint.identification.algorithm = https
+ ssl.key.password = null
+ ssl.keymanager.algorithm = SunX509
+ ssl.keystore.location = null
+ ssl.keystore.password = null
+ ssl.keystore.type = JKS
+ ssl.protocol = TLS
+ ssl.provider = null
+ ssl.secure.random.implementation = null
+ ssl.trustmanager.algorithm = PKIX
+ ssl.truststore.location = null
+ ssl.truststore.password = null
+ ssl.truststore.type = JKS
+
+2025-11-07 18:21:19.416 [main] 117 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 2.3.1
+2025-11-07 18:21:19.417 [main] 118 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 18a913733fb71c01
+2025-11-07 18:21:19.417 [main] 119 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1762510879415
+2025-11-07 18:21:19.705 [main] 347 INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values:
+ allow.auto.create.topics = true
+ auto.commit.interval.ms = 1000
+ auto.offset.reset = earliest
+ bootstrap.servers = [node-01:19092, node-02:19092, node-03:19092]
+ check.crcs = true
+ client.dns.lookup = default
+ client.id =
+ client.rack =
+ connections.max.idle.ms = 540000
+ default.api.timeout.ms = 60000
+ enable.auto.commit = true
+ exclude.internal.topics = true
+ fetch.max.bytes = 52428800
+ fetch.max.wait.ms = 500
+ fetch.min.bytes = 1
+ group.id = test4
+ group.instance.id = null
+ heartbeat.interval.ms = 3000
+ interceptor.classes = []
+ internal.leave.group.on.close = true
+ isolation.level = read_uncommitted
+ key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer
+ max.partition.fetch.bytes = 1048576
+ max.poll.interval.ms = 300000
+ max.poll.records = 500
+ metadata.max.age.ms = 300000
+ metric.reporters = []
+ metrics.num.samples = 2
+ metrics.recording.level = INFO
+ metrics.sample.window.ms = 30000
+ partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor]
+ receive.buffer.bytes = 65536
+ reconnect.backoff.max.ms = 1000
+ reconnect.backoff.ms = 50
+ request.timeout.ms = 30000
+ retry.backoff.ms = 100
+ sasl.client.callback.handler.class = null
+ sasl.jaas.config = null
+ sasl.kerberos.kinit.cmd = /usr/bin/kinit
+ sasl.kerberos.min.time.before.relogin = 60000
+ sasl.kerberos.service.name = null
+ sasl.kerberos.ticket.renew.jitter = 0.05
+ sasl.kerberos.ticket.renew.window.factor = 0.8
+ sasl.login.callback.handler.class = null
+ sasl.login.class = null
+ sasl.login.refresh.buffer.seconds = 300
+ sasl.login.refresh.min.period.seconds = 60
+ sasl.login.refresh.window.factor = 0.8
+ sasl.login.refresh.window.jitter = 0.05
+ sasl.mechanism = GSSAPI
+ security.protocol = PLAINTEXT
+ send.buffer.bytes = 131072
+ session.timeout.ms = 10000
+ ssl.cipher.suites = null
+ ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
+ ssl.endpoint.identification.algorithm = https
+ ssl.key.password = null
+ ssl.keymanager.algorithm = SunX509
+ ssl.keystore.location = null
+ ssl.keystore.password = null
+ ssl.keystore.type = JKS
+ ssl.protocol = TLS
+ ssl.provider = null
+ ssl.secure.random.implementation = null
+ ssl.trustmanager.algorithm = PKIX
+ ssl.truststore.location = null
+ ssl.truststore.password = null
+ ssl.truststore.type = JKS
+ value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer
+
+2025-11-07 18:21:19.724 [main] 117 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 2.3.1
+2025-11-07 18:21:19.724 [main] 118 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 18a913733fb71c01
+2025-11-07 18:21:19.724 [main] 119 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1762510879724
+2025-11-07 18:21:19.724 [main] 964 INFO org.apache.kafka.clients.consumer.KafkaConsumer - [Consumer clientId=consumer-1, groupId=test4] Subscribed to topic(s): nvps_search_data
+2025-11-07 18:21:19.726 [main] 171 INFO o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService
+2025-11-07 18:21:19.737 [main] 173 INFO org.apache.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8001"]
+2025-11-07 18:21:19.745 [main] 204 INFO o.s.boot.web.embedded.tomcat.TomcatWebServer - Tomcat started on port(s): 8001 (http) with context path '/nvps'
+2025-11-07 18:21:19.747 [main] 61 INFO com.bw.search.Application - Started Application in 2.35 seconds (JVM running for 2.636)
+2025-11-07 18:21:20.058 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 261 INFO org.apache.kafka.clients.Metadata - [Consumer clientId=consumer-1, groupId=test4] Cluster ID: NyNtqsIDRIae-HJZGe2pww
+2025-11-07 18:21:20.087 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 728 INFO o.a.k.c.consumer.internals.AbstractCoordinator - [Consumer clientId=consumer-1, groupId=test4] Discovered group coordinator node-03:19092 (id: 2147483644 rack: null)
+2025-11-07 18:21:20.089 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 476 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Revoking previously assigned partitions []
+2025-11-07 18:21:20.089 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 279 INFO o.s.kafka.listener.KafkaMessageListenerContainer - test4: partitions revoked: []
+2025-11-07 18:21:20.089 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 505 INFO o.a.k.c.consumer.internals.AbstractCoordinator - [Consumer clientId=consumer-1, groupId=test4] (Re-)joining group
+2025-11-07 18:21:20.207 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 505 INFO o.a.k.c.consumer.internals.AbstractCoordinator - [Consumer clientId=consumer-1, groupId=test4] (Re-)joining group
+2025-11-07 18:21:20.748 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 469 INFO o.a.k.c.consumer.internals.AbstractCoordinator - [Consumer clientId=consumer-1, groupId=test4] Successfully joined group with generation 23
+2025-11-07 18:21:20.750 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 283 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting newly assigned partitions: nvps_search_data-7, nvps_search_data-8, nvps_search_data-1, nvps_search_data-2, nvps_search_data-0, nvps_search_data-5, nvps_search_data-6, nvps_search_data-3, nvps_search_data-4
+2025-11-07 18:21:20.789 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-7 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=node-03:19092 (id: 3 rack: null), epoch=0}}
+2025-11-07 18:21:20.789 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-8 to the committed offset FetchPosition{offset=1, offsetEpoch=Optional[0], currentLeader=LeaderAndEpoch{leader=node-01:19092 (id: 1 rack: null), epoch=0}}
+2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-1 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=node-03:19092 (id: 3 rack: null), epoch=0}}
+2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-2 to the committed offset FetchPosition{offset=3, offsetEpoch=Optional[0], currentLeader=LeaderAndEpoch{leader=node-01:19092 (id: 1 rack: null), epoch=0}}
+2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-0 to the committed offset FetchPosition{offset=1, offsetEpoch=Optional[0], currentLeader=LeaderAndEpoch{leader=node-02:19092 (id: 2 rack: null), epoch=0}}
+2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-5 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=node-01:19092 (id: 1 rack: null), epoch=0}}
+2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-6 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=node-02:19092 (id: 2 rack: null), epoch=0}}
+2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-3 to the committed offset FetchPosition{offset=1, offsetEpoch=Optional[0], currentLeader=LeaderAndEpoch{leader=node-02:19092 (id: 2 rack: null), epoch=0}}
+2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-4 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=node-03:19092 (id: 3 rack: null), epoch=0}}
+2025-11-07 18:21:20.947 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 279 INFO o.s.kafka.listener.KafkaMessageListenerContainer - test4: partitions assigned: [nvps_search_data-7, nvps_search_data-8, nvps_search_data-1, nvps_search_data-2, nvps_search_data-0, nvps_search_data-5, nvps_search_data-6, nvps_search_data-3, nvps_search_data-4]
+2025-11-07 18:21:41.726 [http-nio-8001-exec-2] 173 INFO o.a.c.c.ContainerBase.[Tomcat].[localhost].[/nvps] - Initializing Spring DispatcherServlet 'dispatcherServlet'
+2025-11-07 18:21:41.727 [http-nio-8001-exec-2] 525 INFO org.springframework.web.servlet.DispatcherServlet - Initializing Servlet 'dispatcherServlet'
+2025-11-07 18:21:41.733 [http-nio-8001-exec-2] 547 INFO org.springframework.web.servlet.DispatcherServlet - Completed initialization in 6 ms
+2025-11-07 18:21:41.881 [http-nio-8001-exec-2] 48 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识库检索开始-----
+2025-11-07 18:21:41.895 [http-nio-8001-exec-2] 28 INFO com.bw.search.utils.SpringBootKafka - 准备发送消息为:"{\"id\":\"id-1\",\"keyword\":\"Michael J. Aziz\"}"
+2025-11-07 18:21:41.902 [http-nio-8001-exec-2] 347 INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values:
+ acks = 1
+ batch.size = 16384
+ bootstrap.servers = [node-01:19092, node-02:19092, node-03:19092]
+ buffer.memory = 33554432
+ client.dns.lookup = default
+ client.id =
+ compression.type = none
+ connections.max.idle.ms = 540000
+ delivery.timeout.ms = 120000
+ enable.idempotence = false
+ interceptor.classes = []
+ key.serializer = class org.apache.kafka.common.serialization.StringSerializer
+ linger.ms = 0
+ max.block.ms = 60000
+ max.in.flight.requests.per.connection = 5
+ max.request.size = 1048576
+ metadata.max.age.ms = 300000
+ metric.reporters = []
+ metrics.num.samples = 2
+ metrics.recording.level = INFO
+ metrics.sample.window.ms = 30000
+ partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner
+ receive.buffer.bytes = 32768
+ reconnect.backoff.max.ms = 1000
+ reconnect.backoff.ms = 50
+ request.timeout.ms = 30000
+ retries = 0
+ retry.backoff.ms = 100
+ sasl.client.callback.handler.class = null
+ sasl.jaas.config = null
+ sasl.kerberos.kinit.cmd = /usr/bin/kinit
+ sasl.kerberos.min.time.before.relogin = 60000
+ sasl.kerberos.service.name = null
+ sasl.kerberos.ticket.renew.jitter = 0.05
+ sasl.kerberos.ticket.renew.window.factor = 0.8
+ sasl.login.callback.handler.class = null
+ sasl.login.class = null
+ sasl.login.refresh.buffer.seconds = 300
+ sasl.login.refresh.min.period.seconds = 60
+ sasl.login.refresh.window.factor = 0.8
+ sasl.login.refresh.window.jitter = 0.05
+ sasl.mechanism = GSSAPI
+ security.protocol = PLAINTEXT
+ send.buffer.bytes = 131072
+ ssl.cipher.suites = null
+ ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
+ ssl.endpoint.identification.algorithm = https
+ ssl.key.password = null
+ ssl.keymanager.algorithm = SunX509
+ ssl.keystore.location = null
+ ssl.keystore.password = null
+ ssl.keystore.type = JKS
+ ssl.protocol = TLS
+ ssl.provider = null
+ ssl.secure.random.implementation = null
+ ssl.trustmanager.algorithm = PKIX
+ ssl.truststore.location = null
+ ssl.truststore.password = null
+ ssl.truststore.type = JKS
+ transaction.timeout.ms = 60000
+ transactional.id = null
+ value.serializer = class org.apache.kafka.common.serialization.StringSerializer
+
+2025-11-07 18:21:41.918 [http-nio-8001-exec-2] 117 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 2.3.1
+2025-11-07 18:21:41.919 [http-nio-8001-exec-2] 118 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 18a913733fb71c01
+2025-11-07 18:21:41.919 [http-nio-8001-exec-2] 119 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1762510901918
+2025-11-07 18:21:42.058 [kafka-producer-network-thread | producer-1] 261 INFO org.apache.kafka.clients.Metadata - [Producer clientId=producer-1] Cluster ID: NyNtqsIDRIae-HJZGe2pww
+2025-11-07 18:21:42.068 [http-nio-8001-exec-2] 116 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识获取中请稍后...
+2025-11-07 18:21:42.175 [kafka-producer-network-thread | producer-1] 41 INFO com.bw.search.utils.SpringBootKafka - nvps_know_base - 生产者 发送消息成功:SendResult [producerRecord=ProducerRecord(topic=nvps_know_base, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value={"id":"id-1","keyword":"Michael J. Aziz"}, timestamp=null), recordMetadata=nvps_know_base-7@1]
+2025-11-07 18:21:43.085 [http-nio-8001-exec-2] 116 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识获取中请稍后...
+2025-11-07 18:21:44.092 [http-nio-8001-exec-2] 116 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识获取中请稍后...
+2025-11-07 18:21:45.106 [http-nio-8001-exec-2] 116 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识获取中请稍后...
+2025-11-07 18:21:45.770 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 35 INFO com.bw.search.process.ResultSendQueue - 消费知识:id-1
+2025-11-07 18:21:46.128 [http-nio-8001-exec-2] 51 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识库结果已获取:{"isLast":1,"promptRes":"已知信息:\n{\"college\":\"Bertoldi Group: Solid Mechanics\",\"phone\":\"(617) 496-3084\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\",\"Materials\",\"Materials\",\"Solid Mechanics\"],\"name\":\"Katia Bertoldi\",\"position\":\"William and Ami Kuan Danoff Professor of Applied Mechanics\",\"email\":\"bertoldi@seas.harvard.edu\"}\n{\"college\":\"\",\"phone\":\"(617) 495-6012\",\"researchFocus\":[\"Materials\",\"Robotics and Control\",\"Robotics and Control\",\"Materials\",\"Robotics and Control\",\"Design and Innovation\"],\"name\":\"Martin Bechthold\",\"position\":\"Kumagai Professor of Architectural Technology; Affiliate in Materials Science & Mechanical Engineering\",\"email\":\"mbechthold@gsd.harvard.edu\"}\n{\"college\":\"Anderson Group\",\"phone\":\"(617) 998-5550\",\"researchFocus\":[\"Atmospheric Chemistry\",\"Climate Change\",\"Solar Geoengineering\"],\"name\":\"James G. Anderson\",\"position\":\"Philip S. Weld Professor of Atmospheric Chemistry\",\"email\":\"anderson@huarp.harvard.edu\"}\n{\"college\":\"Markus Basan Lab\",\"phone\":\"\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\",\"Soft Matter\",\"Soft Matter\"],\"name\":\"Markus Thomas Basan\",\"position\":\"Assistant Professor of Systems Biology; Affiliate in Applied Physics\",\"email\":\"markus@hms.harvard.edu\"}\n{\"college\":\"\",\"phone\":\"\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\"],\"name\":\"William H. Bossert\",\"position\":\"David B. Arnold, Jr. Professor of Science, Emeritus\",\"email\":\"bossert@seas.harvard.edu\"}\n\n参考上述已知信息回答问题:\nMichael J. Aziz","ids":"[\"7dc72740-bbc3-11f0-9abc-00163e03f4d5\", \"7a5654fa-bbc3-11f0-9abc-00163e03f4d5\", \"783b6c46-bbc3-11f0-9abc-00163e03f4d5\", \"78ac3a02-bbc3-11f0-9abc-00163e03f4d5\", \"7d5e213c-bbc3-11f0-9abc-00163e03f4d5\"]","id":"id-1","keyword":"Michael J. Aziz"}
+2025-11-07 18:21:46.141 [http-nio-8001-exec-2] 98 INFO com.bw.search.service.impl.RagSearchServiceImpl - 模型请求信息:url:https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions,header:{"authorization":"Bearer sk-c5f56c1c8a6447b3a6c646a3f14085c4","Content-Type":"application/json"},params:{"top_p":0.95,"stream":false,"max_tokens":8192,"temperature":0.7,"messages":[{"role":"user","content":"You are a professional academic profile writer.Task:You will receive multiple lines of text. Each line contains either:- A plain JSON object with researcher information, OR - A wrapper object that includes a \"value\" field containing the actual researcher JSON string.For each line, extract the researcher information and generate a concise academic biography in English (3–5 sentences). Avoid listing items mechanically — make it sound natural and professional. If some fields (like phone or email) are missing, just skip them. Output one biography paragraph per person, separated by blank lines.Input example:{\"college\":\"Aizenberg Biomineralization and Biomimetics Lab\",\"phone\":\"(617) 495-3558\",\"researchFocus\":[\"Biomaterials\",\"Soft Matter\",\"Surface and Interface Science\"],\"name\":\"Joanna Aizenberg\",\"position\":\"Amy Smith Berylson Professor of Materials Science and Professor of Chemistry & Chemical Biology\",\"email\":\"jaiz@seas.harvard.edu\"}{\"college\":\"Bertoldi Group: Solid Mechanics\",\"phone\":\"(617) 496-3084\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\",\"Materials\",\"Solid Mechanics\"],\"name\":\"Katia Bertoldi\",\"position\":\"William and Ami Kuan Danoff Professor of Applied Mechanics\",\"email\":\"bertoldi@seas.harvard.edu\"}Output format:Write one paragraph per person, separated by a blank line. Each paragraph should summarize their title, affiliation, and main research interests.Now generate the biographies for the following input:已知信息:\n{\"college\":\"Bertoldi Group: Solid Mechanics\",\"phone\":\"(617) 496-3084\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\",\"Materials\",\"Materials\",\"Solid Mechanics\"],\"name\":\"Katia Bertoldi\",\"position\":\"William and Ami Kuan Danoff Professor of Applied Mechanics\",\"email\":\"bertoldi@seas.harvard.edu\"}\n{\"college\":\"\",\"phone\":\"(617) 495-6012\",\"researchFocus\":[\"Materials\",\"Robotics and Control\",\"Robotics and Control\",\"Materials\",\"Robotics and Control\",\"Design and Innovation\"],\"name\":\"Martin Bechthold\",\"position\":\"Kumagai Professor of Architectural Technology; Affiliate in Materials Science & Mechanical Engineering\",\"email\":\"mbechthold@gsd.harvard.edu\"}\n{\"college\":\"Anderson Group\",\"phone\":\"(617) 998-5550\",\"researchFocus\":[\"Atmospheric Chemistry\",\"Climate Change\",\"Solar Geoengineering\"],\"name\":\"James G. Anderson\",\"position\":\"Philip S. Weld Professor of Atmospheric Chemistry\",\"email\":\"anderson@huarp.harvard.edu\"}\n{\"college\":\"Markus Basan Lab\",\"phone\":\"\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\",\"Soft Matter\",\"Soft Matter\"],\"name\":\"Markus Thomas Basan\",\"position\":\"Assistant Professor of Systems Biology; Affiliate in Applied Physics\",\"email\":\"markus@hms.harvard.edu\"}\n{\"college\":\"\",\"phone\":\"\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\"],\"name\":\"William H. Bossert\",\"position\":\"David B. Arnold, Jr. Professor of Science, Emeritus\",\"email\":\"bossert@seas.harvard.edu\"}\n\n参考上述已知信息回答问题:\nMichael J. Aziz"}],"model":"qwen-max-latest"}
+2025-11-07 18:22:00.732 [http-nio-8001-exec-2] 61 INFO com.bw.search.service.impl.RagSearchServiceImpl - 问答结果:{"choices":[{"message":{"role":"assistant","content":"Katia Bertoldi is the William and Ami Kuan Danoff Professor of Applied Mechanics at Harvard University, affiliated with the Bertoldi Group in Solid Mechanics. Her research focuses on modeling physical and biological phenomena, materials science, and solid mechanics, contributing significantly to the understanding of complex systems and their behaviors.\n\nMartin Bechthold holds the position of Kumagai Professor of Architectural Technology and is also affiliated with Materials Science and Mechanical Engineering at Harvard. His work centers on advancing materials innovation, robotics and control systems, and design principles, bridging architecture and engineering through cutting-edge technology.\n\nJames G. Anderson serves as the Philip S. Weld Professor of Atmospheric Chemistry in the Anderson Group at Harvard. His pioneering research explores atmospheric chemistry, climate change, and solar geoengineering, addressing critical environmental challenges with far-reaching implications for global sustainability.\n\nMarkus Thomas Basan is an Assistant Professor of Systems Biology with an affiliation in Applied Physics at Harvard Medical School. Leading the Markus Basan Lab, his research investigates the modeling of physical and biological systems, with a particular emphasis on soft matter, contributing to interdisciplinary approaches in life sciences.\n\nWilliam H. Bossert, the David B. Arnold, Jr. Professor of Science, Emeritus, has made significant contributions to the field of modeling physical and biological phenomena. His academic legacy continues to inspire advancements in scientific understanding, even as he steps back from active research.\n\nNo information is available in the provided data about Michael J. Aziz. Additional details would be needed to construct his academic biography."},"finish_reason":"stop","index":0,"logprobs":null}],"object":"chat.completion","usage":{"prompt_tokens":708,"completion_tokens":307,"total_tokens":1015},"created":1762510923,"system_fingerprint":null,"model":"qwen-max-latest","id":"chatcmpl-5751ed60-460e-4e8a-bb07-90cb997e9568"}
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..7ffe66c
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,237 @@
+
+ 4.0.0
+
+ org.springframework.boot
+ spring-boot-starter-parent
+ 2.2.4.RELEASE
+
+ com.bw
+ RagEngine
+ 0.0.1-SNAPSHOT
+ jar
+
+ RagEngine
+ http://maven.apache.org
+
+
+ UTF-8
+ 1.8
+ 1.8
+
+
+
+
+ junit
+ junit
+ 4.11
+ test
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+
+
+
+ de.codecentric
+ spring-boot-admin-starter-client
+ 2.2.4
+
+
+ com.google.code.gson
+ gson
+ 2.8.8
+
+
+ org.springframework.boot
+ spring-boot-test
+
+
+
+ org.springframework
+ spring-test
+ 5.0.10.RELEASE
+ test
+
+
+ org.springframework.kafka
+ spring-kafka
+
+
+ commons-io
+ commons-io
+ 2.11.0
+
+
+
+ com.alibaba
+ fastjson
+ 2.0.17
+
+
+
+ com.mchange
+ c3p0
+ 0.9.5.5
+
+
+
+ com.squareup.okhttp3
+ okhttp
+ 4.9.3
+
+
+ org.apache.httpcomponents
+ httpclient
+ 4.5.3
+
+
+ commons-lang
+ commons-lang
+ 2.6
+
+
+
+ org.jetbrains.kotlin
+ kotlin-reflect
+ 1.6.21
+ runtime
+
+
+
+
+ org.apache.logging.log4j
+ log4j-api
+ 2.14.1
+
+
+ org.apache.logging.log4j
+ log4j-core
+ 2.14.1
+
+
+
+ org.projectlombok
+ lombok
+
+
+ cn.hutool
+ hutool-all
+ 5.8.5
+
+
+ junit
+ junit
+
+
+
+ p6spy
+ p6spy
+ 3.9.0
+
+
+
+ commons-collections
+ commons-collections
+ 3.2.2
+
+
+
+
+
+
+
+
+
+ maven-clean-plugin
+ 3.1.0
+
+
+
+ maven-resources-plugin
+ 3.0.2
+
+
+ maven-compiler-plugin
+ 3.8.0
+
+
+ maven-surefire-plugin
+ 2.22.1
+
+
+ maven-jar-plugin
+ 3.0.2
+
+
+ maven-install-plugin
+ 2.5.2
+
+
+ maven-deploy-plugin
+ 2.8.2
+
+
+
+ maven-site-plugin
+ 3.7.1
+
+
+ maven-project-info-reports-plugin
+ 3.0.0
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+
+ com.bw.search.Application
+ ZIP
+
+
+ ${project.groupId}
+ ${project.artifactId}
+
+
+
+
+
+
+ repackage
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-dependency-plugin
+ 3.1.1
+
+
+ copy
+ package
+
+ copy-dependencies
+
+
+ jar
+ jar
+ runtime
+ ${project.build.directory}/libs
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/prompt b/prompt
new file mode 100644
index 0000000..45ba0db
--- /dev/null
+++ b/prompt
@@ -0,0 +1,46 @@
+You are a professional academic profile writer.
+
+### Language Requirement
+
+Determine the output language dynamically based on the keyword content.
+If the keyword is in Arabic, output Arabic.
+If the keyword is in Chinese, output Chinese.
+If in English, output English.
+If ambiguous, default to English.
+Use the exact same language for all generated biographies.
+
+Task
+
+You will receive multiple lines of text.
+Each line contains either:
+ A plain JSON object with researcher information, OR
+ A wrapper object that includes a "value" field containing the actual researcher JSON string.
+
+For each line:
+ Extract the researcher introduction.
+ Generate an abstract summary biography (1–3 sentences).
+ Avoid mechanical listing — write naturally and academically.
+ If some fields (phone, email, etc.) are missing, simply omit them.
+ Output one biography paragraph per person.
+ Insert exactly one blank line between different people’s biographies.
+ Do not merge multiple people into one paragraph.
+
+Input Example:
+ {"college":"Aizenberg Biomineralization and Biomimetics Lab","phone":"(617) 495-3558","researchFocus":["Biomaterials","Soft Matter","Surface and Interface Science"],"name":"Joanna Aizenberg","position":"Amy Smith Berylson Professor of Materials Science and Professor of Chemistry & Chemical Biology","email":"jaiz@seas.harvard.edu"}
+{"college":"Bertoldi Group: Solid Mechanics","phone":"(617) 496-3084","researchFocus":["Modeling Physical/Biological Phenomena and Systems","Materials","Solid Mechanics"],"name":"Katia Bertoldi","position":"William and Ami Kuan Danoff Professor of Applied Mechanics","email":"bertoldi@seas.harvard.edu"}
+
+Output Format:
+ Name: biography.
+
+ (blank line)
+
+ Name: biography.
+
+Each biography separated by one empty line.
+Written entirely in the language inferred from the keyword.
+
+keyword(用于自动识别语种):
+${keyword}
+
+Now generate the biographies for the following input:
+${information}
\ No newline at end of file
diff --git a/prompt-test b/prompt-test
new file mode 100644
index 0000000..31ed8b2
--- /dev/null
+++ b/prompt-test
@@ -0,0 +1,39 @@
+You are a professional academic profile writer.
+
+Language Requirement
+
+Determine the output language dynamically based on the keyword content.
+If the keyword is in Arabic, output Arabic.
+If in English, output English.
+If ambiguous, default to English.
+Use the exact same language for all generated biographies.
+
+Task
+
+You will receive multiple lines of text.
+Each line contains either:
+A plain JSON object with researcher information, OR
+A wrapper object that includes a "value" field containing the actual researcher JSON string.
+For each line:
+Extract the researcher introduction.
+Generate a abstract summary biography (1–3 sentences).
+Avoid mechanical listing — write naturally and academically.
+If some fields (phone, email, etc.) are missing, simply omit them.
+Output one biography paragraph per person, separated by a blank line.
+
+Input Example
+{"college":"Aizenberg Biomineralization and Biomimetics Lab","phone":"(617) 495-3558","researchFocus":["Biomaterials","Soft Matter","Surface and Interface Science"],"name":"Joanna Aizenberg","position":"Amy Smith Berylson Professor of Materials Science and Professor of Chemistry & Chemical Biology","email":"jaiz@seas.harvard.edu
+"}
+{"college":"Bertoldi Group: Solid Mechanics","phone":"(617) 496-3084","researchFocus":["Modeling Physical/Biological Phenomena and Systems","Materials","Solid Mechanics"],"name":"Katia Bertoldi","position":"William and Ami Kuan Danoff Professor of Applied Mechanics","email":"bertoldi@seas.harvard.edu
+"}
+
+Output Format
+Name: biography.
+Paragraphs separated by a blank line
+Written entirely in the language inferred from keyword
+
+keyword(用于自动识别语种):
+${keyword}
+
+Now generate the biographies for the following input:
+${information}
\ No newline at end of file
diff --git a/src/main/java/com/bw/search/Application.java b/src/main/java/com/bw/search/Application.java
new file mode 100644
index 0000000..95e0ab7
--- /dev/null
+++ b/src/main/java/com/bw/search/Application.java
@@ -0,0 +1,25 @@
+package com.bw.search;
+
+
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.kafka.annotation.EnableKafka;
+import org.springframework.scheduling.annotation.EnableScheduling;
+
+/**
+ * 主入口
+ *
+ * @author jian.mao
+ * @date 2023年7月4日
+ * @description
+ */
+@SpringBootApplication
+@EnableScheduling
+@EnableKafka
+public class Application {
+
+ public static void main(String[] args) {
+ SpringApplication.run(Application.class, args);
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/com/bw/search/cache/ConfigCache.java b/src/main/java/com/bw/search/cache/ConfigCache.java
new file mode 100644
index 0000000..3007954
--- /dev/null
+++ b/src/main/java/com/bw/search/cache/ConfigCache.java
@@ -0,0 +1,22 @@
+package com.bw.search.cache;
+
+import lombok.extern.slf4j.Slf4j;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * @author jian.mao
+ * @date 2022年11月11日
+ * @description 静态变量类
+ */
+@Slf4j
+public class ConfigCache {
+
+ /**启动条件**/
+ public static boolean isStart = true;
+ /***知识库信息存储--模型总结使用***/
+ public static Map baseResult = new HashMap(16);
+ /***知识库信息存储--页面查询结果使用***/
+ public static Map searchResult = new HashMap(16);
+}
diff --git a/src/main/java/com/bw/search/common/Res.java b/src/main/java/com/bw/search/common/Res.java
new file mode 100644
index 0000000..29460a6
--- /dev/null
+++ b/src/main/java/com/bw/search/common/Res.java
@@ -0,0 +1,61 @@
+package com.bw.search.common;
+
+/**
+ * 通用返回对象
+ *
+ * @author jian.mao
+ * @date 2025年9月17日
+ * @description
+ * @param
+ */
+
+public class Res {
+ private int resCode;
+ private String resMsg;
+ private T data;
+
+ public Res() {
+ }
+
+ public Res(int resCode, String resMsg, T data) {
+ this.resCode = resCode;
+ this.resMsg = resMsg;
+ this.data = data;
+ }
+
+ public static Res ok(T data) {
+ return new Res(ResponseCode.SUCCESS.code(), ResponseCode.SUCCESS.message(), data);
+ }
+
+ public static Res fail(String msg) {
+ return new Res(ResponseCode.FAIL.code(), msg, null);
+ }
+ public static Res checkError(T error) {
+ return new Res(ResponseCode.FAIL.code(), ResponseCode.CHECKERROR.message(), error);
+ }
+
+ // getter & setter
+ public int getResCode() {
+ return resCode;
+ }
+
+ public void setResCode(int resCode) {
+ this.resCode = resCode;
+ }
+
+ public String getResMsg() {
+ return resMsg;
+ }
+
+ public void setResMsg(String resMsg) {
+ this.resMsg = resMsg;
+ }
+
+ public T getData() {
+ return data;
+ }
+
+ public void setData(T data) {
+ this.data = data;
+ }
+}
diff --git a/src/main/java/com/bw/search/common/ResponseCode.java b/src/main/java/com/bw/search/common/ResponseCode.java
new file mode 100644
index 0000000..ee4c4c3
--- /dev/null
+++ b/src/main/java/com/bw/search/common/ResponseCode.java
@@ -0,0 +1,30 @@
+package com.bw.search.common;
+
+/**
+ * 状态码管理
+ * @author jian.mao
+ * @date 2025年9月17日
+ * @description
+ */
+public enum ResponseCode {
+
+ SUCCESS(0, "success"),
+ FAIL(-1, "fail"),
+ CHECKERROR(400, "paramsError");
+
+ private final int code;
+ private final String message;
+
+ ResponseCode(int code, String message) {
+ this.code = code;
+ this.message = message;
+ }
+
+ public int code() {
+ return code;
+ }
+
+ public String message() {
+ return message;
+ }
+}
diff --git a/src/main/java/com/bw/search/config/EsConfig.java b/src/main/java/com/bw/search/config/EsConfig.java
new file mode 100644
index 0000000..2d6af1f
--- /dev/null
+++ b/src/main/java/com/bw/search/config/EsConfig.java
@@ -0,0 +1,16 @@
+package com.bw.search.config;
+
+import lombok.Data;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+
+@Data
+@Component
+@ConfigurationProperties(prefix = "es")
+public class EsConfig {
+
+ private String host;
+ private String username;
+ private String password;
+ private String index;
+}
diff --git a/src/main/java/com/bw/search/config/KafkaConfig.java b/src/main/java/com/bw/search/config/KafkaConfig.java
new file mode 100644
index 0000000..33e9060
--- /dev/null
+++ b/src/main/java/com/bw/search/config/KafkaConfig.java
@@ -0,0 +1,20 @@
+package com.bw.search.config;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Configuration;
+
+/**
+ * kafka配置类
+ * @author jian.mao
+ * @date 2023年7月6日
+ * @description
+ */
+@Configuration
+public class KafkaConfig {
+ @Value("${kafka.consumer.topic}")
+ private String kafkaTopic;
+
+ public String getKafkaTopic() {
+ return kafkaTopic;
+ }
+}
diff --git a/src/main/java/com/bw/search/controller/RagSearchController.java b/src/main/java/com/bw/search/controller/RagSearchController.java
new file mode 100644
index 0000000..a343dce
--- /dev/null
+++ b/src/main/java/com/bw/search/controller/RagSearchController.java
@@ -0,0 +1,65 @@
+package com.bw.search.controller;
+
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.MediaType;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.CrossOrigin;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.ResponseBody;
+import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
+
+import com.bw.search.common.Res;
+import com.bw.search.service.ModelStreamService;
+import com.bw.search.service.RagSearchService;
+
+import lombok.extern.slf4j.Slf4j;
+
+
+@Controller
+@CrossOrigin
+@RequestMapping("/api")
+@Slf4j
+public class RagSearchController {
+ @Autowired
+ private RagSearchService ragSearchService;
+ @Autowired
+ private ModelStreamService modelStreamService;
+ @PostMapping("/search")
+ @ResponseBody
+ public Res> search(@RequestBody String dataJson){
+ return ragSearchService.search(dataJson);
+ }
+
+ @PostMapping(value = "/stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
+ public SseEmitter stream(@RequestBody String dataJson) {
+
+ SseEmitter emitter = new SseEmitter(0L);
+
+ modelStreamService.stream(dataJson, new ModelStreamService.StreamListener() {
+
+ @Override
+ public void onMessage(String text) {
+ try {
+ emitter.send(text);
+ } catch (Exception e) {
+ emitter.completeWithError(e);
+ }
+ }
+
+ @Override
+ public void onComplete() {
+ emitter.complete();
+ }
+
+ @Override
+ public void onError(Throwable t) {
+ emitter.completeWithError(t);
+ }
+ });
+
+ return emitter;
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/com/bw/search/entity/Constants.java b/src/main/java/com/bw/search/entity/Constants.java
new file mode 100644
index 0000000..0864836
--- /dev/null
+++ b/src/main/java/com/bw/search/entity/Constants.java
@@ -0,0 +1,51 @@
+package com.bw.search.entity;
+
+/**
+ * 常量类
+ * @author jian.mao
+ * @date 2025年11月6日
+ * @description
+ */
+public class Constants {
+
+ /****id*****/
+ public static final String ID = "id";
+ /***ids***/
+ public static final String IDS = "ids";
+
+ /***检索key****/
+ public static final String KEYWORD = "keyword";
+
+ /***知识库结果***/
+ public static final String REFERENCES = "references";
+
+ /***问答请求参数****/
+ public static final String AUTHORIZATION = "authorization";
+ public static final String TEMPERATURE = "temperature";
+ public static final String TOP_P = "top_p";
+ public static final String MODEL = "model";
+ public static final String PROMPT = "prompt";
+ public static final String MASK_SENSITIVE_INFO = "mask_sensitive_info";
+ public static final String MAX_TOKENS = "max_tokens";
+ public static final String CHOICES = "choices";
+ public static final String CONTENT = "content";
+
+ /****聊天体常量key****/
+ public static final String MESSAGES = "messages";
+ /****流****/
+ public static final String STREAM = "stream";
+ /***响应描述****/
+ public static final String MESSAGE = "message";
+ /***空常量****/
+ public static final String EMPTY = "";
+ /***向量结果key***/
+ public static final String PROMPTRES = "promptRes";
+ /***user***/
+ public static final String USER = "user";
+ /***角色***/
+ public static final String ROLE = "role";
+ /***prompt关键词标识***/
+ public static final String KEYWORD_MARK = "${keyword}";
+ /***prompt个人信息标识***/
+ public static final String INFORMATION_MARK = "${information}";
+}
diff --git a/src/main/java/com/bw/search/entity/SearchResponse.java b/src/main/java/com/bw/search/entity/SearchResponse.java
new file mode 100644
index 0000000..2b51665
--- /dev/null
+++ b/src/main/java/com/bw/search/entity/SearchResponse.java
@@ -0,0 +1,24 @@
+package com.bw.search.entity;
+
+import java.util.List;
+
+import lombok.Data;
+
+@Data
+public class SearchResponse {
+
+ private String keyword;
+
+ private List ids;
+
+ public SearchResponse() {
+ }
+
+ public SearchResponse(String keyword, List ids) {
+ this.keyword = keyword;
+ this.ids = ids;
+ }
+
+
+
+}
diff --git a/src/main/java/com/bw/search/process/ResultSendQueue.java b/src/main/java/com/bw/search/process/ResultSendQueue.java
new file mode 100644
index 0000000..4ed76be
--- /dev/null
+++ b/src/main/java/com/bw/search/process/ResultSendQueue.java
@@ -0,0 +1,41 @@
+package com.bw.search.process;
+
+import java.util.Map;
+
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.stereotype.Component;
+
+import com.alibaba.fastjson.JSONObject;
+import com.bw.search.cache.ConfigCache;
+import com.bw.search.entity.Constants;
+
+import lombok.extern.slf4j.Slf4j;
+
+
+/**
+ * 接口响应结果操作类
+ * @author jian.mao
+ * @date 2023年7月6日
+ * @description
+ */
+@Component
+@Slf4j
+public class ResultSendQueue {
+ /**
+ * kafka读取结果写入队列
+ * @param message
+ */
+ @KafkaListener(topics = "#{kafkaConfig.getKafkaTopic()}")
+ public void consumeMessage(String message) {
+ // 处理接收到的消息逻辑
+ try {
+ Map result = JSONObject.parseObject(message);
+ String key = result.get(Constants.ID).toString();
+ ConfigCache.baseResult.put(key, result);
+ ConfigCache.searchResult.put(key, result);
+ log.info("消费知识:{}",key);
+ } catch (Exception e) {
+ log.error("结果集json转换失败,result:{},\n",message,e);
+ }
+ }
+}
diff --git a/src/main/java/com/bw/search/service/ModelStreamService.java b/src/main/java/com/bw/search/service/ModelStreamService.java
new file mode 100644
index 0000000..0071790
--- /dev/null
+++ b/src/main/java/com/bw/search/service/ModelStreamService.java
@@ -0,0 +1,21 @@
+package com.bw.search.service;
+
+/**
+ * 模型流式业务层接口
+ * @author jian.mao
+ * @date 2025年11月20日
+ * @description
+ */
+public interface ModelStreamService {
+
+ interface StreamListener {
+ void onMessage(String text);
+ void onComplete();
+ void onError(Throwable t);
+ }
+
+ /**
+ * 流式调用 GPT
+ */
+ void stream(String prompt, StreamListener listener);
+}
\ No newline at end of file
diff --git a/src/main/java/com/bw/search/service/RagSearchService.java b/src/main/java/com/bw/search/service/RagSearchService.java
new file mode 100644
index 0000000..10c713b
--- /dev/null
+++ b/src/main/java/com/bw/search/service/RagSearchService.java
@@ -0,0 +1,19 @@
+package com.bw.search.service;
+
+import com.bw.search.common.Res;
+
+/**
+ * rag检索业务层接口
+ * @author jian.mao
+ * @date 2025年11月6日
+ * @description
+ */
+public interface RagSearchService {
+
+ /**
+ * 检索方法
+ * @param dataJson
+ * @return
+ */
+ public Res> search(String dataJson);
+}
diff --git a/src/main/java/com/bw/search/service/impl/ModelStreamServiceImpl.java b/src/main/java/com/bw/search/service/impl/ModelStreamServiceImpl.java
new file mode 100644
index 0000000..2ed4bcb
--- /dev/null
+++ b/src/main/java/com/bw/search/service/impl/ModelStreamServiceImpl.java
@@ -0,0 +1,252 @@
+package com.bw.search.service.impl;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Service;
+
+import com.alibaba.fastjson.JSONArray;
+import com.alibaba.fastjson.JSONObject;
+import com.bw.search.cache.ConfigCache;
+import com.bw.search.config.EsConfig;
+import com.bw.search.entity.Constants;
+import com.bw.search.service.ModelStreamService;
+import com.bw.search.utils.DateUtil;
+import com.bw.search.utils.FileUtil;
+import com.bw.search.utils.SpringBootKafka;
+
+import lombok.extern.slf4j.Slf4j;
+import okhttp3.Call;
+import okhttp3.Callback;
+import okhttp3.MediaType;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+import okhttp3.ResponseBody;
+import okio.BufferedSource;
+
+
+import com.alibaba.fastjson.JSON;
+import org.springframework.http.*;
+import org.springframework.util.Base64Utils;
+import org.springframework.web.client.RestTemplate;
+
+import javax.annotation.Resource;
+import java.nio.charset.StandardCharsets;
+
+
+@Service
+@Slf4j
+public class ModelStreamServiceImpl implements ModelStreamService {
+
+ @Value("${model.authorization}")
+ private String authorization;
+ @Value("${model.url}")
+ private String modelUrl;
+ @Value("${model.name}")
+ private String modelName;
+ @Autowired
+ private SpringBootKafka springBootKafka;
+ @Value("${kafka.producer.topic}")
+ private String topic;
+ @Value("${prompt.path}")
+ private String promptPath;
+
+ @Resource
+ private EsConfig esConfig;
+
+ private final RestTemplate restTemplate = new RestTemplate();
+
+ private final OkHttpClient client = new OkHttpClient.Builder()
+ .connectTimeout(30, TimeUnit.SECONDS)
+ .readTimeout(0, TimeUnit.SECONDS) // 流式接口,需要读超时为 0
+ .writeTimeout(30, TimeUnit.SECONDS)
+ .build();
+
+
+ @Override
+ public void stream(String dataJson, StreamListener listener) {
+ //转换对象
+ JSONObject parseObject = JSONObject.parseObject(dataJson);
+ String id = parseObject.getString(Constants.ID);
+// String keyword = parseObject.getString(Constants.KEWORD);
+ log.info("知识库检索开始-----");
+ springBootKafka.send(topic, dataJson);
+ Map knowResult = getKnowledge(id);
+ if(knowResult == null) {
+ log.error("知识库获取异常!");
+ return ;
+ }
+ log.info("知识库结果已获取:{}",JSONObject.toJSONString(knowResult));
+ Float temperature = 0.7f;
+ Float topP = 0.95f;
+ List