commit
6d632b6f47
32 changed files with 2823 additions and 0 deletions
-
40.classpath
-
23.project
-
5.settings/org.eclipse.core.resources.prefs
-
9.settings/org.eclipse.jdt.core.prefs
-
4.settings/org.eclipse.m2e.core.prefs
-
242logs/ragEngineInfo.log
-
237pom.xml
-
46prompt
-
39prompt-test
-
25src/main/java/com/bw/search/Application.java
-
22src/main/java/com/bw/search/cache/ConfigCache.java
-
61src/main/java/com/bw/search/common/Res.java
-
30src/main/java/com/bw/search/common/ResponseCode.java
-
16src/main/java/com/bw/search/config/EsConfig.java
-
20src/main/java/com/bw/search/config/KafkaConfig.java
-
65src/main/java/com/bw/search/controller/RagSearchController.java
-
51src/main/java/com/bw/search/entity/Constants.java
-
24src/main/java/com/bw/search/entity/SearchResponse.java
-
41src/main/java/com/bw/search/process/ResultSendQueue.java
-
21src/main/java/com/bw/search/service/ModelStreamService.java
-
19src/main/java/com/bw/search/service/RagSearchService.java
-
252src/main/java/com/bw/search/service/impl/ModelStreamServiceImpl.java
-
68src/main/java/com/bw/search/service/impl/RagSearchServiceImpl.java
-
177src/main/java/com/bw/search/utils/DateUtil.java
-
1007src/main/java/com/bw/search/utils/DownLoadUtil.java
-
60src/main/java/com/bw/search/utils/FileUtil.java
-
45src/main/java/com/bw/search/utils/SpringBootKafka.java
-
23src/main/java/com/bw/search/utils/ThrowMessageUtil.java
-
73src/main/resources/application.yml
-
36src/main/resources/logback-spring.xml
-
38src/test/java/com/bw/AppTest.java
-
4target/.gitignore
@ -0,0 +1,40 @@ |
|||||
|
<?xml version="1.0" encoding="UTF-8"?> |
||||
|
<classpath> |
||||
|
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources"> |
||||
|
<attributes> |
||||
|
<attribute name="maven.pomderived" value="true"/> |
||||
|
<attribute name="optional" value="true"/> |
||||
|
</attributes> |
||||
|
</classpathentry> |
||||
|
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources"> |
||||
|
<attributes> |
||||
|
<attribute name="maven.pomderived" value="true"/> |
||||
|
<attribute name="test" value="true"/> |
||||
|
<attribute name="optional" value="true"/> |
||||
|
</attributes> |
||||
|
</classpathentry> |
||||
|
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8"> |
||||
|
<attributes> |
||||
|
<attribute name="maven.pomderived" value="true"/> |
||||
|
</attributes> |
||||
|
</classpathentry> |
||||
|
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER"> |
||||
|
<attributes> |
||||
|
<attribute name="maven.pomderived" value="true"/> |
||||
|
</attributes> |
||||
|
</classpathentry> |
||||
|
<classpathentry kind="src" output="target/classes" path="src/main/java"> |
||||
|
<attributes> |
||||
|
<attribute name="optional" value="true"/> |
||||
|
<attribute name="maven.pomderived" value="true"/> |
||||
|
</attributes> |
||||
|
</classpathentry> |
||||
|
<classpathentry kind="src" output="target/test-classes" path="src/test/java"> |
||||
|
<attributes> |
||||
|
<attribute name="optional" value="true"/> |
||||
|
<attribute name="maven.pomderived" value="true"/> |
||||
|
<attribute name="test" value="true"/> |
||||
|
</attributes> |
||||
|
</classpathentry> |
||||
|
<classpathentry kind="output" path="target/classes"/> |
||||
|
</classpath> |
||||
@ -0,0 +1,23 @@ |
|||||
|
<?xml version="1.0" encoding="UTF-8"?> |
||||
|
<projectDescription> |
||||
|
<name>RagEngine</name> |
||||
|
<comment></comment> |
||||
|
<projects> |
||||
|
</projects> |
||||
|
<buildSpec> |
||||
|
<buildCommand> |
||||
|
<name>org.eclipse.jdt.core.javabuilder</name> |
||||
|
<arguments> |
||||
|
</arguments> |
||||
|
</buildCommand> |
||||
|
<buildCommand> |
||||
|
<name>org.eclipse.m2e.core.maven2Builder</name> |
||||
|
<arguments> |
||||
|
</arguments> |
||||
|
</buildCommand> |
||||
|
</buildSpec> |
||||
|
<natures> |
||||
|
<nature>org.eclipse.jdt.core.javanature</nature> |
||||
|
<nature>org.eclipse.m2e.core.maven2Nature</nature> |
||||
|
</natures> |
||||
|
</projectDescription> |
||||
@ -0,0 +1,5 @@ |
|||||
|
eclipse.preferences.version=1 |
||||
|
encoding//src/main/java=UTF-8 |
||||
|
encoding//src/main/resources=UTF-8 |
||||
|
encoding//src/test/java=UTF-8 |
||||
|
encoding/<project>=UTF-8 |
||||
@ -0,0 +1,9 @@ |
|||||
|
eclipse.preferences.version=1 |
||||
|
org.eclipse.jdt.core.compiler.codegen.methodParameters=generate |
||||
|
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8 |
||||
|
org.eclipse.jdt.core.compiler.compliance=1.8 |
||||
|
org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled |
||||
|
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning |
||||
|
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore |
||||
|
org.eclipse.jdt.core.compiler.release=disabled |
||||
|
org.eclipse.jdt.core.compiler.source=1.8 |
||||
@ -0,0 +1,4 @@ |
|||||
|
activeProfiles= |
||||
|
eclipse.preferences.version=1 |
||||
|
resolveWorkspaceProjects=true |
||||
|
version=1 |
||||
@ -0,0 +1,242 @@ |
|||||
|
2025-11-07 18:21:17.669 [main] 55 INFO com.bw.search.Application - Starting Application on maojian with PID 72232 (D:\eclipseWork\RagEngine\target\classes started by 55007 in D:\eclipseWork\RagEngine) |
||||
|
2025-11-07 18:21:17.676 [main] 651 INFO com.bw.search.Application - No active profile set, falling back to default profiles: default |
||||
|
2025-11-07 18:21:17.719 [background-preinit] 127 WARN o.s.h.converter.json.Jackson2ObjectMapperBuilder - For Jackson Kotlin classes support please add "com.fasterxml.jackson.module:jackson-module-kotlin" to the classpath |
||||
|
2025-11-07 18:21:18.738 [main] 92 INFO o.s.boot.web.embedded.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8001 (http) |
||||
|
2025-11-07 18:21:18.744 [main] 173 INFO org.apache.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8001"] |
||||
|
2025-11-07 18:21:18.744 [main] 173 INFO org.apache.catalina.core.StandardService - Starting service [Tomcat] |
||||
|
2025-11-07 18:21:18.744 [main] 173 INFO org.apache.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.30] |
||||
|
2025-11-07 18:21:18.816 [main] 173 INFO o.a.c.c.ContainerBase.[Tomcat].[localhost].[/nvps] - Initializing Spring embedded WebApplicationContext |
||||
|
2025-11-07 18:21:18.816 [main] 284 INFO org.springframework.web.context.ContextLoader - Root WebApplicationContext: initialization completed in 1094 ms |
||||
|
2025-11-07 18:21:19.120 [main] 171 INFO o.s.scheduling.concurrent.ThreadPoolTaskExecutor - Initializing ExecutorService 'applicationTaskExecutor' |
||||
|
2025-11-07 18:21:19.302 [main] 171 INFO o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'taskScheduler' |
||||
|
2025-11-07 18:21:19.307 [main] 58 INFO o.s.b.actuate.endpoint.web.EndpointLinksResolver - Exposing 2 endpoint(s) beneath base path '/actuator' |
||||
|
2025-11-07 18:21:19.363 [main] 347 INFO org.apache.kafka.clients.admin.AdminClientConfig - AdminClientConfig values: |
||||
|
bootstrap.servers = [node-01:19092, node-02:19092, node-03:19092] |
||||
|
client.dns.lookup = default |
||||
|
client.id = |
||||
|
connections.max.idle.ms = 300000 |
||||
|
metadata.max.age.ms = 300000 |
||||
|
metric.reporters = [] |
||||
|
metrics.num.samples = 2 |
||||
|
metrics.recording.level = INFO |
||||
|
metrics.sample.window.ms = 30000 |
||||
|
receive.buffer.bytes = 65536 |
||||
|
reconnect.backoff.max.ms = 1000 |
||||
|
reconnect.backoff.ms = 50 |
||||
|
request.timeout.ms = 120000 |
||||
|
retries = 5 |
||||
|
retry.backoff.ms = 100 |
||||
|
sasl.client.callback.handler.class = null |
||||
|
sasl.jaas.config = null |
||||
|
sasl.kerberos.kinit.cmd = /usr/bin/kinit |
||||
|
sasl.kerberos.min.time.before.relogin = 60000 |
||||
|
sasl.kerberos.service.name = null |
||||
|
sasl.kerberos.ticket.renew.jitter = 0.05 |
||||
|
sasl.kerberos.ticket.renew.window.factor = 0.8 |
||||
|
sasl.login.callback.handler.class = null |
||||
|
sasl.login.class = null |
||||
|
sasl.login.refresh.buffer.seconds = 300 |
||||
|
sasl.login.refresh.min.period.seconds = 60 |
||||
|
sasl.login.refresh.window.factor = 0.8 |
||||
|
sasl.login.refresh.window.jitter = 0.05 |
||||
|
sasl.mechanism = GSSAPI |
||||
|
security.protocol = PLAINTEXT |
||||
|
send.buffer.bytes = 131072 |
||||
|
ssl.cipher.suites = null |
||||
|
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] |
||||
|
ssl.endpoint.identification.algorithm = https |
||||
|
ssl.key.password = null |
||||
|
ssl.keymanager.algorithm = SunX509 |
||||
|
ssl.keystore.location = null |
||||
|
ssl.keystore.password = null |
||||
|
ssl.keystore.type = JKS |
||||
|
ssl.protocol = TLS |
||||
|
ssl.provider = null |
||||
|
ssl.secure.random.implementation = null |
||||
|
ssl.trustmanager.algorithm = PKIX |
||||
|
ssl.truststore.location = null |
||||
|
ssl.truststore.password = null |
||||
|
ssl.truststore.type = JKS |
||||
|
|
||||
|
2025-11-07 18:21:19.416 [main] 117 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 2.3.1 |
||||
|
2025-11-07 18:21:19.417 [main] 118 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 18a913733fb71c01 |
||||
|
2025-11-07 18:21:19.417 [main] 119 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1762510879415 |
||||
|
2025-11-07 18:21:19.705 [main] 347 INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values: |
||||
|
allow.auto.create.topics = true |
||||
|
auto.commit.interval.ms = 1000 |
||||
|
auto.offset.reset = earliest |
||||
|
bootstrap.servers = [node-01:19092, node-02:19092, node-03:19092] |
||||
|
check.crcs = true |
||||
|
client.dns.lookup = default |
||||
|
client.id = |
||||
|
client.rack = |
||||
|
connections.max.idle.ms = 540000 |
||||
|
default.api.timeout.ms = 60000 |
||||
|
enable.auto.commit = true |
||||
|
exclude.internal.topics = true |
||||
|
fetch.max.bytes = 52428800 |
||||
|
fetch.max.wait.ms = 500 |
||||
|
fetch.min.bytes = 1 |
||||
|
group.id = test4 |
||||
|
group.instance.id = null |
||||
|
heartbeat.interval.ms = 3000 |
||||
|
interceptor.classes = [] |
||||
|
internal.leave.group.on.close = true |
||||
|
isolation.level = read_uncommitted |
||||
|
key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer |
||||
|
max.partition.fetch.bytes = 1048576 |
||||
|
max.poll.interval.ms = 300000 |
||||
|
max.poll.records = 500 |
||||
|
metadata.max.age.ms = 300000 |
||||
|
metric.reporters = [] |
||||
|
metrics.num.samples = 2 |
||||
|
metrics.recording.level = INFO |
||||
|
metrics.sample.window.ms = 30000 |
||||
|
partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] |
||||
|
receive.buffer.bytes = 65536 |
||||
|
reconnect.backoff.max.ms = 1000 |
||||
|
reconnect.backoff.ms = 50 |
||||
|
request.timeout.ms = 30000 |
||||
|
retry.backoff.ms = 100 |
||||
|
sasl.client.callback.handler.class = null |
||||
|
sasl.jaas.config = null |
||||
|
sasl.kerberos.kinit.cmd = /usr/bin/kinit |
||||
|
sasl.kerberos.min.time.before.relogin = 60000 |
||||
|
sasl.kerberos.service.name = null |
||||
|
sasl.kerberos.ticket.renew.jitter = 0.05 |
||||
|
sasl.kerberos.ticket.renew.window.factor = 0.8 |
||||
|
sasl.login.callback.handler.class = null |
||||
|
sasl.login.class = null |
||||
|
sasl.login.refresh.buffer.seconds = 300 |
||||
|
sasl.login.refresh.min.period.seconds = 60 |
||||
|
sasl.login.refresh.window.factor = 0.8 |
||||
|
sasl.login.refresh.window.jitter = 0.05 |
||||
|
sasl.mechanism = GSSAPI |
||||
|
security.protocol = PLAINTEXT |
||||
|
send.buffer.bytes = 131072 |
||||
|
session.timeout.ms = 10000 |
||||
|
ssl.cipher.suites = null |
||||
|
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] |
||||
|
ssl.endpoint.identification.algorithm = https |
||||
|
ssl.key.password = null |
||||
|
ssl.keymanager.algorithm = SunX509 |
||||
|
ssl.keystore.location = null |
||||
|
ssl.keystore.password = null |
||||
|
ssl.keystore.type = JKS |
||||
|
ssl.protocol = TLS |
||||
|
ssl.provider = null |
||||
|
ssl.secure.random.implementation = null |
||||
|
ssl.trustmanager.algorithm = PKIX |
||||
|
ssl.truststore.location = null |
||||
|
ssl.truststore.password = null |
||||
|
ssl.truststore.type = JKS |
||||
|
value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer |
||||
|
|
||||
|
2025-11-07 18:21:19.724 [main] 117 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 2.3.1 |
||||
|
2025-11-07 18:21:19.724 [main] 118 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 18a913733fb71c01 |
||||
|
2025-11-07 18:21:19.724 [main] 119 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1762510879724 |
||||
|
2025-11-07 18:21:19.724 [main] 964 INFO org.apache.kafka.clients.consumer.KafkaConsumer - [Consumer clientId=consumer-1, groupId=test4] Subscribed to topic(s): nvps_search_data |
||||
|
2025-11-07 18:21:19.726 [main] 171 INFO o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService |
||||
|
2025-11-07 18:21:19.737 [main] 173 INFO org.apache.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8001"] |
||||
|
2025-11-07 18:21:19.745 [main] 204 INFO o.s.boot.web.embedded.tomcat.TomcatWebServer - Tomcat started on port(s): 8001 (http) with context path '/nvps' |
||||
|
2025-11-07 18:21:19.747 [main] 61 INFO com.bw.search.Application - Started Application in 2.35 seconds (JVM running for 2.636) |
||||
|
2025-11-07 18:21:20.058 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 261 INFO org.apache.kafka.clients.Metadata - [Consumer clientId=consumer-1, groupId=test4] Cluster ID: NyNtqsIDRIae-HJZGe2pww |
||||
|
2025-11-07 18:21:20.087 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 728 INFO o.a.k.c.consumer.internals.AbstractCoordinator - [Consumer clientId=consumer-1, groupId=test4] Discovered group coordinator node-03:19092 (id: 2147483644 rack: null) |
||||
|
2025-11-07 18:21:20.089 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 476 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Revoking previously assigned partitions [] |
||||
|
2025-11-07 18:21:20.089 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 279 INFO o.s.kafka.listener.KafkaMessageListenerContainer - test4: partitions revoked: [] |
||||
|
2025-11-07 18:21:20.089 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 505 INFO o.a.k.c.consumer.internals.AbstractCoordinator - [Consumer clientId=consumer-1, groupId=test4] (Re-)joining group |
||||
|
2025-11-07 18:21:20.207 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 505 INFO o.a.k.c.consumer.internals.AbstractCoordinator - [Consumer clientId=consumer-1, groupId=test4] (Re-)joining group |
||||
|
2025-11-07 18:21:20.748 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 469 INFO o.a.k.c.consumer.internals.AbstractCoordinator - [Consumer clientId=consumer-1, groupId=test4] Successfully joined group with generation 23 |
||||
|
2025-11-07 18:21:20.750 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 283 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting newly assigned partitions: nvps_search_data-7, nvps_search_data-8, nvps_search_data-1, nvps_search_data-2, nvps_search_data-0, nvps_search_data-5, nvps_search_data-6, nvps_search_data-3, nvps_search_data-4 |
||||
|
2025-11-07 18:21:20.789 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-7 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=node-03:19092 (id: 3 rack: null), epoch=0}} |
||||
|
2025-11-07 18:21:20.789 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-8 to the committed offset FetchPosition{offset=1, offsetEpoch=Optional[0], currentLeader=LeaderAndEpoch{leader=node-01:19092 (id: 1 rack: null), epoch=0}} |
||||
|
2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-1 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=node-03:19092 (id: 3 rack: null), epoch=0}} |
||||
|
2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-2 to the committed offset FetchPosition{offset=3, offsetEpoch=Optional[0], currentLeader=LeaderAndEpoch{leader=node-01:19092 (id: 1 rack: null), epoch=0}} |
||||
|
2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-0 to the committed offset FetchPosition{offset=1, offsetEpoch=Optional[0], currentLeader=LeaderAndEpoch{leader=node-02:19092 (id: 2 rack: null), epoch=0}} |
||||
|
2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-5 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=node-01:19092 (id: 1 rack: null), epoch=0}} |
||||
|
2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-6 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=node-02:19092 (id: 2 rack: null), epoch=0}} |
||||
|
2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-3 to the committed offset FetchPosition{offset=1, offsetEpoch=Optional[0], currentLeader=LeaderAndEpoch{leader=node-02:19092 (id: 2 rack: null), epoch=0}} |
||||
|
2025-11-07 18:21:20.790 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 525 INFO o.a.k.c.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumer-1, groupId=test4] Setting offset for partition nvps_search_data-4 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=node-03:19092 (id: 3 rack: null), epoch=0}} |
||||
|
2025-11-07 18:21:20.947 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 279 INFO o.s.kafka.listener.KafkaMessageListenerContainer - test4: partitions assigned: [nvps_search_data-7, nvps_search_data-8, nvps_search_data-1, nvps_search_data-2, nvps_search_data-0, nvps_search_data-5, nvps_search_data-6, nvps_search_data-3, nvps_search_data-4] |
||||
|
2025-11-07 18:21:41.726 [http-nio-8001-exec-2] 173 INFO o.a.c.c.ContainerBase.[Tomcat].[localhost].[/nvps] - Initializing Spring DispatcherServlet 'dispatcherServlet' |
||||
|
2025-11-07 18:21:41.727 [http-nio-8001-exec-2] 525 INFO org.springframework.web.servlet.DispatcherServlet - Initializing Servlet 'dispatcherServlet' |
||||
|
2025-11-07 18:21:41.733 [http-nio-8001-exec-2] 547 INFO org.springframework.web.servlet.DispatcherServlet - Completed initialization in 6 ms |
||||
|
2025-11-07 18:21:41.881 [http-nio-8001-exec-2] 48 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识库检索开始----- |
||||
|
2025-11-07 18:21:41.895 [http-nio-8001-exec-2] 28 INFO com.bw.search.utils.SpringBootKafka - 准备发送消息为:"{\"id\":\"id-1\",\"keyword\":\"Michael J. Aziz\"}" |
||||
|
2025-11-07 18:21:41.902 [http-nio-8001-exec-2] 347 INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: |
||||
|
acks = 1 |
||||
|
batch.size = 16384 |
||||
|
bootstrap.servers = [node-01:19092, node-02:19092, node-03:19092] |
||||
|
buffer.memory = 33554432 |
||||
|
client.dns.lookup = default |
||||
|
client.id = |
||||
|
compression.type = none |
||||
|
connections.max.idle.ms = 540000 |
||||
|
delivery.timeout.ms = 120000 |
||||
|
enable.idempotence = false |
||||
|
interceptor.classes = [] |
||||
|
key.serializer = class org.apache.kafka.common.serialization.StringSerializer |
||||
|
linger.ms = 0 |
||||
|
max.block.ms = 60000 |
||||
|
max.in.flight.requests.per.connection = 5 |
||||
|
max.request.size = 1048576 |
||||
|
metadata.max.age.ms = 300000 |
||||
|
metric.reporters = [] |
||||
|
metrics.num.samples = 2 |
||||
|
metrics.recording.level = INFO |
||||
|
metrics.sample.window.ms = 30000 |
||||
|
partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner |
||||
|
receive.buffer.bytes = 32768 |
||||
|
reconnect.backoff.max.ms = 1000 |
||||
|
reconnect.backoff.ms = 50 |
||||
|
request.timeout.ms = 30000 |
||||
|
retries = 0 |
||||
|
retry.backoff.ms = 100 |
||||
|
sasl.client.callback.handler.class = null |
||||
|
sasl.jaas.config = null |
||||
|
sasl.kerberos.kinit.cmd = /usr/bin/kinit |
||||
|
sasl.kerberos.min.time.before.relogin = 60000 |
||||
|
sasl.kerberos.service.name = null |
||||
|
sasl.kerberos.ticket.renew.jitter = 0.05 |
||||
|
sasl.kerberos.ticket.renew.window.factor = 0.8 |
||||
|
sasl.login.callback.handler.class = null |
||||
|
sasl.login.class = null |
||||
|
sasl.login.refresh.buffer.seconds = 300 |
||||
|
sasl.login.refresh.min.period.seconds = 60 |
||||
|
sasl.login.refresh.window.factor = 0.8 |
||||
|
sasl.login.refresh.window.jitter = 0.05 |
||||
|
sasl.mechanism = GSSAPI |
||||
|
security.protocol = PLAINTEXT |
||||
|
send.buffer.bytes = 131072 |
||||
|
ssl.cipher.suites = null |
||||
|
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] |
||||
|
ssl.endpoint.identification.algorithm = https |
||||
|
ssl.key.password = null |
||||
|
ssl.keymanager.algorithm = SunX509 |
||||
|
ssl.keystore.location = null |
||||
|
ssl.keystore.password = null |
||||
|
ssl.keystore.type = JKS |
||||
|
ssl.protocol = TLS |
||||
|
ssl.provider = null |
||||
|
ssl.secure.random.implementation = null |
||||
|
ssl.trustmanager.algorithm = PKIX |
||||
|
ssl.truststore.location = null |
||||
|
ssl.truststore.password = null |
||||
|
ssl.truststore.type = JKS |
||||
|
transaction.timeout.ms = 60000 |
||||
|
transactional.id = null |
||||
|
value.serializer = class org.apache.kafka.common.serialization.StringSerializer |
||||
|
|
||||
|
2025-11-07 18:21:41.918 [http-nio-8001-exec-2] 117 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 2.3.1 |
||||
|
2025-11-07 18:21:41.919 [http-nio-8001-exec-2] 118 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 18a913733fb71c01 |
||||
|
2025-11-07 18:21:41.919 [http-nio-8001-exec-2] 119 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1762510901918 |
||||
|
2025-11-07 18:21:42.058 [kafka-producer-network-thread | producer-1] 261 INFO org.apache.kafka.clients.Metadata - [Producer clientId=producer-1] Cluster ID: NyNtqsIDRIae-HJZGe2pww |
||||
|
2025-11-07 18:21:42.068 [http-nio-8001-exec-2] 116 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识获取中请稍后... |
||||
|
2025-11-07 18:21:42.175 [kafka-producer-network-thread | producer-1] 41 INFO com.bw.search.utils.SpringBootKafka - nvps_know_base - 生产者 发送消息成功:SendResult [producerRecord=ProducerRecord(topic=nvps_know_base, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value={"id":"id-1","keyword":"Michael J. Aziz"}, timestamp=null), recordMetadata=nvps_know_base-7@1] |
||||
|
2025-11-07 18:21:43.085 [http-nio-8001-exec-2] 116 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识获取中请稍后... |
||||
|
2025-11-07 18:21:44.092 [http-nio-8001-exec-2] 116 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识获取中请稍后... |
||||
|
2025-11-07 18:21:45.106 [http-nio-8001-exec-2] 116 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识获取中请稍后... |
||||
|
2025-11-07 18:21:45.770 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] 35 INFO com.bw.search.process.ResultSendQueue - 消费知识:id-1 |
||||
|
2025-11-07 18:21:46.128 [http-nio-8001-exec-2] 51 INFO com.bw.search.service.impl.RagSearchServiceImpl - 知识库结果已获取:{"isLast":1,"promptRes":"已知信息:\n{\"college\":\"Bertoldi Group: Solid Mechanics\",\"phone\":\"(617) 496-3084\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\",\"Materials\",\"Materials\",\"Solid Mechanics\"],\"name\":\"Katia Bertoldi\",\"position\":\"William and Ami Kuan Danoff Professor of Applied Mechanics\",\"email\":\"bertoldi@seas.harvard.edu\"}\n{\"college\":\"\",\"phone\":\"(617) 495-6012\",\"researchFocus\":[\"Materials\",\"Robotics and Control\",\"Robotics and Control\",\"Materials\",\"Robotics and Control\",\"Design and Innovation\"],\"name\":\"Martin Bechthold\",\"position\":\"Kumagai Professor of Architectural Technology; Affiliate in Materials Science & Mechanical Engineering\",\"email\":\"mbechthold@gsd.harvard.edu\"}\n{\"college\":\"Anderson Group\",\"phone\":\"(617) 998-5550\",\"researchFocus\":[\"Atmospheric Chemistry\",\"Climate Change\",\"Solar Geoengineering\"],\"name\":\"James G. Anderson\",\"position\":\"Philip S. Weld Professor of Atmospheric Chemistry\",\"email\":\"anderson@huarp.harvard.edu\"}\n{\"college\":\"Markus Basan Lab\",\"phone\":\"\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\",\"Soft Matter\",\"Soft Matter\"],\"name\":\"Markus Thomas Basan\",\"position\":\"Assistant Professor of Systems Biology; Affiliate in Applied Physics\",\"email\":\"markus@hms.harvard.edu\"}\n{\"college\":\"\",\"phone\":\"\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\"],\"name\":\"William H. Bossert\",\"position\":\"David B. Arnold, Jr. Professor of Science, Emeritus\",\"email\":\"bossert@seas.harvard.edu\"}\n\n参考上述已知信息回答问题:\nMichael J. Aziz","ids":"[\"7dc72740-bbc3-11f0-9abc-00163e03f4d5\", \"7a5654fa-bbc3-11f0-9abc-00163e03f4d5\", \"783b6c46-bbc3-11f0-9abc-00163e03f4d5\", \"78ac3a02-bbc3-11f0-9abc-00163e03f4d5\", \"7d5e213c-bbc3-11f0-9abc-00163e03f4d5\"]","id":"id-1","keyword":"Michael J. Aziz"} |
||||
|
2025-11-07 18:21:46.141 [http-nio-8001-exec-2] 98 INFO com.bw.search.service.impl.RagSearchServiceImpl - 模型请求信息:url:https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions,header:{"authorization":"Bearer sk-c5f56c1c8a6447b3a6c646a3f14085c4","Content-Type":"application/json"},params:{"top_p":0.95,"stream":false,"max_tokens":8192,"temperature":0.7,"messages":[{"role":"user","content":"You are a professional academic profile writer.Task:You will receive multiple lines of text. Each line contains either:- A plain JSON object with researcher information, OR - A wrapper object that includes a \"value\" field containing the actual researcher JSON string.For each line, extract the researcher information and generate a concise academic biography in English (3–5 sentences). Avoid listing items mechanically — make it sound natural and professional. If some fields (like phone or email) are missing, just skip them. Output one biography paragraph per person, separated by blank lines.Input example:{\"college\":\"Aizenberg Biomineralization and Biomimetics Lab\",\"phone\":\"(617) 495-3558\",\"researchFocus\":[\"Biomaterials\",\"Soft Matter\",\"Surface and Interface Science\"],\"name\":\"Joanna Aizenberg\",\"position\":\"Amy Smith Berylson Professor of Materials Science and Professor of Chemistry & Chemical Biology\",\"email\":\"jaiz@seas.harvard.edu\"}{\"college\":\"Bertoldi Group: Solid Mechanics\",\"phone\":\"(617) 496-3084\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\",\"Materials\",\"Solid Mechanics\"],\"name\":\"Katia Bertoldi\",\"position\":\"William and Ami Kuan Danoff Professor of Applied Mechanics\",\"email\":\"bertoldi@seas.harvard.edu\"}Output format:Write one paragraph per person, separated by a blank line. Each paragraph should summarize their title, affiliation, and main research interests.Now generate the biographies for the following input:已知信息:\n{\"college\":\"Bertoldi Group: Solid Mechanics\",\"phone\":\"(617) 496-3084\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\",\"Materials\",\"Materials\",\"Solid Mechanics\"],\"name\":\"Katia Bertoldi\",\"position\":\"William and Ami Kuan Danoff Professor of Applied Mechanics\",\"email\":\"bertoldi@seas.harvard.edu\"}\n{\"college\":\"\",\"phone\":\"(617) 495-6012\",\"researchFocus\":[\"Materials\",\"Robotics and Control\",\"Robotics and Control\",\"Materials\",\"Robotics and Control\",\"Design and Innovation\"],\"name\":\"Martin Bechthold\",\"position\":\"Kumagai Professor of Architectural Technology; Affiliate in Materials Science & Mechanical Engineering\",\"email\":\"mbechthold@gsd.harvard.edu\"}\n{\"college\":\"Anderson Group\",\"phone\":\"(617) 998-5550\",\"researchFocus\":[\"Atmospheric Chemistry\",\"Climate Change\",\"Solar Geoengineering\"],\"name\":\"James G. Anderson\",\"position\":\"Philip S. Weld Professor of Atmospheric Chemistry\",\"email\":\"anderson@huarp.harvard.edu\"}\n{\"college\":\"Markus Basan Lab\",\"phone\":\"\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\",\"Soft Matter\",\"Soft Matter\"],\"name\":\"Markus Thomas Basan\",\"position\":\"Assistant Professor of Systems Biology; Affiliate in Applied Physics\",\"email\":\"markus@hms.harvard.edu\"}\n{\"college\":\"\",\"phone\":\"\",\"researchFocus\":[\"Modeling Physical/Biological Phenomena and Systems\"],\"name\":\"William H. Bossert\",\"position\":\"David B. Arnold, Jr. Professor of Science, Emeritus\",\"email\":\"bossert@seas.harvard.edu\"}\n\n参考上述已知信息回答问题:\nMichael J. Aziz"}],"model":"qwen-max-latest"} |
||||
|
2025-11-07 18:22:00.732 [http-nio-8001-exec-2] 61 INFO com.bw.search.service.impl.RagSearchServiceImpl - 问答结果:{"choices":[{"message":{"role":"assistant","content":"Katia Bertoldi is the William and Ami Kuan Danoff Professor of Applied Mechanics at Harvard University, affiliated with the Bertoldi Group in Solid Mechanics. Her research focuses on modeling physical and biological phenomena, materials science, and solid mechanics, contributing significantly to the understanding of complex systems and their behaviors.\n\nMartin Bechthold holds the position of Kumagai Professor of Architectural Technology and is also affiliated with Materials Science and Mechanical Engineering at Harvard. His work centers on advancing materials innovation, robotics and control systems, and design principles, bridging architecture and engineering through cutting-edge technology.\n\nJames G. Anderson serves as the Philip S. Weld Professor of Atmospheric Chemistry in the Anderson Group at Harvard. His pioneering research explores atmospheric chemistry, climate change, and solar geoengineering, addressing critical environmental challenges with far-reaching implications for global sustainability.\n\nMarkus Thomas Basan is an Assistant Professor of Systems Biology with an affiliation in Applied Physics at Harvard Medical School. Leading the Markus Basan Lab, his research investigates the modeling of physical and biological systems, with a particular emphasis on soft matter, contributing to interdisciplinary approaches in life sciences.\n\nWilliam H. Bossert, the David B. Arnold, Jr. Professor of Science, Emeritus, has made significant contributions to the field of modeling physical and biological phenomena. His academic legacy continues to inspire advancements in scientific understanding, even as he steps back from active research.\n\nNo information is available in the provided data about Michael J. Aziz. Additional details would be needed to construct his academic biography."},"finish_reason":"stop","index":0,"logprobs":null}],"object":"chat.completion","usage":{"prompt_tokens":708,"completion_tokens":307,"total_tokens":1015},"created":1762510923,"system_fingerprint":null,"model":"qwen-max-latest","id":"chatcmpl-5751ed60-460e-4e8a-bb07-90cb997e9568"} |
||||
@ -0,0 +1,237 @@ |
|||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" |
||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> |
||||
|
<modelVersion>4.0.0</modelVersion> |
||||
|
<parent> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-parent</artifactId> |
||||
|
<version>2.2.4.RELEASE</version> |
||||
|
</parent> |
||||
|
<groupId>com.bw</groupId> |
||||
|
<artifactId>RagEngine</artifactId> |
||||
|
<version>0.0.1-SNAPSHOT</version> |
||||
|
<packaging>jar</packaging> |
||||
|
|
||||
|
<name>RagEngine</name> |
||||
|
<url>http://maven.apache.org</url> |
||||
|
|
||||
|
<properties> |
||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> |
||||
|
<maven.compiler.source>1.8</maven.compiler.source> |
||||
|
<maven.compiler.target>1.8</maven.compiler.target> |
||||
|
</properties> |
||||
|
|
||||
|
<dependencies> |
||||
|
<dependency> |
||||
|
<groupId>junit</groupId> |
||||
|
<artifactId>junit</artifactId> |
||||
|
<version>4.11</version> |
||||
|
<scope>test</scope> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-web</artifactId> |
||||
|
</dependency> |
||||
|
<!-- |
||||
|
https://mvnrepository.com/artifact/de.codecentric/spring-boot-admin-starter-client --> |
||||
|
<dependency> |
||||
|
<groupId>de.codecentric</groupId> |
||||
|
<artifactId>spring-boot-admin-starter-client</artifactId> |
||||
|
<version>2.2.4</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>com.google.code.gson</groupId> |
||||
|
<artifactId>gson</artifactId> |
||||
|
<version>2.8.8</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-test</artifactId> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/org.springframework/spring-test --> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework</groupId> |
||||
|
<artifactId>spring-test</artifactId> |
||||
|
<version>5.0.10.RELEASE</version> |
||||
|
<scope>test</scope> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.kafka</groupId> |
||||
|
<artifactId>spring-kafka</artifactId> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>commons-io</groupId> |
||||
|
<artifactId>commons-io</artifactId> |
||||
|
<version>2.11.0</version> <!-- 根据你的需求选择版本 --> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>com.alibaba</groupId> |
||||
|
<artifactId>fastjson</artifactId> |
||||
|
<version>2.0.17</version> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/com.mchange/c3p0 --> |
||||
|
<dependency> |
||||
|
<groupId>com.mchange</groupId> |
||||
|
<artifactId>c3p0</artifactId> |
||||
|
<version>0.9.5.5</version> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/com.squareup.okhttp3/okhttp --> |
||||
|
<dependency> |
||||
|
<groupId>com.squareup.okhttp3</groupId> |
||||
|
<artifactId>okhttp</artifactId> |
||||
|
<version>4.9.3</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.apache.httpcomponents</groupId> |
||||
|
<artifactId>httpclient</artifactId> |
||||
|
<version>4.5.3</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>commons-lang</groupId> |
||||
|
<artifactId>commons-lang</artifactId> |
||||
|
<version>2.6</version> |
||||
|
</dependency> |
||||
|
<!-- |
||||
|
https://mvnrepository.com/artifact/org.jetbrains.kotlin/kotlin-reflect --> |
||||
|
<dependency> |
||||
|
<groupId>org.jetbrains.kotlin</groupId> |
||||
|
<artifactId>kotlin-reflect</artifactId> |
||||
|
<version>1.6.21</version> |
||||
|
<scope>runtime</scope> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- Log4j 2 日志库 --> |
||||
|
<dependency> |
||||
|
<groupId>org.apache.logging.log4j</groupId> |
||||
|
<artifactId>log4j-api</artifactId> |
||||
|
<version>2.14.1</version> <!-- 或者你需要的其他版本 --> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.apache.logging.log4j</groupId> |
||||
|
<artifactId>log4j-core</artifactId> |
||||
|
<version>2.14.1</version> <!-- 或者你需要的其他版本 --> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>org.projectlombok</groupId> |
||||
|
<artifactId>lombok</artifactId> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>cn.hutool</groupId> |
||||
|
<artifactId>hutool-all</artifactId> |
||||
|
<version>5.8.5</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>junit</groupId> |
||||
|
<artifactId>junit</artifactId> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/p6spy/p6spy --> |
||||
|
<dependency> |
||||
|
<groupId>p6spy</groupId> |
||||
|
<artifactId>p6spy</artifactId> |
||||
|
<version>3.9.0</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>commons-collections</groupId> |
||||
|
<artifactId>commons-collections</artifactId> |
||||
|
<version>3.2.2</version> |
||||
|
</dependency> |
||||
|
|
||||
|
</dependencies> |
||||
|
|
||||
|
<build> |
||||
|
<!-- <pluginManagement> --><!-- lock down plugins versions to avoid using Maven defaults (may be |
||||
|
moved |
||||
|
to parent pom) --> |
||||
|
<plugins> |
||||
|
<!-- clean lifecycle, see |
||||
|
https://maven.apache.org/ref/current/maven-core/lifecycles.html#clean_Lifecycle --> |
||||
|
<plugin> |
||||
|
<artifactId>maven-clean-plugin</artifactId> |
||||
|
<version>3.1.0</version> |
||||
|
</plugin> |
||||
|
<!-- default lifecycle, jar packaging: see |
||||
|
https://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_jar_packaging --> |
||||
|
<plugin> |
||||
|
<artifactId>maven-resources-plugin</artifactId> |
||||
|
<version>3.0.2</version> |
||||
|
</plugin> |
||||
|
<plugin> |
||||
|
<artifactId>maven-compiler-plugin</artifactId> |
||||
|
<version>3.8.0</version> |
||||
|
</plugin> |
||||
|
<plugin> |
||||
|
<artifactId>maven-surefire-plugin</artifactId> |
||||
|
<version>2.22.1</version> |
||||
|
</plugin> |
||||
|
<plugin> |
||||
|
<artifactId>maven-jar-plugin</artifactId> |
||||
|
<version>3.0.2</version> |
||||
|
</plugin> |
||||
|
<plugin> |
||||
|
<artifactId>maven-install-plugin</artifactId> |
||||
|
<version>2.5.2</version> |
||||
|
</plugin> |
||||
|
<plugin> |
||||
|
<artifactId>maven-deploy-plugin</artifactId> |
||||
|
<version>2.8.2</version> |
||||
|
</plugin> |
||||
|
<!-- site lifecycle, see |
||||
|
https://maven.apache.org/ref/current/maven-core/lifecycles.html#site_Lifecycle --> |
||||
|
<plugin> |
||||
|
<artifactId>maven-site-plugin</artifactId> |
||||
|
<version>3.7.1</version> |
||||
|
</plugin> |
||||
|
<plugin> |
||||
|
<artifactId>maven-project-info-reports-plugin</artifactId> |
||||
|
<version>3.0.0</version> |
||||
|
</plugin> |
||||
|
<!-- spring-boot-maven-plugin插件就是打包spring boot应用的 --> |
||||
|
|
||||
|
<plugin> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-maven-plugin</artifactId> |
||||
|
<configuration> |
||||
|
<mainClass>com.bw.search.Application</mainClass> |
||||
|
<layout>ZIP</layout> |
||||
|
<includes> |
||||
|
<include> |
||||
|
<groupId>${project.groupId}</groupId> |
||||
|
<artifactId>${project.artifactId}</artifactId> |
||||
|
</include> |
||||
|
</includes> |
||||
|
</configuration> |
||||
|
<executions> |
||||
|
<execution> |
||||
|
<goals> |
||||
|
<goal>repackage</goal> |
||||
|
</goals> |
||||
|
</execution> |
||||
|
</executions> |
||||
|
</plugin> |
||||
|
<plugin> |
||||
|
<groupId>org.apache.maven.plugins</groupId> |
||||
|
<artifactId>maven-dependency-plugin</artifactId> |
||||
|
<version>3.1.1</version> |
||||
|
<executions> |
||||
|
<execution> |
||||
|
<id>copy</id> |
||||
|
<phase>package</phase> |
||||
|
<goals> |
||||
|
<goal>copy-dependencies</goal> |
||||
|
</goals> |
||||
|
<configuration> |
||||
|
<type>jar</type> |
||||
|
<includeTypes>jar</includeTypes> |
||||
|
<includeScope>runtime</includeScope> |
||||
|
<outputDirectory>${project.build.directory}/libs</outputDirectory> |
||||
|
</configuration> |
||||
|
</execution> |
||||
|
</executions> |
||||
|
</plugin> |
||||
|
</plugins> |
||||
|
<!-- </pluginManagement> --> |
||||
|
</build> |
||||
|
</project> |
||||
@ -0,0 +1,46 @@ |
|||||
|
You are a professional academic profile writer. |
||||
|
|
||||
|
### Language Requirement |
||||
|
|
||||
|
Determine the output language dynamically based on the keyword content. |
||||
|
If the keyword is in Arabic, output Arabic. |
||||
|
If the keyword is in Chinese, output Chinese. |
||||
|
If in English, output English. |
||||
|
If ambiguous, default to English. |
||||
|
Use the exact same language for all generated biographies. |
||||
|
|
||||
|
Task |
||||
|
|
||||
|
You will receive multiple lines of text. |
||||
|
Each line contains either: |
||||
|
A plain JSON object with researcher information, OR |
||||
|
A wrapper object that includes a "value" field containing the actual researcher JSON string. |
||||
|
|
||||
|
For each line: |
||||
|
Extract the researcher introduction. |
||||
|
Generate an abstract summary biography (1–3 sentences). |
||||
|
Avoid mechanical listing — write naturally and academically. |
||||
|
If some fields (phone, email, etc.) are missing, simply omit them. |
||||
|
Output one biography paragraph per person. |
||||
|
Insert exactly one blank line between different people’s biographies. |
||||
|
Do not merge multiple people into one paragraph. |
||||
|
|
||||
|
Input Example: |
||||
|
{"college":"Aizenberg Biomineralization and Biomimetics Lab","phone":"(617) 495-3558","researchFocus":["Biomaterials","Soft Matter","Surface and Interface Science"],"name":"Joanna Aizenberg","position":"Amy Smith Berylson Professor of Materials Science and Professor of Chemistry & Chemical Biology","email":"jaiz@seas.harvard.edu"} |
||||
|
{"college":"Bertoldi Group: Solid Mechanics","phone":"(617) 496-3084","researchFocus":["Modeling Physical/Biological Phenomena and Systems","Materials","Solid Mechanics"],"name":"Katia Bertoldi","position":"William and Ami Kuan Danoff Professor of Applied Mechanics","email":"bertoldi@seas.harvard.edu"} |
||||
|
|
||||
|
Output Format: |
||||
|
Name: biography. |
||||
|
|
||||
|
(blank line) |
||||
|
|
||||
|
Name: biography. |
||||
|
|
||||
|
Each biography separated by one empty line. |
||||
|
Written entirely in the language inferred from the keyword. |
||||
|
|
||||
|
keyword(用于自动识别语种): |
||||
|
${keyword} |
||||
|
|
||||
|
Now generate the biographies for the following input: |
||||
|
${information} |
||||
@ -0,0 +1,39 @@ |
|||||
|
You are a professional academic profile writer. |
||||
|
|
||||
|
Language Requirement |
||||
|
|
||||
|
Determine the output language dynamically based on the keyword content. |
||||
|
If the keyword is in Arabic, output Arabic. |
||||
|
If in English, output English. |
||||
|
If ambiguous, default to English. |
||||
|
Use the exact same language for all generated biographies. |
||||
|
|
||||
|
Task |
||||
|
|
||||
|
You will receive multiple lines of text. |
||||
|
Each line contains either: |
||||
|
A plain JSON object with researcher information, OR |
||||
|
A wrapper object that includes a "value" field containing the actual researcher JSON string. |
||||
|
For each line: |
||||
|
Extract the researcher introduction. |
||||
|
Generate a abstract summary biography (1–3 sentences). |
||||
|
Avoid mechanical listing — write naturally and academically. |
||||
|
If some fields (phone, email, etc.) are missing, simply omit them. |
||||
|
Output one biography paragraph per person, separated by a blank line. |
||||
|
|
||||
|
Input Example |
||||
|
{"college":"Aizenberg Biomineralization and Biomimetics Lab","phone":"(617) 495-3558","researchFocus":["Biomaterials","Soft Matter","Surface and Interface Science"],"name":"Joanna Aizenberg","position":"Amy Smith Berylson Professor of Materials Science and Professor of Chemistry & Chemical Biology","email":"jaiz@seas.harvard.edu |
||||
|
"} |
||||
|
{"college":"Bertoldi Group: Solid Mechanics","phone":"(617) 496-3084","researchFocus":["Modeling Physical/Biological Phenomena and Systems","Materials","Solid Mechanics"],"name":"Katia Bertoldi","position":"William and Ami Kuan Danoff Professor of Applied Mechanics","email":"bertoldi@seas.harvard.edu |
||||
|
"} |
||||
|
|
||||
|
Output Format |
||||
|
Name: biography. |
||||
|
Paragraphs separated by a blank line |
||||
|
Written entirely in the language inferred from keyword |
||||
|
|
||||
|
keyword(用于自动识别语种): |
||||
|
${keyword} |
||||
|
|
||||
|
Now generate the biographies for the following input: |
||||
|
${information} |
||||
@ -0,0 +1,25 @@ |
|||||
|
package com.bw.search; |
||||
|
|
||||
|
|
||||
|
|
||||
|
import org.springframework.boot.SpringApplication; |
||||
|
import org.springframework.boot.autoconfigure.SpringBootApplication; |
||||
|
import org.springframework.kafka.annotation.EnableKafka; |
||||
|
import org.springframework.scheduling.annotation.EnableScheduling; |
||||
|
|
||||
|
/** |
||||
|
* 主入口 |
||||
|
* |
||||
|
* @author jian.mao |
||||
|
* @date 2023年7月4日 |
||||
|
* @description |
||||
|
*/ |
||||
|
@SpringBootApplication |
||||
|
@EnableScheduling |
||||
|
@EnableKafka |
||||
|
public class Application { |
||||
|
|
||||
|
public static void main(String[] args) { |
||||
|
SpringApplication.run(Application.class, args); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,22 @@ |
|||||
|
package com.bw.search.cache; |
||||
|
|
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
|
||||
|
import java.util.HashMap; |
||||
|
import java.util.Map; |
||||
|
|
||||
|
/** |
||||
|
* @author jian.mao |
||||
|
* @date 2022年11月11日 |
||||
|
* @description 静态变量类 |
||||
|
*/ |
||||
|
@Slf4j |
||||
|
public class ConfigCache { |
||||
|
|
||||
|
/**启动条件**/ |
||||
|
public static boolean isStart = true; |
||||
|
/***知识库信息存储--模型总结使用***/ |
||||
|
public static Map<String, Object> baseResult = new HashMap<String, Object>(16); |
||||
|
/***知识库信息存储--页面查询结果使用***/ |
||||
|
public static Map<String, Object> searchResult = new HashMap<String, Object>(16); |
||||
|
} |
||||
@ -0,0 +1,61 @@ |
|||||
|
package com.bw.search.common; |
||||
|
|
||||
|
/** |
||||
|
* 通用返回对象 |
||||
|
* |
||||
|
* @author jian.mao |
||||
|
* @date 2025年9月17日 |
||||
|
* @description |
||||
|
* @param <T> |
||||
|
*/ |
||||
|
|
||||
|
public class Res<T> { |
||||
|
private int resCode; |
||||
|
private String resMsg; |
||||
|
private T data; |
||||
|
|
||||
|
public Res() { |
||||
|
} |
||||
|
|
||||
|
public Res(int resCode, String resMsg, T data) { |
||||
|
this.resCode = resCode; |
||||
|
this.resMsg = resMsg; |
||||
|
this.data = data; |
||||
|
} |
||||
|
|
||||
|
public static <T> Res<T> ok(T data) { |
||||
|
return new Res<T>(ResponseCode.SUCCESS.code(), ResponseCode.SUCCESS.message(), data); |
||||
|
} |
||||
|
|
||||
|
public static <T> Res<T> fail(String msg) { |
||||
|
return new Res<T>(ResponseCode.FAIL.code(), msg, null); |
||||
|
} |
||||
|
public static <T> Res<T> checkError(T error) { |
||||
|
return new Res<T>(ResponseCode.FAIL.code(), ResponseCode.CHECKERROR.message(), error); |
||||
|
} |
||||
|
|
||||
|
// getter & setter |
||||
|
public int getResCode() { |
||||
|
return resCode; |
||||
|
} |
||||
|
|
||||
|
public void setResCode(int resCode) { |
||||
|
this.resCode = resCode; |
||||
|
} |
||||
|
|
||||
|
public String getResMsg() { |
||||
|
return resMsg; |
||||
|
} |
||||
|
|
||||
|
public void setResMsg(String resMsg) { |
||||
|
this.resMsg = resMsg; |
||||
|
} |
||||
|
|
||||
|
public T getData() { |
||||
|
return data; |
||||
|
} |
||||
|
|
||||
|
public void setData(T data) { |
||||
|
this.data = data; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,30 @@ |
|||||
|
package com.bw.search.common; |
||||
|
|
||||
|
/** |
||||
|
* 状态码管理 |
||||
|
* @author jian.mao |
||||
|
* @date 2025年9月17日 |
||||
|
* @description |
||||
|
*/ |
||||
|
public enum ResponseCode { |
||||
|
|
||||
|
SUCCESS(0, "success"), |
||||
|
FAIL(-1, "fail"), |
||||
|
CHECKERROR(400, "paramsError"); |
||||
|
|
||||
|
private final int code; |
||||
|
private final String message; |
||||
|
|
||||
|
ResponseCode(int code, String message) { |
||||
|
this.code = code; |
||||
|
this.message = message; |
||||
|
} |
||||
|
|
||||
|
public int code() { |
||||
|
return code; |
||||
|
} |
||||
|
|
||||
|
public String message() { |
||||
|
return message; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,16 @@ |
|||||
|
package com.bw.search.config; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
import org.springframework.boot.context.properties.ConfigurationProperties; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
@Data |
||||
|
@Component |
||||
|
@ConfigurationProperties(prefix = "es") |
||||
|
public class EsConfig { |
||||
|
|
||||
|
private String host; |
||||
|
private String username; |
||||
|
private String password; |
||||
|
private String index; |
||||
|
} |
||||
@ -0,0 +1,20 @@ |
|||||
|
package com.bw.search.config; |
||||
|
|
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.context.annotation.Configuration; |
||||
|
|
||||
|
/** |
||||
|
* kafka配置类 |
||||
|
* @author jian.mao |
||||
|
* @date 2023年7月6日 |
||||
|
* @description |
||||
|
*/ |
||||
|
@Configuration |
||||
|
public class KafkaConfig { |
||||
|
@Value("${kafka.consumer.topic}") |
||||
|
private String kafkaTopic; |
||||
|
|
||||
|
public String getKafkaTopic() { |
||||
|
return kafkaTopic; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,65 @@ |
|||||
|
package com.bw.search.controller; |
||||
|
|
||||
|
|
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.http.MediaType; |
||||
|
import org.springframework.stereotype.Controller; |
||||
|
import org.springframework.web.bind.annotation.CrossOrigin; |
||||
|
import org.springframework.web.bind.annotation.PostMapping; |
||||
|
import org.springframework.web.bind.annotation.RequestBody; |
||||
|
import org.springframework.web.bind.annotation.RequestMapping; |
||||
|
import org.springframework.web.bind.annotation.ResponseBody; |
||||
|
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter; |
||||
|
|
||||
|
import com.bw.search.common.Res; |
||||
|
import com.bw.search.service.ModelStreamService; |
||||
|
import com.bw.search.service.RagSearchService; |
||||
|
|
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
|
||||
|
|
||||
|
@Controller |
||||
|
@CrossOrigin |
||||
|
@RequestMapping("/api") |
||||
|
@Slf4j |
||||
|
public class RagSearchController { |
||||
|
@Autowired |
||||
|
private RagSearchService ragSearchService; |
||||
|
@Autowired |
||||
|
private ModelStreamService modelStreamService; |
||||
|
@PostMapping("/search") |
||||
|
@ResponseBody |
||||
|
public Res<?> search(@RequestBody String dataJson){ |
||||
|
return ragSearchService.search(dataJson); |
||||
|
} |
||||
|
|
||||
|
@PostMapping(value = "/stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE) |
||||
|
public SseEmitter stream(@RequestBody String dataJson) { |
||||
|
|
||||
|
SseEmitter emitter = new SseEmitter(0L); |
||||
|
|
||||
|
modelStreamService.stream(dataJson, new ModelStreamService.StreamListener() { |
||||
|
|
||||
|
@Override |
||||
|
public void onMessage(String text) { |
||||
|
try { |
||||
|
emitter.send(text); |
||||
|
} catch (Exception e) { |
||||
|
emitter.completeWithError(e); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void onComplete() { |
||||
|
emitter.complete(); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void onError(Throwable t) { |
||||
|
emitter.completeWithError(t); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
return emitter; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,51 @@ |
|||||
|
package com.bw.search.entity; |
||||
|
|
||||
|
/** |
||||
|
* 常量类 |
||||
|
* @author jian.mao |
||||
|
* @date 2025年11月6日 |
||||
|
* @description |
||||
|
*/ |
||||
|
public class Constants { |
||||
|
|
||||
|
/****id*****/ |
||||
|
public static final String ID = "id"; |
||||
|
/***ids***/ |
||||
|
public static final String IDS = "ids"; |
||||
|
|
||||
|
/***检索key****/ |
||||
|
public static final String KEYWORD = "keyword"; |
||||
|
|
||||
|
/***知识库结果***/ |
||||
|
public static final String REFERENCES = "references"; |
||||
|
|
||||
|
/***问答请求参数****/ |
||||
|
public static final String AUTHORIZATION = "authorization"; |
||||
|
public static final String TEMPERATURE = "temperature"; |
||||
|
public static final String TOP_P = "top_p"; |
||||
|
public static final String MODEL = "model"; |
||||
|
public static final String PROMPT = "prompt"; |
||||
|
public static final String MASK_SENSITIVE_INFO = "mask_sensitive_info"; |
||||
|
public static final String MAX_TOKENS = "max_tokens"; |
||||
|
public static final String CHOICES = "choices"; |
||||
|
public static final String CONTENT = "content"; |
||||
|
|
||||
|
/****聊天体常量key****/ |
||||
|
public static final String MESSAGES = "messages"; |
||||
|
/****流****/ |
||||
|
public static final String STREAM = "stream"; |
||||
|
/***响应描述****/ |
||||
|
public static final String MESSAGE = "message"; |
||||
|
/***空常量****/ |
||||
|
public static final String EMPTY = ""; |
||||
|
/***向量结果key***/ |
||||
|
public static final String PROMPTRES = "promptRes"; |
||||
|
/***user***/ |
||||
|
public static final String USER = "user"; |
||||
|
/***角色***/ |
||||
|
public static final String ROLE = "role"; |
||||
|
/***prompt关键词标识***/ |
||||
|
public static final String KEYWORD_MARK = "${keyword}"; |
||||
|
/***prompt个人信息标识***/ |
||||
|
public static final String INFORMATION_MARK = "${information}"; |
||||
|
} |
||||
@ -0,0 +1,24 @@ |
|||||
|
package com.bw.search.entity; |
||||
|
|
||||
|
import java.util.List; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class SearchResponse { |
||||
|
|
||||
|
private String keyword; |
||||
|
|
||||
|
private List<String> ids; |
||||
|
|
||||
|
public SearchResponse() { |
||||
|
} |
||||
|
|
||||
|
public SearchResponse(String keyword, List<String> ids) { |
||||
|
this.keyword = keyword; |
||||
|
this.ids = ids; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,41 @@ |
|||||
|
package com.bw.search.process; |
||||
|
|
||||
|
import java.util.Map; |
||||
|
|
||||
|
import org.springframework.kafka.annotation.KafkaListener; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import com.bw.search.cache.ConfigCache; |
||||
|
import com.bw.search.entity.Constants; |
||||
|
|
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* 接口响应结果操作类 |
||||
|
* @author jian.mao |
||||
|
* @date 2023年7月6日 |
||||
|
* @description |
||||
|
*/ |
||||
|
@Component |
||||
|
@Slf4j |
||||
|
public class ResultSendQueue { |
||||
|
/** |
||||
|
* kafka读取结果写入队列 |
||||
|
* @param message |
||||
|
*/ |
||||
|
@KafkaListener(topics = "#{kafkaConfig.getKafkaTopic()}") |
||||
|
public void consumeMessage(String message) { |
||||
|
// 处理接收到的消息逻辑 |
||||
|
try { |
||||
|
Map<String, Object> result = JSONObject.parseObject(message); |
||||
|
String key = result.get(Constants.ID).toString(); |
||||
|
ConfigCache.baseResult.put(key, result); |
||||
|
ConfigCache.searchResult.put(key, result); |
||||
|
log.info("消费知识:{}",key); |
||||
|
} catch (Exception e) { |
||||
|
log.error("结果集json转换失败,result:{},\n",message,e); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,21 @@ |
|||||
|
package com.bw.search.service; |
||||
|
|
||||
|
/** |
||||
|
* 模型流式业务层接口 |
||||
|
* @author jian.mao |
||||
|
* @date 2025年11月20日 |
||||
|
* @description |
||||
|
*/ |
||||
|
public interface ModelStreamService { |
||||
|
|
||||
|
interface StreamListener { |
||||
|
void onMessage(String text); |
||||
|
void onComplete(); |
||||
|
void onError(Throwable t); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 流式调用 GPT |
||||
|
*/ |
||||
|
void stream(String prompt, StreamListener listener); |
||||
|
} |
||||
@ -0,0 +1,19 @@ |
|||||
|
package com.bw.search.service; |
||||
|
|
||||
|
import com.bw.search.common.Res; |
||||
|
|
||||
|
/** |
||||
|
* rag检索业务层接口 |
||||
|
* @author jian.mao |
||||
|
* @date 2025年11月6日 |
||||
|
* @description |
||||
|
*/ |
||||
|
public interface RagSearchService { |
||||
|
|
||||
|
/** |
||||
|
* 检索方法 |
||||
|
* @param dataJson |
||||
|
* @return |
||||
|
*/ |
||||
|
public Res<?> search(String dataJson); |
||||
|
} |
||||
@ -0,0 +1,252 @@ |
|||||
|
package com.bw.search.service.impl; |
||||
|
|
||||
|
import java.io.IOException; |
||||
|
import java.util.ArrayList; |
||||
|
import java.util.HashMap; |
||||
|
import java.util.List; |
||||
|
import java.util.Map; |
||||
|
import java.util.concurrent.TimeUnit; |
||||
|
|
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSONArray; |
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import com.bw.search.cache.ConfigCache; |
||||
|
import com.bw.search.config.EsConfig; |
||||
|
import com.bw.search.entity.Constants; |
||||
|
import com.bw.search.service.ModelStreamService; |
||||
|
import com.bw.search.utils.DateUtil; |
||||
|
import com.bw.search.utils.FileUtil; |
||||
|
import com.bw.search.utils.SpringBootKafka; |
||||
|
|
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import okhttp3.Call; |
||||
|
import okhttp3.Callback; |
||||
|
import okhttp3.MediaType; |
||||
|
import okhttp3.OkHttpClient; |
||||
|
import okhttp3.Request; |
||||
|
import okhttp3.RequestBody; |
||||
|
import okhttp3.Response; |
||||
|
import okhttp3.ResponseBody; |
||||
|
import okio.BufferedSource; |
||||
|
|
||||
|
|
||||
|
import com.alibaba.fastjson.JSON; |
||||
|
import org.springframework.http.*; |
||||
|
import org.springframework.util.Base64Utils; |
||||
|
import org.springframework.web.client.RestTemplate; |
||||
|
|
||||
|
import javax.annotation.Resource; |
||||
|
import java.nio.charset.StandardCharsets; |
||||
|
|
||||
|
|
||||
|
@Service |
||||
|
@Slf4j |
||||
|
public class ModelStreamServiceImpl implements ModelStreamService { |
||||
|
|
||||
|
@Value("${model.authorization}") |
||||
|
private String authorization; |
||||
|
@Value("${model.url}") |
||||
|
private String modelUrl; |
||||
|
@Value("${model.name}") |
||||
|
private String modelName; |
||||
|
@Autowired |
||||
|
private SpringBootKafka springBootKafka; |
||||
|
@Value("${kafka.producer.topic}") |
||||
|
private String topic; |
||||
|
@Value("${prompt.path}") |
||||
|
private String promptPath; |
||||
|
|
||||
|
@Resource |
||||
|
private EsConfig esConfig; |
||||
|
|
||||
|
private final RestTemplate restTemplate = new RestTemplate(); |
||||
|
|
||||
|
private final OkHttpClient client = new OkHttpClient.Builder() |
||||
|
.connectTimeout(30, TimeUnit.SECONDS) |
||||
|
.readTimeout(0, TimeUnit.SECONDS) // 流式接口,需要读超时为 0 |
||||
|
.writeTimeout(30, TimeUnit.SECONDS) |
||||
|
.build(); |
||||
|
|
||||
|
|
||||
|
@Override |
||||
|
public void stream(String dataJson, StreamListener listener) { |
||||
|
//转换对象 |
||||
|
JSONObject parseObject = JSONObject.parseObject(dataJson); |
||||
|
String id = parseObject.getString(Constants.ID); |
||||
|
// String keyword = parseObject.getString(Constants.KEWORD); |
||||
|
log.info("知识库检索开始-----"); |
||||
|
springBootKafka.send(topic, dataJson); |
||||
|
Map<String, Object> knowResult = getKnowledge(id); |
||||
|
if(knowResult == null) { |
||||
|
log.error("知识库获取异常!"); |
||||
|
return ; |
||||
|
} |
||||
|
log.info("知识库结果已获取:{}",JSONObject.toJSONString(knowResult)); |
||||
|
Float temperature = 0.7f; |
||||
|
Float topP = 0.95f; |
||||
|
List<Map<String, Object>> messages = new ArrayList<Map<String,Object>>(); |
||||
|
Map<String, Object> message = new HashMap<String, Object>(16); |
||||
|
//根据id获取源数据 |
||||
|
JSONObject searchRes = queryByIds(JSONObject.parseArray((String)knowResult.get(Constants.IDS), String.class)); |
||||
|
// log.info("数据关联结果:{}",searchRes.toJSONString()); |
||||
|
JSONObject hits = searchRes.getJSONObject("hits"); |
||||
|
//多个任务完整信息 |
||||
|
StringBuffer informations = new StringBuffer(); |
||||
|
List<Map<String, Object>> hitsList = (List<Map<String, Object>>) hits.get("hits"); |
||||
|
for (Map<String, Object> map : hitsList) { |
||||
|
Map<String, Object> _source = (Map<String, Object>) map.get("_source"); |
||||
|
informations.append(JSONObject.toJSONString(_source)+"\\n"); |
||||
|
} |
||||
|
String question = FileUtil.readPrompt(promptPath).replace(Constants.KEYWORD_MARK, (String)knowResult.get(Constants.KEYWORD)).replace(Constants.INFORMATION_MARK,informations.toString()) ; |
||||
|
message.put(Constants.CONTENT, question); |
||||
|
message.put(Constants.ROLE, Constants.USER); |
||||
|
messages.add(message); |
||||
|
log.info("question:{}",question);; |
||||
|
|
||||
|
Map<String, Object> headers = new HashMap<String, Object>(16); |
||||
|
Map<String, Object> params = new HashMap<String, Object>(16); |
||||
|
|
||||
|
params.put(Constants.MODEL, modelName); |
||||
|
params.put(Constants.MESSAGES, messages); |
||||
|
params.put(Constants.TEMPERATURE, temperature); |
||||
|
params.put(Constants.TOP_P, topP); |
||||
|
params.put(Constants.STREAM, true); |
||||
|
params.put(Constants.MAX_TOKENS, 8192); |
||||
|
|
||||
|
headers.put(Constants.AUTHORIZATION, authorization); |
||||
|
headers.put("Content-Type", "application/json"); |
||||
|
Request request = new Request.Builder().url(modelUrl) |
||||
|
.header(Constants.AUTHORIZATION, authorization).header("Content-Type", "application/json") |
||||
|
.post(RequestBody.create(MediaType.parse("application/json"), JSONObject.toJSONString(params))).build(); |
||||
|
|
||||
|
client.newCall(request).enqueue(new Callback() { |
||||
|
|
||||
|
@Override |
||||
|
public void onFailure(Call call, IOException e) { |
||||
|
listener.onError(e); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void onResponse(Call call, Response response) { |
||||
|
// 使用 try-with-resources 确保 ResponseBody 自动关闭 |
||||
|
try (ResponseBody body = response.body()) { |
||||
|
if (body == null) { |
||||
|
listener.onError(new IOException("Response body is null")); |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
BufferedSource source = body.source(); |
||||
|
while (!source.exhausted()) { |
||||
|
String line = source.readUtf8Line(); |
||||
|
if (line == null || !line.startsWith("data:")) |
||||
|
continue; |
||||
|
|
||||
|
if ("data: [DONE]".equals(line)) { |
||||
|
listener.onComplete(); |
||||
|
break; |
||||
|
} |
||||
|
|
||||
|
String json = line.substring("data:".length()).trim(); |
||||
|
String content = extractContent(json); |
||||
|
log.info("流式内容:{}",content); |
||||
|
if (content != null && !content.isEmpty()) { |
||||
|
listener.onMessage(content); |
||||
|
} |
||||
|
} |
||||
|
} catch (Exception e) { |
||||
|
listener.onError(e); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
private String extractContent(String json) { |
||||
|
try { |
||||
|
// 使用 JSON 解析,安全可靠 |
||||
|
JSONObject obj = JSONObject.parseObject(json); |
||||
|
JSONArray choices = obj.getJSONArray("choices"); |
||||
|
if (choices != null && !choices.isEmpty()) { |
||||
|
JSONObject delta = choices.getJSONObject(0).getJSONObject("delta"); |
||||
|
if (delta != null && delta.containsKey("content")) { |
||||
|
return delta.getString("content"); |
||||
|
} |
||||
|
} |
||||
|
} catch (Exception ignored) {} |
||||
|
return null; |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 获取知识库结果 |
||||
|
* 同步 |
||||
|
* @param chatId |
||||
|
* @return |
||||
|
*/ |
||||
|
private Map<String, Object> getKnowledge(String id) { |
||||
|
Map<String, Object> knowResult = null; |
||||
|
int retryTime = 30; |
||||
|
while(ConfigCache.isStart) { |
||||
|
if(retryTime > 0) { |
||||
|
if(ConfigCache.baseResult.containsKey(id)) { |
||||
|
knowResult = (Map<String, Object>) ConfigCache.baseResult.get(id); |
||||
|
break; |
||||
|
}else { |
||||
|
log.info("知识获取中请稍后...{}",retryTime); |
||||
|
DateUtil.sleep(1000); |
||||
|
} |
||||
|
retryTime --; |
||||
|
}else { |
||||
|
break; |
||||
|
} |
||||
|
} |
||||
|
//获取之后删除 |
||||
|
ConfigCache.baseResult.remove(id); |
||||
|
return knowResult; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 按 ID 批量查询 |
||||
|
*/ |
||||
|
public JSONObject queryByIds(List<String> ids) { |
||||
|
|
||||
|
// 1️⃣ 拼接 ES URL |
||||
|
String url = esConfig.getHost() |
||||
|
+ "/" + esConfig.getIndex() |
||||
|
+ "/_search"; |
||||
|
|
||||
|
// 2️⃣ 构建 terms 查询 |
||||
|
Map<String, Object> terms = new HashMap<>(); |
||||
|
terms.put("knowId", ids); |
||||
|
|
||||
|
Map<String, Object> query = new HashMap<>(); |
||||
|
query.put("terms", terms); |
||||
|
|
||||
|
Map<String, Object> body = new HashMap<>(); |
||||
|
body.put("query", query); |
||||
|
body.put("size", ids.size()); |
||||
|
|
||||
|
String jsonBody = JSON.toJSONString(body); |
||||
|
|
||||
|
// 3️⃣ 构建请求头(带 Basic Auth) |
||||
|
HttpHeaders headers = new HttpHeaders(); |
||||
|
headers.set("Content-Type", "application/json"); |
||||
|
|
||||
|
String auth = esConfig.getUsername() + ":" + esConfig.getPassword(); |
||||
|
String encodedAuth = Base64Utils |
||||
|
.encodeToString(auth.getBytes(StandardCharsets.UTF_8)); |
||||
|
headers.set("Authorization", "Basic " + encodedAuth); |
||||
|
|
||||
|
HttpEntity<String> requestEntity = |
||||
|
new HttpEntity<>(jsonBody, headers); |
||||
|
|
||||
|
// 4️⃣ 发起请求 |
||||
|
ResponseEntity<String> response = restTemplate |
||||
|
.postForEntity(url, requestEntity, String.class); |
||||
|
|
||||
|
// 5️⃣ 返回 JSON 结果 |
||||
|
return JSON.parseObject(response.getBody()); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,68 @@ |
|||||
|
package com.bw.search.service.impl; |
||||
|
|
||||
|
import java.util.Map; |
||||
|
|
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import com.bw.search.cache.ConfigCache; |
||||
|
import com.bw.search.common.Res; |
||||
|
import com.bw.search.entity.Constants; |
||||
|
import com.bw.search.entity.SearchResponse; |
||||
|
import com.bw.search.service.RagSearchService; |
||||
|
import com.bw.search.utils.DateUtil; |
||||
|
|
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
|
||||
|
@Service |
||||
|
@Slf4j |
||||
|
public class RagSearchServiceImpl implements RagSearchService { |
||||
|
|
||||
|
@Override |
||||
|
public Res<?> search(String dataJson) { |
||||
|
log.info("向量检索参数:{}",dataJson); |
||||
|
//转换对象 |
||||
|
JSONObject parseObject = JSONObject.parseObject(dataJson); |
||||
|
String id = parseObject.getString(Constants.ID); |
||||
|
Map<String, Object> knowResult = getKnowledge(id); |
||||
|
if(knowResult == null) { |
||||
|
log.error("向量检索失败!"); |
||||
|
return Res.fail("知识库获取失败!"); |
||||
|
} |
||||
|
log.info("知识库结果已获取:{}",JSONObject.toJSONString(knowResult)); |
||||
|
//响应体数据 |
||||
|
SearchResponse searchResponse = new SearchResponse(); |
||||
|
searchResponse.setIds(JSONObject.parseArray((String)knowResult.get(Constants.IDS), String.class)); |
||||
|
return Res.ok(searchResponse); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* 获取知识库结果 |
||||
|
* 同步 |
||||
|
* @param chatId |
||||
|
* @return |
||||
|
*/ |
||||
|
private Map<String, Object> getKnowledge(String id) { |
||||
|
Map<String, Object> knowResult = null; |
||||
|
int retryTime = 30; |
||||
|
while(ConfigCache.isStart) { |
||||
|
if (retryTime > 0) { |
||||
|
if(ConfigCache.searchResult.containsKey(id)) { |
||||
|
knowResult = (Map<String, Object>) ConfigCache.searchResult.get(id); |
||||
|
break; |
||||
|
}else { |
||||
|
log.info("知识获取中请稍后...{}",retryTime); |
||||
|
DateUtil.sleep(1000); |
||||
|
} |
||||
|
retryTime --; |
||||
|
}else { |
||||
|
break; |
||||
|
} |
||||
|
} |
||||
|
//获取之后删除 |
||||
|
ConfigCache.searchResult.remove(id); |
||||
|
return knowResult; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,177 @@ |
|||||
|
package com.bw.search.utils; |
||||
|
|
||||
|
|
||||
|
import java.math.BigInteger; |
||||
|
import java.security.MessageDigest; |
||||
|
import java.security.NoSuchAlgorithmException; |
||||
|
import java.text.ParseException; |
||||
|
import java.text.SimpleDateFormat; |
||||
|
import java.time.LocalDateTime; |
||||
|
import java.time.format.DateTimeFormatter; |
||||
|
import java.util.Date; |
||||
|
|
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSON; |
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
|
||||
|
/** |
||||
|
* 日期工具类 |
||||
|
* |
||||
|
* @author jian.mao |
||||
|
* @date 2022年11月15日 |
||||
|
* @description |
||||
|
*/ |
||||
|
@Slf4j |
||||
|
public class DateUtil { |
||||
|
|
||||
|
/** |
||||
|
* @return |
||||
|
*/ |
||||
|
public static String getTimeStrForNow() { |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHH"); |
||||
|
return sdf.format(new Date()); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public static String getTimeStrForDay(long time) { |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); |
||||
|
|
||||
|
return sdf.format(new Date(time * 1000)); |
||||
|
} |
||||
|
|
||||
|
public static String getTimeStrForDay() { |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); |
||||
|
|
||||
|
return sdf.format(new Date()); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public static String getDateTime() { |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
String time = sdf.format(new Date()); |
||||
|
return time; |
||||
|
} |
||||
|
|
||||
|
public static String getDateTime(Long timestap) { |
||||
|
|
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
String time = sdf.format(new Date(timestap)); |
||||
|
return time; |
||||
|
} |
||||
|
|
||||
|
public static String getDate(Long timestap) { |
||||
|
|
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); |
||||
|
String time = sdf.format(new Date(timestap)); |
||||
|
return time; |
||||
|
} |
||||
|
|
||||
|
public static String getDateTimeForMonth() { |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMM"); |
||||
|
String time = sdf.format(new Date()); |
||||
|
return time; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 休眠 |
||||
|
* |
||||
|
* @param millis 毫秒 |
||||
|
*/ |
||||
|
public static void sleep(long millis) { |
||||
|
try { |
||||
|
Thread.sleep(millis); |
||||
|
} catch (InterruptedException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 1. @Description:时间戳转时间 |
||||
|
* 2. @Author: ying.zhao |
||||
|
* 3. @Date: 2023/3/28 |
||||
|
*/ |
||||
|
|
||||
|
public static String timestampToDate(String time) { |
||||
|
int thirteen = 13; |
||||
|
int ten = 10; |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
// if (time.length() == thirteen) { |
||||
|
if (time.length() > ten) { |
||||
|
return sdf.format(new Date(Long.parseLong(time))); |
||||
|
} else { |
||||
|
return sdf.format(new Date(Integer.parseInt(time) * 1000L)); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
public static String parseCreated(String jsonTime){ |
||||
|
String formattedDateTime = getDateTime(); |
||||
|
try { |
||||
|
// 使用fastjson解析JSON数据 |
||||
|
JSONObject jsonObject = JSON.parseObject(jsonTime); |
||||
|
// 获取日期和时间的值 |
||||
|
JSONObject dateObject = jsonObject.getJSONObject("date"); |
||||
|
int day = dateObject.getIntValue("day"); |
||||
|
int month = dateObject.getIntValue("month"); |
||||
|
int year = dateObject.getIntValue("year"); |
||||
|
|
||||
|
JSONObject timeObject = jsonObject.getJSONObject("time"); |
||||
|
int hour = timeObject.getIntValue("hour"); |
||||
|
int minute = timeObject.getIntValue("minute"); |
||||
|
int second = timeObject.getIntValue("second"); |
||||
|
|
||||
|
// 创建LocalDateTime对象 |
||||
|
LocalDateTime dateTime = LocalDateTime.of(year, month, day, hour, minute, second); |
||||
|
|
||||
|
// 定义日期时间格式化器 |
||||
|
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); |
||||
|
|
||||
|
// 格式化日期时间 |
||||
|
formattedDateTime = dateTime.format(formatter); |
||||
|
} catch (Exception e) { |
||||
|
log.info("日期转换失败:{}",e); |
||||
|
} |
||||
|
return formattedDateTime; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 字符串转换日期 |
||||
|
* @param format |
||||
|
* @param date |
||||
|
* @return |
||||
|
*/ |
||||
|
public static Date strToDate(String format,String date){ |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat(format); |
||||
|
if (date == null || date.equals("")){ |
||||
|
return new Date(); |
||||
|
}else{ |
||||
|
Date ru = null; |
||||
|
try { |
||||
|
ru = sdf.parse(date); |
||||
|
} catch (ParseException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return ru; |
||||
|
} |
||||
|
} |
||||
|
/** |
||||
|
* 日期格式话 |
||||
|
* @param format 日期格式 |
||||
|
* @param dater 要转换的日期,默认当前时间 |
||||
|
* @return |
||||
|
*/ |
||||
|
public static String FormatDate(String format,Date date){ |
||||
|
String fromatDate = null; |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat(format); |
||||
|
if (date == null){ |
||||
|
fromatDate = sdf.format(new Date()); |
||||
|
}else{ |
||||
|
fromatDate = sdf.format(date); |
||||
|
} |
||||
|
return fromatDate; |
||||
|
} |
||||
|
public static void main(String[] args) { |
||||
|
String time = timestampToDate("955814400000"); |
||||
|
System.out.println(time); |
||||
|
} |
||||
|
} |
||||
1007
src/main/java/com/bw/search/utils/DownLoadUtil.java
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,60 @@ |
|||||
|
package com.bw.search.utils; |
||||
|
|
||||
|
import java.io.File; |
||||
|
import java.io.FileWriter; |
||||
|
import java.io.IOException; |
||||
|
import java.util.List; |
||||
|
import java.util.Map; |
||||
|
import java.util.concurrent.LinkedBlockingDeque; |
||||
|
|
||||
|
import org.apache.commons.io.FileUtils; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
|
||||
|
/** |
||||
|
* 文件工具类 |
||||
|
* @author jian.mao |
||||
|
* @date 2023年7月14日 |
||||
|
* @description |
||||
|
*/ |
||||
|
public class FileUtil { |
||||
|
|
||||
|
/** |
||||
|
* 数据写入文件 |
||||
|
* @param Path 文件路径 |
||||
|
* @param result 数据 |
||||
|
* @throws IOException |
||||
|
*/ |
||||
|
public static void writeFile(String path,String result){ |
||||
|
try { |
||||
|
FileWriter fw = new FileWriter(path,true); |
||||
|
fw.write(result+"\n"); |
||||
|
fw.flush(); |
||||
|
fw.close(); |
||||
|
} catch (Exception e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 读取prompt |
||||
|
* @param path |
||||
|
* @return |
||||
|
*/ |
||||
|
public static String readPrompt(String path){ |
||||
|
StringBuffer prompt = new StringBuffer(); |
||||
|
File file = new File(path); |
||||
|
if(file.exists()){ |
||||
|
List<String> tasks = null; |
||||
|
try { |
||||
|
tasks = FileUtils.readLines(file,"UTF-8"); |
||||
|
} catch (IOException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
for (String taskStr : tasks) { |
||||
|
prompt.append(taskStr); |
||||
|
} |
||||
|
} |
||||
|
return prompt.toString(); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,45 @@ |
|||||
|
package com.bw.search.utils; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.kafka.core.KafkaTemplate; |
||||
|
import org.springframework.kafka.support.SendResult; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
import org.springframework.util.concurrent.ListenableFuture; |
||||
|
import org.springframework.util.concurrent.ListenableFutureCallback; |
||||
|
|
||||
|
/** |
||||
|
* @DESCRIPTION:SpringBootKafka 工具类 |
||||
|
*/ |
||||
|
@Slf4j |
||||
|
@Component |
||||
|
public class SpringBootKafka { |
||||
|
@Autowired |
||||
|
private KafkaTemplate<String, Object> kafkaTemplate; |
||||
|
/** |
||||
|
* 自定义topicKafkaTemplate |
||||
|
*/ |
||||
|
/** |
||||
|
* public static final String TOPIC = "companyBussTest"; |
||||
|
**/ |
||||
|
public void send(String topic, String message) { |
||||
|
String obj2String = JSONObject.toJSONString(message); |
||||
|
log.info("准备发送消息为:{}", obj2String); |
||||
|
//发送消息 |
||||
|
ListenableFuture<SendResult<String, Object>> future = kafkaTemplate.send(topic, message); |
||||
|
future.addCallback(new ListenableFutureCallback<SendResult<String, Object>>() { |
||||
|
@Override |
||||
|
public void onFailure(Throwable throwable) { |
||||
|
//发送失败的处理 |
||||
|
log.info(topic + " - 生产者 发送消息失败:" + throwable.getMessage()); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void onSuccess(SendResult<String, Object> stringObjectSendResult) { |
||||
|
//成功的处理 |
||||
|
log.info(topic + " - 生产者 发送消息成功:" + stringObjectSendResult.toString()); |
||||
|
} |
||||
|
}); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,23 @@ |
|||||
|
package com.bw.search.utils; |
||||
|
|
||||
|
import java.io.PrintWriter; |
||||
|
import java.io.StringWriter; |
||||
|
|
||||
|
/** |
||||
|
* @author jian.mao |
||||
|
* @date 2023年3月22日 |
||||
|
* @description |
||||
|
*/ |
||||
|
public class ThrowMessageUtil { |
||||
|
|
||||
|
/** |
||||
|
* 获取异常信息 |
||||
|
* @param t |
||||
|
* @return |
||||
|
*/ |
||||
|
public static String getErrmessage(Throwable t){ |
||||
|
StringWriter stringWriter=new StringWriter(); |
||||
|
t.printStackTrace(new PrintWriter(stringWriter,true)); |
||||
|
return stringWriter.getBuffer().toString(); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,73 @@ |
|||||
|
logging: |
||||
|
level: |
||||
|
root: info |
||||
|
path: ../logs |
||||
|
server: |
||||
|
port: 8001 |
||||
|
servlet: |
||||
|
context-path: /nvps |
||||
|
tomcat: |
||||
|
uri-encoding: utf-8 |
||||
|
max-connections: 20000 |
||||
|
max-http-form-post-size: 1 |
||||
|
max-threads: 1000 |
||||
|
model: |
||||
|
url: https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions |
||||
|
authorization: Bearer sk-c5f56c1c8a6447b3a6c646a3f14085c4 |
||||
|
name: qwen-max-latest |
||||
|
spring: |
||||
|
kafka: |
||||
|
bootstrap-servers: node-01:19092,node-02:19092,node-03:19092 |
||||
|
producer: |
||||
|
retries: 0 |
||||
|
#当有多个消息需要被发送到同一个分区时,生产者会把它们放在同一个批次里。该参数指定了一个批次可以使用的内存大小,按照字节数计算。 |
||||
|
batch-size: 16384 |
||||
|
# 设置生产者内存缓冲区的大小。 |
||||
|
buffer-memory: 33554432 |
||||
|
# 键的序列化方式 |
||||
|
key-serializer: org.apache.kafka.common.serialization.StringSerializer |
||||
|
# 值的序列化方式 |
||||
|
value-serializer: org.apache.kafka.common.serialization.StringSerializer |
||||
|
# acks=0 : 生产者在成功写入消息之前不会等待任何来自服务器的响应。 |
||||
|
# acks=1 : 只要集群的首领节点收到消息,生产者就会收到一个来自服务器成功响应。 |
||||
|
# acks=all :只有当所有参与复制的节点全部收到消息时,生产者才会收到一个来自服务器的成功响应。 |
||||
|
acks: 1 |
||||
|
consumer: |
||||
|
# 自动提交的时间间隔 在spring boot 2.X 版本中这里采用的是值的类型为Duration 需要符合特定的格式,如1S,1M,2H,5D |
||||
|
auto-commit-interval: 1S |
||||
|
# 该属性指定了消费者在读取一个没有偏移量的分区或者偏移量无效的情况下该作何处理: |
||||
|
# latest(默认值)在偏移量无效的情况下,消费者将从最新的记录开始读取数据(在消费者启动之后生成的记录) |
||||
|
# earliest :在偏移量无效的情况下,消费者将从起始位置读取分区的记录 |
||||
|
auto-offset-reset: earliest |
||||
|
# 是否自动提交偏移量,默认值是true,为了避免出现重复数据和数据丢失,可以把它设置为false,然后手动提交偏移量 |
||||
|
enable-auto-commit: true |
||||
|
# 键的反序列化方式 |
||||
|
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer |
||||
|
# 值的反序列化方式 |
||||
|
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer |
||||
|
#消费组 |
||||
|
group-id: test4 |
||||
|
#消费者并发线程数 |
||||
|
concurrency: 4 |
||||
|
#超时时间 |
||||
|
max-poll-interval-ms: 60000 |
||||
|
#listener: |
||||
|
# 在侦听器容器中运行的线程数。 |
||||
|
#concurrency: 5 |
||||
|
#listner负责ack,每调用一次,就立即commit |
||||
|
#ack-mode: manual_immediate |
||||
|
#missing-topics-fatal: false |
||||
|
es: |
||||
|
host: http://127.0.0.1:9200 |
||||
|
username: elastic |
||||
|
password: es@123 |
||||
|
index: nvps_person_info |
||||
|
|
||||
|
kafka: |
||||
|
consumer: |
||||
|
#自定义topic |
||||
|
topic: nvps_search_data |
||||
|
producer: |
||||
|
topic: nvps_know_base |
||||
|
prompt: |
||||
|
path: ./prompt |
||||
@ -0,0 +1,36 @@ |
|||||
|
<configuration> |
||||
|
<!-- 属性文件:在properties文件中找到对应的配置项 --> |
||||
|
<springProperty scope="context" name="logging.path" source="logging.path"/> |
||||
|
<springProperty scope="context" name="logging.level" source="logging.level.com.bfd"/> |
||||
|
<!-- 默认的控制台日志输出,一般生产环境都是后台启动,这个没太大作用 --> |
||||
|
<!-- <appender name="STDOUT" |
||||
|
class="ch.qos.logback.core.ConsoleAppender"> |
||||
|
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
||||
|
<Pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %line %-5level %logger{50} - %msg%n</Pattern> |
||||
|
</encoder> |
||||
|
</appender> --> |
||||
|
|
||||
|
<appender name="GLMAPPER-LOGGERONE" |
||||
|
class="ch.qos.logback.core.rolling.RollingFileAppender"> |
||||
|
<append>true</append> |
||||
|
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> |
||||
|
<level>${logging.level}</level> |
||||
|
</filter> |
||||
|
<file> |
||||
|
${logging.path}/ragEngineInfo.log |
||||
|
</file> |
||||
|
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> |
||||
|
<FileNamePattern>${logging.path}/ragEngineInfo.log.%d{yyyy-MM-dd}</FileNamePattern> |
||||
|
<MaxHistory>7</MaxHistory> |
||||
|
</rollingPolicy> |
||||
|
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
||||
|
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %line %-5level %logger{50} - %msg%n</pattern> |
||||
|
<charset>UTF-8</charset> |
||||
|
</encoder> |
||||
|
</appender> |
||||
|
|
||||
|
<root level="info"> |
||||
|
<appender-ref ref="GLMAPPER-LOGGERONE"/> |
||||
|
<!-- <appender-ref ref="STDOUT"/> --> |
||||
|
</root> |
||||
|
</configuration> |
||||
@ -0,0 +1,38 @@ |
|||||
|
package com.bw; |
||||
|
|
||||
|
import junit.framework.Test; |
||||
|
import junit.framework.TestCase; |
||||
|
import junit.framework.TestSuite; |
||||
|
|
||||
|
/** |
||||
|
* Unit test for simple App. |
||||
|
*/ |
||||
|
public class AppTest |
||||
|
extends TestCase |
||||
|
{ |
||||
|
/** |
||||
|
* Create the test case |
||||
|
* |
||||
|
* @param testName name of the test case |
||||
|
*/ |
||||
|
public AppTest( String testName ) |
||||
|
{ |
||||
|
super( testName ); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* @return the suite of tests being tested |
||||
|
*/ |
||||
|
public static Test suite() |
||||
|
{ |
||||
|
return new TestSuite( AppTest.class ); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Rigourous Test :-) |
||||
|
*/ |
||||
|
public void testApp() |
||||
|
{ |
||||
|
assertTrue( true ); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,4 @@ |
|||||
|
/classes/ |
||||
|
/test-classes/ |
||||
|
/logs/ |
||||
|
/target/ |
||||
Write
Preview
Loading…
Cancel
Save
Reference in new issue