commit
5b753adbfc
82 changed files with 12795 additions and 0 deletions
-
2.gitattributes
-
33.gitignore
-
19.mvn/wrapper/maven-wrapper.properties
-
50data/1.txt
-
BINdata/20250519100453.xls
-
142data/subjectIds.txt
-
68etc/.properties
-
7etc/145redis.properties
-
18etc/1_kafka.properties
-
18etc/2_kafka.properties
-
18etc/3_kafka.properties
-
1etc/3xf3z9ecg4cuggc
-
20etc/4_kafka.properties
-
18etc/5_kafka.properties
-
10etc/db.properties
-
10etc/elasticsearch_1.properties
-
10etc/elasticsearch_2.properties
-
14etc/elasticsearch_3.properties
-
42etc/log4j.properties
-
154etc/logback.xml
-
36etc/posTag.txt
-
7etc/redis.properties
-
1280etc/stopWords.txt
-
891etc/stopWordsEN.txt
-
1foreground
-
0log/kafka.log
-
259mvnw
-
149mvnw.cmd
-
327pom.xml
-
25src/main/java/com/zyzs/otherdatasave/ExcelReader.java
-
146src/main/java/com/zyzs/otherdatasave/OtherDatasaveApplication.java
-
38src/main/java/com/zyzs/otherdatasave/bean/Clini.java
-
40src/main/java/com/zyzs/otherdatasave/bean/ClinikJ.java
-
55src/main/java/com/zyzs/otherdatasave/bean/Drug.java
-
27src/main/java/com/zyzs/otherdatasave/bean/Eqiupment.java
-
129src/main/java/com/zyzs/otherdatasave/bean/ExcelData.java
-
26src/main/java/com/zyzs/otherdatasave/bean/ExcelListener.java
-
46src/main/java/com/zyzs/otherdatasave/bean/Paper.java
-
47src/main/java/com/zyzs/otherdatasave/bean/Patent.java
-
38src/main/java/com/zyzs/otherdatasave/bean/Patentkj.java
-
34src/main/java/com/zyzs/otherdatasave/bean/Proj.java
-
24src/main/java/com/zyzs/otherdatasave/cache/CacheObject.java
-
533src/main/java/com/zyzs/otherdatasave/cache/Constants.java
-
29src/main/java/com/zyzs/otherdatasave/cache/ProjectCache.java
-
63src/main/java/com/zyzs/otherdatasave/cache/UpdateCacheThread.java
-
714src/main/java/com/zyzs/otherdatasave/config/AppConfig.java
-
1116src/main/java/com/zyzs/otherdatasave/config/BFDApiConfig.java
-
1092src/main/java/com/zyzs/otherdatasave/config/ESConstants.java
-
97src/main/java/com/zyzs/otherdatasave/service/QueryDrug.java
-
80src/main/java/com/zyzs/otherdatasave/service/QueryEq.java
-
160src/main/java/com/zyzs/otherdatasave/service/QueryPaper.java
-
229src/main/java/com/zyzs/otherdatasave/service/QueryPatent.java
-
98src/main/java/com/zyzs/otherdatasave/service/QueryProj.java
-
98src/main/java/com/zyzs/otherdatasave/service/Queryclini.java
-
52src/main/java/com/zyzs/otherdatasave/service/QuerycliniKJ.java
-
69src/main/java/com/zyzs/otherdatasave/service/Querykafka.java
-
147src/main/java/com/zyzs/otherdatasave/service/Queryorg.java
-
202src/main/java/com/zyzs/otherdatasave/util/AllKeys.java
-
255src/main/java/com/zyzs/otherdatasave/util/AllKeys6812.java
-
176src/main/java/com/zyzs/otherdatasave/util/AllKeys6813.java
-
53src/main/java/com/zyzs/otherdatasave/util/AllKeys6814.java
-
233src/main/java/com/zyzs/otherdatasave/util/AllKeys6815.java
-
260src/main/java/com/zyzs/otherdatasave/util/AllKeysPatent.java
-
315src/main/java/com/zyzs/otherdatasave/util/DataCheckUtil.java
-
936src/main/java/com/zyzs/otherdatasave/util/DateUtil.java
-
644src/main/java/com/zyzs/otherdatasave/util/EsUtils.java
-
53src/main/java/com/zyzs/otherdatasave/util/MfMD5Util.java
-
4src/main/java/com/zyzs/otherdatasave/util/Patentdub.java
-
234src/main/java/com/zyzs/otherdatasave/util/ReadLine.java
-
87src/main/java/com/zyzs/otherdatasave/worker/AbstractWorker.java
-
41src/main/java/com/zyzs/otherdatasave/worker/QueryDrugProducer.java
-
39src/main/java/com/zyzs/otherdatasave/worker/QueryEqProducer.java
-
41src/main/java/com/zyzs/otherdatasave/worker/QueryKfkaProducer.java
-
40src/main/java/com/zyzs/otherdatasave/worker/QueryPaperProducer.java
-
41src/main/java/com/zyzs/otherdatasave/worker/QueryPatentProducer.java
-
39src/main/java/com/zyzs/otherdatasave/worker/QueryProProducer.java
-
40src/main/java/com/zyzs/otherdatasave/worker/QuerycliniProducer.java
-
41src/main/java/com/zyzs/otherdatasave/worker/QueryorgProducer.java
-
24src/main/java/com/zyzs/otherdatasave/worker/Worker.java
-
25src/main/java/com/zyzs/otherdatasave/worker/readtxt.java
-
103src/main/resources/application.yml
-
13src/test/java/com/zyzs/otherdatasave/OtherDatasaveApplicationTests.java
@ -0,0 +1,2 @@ |
|||
/mvnw text eol=lf |
|||
*.cmd text eol=crlf |
|||
@ -0,0 +1,33 @@ |
|||
HELP.md |
|||
target/ |
|||
!.mvn/wrapper/maven-wrapper.jar |
|||
!**/src/main/**/target/ |
|||
!**/src/test/**/target/ |
|||
|
|||
### STS ### |
|||
.apt_generated |
|||
.classpath |
|||
.factorypath |
|||
.project |
|||
.settings |
|||
.springBeans |
|||
.sts4-cache |
|||
|
|||
### IntelliJ IDEA ### |
|||
.idea |
|||
*.iws |
|||
*.iml |
|||
*.ipr |
|||
|
|||
### NetBeans ### |
|||
/nbproject/private/ |
|||
/nbbuild/ |
|||
/dist/ |
|||
/nbdist/ |
|||
/.nb-gradle/ |
|||
build/ |
|||
!**/src/main/**/build/ |
|||
!**/src/test/**/build/ |
|||
|
|||
### VS Code ### |
|||
.vscode/ |
|||
@ -0,0 +1,19 @@ |
|||
# Licensed to the Apache Software Foundation (ASF) under one |
|||
# or more contributor license agreements. See the NOTICE file |
|||
# distributed with this work for additional information |
|||
# regarding copyright ownership. The ASF licenses this file |
|||
# to you under the Apache License, Version 2.0 (the |
|||
# "License"); you may not use this file except in compliance |
|||
# with the License. You may obtain a copy of the License at |
|||
# |
|||
# http://www.apache.org/licenses/LICENSE-2.0 |
|||
# |
|||
# Unless required by applicable law or agreed to in writing, |
|||
# software distributed under the License is distributed on an |
|||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
|||
# KIND, either express or implied. See the License for the |
|||
# specific language governing permissions and limitations |
|||
# under the License. |
|||
wrapperVersion=3.3.2 |
|||
distributionType=only-script |
|||
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip |
|||
@ -0,0 +1,50 @@ |
|||
176 7 |
|||
104 2 |
|||
115 6 |
|||
101 3 |
|||
125 3 |
|||
141 3 |
|||
142 45 |
|||
122 2 |
|||
117 4 |
|||
1 13 |
|||
55 4 |
|||
95 7 |
|||
92 6 |
|||
94 7 |
|||
128 22 |
|||
98 9 |
|||
51 1 |
|||
24 1 |
|||
44 5 |
|||
26 4 |
|||
57 9 |
|||
56 2 |
|||
62 2 |
|||
61 4 |
|||
6 8 |
|||
65 1 |
|||
97 2 |
|||
4 1 |
|||
134 2 |
|||
17 1 |
|||
121 3 |
|||
102 5 |
|||
105 1 |
|||
11 2 |
|||
131 1 |
|||
164 2 |
|||
71 2 |
|||
46 2 |
|||
107 2 |
|||
16 1 |
|||
167 1 |
|||
72 1 |
|||
127 3 |
|||
165 1 |
|||
177 1 |
|||
84 1 |
|||
8 1 |
|||
124 1 |
|||
108 1 |
|||
66 1 |
|||
@ -0,0 +1,142 @@ |
|||
54 津巴布韦 -17.8282 31.0489 |
|||
55 中国 39.904207 116.407396 |
|||
56 日本 35.689497 139.766944 |
|||
57 韩国 37.566579 126.977991 |
|||
58 朝鲜 39.035376 125.755509 |
|||
59 蒙古 40.843345 106.844373 |
|||
60 越南 21.023691 105.845209 |
|||
61 老挝 17.957396 102.614585 |
|||
62 柬埔寨 11.588607 104.995713 |
|||
63 泰国 13.717539 100.512403 |
|||
64 缅甸 16.787839 96.144222 |
|||
65 马来西亚 3.146372 101.686853 |
|||
66 新加坡 1.293145 103.850107 |
|||
67 印度尼西亚 -6.175079 106.809755 |
|||
68 文莱 1.522319 115.173294 |
|||
69 菲律宾 14.608786 121.041768 |
|||
70 东帝汶 -8.550825 125.727857 |
|||
71 印度 28.613939 77.209026 |
|||
72 巴基斯坦 33.724076 73.076466 |
|||
73 孟加拉国 23.712954 90.407989 |
|||
74 尼泊尔 27.713286 85.323592 |
|||
75 不丹 27.416667 89.647222 |
|||
76 斯里兰卡 6.925278 79.861092 |
|||
77 马尔代夫 4.665417 73.219835 |
|||
78 阿富汗 34.528131 69.195492 |
|||
79 伊朗 35.6892 51.389 |
|||
80 伊拉克 33.3167 44.3667 |
|||
81 叙利亚 33.5 36.3167 |
|||
82 约旦 31.95 35.9333 |
|||
83 黎巴嫩 33.8833 35.5167 |
|||
84 以色列 31.7667 35.2333 |
|||
85 巴勒斯坦 31.8917 35.2333 |
|||
86 沙特阿拉伯 24.6667 46.7167 |
|||
87 巴林 26.2275 50.5877 |
|||
88 卡塔尔 25.2833 51.5333 |
|||
89 阿联酋 24.4667 54.3667 |
|||
90 阿曼 23.6167 58.5833 |
|||
91 也门 15.35 44.1833 |
|||
92 格鲁吉亚 41.7151 44.8269 |
|||
93 亚美尼亚 40.1875 44.5153 |
|||
94 阿塞拜疆 40.4093 49.8671 |
|||
95 哈萨克斯坦 51.1667 71.4333 |
|||
96 吉尔吉斯斯坦 42.8788 74.6142 |
|||
97 塔吉克斯坦 38.5542 68.7825 |
|||
98 乌兹别克斯坦 41.2995 69.2401 |
|||
99 土库曼斯坦 37.95 58.4167 |
|||
100 英国 51.5074 -0.1278 |
|||
101 法国 48.8566 2.3522 |
|||
102 爱尔兰 53.3498 -6.2489 |
|||
103 荷兰 52.3702 4.8952 |
|||
104 比利时 50.85 4.3676 |
|||
105 卢森堡 49.6116 6.1296 |
|||
106 摩纳哥 43.7351 7.4246 |
|||
107 意大利 41.8919 12.4964 |
|||
108 西班牙 40.4168 -3.7038 |
|||
109 葡萄牙 38.7223 -9.1393 |
|||
110 希腊 37.9667 23.7167 |
|||
111 梵蒂冈 41.9028 12.4534 |
|||
112 圣马力诺 43.9361 12.4597 |
|||
113 马耳他 35.8997 14.5147 |
|||
114 安道尔 42.5063 1.5218 |
|||
115 德国 52.5167 13.3833 |
|||
116 奥地利 48.2085 16.3721 |
|||
117 瑞士 46.948 7.4481 |
|||
118 波兰 52.2297 21.0122 |
|||
119 捷克 50.0755 14.4378 |
|||
120 斯洛伐克 48.1489 17.1075 |
|||
121 匈牙利 47.4979 19.0512 |
|||
122 瑞典 59.3328 18.0649 |
|||
123 挪威 59.9138 10.7387 |
|||
124 丹麦 55.6761 12.5683 |
|||
125 芬兰 60.1699 24.9384 |
|||
126 冰岛 64.1333 -21.9333 |
|||
127 俄罗斯 55.7558 37.6173 |
|||
128 乌克兰 50.4501 30.5234 |
|||
129 白俄罗斯 53.9 27.5649 |
|||
130 摩尔多瓦 47.0281 28.8497 |
|||
131 罗马尼亚 44.4322 26.1025 |
|||
132 保加利亚 42.6975 23.3241 |
|||
133 塞尔维亚 44.804 20.4681 |
|||
134 克罗地亚 45.815 15.8781 |
|||
135 斯洛文尼亚 46.0511 14.5058 |
|||
136 波黑 43.85 18.4241 |
|||
137 黑山 42.4622 19.4342 |
|||
138 阿尔巴尼亚 41.3275 19.8189 |
|||
139 北马其顿 42.0005 21.4281 |
|||
140 列支敦士登 47.05 9.5167 |
|||
141 加拿大 45.4215 -75.7031 |
|||
142 美国 38.8951 -77.0369 |
|||
143 墨西哥 19.4326 -99.1332 |
|||
144 危地马拉 14.624 -90.52 |
|||
145 伯利兹 17.1833 -88.8333 |
|||
146 萨尔瓦多 13.6806 -88.8721 |
|||
147 洪都拉斯 14.0722 -90.1889 |
|||
148 尼加拉瓜 12.175 -86.2413 |
|||
149 哥斯达黎加 9.9111 -101.924 |
|||
150 巴拿马 9.5056 -79.5378 |
|||
151 古巴 23.1099 -82.3099 |
|||
152 牙买加 17.9678 -76.7822 |
|||
153 海地 18.5167 -72.2822 |
|||
154 多米尼加共和国 18.4606 -70.1036 |
|||
155 圣基茨和尼维斯 17.1 -62.2333 |
|||
156 安提瓜和巴布达 17.35 -62.9667 |
|||
157 多米尼克 15.4167 -61.5833 |
|||
158 圣卢西亚 14.9667 -61.5 |
|||
159 圣文森特和格林纳丁斯 17.9833 -76.7917 |
|||
160 巴巴多斯 16.9333 -62.2 |
|||
161 格林纳达 16.8 -62.0833 |
|||
162 特立尼达和多巴哥 14.65 -61.5 |
|||
163 巴哈马 25.0333 -76.9647 |
|||
164 巴西 -15.7822 -47.9097 |
|||
165 阿根廷 -34.6036 -58.4425 |
|||
166 哥伦比亚 4.6028 -74.0828 |
|||
167 秘鲁 -12.05 -77.0333 |
|||
168 委内瑞拉 10.5 -66.8667 |
|||
169 智利 -33.4489 -70.6594 |
|||
170 厄瓜多尔 -0.1 -78.5 |
|||
171 乌拉圭 -34.9 -57.5833 |
|||
172 巴拉圭 -25.2833 -54.5667 |
|||
173 玻利维亚 -19.02 -65.25 |
|||
174 圭亚那 -16.5 -68.1353 |
|||
175 苏里南 6.8 -58.1833 |
|||
176 澳大利亚 5.8833 -55.2333 |
|||
177 新西兰 -35.2809 149.1244 |
|||
178 巴布亚新几内亚 -41.2864 174.7762 |
|||
179 斐济 -18.1333 178.4333 |
|||
180 所罗门群岛 -9.4333 160.2 |
|||
181 瓦努阿图 -17.7333 168.3167 |
|||
182 萨摩亚 -13.8167 -171.7667 |
|||
183 基里巴斯 1.35 173 |
|||
184 汤加 -21.1333 -175.1833 |
|||
185 密克罗尼西亚联邦 7.5 158.17 |
|||
186 马绍尔群岛 7.0833 171.3833 |
|||
187 帕劳 7.5 134.5667 |
|||
188 瑙鲁 -8.52 179.2 |
|||
189 图瓦卢 -8.52 179.2 |
|||
190 法属圭亚那 4.9667 -52.3333 |
|||
191 科威特 29.3117 47.9968 |
|||
192 土耳其 39.9199 32.8538 |
|||
193 波多黎各 18.4663 -66.1055 |
|||
194 立陶宛 54.6872 25.2867 |
|||
195 拉托维亚 56.9496 24.1052 |
|||
@ -0,0 +1,68 @@ |
|||
crawl.kfk.metadata.broker.list=172.18.1.92:9092 |
|||
crawl.kfk.metadata.broker.list=172.18.1.92:9092 |
|||
crawl.kfk.metadata.broker.list=172.18.1.92:9092 |
|||
crawl.kfk.metadata.broker.list=172.18.1.92:9092 |
|||
crawl.kfk.metadata.broker.list=172.18.1.119:9992 |
|||
crawl.kfk.metadata.broker.list=172.18.1.119:9992 |
|||
crawl.kfk.metadata.broker.list=172.18.1.119:9992 |
|||
crawl.kfk.metadata.broker.list=172.18.1.119:9992 |
|||
crawl.kfk.connectionTimeOut=50000 |
|||
crawl.kfk.connectionTimeOut=50000 |
|||
crawl.kfk.connectionTimeOut=50000 |
|||
crawl.kfk.zk.sessiontimeout.ms=50000 |
|||
crawl.kfk.zk.sessiontimeout.ms=50000 |
|||
crawl.kfk.zk.sessiontimeout.ms=50000 |
|||
crawl.kfk.kafka.serializer=kafka.serializer.StringEncoder |
|||
crawl.kfk.connectionTimeOut=50000 |
|||
crawl.kfk.kafka.serializer=kafka.serializer.StringEncoder |
|||
crawl.kfk.producer.acks=all |
|||
crawl.kfk.kafka.serializer=kafka.serializer.StringEncoder |
|||
crawl.kfk.zk.sessiontimeout.ms=50000 |
|||
crawl.kfk.producer.retries=0 |
|||
crawl.kfk.producer.acks=all |
|||
crawl.kfk.producer.batch.size=16384 |
|||
crawl.kfk.kafka.serializer=kafka.serializer.StringEncoder |
|||
crawl.kfk.producer.acks=all |
|||
crawl.kfk.producer.linger.ms=1 |
|||
crawl.kfk.producer.retries=0 |
|||
crawl.kfk.producer.buffer.memory=33554432 |
|||
crawl.kfk.producer.retries=0 |
|||
crawl.kfk.producer.acks=all |
|||
crawl.kfk.producer.max.request.size=10485760 |
|||
crawl.kfk.producer.batch.size=16384 |
|||
crawl.kfk.producer.batch.size=16384 |
|||
crawl.kfk.consumer.enable.auto.commit=true |
|||
crawl.kfk.producer.retries=0 |
|||
crawl.kfk.producer.linger.ms=1 |
|||
crawl.kfk.consumer.auto.commit.interval.ms=1000 |
|||
crawl.kfk.producer.batch.size=16384 |
|||
crawl.kfk.producer.buffer.memory=33554432 |
|||
crawl.kfk.consumer.session.timeout.ms=30000 |
|||
crawl.kfk.producer.linger.ms=1 |
|||
crawl.kfk.producer.max.request.size=10485760 |
|||
crawl.kfk.consumer.auto.offset.reset=earliest |
|||
crawl.kfk.producer.buffer.memory=33554432 |
|||
crawl.kfk.consumer.enable.auto.commit=true |
|||
crawl.kfk.consumer.thread.core.pool.size=100 |
|||
crawl.kfk.producer.max.request.size=10485760 |
|||
crawl.kfk.consumer.auto.commit.interval.ms=1000 |
|||
crawl.kfk.producer.linger.ms=1 |
|||
crawl.kfk.consumer.enable.auto.commit=true |
|||
crawl.kfk.consumer.thread.maximum.pool.size=100 |
|||
crawl.kfk.consumer.session.timeout.ms=30000 |
|||
crawl.kfk.producer.buffer.memory=33554432 |
|||
crawl.kfk.consumer.auto.offset.reset=earliest |
|||
crawl.kfk.consumer.auto.commit.interval.ms=1000 |
|||
crawl.kfk.producer.max.request.size=10485760 |
|||
crawl.kfk.consumer.session.timeout.ms=30000 |
|||
crawl.kfk.consumer.thread.core.pool.size=100 |
|||
crawl.kfk.consumer.enable.auto.commit=true |
|||
crawl.kfk.consumer.thread.maximum.pool.size=100 |
|||
crawl.kfk.consumer.auto.offset.reset=earliest |
|||
crawl.kfk.consumer.auto.commit.interval.ms=1000 |
|||
crawl.kfk.consumer.thread.core.pool.size=100 |
|||
crawl.kfk.consumer.session.timeout.ms=30000 |
|||
crawl.kfk.consumer.thread.maximum.pool.size=100 |
|||
crawl.kfk.consumer.auto.offset.reset=earliest |
|||
crawl.kfk.consumer.thread.core.pool.size=100 |
|||
crawl.kfk.consumer.thread.maximum.pool.size=100 |
|||
@ -0,0 +1,7 @@ |
|||
#@author ruining.he |
|||
#redis.model -> install model {single,codis} |
|||
|
|||
#if model is codis,use config as below |
|||
redis.model=single |
|||
redis.ip=192.168.0.41 |
|||
redis.port=6379 |
|||
@ -0,0 +1,18 @@ |
|||
#### Kafka Tool Parameters |
|||
crawl.kfk.metadata.broker.list=node-01:19092,node-02:19092,node-03:19092 |
|||
crawl.kfk.connectionTimeOut=50000 |
|||
crawl.kfk.zk.sessiontimeout.ms=50000 |
|||
crawl.kfk.kafka.serializer=kafka.serializer.StringEncoder |
|||
crawl.kfk.producer.acks=all |
|||
crawl.kfk.producer.retries=0 |
|||
crawl.kfk.producer.batch.size=16384 |
|||
crawl.kfk.producer.linger.ms=1 |
|||
crawl.kfk.producer.buffer.memory=33554432 |
|||
crawl.kfk.producer.max.request.size=10485760 |
|||
crawl.kfk.consumer.enable.auto.commit=true |
|||
crawl.kfk.consumer.auto.commit.interval.ms=1000 |
|||
crawl.kfk.consumer.session.timeout.ms=30000 |
|||
crawl.kfk.consumer.auto.offset.reset=latest |
|||
crawl.kfk.consumer.thread.core.pool.size=100 |
|||
crawl.kfk.consumer.thread.maximum.pool.size=100 |
|||
|
|||
@ -0,0 +1,18 @@ |
|||
#### Kafka Tool Parameters |
|||
crawl.kfk.metadata.broker.list=node-01:19092,node-02:19092,node-03:19092 |
|||
crawl.kfk.connectionTimeOut=50000 |
|||
crawl.kfk.zk.sessiontimeout.ms=50000 |
|||
crawl.kfk.kafka.serializer=kafka.serializer.StringEncoder |
|||
crawl.kfk.producer.acks=all |
|||
crawl.kfk.producer.retries=0 |
|||
crawl.kfk.producer.batch.size=16384 |
|||
crawl.kfk.producer.linger.ms=1 |
|||
crawl.kfk.producer.buffer.memory=33554432 |
|||
crawl.kfk.producer.max.request.size=10485760 |
|||
crawl.kfk.consumer.enable.auto.commit=true |
|||
crawl.kfk.consumer.auto.commit.interval.ms=1000 |
|||
crawl.kfk.consumer.session.timeout.ms=30000 |
|||
crawl.kfk.consumer.auto.offset.reset=latest |
|||
crawl.kfk.consumer.thread.core.pool.size=100 |
|||
crawl.kfk.consumer.thread.maximum.pool.size=100 |
|||
|
|||
@ -0,0 +1,18 @@ |
|||
#### Kafka Tool Parameters |
|||
crawl.kfk.metadata.broker.list=172.17.20.241:9092 |
|||
crawl.kfk.connectionTimeOut=50000 |
|||
crawl.kfk.zk.sessiontimeout.ms=50000 |
|||
crawl.kfk.kafka.serializer=kafka.serializer.StringEncoder |
|||
crawl.kfk.producer.acks=all |
|||
crawl.kfk.producer.retries=0 |
|||
crawl.kfk.producer.batch.size=16384 |
|||
crawl.kfk.producer.linger.ms=1 |
|||
crawl.kfk.producer.buffer.memory=33554432 |
|||
crawl.kfk.producer.max.request.size=10485760 |
|||
crawl.kfk.consumer.enable.auto.commit=true |
|||
crawl.kfk.consumer.auto.commit.interval.ms=1000 |
|||
crawl.kfk.consumer.session.timeout.ms=30000 |
|||
crawl.kfk.consumer.auto.offset.reset=latest |
|||
crawl.kfk.consumer.thread.core.pool.size=100 |
|||
crawl.kfk.consumer.thread.maximum.pool.size=100 |
|||
|
|||
1
etc/3xf3z9ecg4cuggc
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,20 @@ |
|||
#### Kafka Tool Parameters |
|||
#crawl.kfk.metadata.broker.list= 10.18.0.18:9092,10.18.0.6:9092,10.18.0.26:9092 |
|||
crawl.kfk.metadata.broker.list=35.208.0.204:19092,35.209.211.105:19092,35.208.174.139:19092 |
|||
crawl.kfk.connectionTimeOut=50000 |
|||
crawl.kfk.zk.sessiontimeout.ms=50000 |
|||
crawl.kfk.kafka.serializer=kafka.serializer.StringEncoder |
|||
crawl.kfk.producer.acks=all |
|||
crawl.kfk.producer.retries=0 |
|||
crawl.kfk.producer.batch.size=16384 |
|||
crawl.kfk.producer.linger.ms=1 |
|||
crawl.kfk.producer.request.timeout.ms=90000 |
|||
crawl.kfk.producer.buffer.memory=33554432 |
|||
crawl.kfk.producer.max.request.size=10485760 |
|||
crawl.kfk.consumer.enable.auto.commit=true |
|||
crawl.kfk.consumer.auto.commit.interval.ms=6000 |
|||
crawl.kfk.consumer.session.timeout.ms=30000 |
|||
crawl.kfk.consumer.auto.offset.reset=earliest |
|||
crawl.kfk.consumer.thread.core.pool.size=100 |
|||
crawl.kfk.consumer.thread.maximum.pool.size=100 |
|||
|
|||
@ -0,0 +1,18 @@ |
|||
#### Kafka Tool Parameters |
|||
crawl.kfk.metadata.broker.list=node-01:19092,node-02:19092,node-03:19092 |
|||
crawl.kfk.connectionTimeOut=50000 |
|||
crawl.kfk.zk.sessiontimeout.ms=50000 |
|||
crawl.kfk.kafka.serializer=kafka.serializer.StringEncoder |
|||
crawl.kfk.producer.acks=all |
|||
crawl.kfk.producer.retries=0 |
|||
crawl.kfk.producer.batch.size=16384 |
|||
crawl.kfk.producer.linger.ms=1 |
|||
crawl.kfk.producer.buffer.memory=33554432 |
|||
crawl.kfk.producer.max.request.size=10485760 |
|||
crawl.kfk.consumer.enable.auto.commit=true |
|||
crawl.kfk.consumer.auto.commit.interval.ms=1000 |
|||
crawl.kfk.consumer.session.timeout.ms=30000 |
|||
crawl.kfk.consumer.auto.offset.reset=latest |
|||
crawl.kfk.consumer.thread.core.pool.size=100 |
|||
crawl.kfk.consumer.thread.maximum.pool.size=100 |
|||
|
|||
@ -0,0 +1,10 @@ |
|||
logfile=DBConnectManager.log |
|||
drivers=com.mysql.jdbc.Driver |
|||
db_stat.maxconn=100 |
|||
db_stat.url=jdbc:mysql://192.168.0.41:3306/intelligent_crawl?useUnicode=true&characterEncoding=utf-8 |
|||
db_stat.user=root |
|||
db_stat.password=bw@2025 |
|||
db_stat_alltask.maxconn=100 |
|||
db_stat_alltask.url=jdbc:mysql://192.168.0.41:3306/intelligent_crawl?useUnicode=true&characterEncoding=utf-8 |
|||
db_stat_alltask.user=root |
|||
db_stat_alltask.password=bw@2025 |
|||
@ -0,0 +1,10 @@ |
|||
crawl.elasticsearch.metadata.broker.list=172.18.1.145:9213,172.18.1.146:9213,172.18.1.147:9213 |
|||
crawl.elasticsearch.socket.timeout.interval.ms=10000 |
|||
crawl.elasticsearch.connect.timeout.ms=10000 |
|||
crawl.elasticsearch.max.connect.num=100 |
|||
crawl.elasticsearch.max.connect.per.route.num=100 |
|||
crawl.elasticsearch.max.retry.timeout=300000 |
|||
crawl.elasticsearch.producer.max.cache.num=2000 |
|||
crawl.elasticsearch.producer.max.write.time=120000 |
|||
crawl.elasticsearch.consumer.quary.num=2000 |
|||
crawl.elasticsearch.consumer.scroll.time=3 |
|||
@ -0,0 +1,10 @@ |
|||
crawl.elasticsearch.metadata.broker.list=172.18.1.134:9201 |
|||
crawl.elasticsearch.socket.timeout.interval.ms=60000 |
|||
crawl.elasticsearch.connect.timeout.ms=50000 |
|||
crawl.elasticsearch.max.connect.num=100 |
|||
crawl.elasticsearch.max.connect.per.route.num=100 |
|||
crawl.elasticsearch.max.retry.timeout=300000 |
|||
crawl.elasticsearch.producer.max.cache.num=1000 |
|||
crawl.elasticsearch.producer.max.write.time=100000 |
|||
crawl.elasticsearch.consumer.quary.num=1000 |
|||
crawl.elasticsearch.consumer.scroll.time=3 |
|||
@ -0,0 +1,14 @@ |
|||
crawl.elasticsearch.metadata.broker.list=10.8.0.18:9200 |
|||
crawl.elasticsearch.socket.timeout.interval.ms=60000 |
|||
crawl.elasticsearch.connect.timeout.ms=50000 |
|||
crawl.elasticsearch.max.connect.num=100 |
|||
crawl.elasticsearch.max.connect.per.route.num=100 |
|||
crawl.elasticsearch.max.retry.timeout=300000 |
|||
crawl.elasticsearch.producer.max.cache.num=1000 |
|||
crawl.elasticsearch.producer.max.write.time=100000 |
|||
crawl.elasticsearch.consumer.quary.num=1000 |
|||
crawl.elasticsearch.consumer.scroll.time=3 |
|||
crawl.elasticsearch.consumer.scroll.time=3 |
|||
crawl.elasticsearch.username=elastic |
|||
crawl.elasticsearch.password=bw@123 |
|||
|
|||
@ -0,0 +1,42 @@ |
|||
|
|||
log4j.rootLogger=debug |
|||
|
|||
log4j.logger.com.bfd.mf.service = warn,SERVICE |
|||
log4j.logger.com.bfd.mf.datasave = warn,DATASAVE |
|||
#service log |
|||
log4j.appender.SERVICE=org.apache.log4j.DailyRollingFileAppender |
|||
log4j.appender.SERVICE.Threshold=warn |
|||
log4j.appender.SERVICE.File=../logs/service/service.log |
|||
log4j.appender.SERVICE.DatePattern='.'yyyy-MM-dd |
|||
log4j.appender.SERVICE.layout=org.apache.log4j.PatternLayout |
|||
log4j.appender.SERVICE.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%L) - %m%n |
|||
|
|||
log4j.logger.com.bfd.mf.datasave = debug,DATASAVE |
|||
#datasave log |
|||
log4j.appender.DATASAVE=org.apache.log4j.DailyRollingFileAppender |
|||
log4j.appender.DATASAVE.Threshold=OFF |
|||
log4j.appender.DATASAVE.File=../logs/datasave/datasave.log |
|||
log4j.appender.DATASAVE.DatePattern='.'yyyy-MM-dd |
|||
log4j.appender.DATASAVE.layout=org.apache.log4j.PatternLayout |
|||
log4j.appender.DATASAVE.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%L) - %m%n |
|||
|
|||
|
|||
log4j.logger.com.bfd.mf.service.extendType.ParentExctendType= error,ParentExctendType |
|||
#service ParentExctendType log |
|||
log4j.appender.ParentExctendType=org.apache.log4j.DailyRollingFileAppender |
|||
log4j.appender.ParentExctendType.Threshold=warn |
|||
log4j.appender.ParentExctendType.File=../logs/service/datatokafka.log |
|||
log4j.appender.ParentExctendType.DatePattern='.'yyyy-MM-dd |
|||
log4j.appender.ParentExctendType.layout=org.apache.log4j.PatternLayout |
|||
log4j.appender.ParentExctendType.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%L) - %m%n |
|||
log4j.logger.com.bfd.mf.service.extendType.ParentExctendType= DEBUG,ParentExctendType |
|||
|
|||
|
|||
log4j.logger.com.bfd.mf.service.extendType.ForegroundExtendType= warn,ForegroundExtendType |
|||
#service ForegroundExtendType log |
|||
log4j.appender.ForegroundExtendType=org.apache.log4j.DailyRollingFileAppender |
|||
log4j.appender.ForegroundExtendType.Threshold=warn |
|||
log4j.appender.ForegroundExtendType.File=../logs/service/sourcedata.log |
|||
log4j.appender.ForegroundExtendType.DatePattern='.'yyyy-MM-dd |
|||
log4j.appender.ForegroundExtendType.layout=org.apache.log4j.PatternLayout |
|||
log4j.appender.ForegroundExtendType.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%L) - %m%n |
|||
@ -0,0 +1,154 @@ |
|||
<?xml version="1.0" encoding="UTF-8"?> |
|||
<configuration debug="true"> |
|||
<!-- 应用名称 --> |
|||
<property name="APP_NAME" value="logdatastream" /> |
|||
<!--日志文件的保存路径,首先查找系统属性-Dlog.dir,如果存在就使用其;否则,在当前目录下创建名为logs目录做日志存放的目录 --> |
|||
<property name="LOG_HOME" value="${log.dir:-logs}/${APP_NAME}" /> |
|||
<!-- 日志输出格式 --> |
|||
<property name="ENCODER_PATTERN" value="%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{80} - %msg%n" /> |
|||
<contextName>${APP_NAME}</contextName> |
|||
|
|||
<!-- 控制台日志:输出全部日志到控制台 --> |
|||
<!-- <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> |
|||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|||
<Pattern>${ENCODER_PATTERN}</Pattern> |
|||
</encoder> |
|||
</appender> --> |
|||
|
|||
<!-- 文件日志:输出全部日志到文件 --> |
|||
<!--<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
|||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> |
|||
<fileNamePattern>${LOG_HOME}/output.log.%d{yyyy-MM-dd}</fileNamePattern> |
|||
<maxHistory>7</maxHistory> |
|||
</rollingPolicy> |
|||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|||
<pattern>${ENCODER_PATTERN}</pattern> |
|||
</encoder> |
|||
</appender>--> |
|||
|
|||
|
|||
<!-- 独立输出的同步日志 --> |
|||
|
|||
<!-- <appender name="SYNC_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
|||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> |
|||
<fileNamePattern>${LOG_HOME}/sync.log.%d{yyyy-MM-dd}</fileNamePattern> |
|||
<maxHistory>7</maxHistory> |
|||
</rollingPolicy> |
|||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|||
<pattern>${ENCODER_PATTERN}</pattern> |
|||
</encoder> |
|||
</appender>--> |
|||
|
|||
|
|||
<!-- 错误日志:用于将错误日志输出到独立文件 --> |
|||
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
|||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> |
|||
<fileNamePattern>${LOG_HOME}/error.log.%d{yyyy-MM-dd}</fileNamePattern> |
|||
<maxHistory>7</maxHistory> |
|||
</rollingPolicy> |
|||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|||
<pattern>${ENCODER_PATTERN}</pattern> |
|||
</encoder> |
|||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> |
|||
<level>WARN</level> |
|||
</filter> |
|||
</appender> |
|||
|
|||
<!-- 独立输出的同步日志 --> |
|||
<appender name="INFO" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
|||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> |
|||
<fileNamePattern>${LOG_HOME}/info.log.%d{yyyy-MM-dd}</fileNamePattern> |
|||
<maxHistory>7</maxHistory> |
|||
</rollingPolicy> |
|||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|||
<pattern>${ENCODER_PATTERN}</pattern> |
|||
</encoder> |
|||
</appender> |
|||
|
|||
<logger name="log.sync" level="INFO" addtivity="true"> |
|||
<appender-ref ref="INFO" /> |
|||
</logger> |
|||
|
|||
<root> |
|||
<level value="INFO" /> |
|||
<!-- <appender-ref ref="STDOUT" /> |
|||
<appender-ref ref="FILE" />--> |
|||
<appender-ref ref="ERROR_FILE" /> |
|||
<appender-ref ref="INFO" /> |
|||
</root> |
|||
|
|||
</configuration> |
|||
|
|||
|
|||
<!--<?xml version="1.0" encoding="UTF-8"?>--> |
|||
<!--<configuration debug="true"> <!– 应用名称 –>--> |
|||
<!--<property name="APP_NAME" value="logtest" />--> |
|||
|
|||
<!--<!–日志文件的保存路径,首先查找系统属性-Dlog.dir,如果存在就使用其;否则,在当前目录下创建名为logs目录做日志存放的目 |
|||
录 –>--> |
|||
<!--<property name="LOG_HOME" value="${log.dir:-logs}/${APP_NAME}" /> <!– 日志输出格式 –>--> |
|||
<!--<property name="ENCODER_PATTERN" value="%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{80} - %msg%n" />--> |
|||
<!--<contextName>${APP_NAME}</contextName>--> |
|||
<!--<!– 控制台日志:输出全部日志到控制台 –>--> |
|||
<!--<appender name="STDOUT" class="com.bfd.mf.SearchApplication">--> |
|||
<!--<encoder class="com.bfd.mf.SearchApplication">--> |
|||
<!--<Pattern>${ENCODER_PATTERN}</Pattern>--> |
|||
<!--</encoder>--> |
|||
<!--</appender>--> |
|||
<!--<!– 文件日志:输出全部日志到文件 –>--> |
|||
<!--<appender name="FILE" class="com.bfd.mf.SearchApplication">--> |
|||
<!--<rollingPolicy class="com.bfd.mf.SearchApplication">--> |
|||
<!--<fileNamePattern>${LOG_HOME}/output.%d{yyyy-MM-dd}.log</fileNamePattern>--> |
|||
<!--<maxHistory>7</maxHistory>--> |
|||
<!--</rollingPolicy>--> |
|||
<!--<encoder class="com.bfd.mf.SearchApplication">--> |
|||
<!--<pattern>${ENCODER_PATTERN}</pattern>--> |
|||
<!--</encoder>--> |
|||
<!--</appender>--> |
|||
<!--<!– 错误日志:用于将错误日志输出到独立文件 –>--> |
|||
<!--<appender name="ERROR_FILE" class="com.bfd.mf.SearchApplication">--> |
|||
<!--<rollingPolicy class="com.bfd.mf.SearchApplication">--> |
|||
<!--<fileNamePattern>${LOG_HOME}/error.%d{yyyy-MM-dd}.log</fileNamePattern>--> |
|||
<!--<maxHistory>7</maxHistory>--> |
|||
<!--</rollingPolicy>--> |
|||
<!--<encoder class="com.bfd.mf.SearchApplication">--> |
|||
<!--<pattern>${ENCODER_PATTERN}</pattern>--> |
|||
<!--</encoder>--> |
|||
<!--<filter class="com.bfd.mf.SearchApplication">--> |
|||
<!--<level>WARN</level>--> |
|||
<!--</filter>--> |
|||
<!--</appender>--> |
|||
<!--<!– 独立输出的同步日志 –>--> |
|||
<!--<appender name="SYNC_FILE" class="com.bfd.mf.SearchApplication">--> |
|||
<!--<rollingPolicy class="com.bfd.mf.SearchApplication">--> |
|||
<!--<fileNamePattern>${LOG_HOME}/sync.%d{yyyy-MM-dd}.log</fileNamePattern>--> |
|||
<!--<maxHistory>7</maxHistory>--> |
|||
<!--</rollingPolicy>--> |
|||
<!--<encoder class="com.bfd.mf.SearchApplication">--> |
|||
<!--<pattern>${ENCODER_PATTERN}</pattern>--> |
|||
<!--</encoder>--> |
|||
<!--</appender>--> |
|||
<!--<logger name="log.sync" level="DEBUG" addtivity="true">--> |
|||
<!--<appender-ref ref="SYNC_FILE" />--> |
|||
<!--</logger>--> |
|||
<!--<root>--> |
|||
<!--<level value="DEBUG" />--> |
|||
<!--<appender-ref ref="STDOUT" />--> |
|||
<!--<appender-ref ref="FILE" />--> |
|||
<!--<appender-ref ref="ERROR_FILE" />--> |
|||
<!--</root>--> |
|||
<!--</configuration>--> |
|||
|
|||
<!--<!–<configuration>–>--> |
|||
<!--<!–<property name="LOG_PATTERN" value="%date{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n" |
|||
/>–>--> |
|||
<!--<!–<property name="FILE_PATH" value="D:/logs/course03/demo.%d{yyyy-MM-dd}.%i.log" />–>--> |
|||
<!--<!–</configuration>–>--> |
|||
<!--<!–<configuration>–>--> |
|||
<!--<!–<appender name="CONSOLE" class="com.bfd.mf.SearchApplication">–>--> |
|||
<!--<!–<encoder>–>--> |
|||
<!--<!–<!– 按照上面配置的LOG_PATTERN来打印日志 –>–>--> |
|||
<!--<!–<pattern>${LOG_PATTERN}</pattern>–>--> |
|||
<!--<!–</encoder>–>--> |
|||
<!--<!–</appender>–>--> |
|||
<!--<!–</configuration>–>--> |
|||
@ -0,0 +1,36 @@ |
|||
u |
|||
ud |
|||
ude1 |
|||
ude2 |
|||
ude3 |
|||
udeng |
|||
udh |
|||
ug |
|||
uguo |
|||
uj |
|||
ul |
|||
ule |
|||
ulian |
|||
uls |
|||
usuo |
|||
uv |
|||
uyy |
|||
uz |
|||
uzhe |
|||
uzhi |
|||
w |
|||
wb |
|||
wd |
|||
wf |
|||
wh |
|||
wj |
|||
wky |
|||
wkz |
|||
wm |
|||
wn |
|||
wp |
|||
ws |
|||
wt |
|||
ww |
|||
wyy |
|||
wyz |
|||
@ -0,0 +1,7 @@ |
|||
#@author ruining.he |
|||
#redis.model -> install model {single,codis} |
|||
|
|||
#if model is codis,use config as below |
|||
redis.model=single |
|||
redis.ip=192.168.0.41 |
|||
redis.port=6379 |
|||
1280
etc/stopWords.txt
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,891 @@ |
|||
'd |
|||
'll |
|||
'm |
|||
're |
|||
's |
|||
't |
|||
've |
|||
ZT |
|||
ZZ |
|||
a |
|||
a's |
|||
able |
|||
about |
|||
above |
|||
abst |
|||
accordance |
|||
according |
|||
accordingly |
|||
across |
|||
act |
|||
actually |
|||
added |
|||
adj |
|||
adopted |
|||
affected |
|||
affecting |
|||
affects |
|||
after |
|||
afterwards |
|||
again |
|||
against |
|||
ah |
|||
ain't |
|||
all |
|||
allow |
|||
allows |
|||
almost |
|||
alone |
|||
along |
|||
already |
|||
also |
|||
although |
|||
always |
|||
am |
|||
among |
|||
amongst |
|||
an |
|||
and |
|||
announce |
|||
another |
|||
any |
|||
anybody |
|||
anyhow |
|||
anymore |
|||
anyone |
|||
anything |
|||
anyway |
|||
anyways |
|||
anywhere |
|||
apart |
|||
apparently |
|||
appear |
|||
appreciate |
|||
appropriate |
|||
approximately |
|||
are |
|||
area |
|||
areas |
|||
aren |
|||
aren't |
|||
arent |
|||
arise |
|||
around |
|||
as |
|||
aside |
|||
ask |
|||
asked |
|||
asking |
|||
asks |
|||
associated |
|||
at |
|||
auth |
|||
available |
|||
away |
|||
awfully |
|||
b |
|||
back |
|||
backed |
|||
backing |
|||
backs |
|||
be |
|||
became |
|||
because |
|||
become |
|||
becomes |
|||
becoming |
|||
been |
|||
before |
|||
beforehand |
|||
began |
|||
begin |
|||
beginning |
|||
beginnings |
|||
begins |
|||
behind |
|||
being |
|||
beings |
|||
believe |
|||
below |
|||
beside |
|||
besides |
|||
best |
|||
better |
|||
between |
|||
beyond |
|||
big |
|||
biol |
|||
both |
|||
brief |
|||
briefly |
|||
but |
|||
by |
|||
c |
|||
c'mon |
|||
c's |
|||
ca |
|||
came |
|||
can |
|||
can't |
|||
cannot |
|||
cant |
|||
case |
|||
cases |
|||
cause |
|||
causes |
|||
certain |
|||
certainly |
|||
changes |
|||
clear |
|||
clearly |
|||
co |
|||
com |
|||
come |
|||
comes |
|||
concerning |
|||
consequently |
|||
consider |
|||
considering |
|||
contain |
|||
containing |
|||
contains |
|||
corresponding |
|||
could |
|||
couldn't |
|||
couldnt |
|||
course |
|||
currently |
|||
d |
|||
date |
|||
definitely |
|||
describe |
|||
described |
|||
despite |
|||
did |
|||
didn't |
|||
differ |
|||
different |
|||
differently |
|||
discuss |
|||
do |
|||
does |
|||
doesn't |
|||
doing |
|||
don't |
|||
done |
|||
down |
|||
downed |
|||
downing |
|||
downs |
|||
downwards |
|||
due |
|||
during |
|||
e |
|||
each |
|||
early |
|||
ed |
|||
edu |
|||
effect |
|||
eg |
|||
eight |
|||
eighty |
|||
either |
|||
else |
|||
elsewhere |
|||
end |
|||
ended |
|||
ending |
|||
ends |
|||
enough |
|||
entirely |
|||
especially |
|||
et |
|||
et-al |
|||
etc |
|||
even |
|||
evenly |
|||
ever |
|||
every |
|||
everybody |
|||
everyone |
|||
everything |
|||
everywhere |
|||
ex |
|||
exactly |
|||
example |
|||
except |
|||
f |
|||
face |
|||
faces |
|||
fact |
|||
facts |
|||
far |
|||
felt |
|||
few |
|||
ff |
|||
fifth |
|||
find |
|||
finds |
|||
first |
|||
five |
|||
fix |
|||
followed |
|||
following |
|||
follows |
|||
for |
|||
former |
|||
formerly |
|||
forth |
|||
found |
|||
four |
|||
from |
|||
full |
|||
fully |
|||
further |
|||
furthered |
|||
furthering |
|||
furthermore |
|||
furthers |
|||
g |
|||
gave |
|||
general |
|||
generally |
|||
get |
|||
gets |
|||
getting |
|||
give |
|||
given |
|||
gives |
|||
giving |
|||
go |
|||
goes |
|||
going |
|||
gone |
|||
good |
|||
goods |
|||
got |
|||
gotten |
|||
great |
|||
greater |
|||
greatest |
|||
greetings |
|||
group |
|||
grouped |
|||
grouping |
|||
groups |
|||
h |
|||
had |
|||
hadn't |
|||
happens |
|||
hardly |
|||
has |
|||
hasn't |
|||
have |
|||
haven't |
|||
having |
|||
he |
|||
he's |
|||
hed |
|||
hello |
|||
help |
|||
hence |
|||
her |
|||
here |
|||
here's |
|||
hereafter |
|||
hereby |
|||
herein |
|||
heres |
|||
hereupon |
|||
hers |
|||
herself |
|||
hes |
|||
hi |
|||
hid |
|||
high |
|||
higher |
|||
highest |
|||
him |
|||
himself |
|||
his |
|||
hither |
|||
home |
|||
hopefully |
|||
how |
|||
howbeit |
|||
however |
|||
hundred |
|||
i |
|||
i'd |
|||
i'll |
|||
i'm |
|||
i've |
|||
id |
|||
ie |
|||
if |
|||
ignored |
|||
im |
|||
immediate |
|||
immediately |
|||
importance |
|||
important |
|||
in |
|||
inasmuch |
|||
inc |
|||
include |
|||
indeed |
|||
index |
|||
indicate |
|||
indicated |
|||
indicates |
|||
information |
|||
inner |
|||
insofar |
|||
instead |
|||
interest |
|||
interested |
|||
interesting |
|||
interests |
|||
into |
|||
invention |
|||
inward |
|||
is |
|||
isn't |
|||
it |
|||
it'd |
|||
it'll |
|||
it's |
|||
itd |
|||
its |
|||
itself |
|||
j |
|||
just |
|||
k |
|||
keep |
|||
keeps |
|||
kept |
|||
keys |
|||
kg |
|||
kind |
|||
km |
|||
knew |
|||
know |
|||
known |
|||
knows |
|||
l |
|||
large |
|||
largely |
|||
last |
|||
lately |
|||
later |
|||
latest |
|||
latter |
|||
latterly |
|||
least |
|||
less |
|||
lest |
|||
let |
|||
let's |
|||
lets |
|||
like |
|||
liked |
|||
likely |
|||
line |
|||
little |
|||
long |
|||
longer |
|||
longest |
|||
look |
|||
looking |
|||
looks |
|||
ltd |
|||
m |
|||
made |
|||
mainly |
|||
make |
|||
makes |
|||
making |
|||
man |
|||
many |
|||
may |
|||
maybe |
|||
me |
|||
mean |
|||
means |
|||
meantime |
|||
meanwhile |
|||
member |
|||
members |
|||
men |
|||
merely |
|||
mg |
|||
might |
|||
million |
|||
miss |
|||
ml |
|||
more |
|||
moreover |
|||
most |
|||
mostly |
|||
mr |
|||
mrs |
|||
much |
|||
mug |
|||
must |
|||
my |
|||
myself |
|||
n |
|||
n't |
|||
na |
|||
name |
|||
namely |
|||
nay |
|||
nd |
|||
near |
|||
nearly |
|||
necessarily |
|||
necessary |
|||
need |
|||
needed |
|||
needing |
|||
needs |
|||
neither |
|||
never |
|||
nevertheless |
|||
new |
|||
newer |
|||
newest |
|||
next |
|||
nine |
|||
ninety |
|||
no |
|||
nobody |
|||
non |
|||
none |
|||
nonetheless |
|||
noone |
|||
nor |
|||
normally |
|||
nos |
|||
not |
|||
noted |
|||
nothing |
|||
novel |
|||
now |
|||
nowhere |
|||
number |
|||
numbers |
|||
o |
|||
obtain |
|||
obtained |
|||
obviously |
|||
of |
|||
off |
|||
often |
|||
oh |
|||
ok |
|||
okay |
|||
old |
|||
older |
|||
oldest |
|||
omitted |
|||
on |
|||
once |
|||
one |
|||
ones |
|||
only |
|||
onto |
|||
open |
|||
opened |
|||
opening |
|||
opens |
|||
or |
|||
ord |
|||
order |
|||
ordered |
|||
ordering |
|||
orders |
|||
other |
|||
others |
|||
otherwise |
|||
ought |
|||
our |
|||
ours |
|||
ourselves |
|||
out |
|||
outside |
|||
over |
|||
overall |
|||
owing |
|||
own |
|||
p |
|||
page |
|||
pages |
|||
part |
|||
parted |
|||
particular |
|||
particularly |
|||
parting |
|||
parts |
|||
past |
|||
per |
|||
perhaps |
|||
place |
|||
placed |
|||
places |
|||
please |
|||
plus |
|||
point |
|||
pointed |
|||
pointing |
|||
points |
|||
poorly |
|||
possible |
|||
possibly |
|||
potentially |
|||
pp |
|||
predominantly |
|||
present |
|||
presented |
|||
presenting |
|||
presents |
|||
presumably |
|||
previously |
|||
primarily |
|||
probably |
|||
problem |
|||
problems |
|||
promptly |
|||
proud |
|||
provides |
|||
put |
|||
puts |
|||
q |
|||
que |
|||
quickly |
|||
quite |
|||
qv |
|||
r |
|||
ran |
|||
rather |
|||
rd |
|||
re |
|||
readily |
|||
really |
|||
reasonably |
|||
recent |
|||
recently |
|||
ref |
|||
refs |
|||
regarding |
|||
regardless |
|||
regards |
|||
related |
|||
relatively |
|||
research |
|||
respectively |
|||
resulted |
|||
resulting |
|||
results |
|||
right |
|||
room |
|||
rooms |
|||
run |
|||
s |
|||
said |
|||
same |
|||
saw |
|||
say |
|||
saying |
|||
says |
|||
sec |
|||
second |
|||
secondly |
|||
seconds |
|||
section |
|||
see |
|||
seeing |
|||
seem |
|||
seemed |
|||
seeming |
|||
seems |
|||
seen |
|||
sees |
|||
self |
|||
selves |
|||
sensible |
|||
sent |
|||
serious |
|||
seriously |
|||
seven |
|||
several |
|||
shall |
|||
she |
|||
she'll |
|||
shed |
|||
shes |
|||
should |
|||
shouldn't |
|||
show |
|||
showed |
|||
showing |
|||
shown |
|||
showns |
|||
shows |
|||
side |
|||
sides |
|||
significant |
|||
significantly |
|||
similar |
|||
similarly |
|||
since |
|||
six |
|||
slightly |
|||
small |
|||
smaller |
|||
smallest |
|||
so |
|||
some |
|||
somebody |
|||
somehow |
|||
someone |
|||
somethan |
|||
something |
|||
sometime |
|||
sometimes |
|||
somewhat |
|||
somewhere |
|||
soon |
|||
sorry |
|||
specifically |
|||
specified |
|||
specify |
|||
specifying |
|||
state |
|||
states |
|||
still |
|||
stop |
|||
strongly |
|||
sub |
|||
substantially |
|||
successfully |
|||
such |
|||
sufficiently |
|||
suggest |
|||
sup |
|||
sure |
|||
t |
|||
t's |
|||
take |
|||
taken |
|||
taking |
|||
tell |
|||
tends |
|||
th |
|||
than |
|||
thank |
|||
thanks |
|||
thanx |
|||
that |
|||
that'll |
|||
that's |
|||
that've |
|||
thats |
|||
the |
|||
their |
|||
theirs |
|||
them |
|||
themselves |
|||
then |
|||
thence |
|||
there |
|||
there'll |
|||
there's |
|||
there've |
|||
thereafter |
|||
thereby |
|||
thered |
|||
therefore |
|||
therein |
|||
thereof |
|||
therere |
|||
theres |
|||
thereto |
|||
thereupon |
|||
these |
|||
they |
|||
they'd |
|||
they'll |
|||
they're |
|||
they've |
|||
theyd |
|||
theyre |
|||
thing |
|||
things |
|||
think |
|||
thinks |
|||
third |
|||
this |
|||
thorough |
|||
thoroughly |
|||
those |
|||
thou |
|||
though |
|||
thoughh |
|||
thought |
|||
thoughts |
|||
thousand |
|||
three |
|||
throug |
|||
through |
|||
throughout |
|||
thru |
|||
thus |
|||
til |
|||
tip |
|||
to |
|||
today |
|||
together |
|||
too |
|||
took |
|||
toward |
|||
towards |
|||
tried |
|||
tries |
|||
truly |
|||
try |
|||
trying |
|||
ts |
|||
turn |
|||
turned |
|||
turning |
|||
turns |
|||
twice |
|||
two |
|||
u |
|||
un |
|||
under |
|||
unfortunately |
|||
unless |
|||
unlike |
|||
unlikely |
|||
until |
|||
unto |
|||
up |
|||
upon |
|||
ups |
|||
us |
|||
use |
|||
used |
|||
useful |
|||
usefully |
|||
usefulness |
|||
uses |
|||
using |
|||
usually |
|||
uucp |
|||
v |
|||
value |
|||
various |
|||
very |
|||
via |
|||
viz |
|||
vol |
|||
vols |
|||
vs |
|||
w |
|||
want |
|||
wanted |
|||
wanting |
|||
wants |
|||
was |
|||
wasn't |
|||
way |
|||
ways |
|||
we |
|||
we'd |
|||
we'll |
|||
we're |
|||
we've |
|||
wed |
|||
welcome |
|||
well |
|||
wells |
|||
went |
|||
were |
|||
weren't |
|||
what |
|||
what'll |
|||
what's |
|||
whatever |
|||
whats |
|||
when |
|||
whence |
|||
whenever |
|||
where |
|||
where's |
|||
whereafter |
|||
whereas |
|||
whereby |
|||
wherein |
|||
wheres |
|||
whereupon |
|||
wherever |
|||
whether |
|||
which |
|||
while |
|||
whim |
|||
whither |
|||
who |
|||
who'll |
|||
who's |
|||
whod |
|||
whoever |
|||
whole |
|||
whom |
|||
whomever |
|||
whos |
|||
whose |
|||
why |
|||
widely |
|||
will |
|||
willing |
|||
wish |
|||
with |
|||
within |
|||
without |
|||
won't |
|||
wonder |
|||
words |
|||
work |
|||
worked |
|||
working |
|||
works |
|||
world |
|||
would |
|||
wouldn't |
|||
www |
|||
x |
|||
y |
|||
year |
|||
years |
|||
yes |
|||
yet |
|||
you |
|||
you'd |
|||
you'll |
|||
you're |
|||
you've |
|||
youd |
|||
young |
|||
younger |
|||
youngest |
|||
your |
|||
youre |
|||
yours |
|||
yourself |
|||
yourselves |
|||
z |
|||
zero |
|||
zt |
|||
zz |
|||
1
foreground
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,259 @@ |
|||
#!/bin/sh |
|||
# ---------------------------------------------------------------------------- |
|||
# Licensed to the Apache Software Foundation (ASF) under one |
|||
# or more contributor license agreements. See the NOTICE file |
|||
# distributed with this work for additional information |
|||
# regarding copyright ownership. The ASF licenses this file |
|||
# to you under the Apache License, Version 2.0 (the |
|||
# "License"); you may not use this file except in compliance |
|||
# with the License. You may obtain a copy of the License at |
|||
# |
|||
# http://www.apache.org/licenses/LICENSE-2.0 |
|||
# |
|||
# Unless required by applicable law or agreed to in writing, |
|||
# software distributed under the License is distributed on an |
|||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
|||
# KIND, either express or implied. See the License for the |
|||
# specific language governing permissions and limitations |
|||
# under the License. |
|||
# ---------------------------------------------------------------------------- |
|||
|
|||
# ---------------------------------------------------------------------------- |
|||
# Apache Maven Wrapper startup batch script, version 3.3.2 |
|||
# |
|||
# Optional ENV vars |
|||
# ----------------- |
|||
# JAVA_HOME - location of a JDK home dir, required when download maven via java source |
|||
# MVNW_REPOURL - repo url base for downloading maven distribution |
|||
# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven |
|||
# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output |
|||
# ---------------------------------------------------------------------------- |
|||
|
|||
set -euf |
|||
[ "${MVNW_VERBOSE-}" != debug ] || set -x |
|||
|
|||
# OS specific support. |
|||
native_path() { printf %s\\n "$1"; } |
|||
case "$(uname)" in |
|||
CYGWIN* | MINGW*) |
|||
[ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")" |
|||
native_path() { cygpath --path --windows "$1"; } |
|||
;; |
|||
esac |
|||
|
|||
# set JAVACMD and JAVACCMD |
|||
set_java_home() { |
|||
# For Cygwin and MinGW, ensure paths are in Unix format before anything is touched |
|||
if [ -n "${JAVA_HOME-}" ]; then |
|||
if [ -x "$JAVA_HOME/jre/sh/java" ]; then |
|||
# IBM's JDK on AIX uses strange locations for the executables |
|||
JAVACMD="$JAVA_HOME/jre/sh/java" |
|||
JAVACCMD="$JAVA_HOME/jre/sh/javac" |
|||
else |
|||
JAVACMD="$JAVA_HOME/bin/java" |
|||
JAVACCMD="$JAVA_HOME/bin/javac" |
|||
|
|||
if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then |
|||
echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2 |
|||
echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2 |
|||
return 1 |
|||
fi |
|||
fi |
|||
else |
|||
JAVACMD="$( |
|||
'set' +e |
|||
'unset' -f command 2>/dev/null |
|||
'command' -v java |
|||
)" || : |
|||
JAVACCMD="$( |
|||
'set' +e |
|||
'unset' -f command 2>/dev/null |
|||
'command' -v javac |
|||
)" || : |
|||
|
|||
if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then |
|||
echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2 |
|||
return 1 |
|||
fi |
|||
fi |
|||
} |
|||
|
|||
# hash string like Java String::hashCode |
|||
hash_string() { |
|||
str="${1:-}" h=0 |
|||
while [ -n "$str" ]; do |
|||
char="${str%"${str#?}"}" |
|||
h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296)) |
|||
str="${str#?}" |
|||
done |
|||
printf %x\\n $h |
|||
} |
|||
|
|||
verbose() { :; } |
|||
[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; } |
|||
|
|||
die() { |
|||
printf %s\\n "$1" >&2 |
|||
exit 1 |
|||
} |
|||
|
|||
trim() { |
|||
# MWRAPPER-139: |
|||
# Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds. |
|||
# Needed for removing poorly interpreted newline sequences when running in more |
|||
# exotic environments such as mingw bash on Windows. |
|||
printf "%s" "${1}" | tr -d '[:space:]' |
|||
} |
|||
|
|||
# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties |
|||
while IFS="=" read -r key value; do |
|||
case "${key-}" in |
|||
distributionUrl) distributionUrl=$(trim "${value-}") ;; |
|||
distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;; |
|||
esac |
|||
done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties" |
|||
[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties" |
|||
|
|||
case "${distributionUrl##*/}" in |
|||
maven-mvnd-*bin.*) |
|||
MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ |
|||
case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in |
|||
*AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;; |
|||
:Darwin*x86_64) distributionPlatform=darwin-amd64 ;; |
|||
:Darwin*arm64) distributionPlatform=darwin-aarch64 ;; |
|||
:Linux*x86_64*) distributionPlatform=linux-amd64 ;; |
|||
*) |
|||
echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2 |
|||
distributionPlatform=linux-amd64 |
|||
;; |
|||
esac |
|||
distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip" |
|||
;; |
|||
maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;; |
|||
*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;; |
|||
esac |
|||
|
|||
# apply MVNW_REPOURL and calculate MAVEN_HOME |
|||
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash> |
|||
[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}" |
|||
distributionUrlName="${distributionUrl##*/}" |
|||
distributionUrlNameMain="${distributionUrlName%.*}" |
|||
distributionUrlNameMain="${distributionUrlNameMain%-bin}" |
|||
MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}" |
|||
MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")" |
|||
|
|||
exec_maven() { |
|||
unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || : |
|||
exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD" |
|||
} |
|||
|
|||
if [ -d "$MAVEN_HOME" ]; then |
|||
verbose "found existing MAVEN_HOME at $MAVEN_HOME" |
|||
exec_maven "$@" |
|||
fi |
|||
|
|||
case "${distributionUrl-}" in |
|||
*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;; |
|||
*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;; |
|||
esac |
|||
|
|||
# prepare tmp dir |
|||
if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then |
|||
clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; } |
|||
trap clean HUP INT TERM EXIT |
|||
else |
|||
die "cannot create temp dir" |
|||
fi |
|||
|
|||
mkdir -p -- "${MAVEN_HOME%/*}" |
|||
|
|||
# Download and Install Apache Maven |
|||
verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." |
|||
verbose "Downloading from: $distributionUrl" |
|||
verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" |
|||
|
|||
# select .zip or .tar.gz |
|||
if ! command -v unzip >/dev/null; then |
|||
distributionUrl="${distributionUrl%.zip}.tar.gz" |
|||
distributionUrlName="${distributionUrl##*/}" |
|||
fi |
|||
|
|||
# verbose opt |
|||
__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR='' |
|||
[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v |
|||
|
|||
# normalize http auth |
|||
case "${MVNW_PASSWORD:+has-password}" in |
|||
'') MVNW_USERNAME='' MVNW_PASSWORD='' ;; |
|||
has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;; |
|||
esac |
|||
|
|||
if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then |
|||
verbose "Found wget ... using wget" |
|||
wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl" |
|||
elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then |
|||
verbose "Found curl ... using curl" |
|||
curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl" |
|||
elif set_java_home; then |
|||
verbose "Falling back to use Java to download" |
|||
javaSource="$TMP_DOWNLOAD_DIR/Downloader.java" |
|||
targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName" |
|||
cat >"$javaSource" <<-END |
|||
public class Downloader extends java.net.Authenticator |
|||
{ |
|||
protected java.net.PasswordAuthentication getPasswordAuthentication() |
|||
{ |
|||
return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() ); |
|||
} |
|||
public static void main( String[] args ) throws Exception |
|||
{ |
|||
setDefault( new Downloader() ); |
|||
java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() ); |
|||
} |
|||
} |
|||
END |
|||
# For Cygwin/MinGW, switch paths to Windows format before running javac and java |
|||
verbose " - Compiling Downloader.java ..." |
|||
"$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java" |
|||
verbose " - Running Downloader.java ..." |
|||
"$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")" |
|||
fi |
|||
|
|||
# If specified, validate the SHA-256 sum of the Maven distribution zip file |
|||
if [ -n "${distributionSha256Sum-}" ]; then |
|||
distributionSha256Result=false |
|||
if [ "$MVN_CMD" = mvnd.sh ]; then |
|||
echo "Checksum validation is not supported for maven-mvnd." >&2 |
|||
echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 |
|||
exit 1 |
|||
elif command -v sha256sum >/dev/null; then |
|||
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then |
|||
distributionSha256Result=true |
|||
fi |
|||
elif command -v shasum >/dev/null; then |
|||
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then |
|||
distributionSha256Result=true |
|||
fi |
|||
else |
|||
echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2 |
|||
echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 |
|||
exit 1 |
|||
fi |
|||
if [ $distributionSha256Result = false ]; then |
|||
echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2 |
|||
echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2 |
|||
exit 1 |
|||
fi |
|||
fi |
|||
|
|||
# unzip and move |
|||
if command -v unzip >/dev/null; then |
|||
unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip" |
|||
else |
|||
tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar" |
|||
fi |
|||
printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url" |
|||
mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME" |
|||
|
|||
clean || : |
|||
exec_maven "$@" |
|||
@ -0,0 +1,149 @@ |
|||
<# : batch portion |
|||
@REM ---------------------------------------------------------------------------- |
|||
@REM Licensed to the Apache Software Foundation (ASF) under one |
|||
@REM or more contributor license agreements. See the NOTICE file |
|||
@REM distributed with this work for additional information |
|||
@REM regarding copyright ownership. The ASF licenses this file |
|||
@REM to you under the Apache License, Version 2.0 (the |
|||
@REM "License"); you may not use this file except in compliance |
|||
@REM with the License. You may obtain a copy of the License at |
|||
@REM |
|||
@REM http://www.apache.org/licenses/LICENSE-2.0 |
|||
@REM |
|||
@REM Unless required by applicable law or agreed to in writing, |
|||
@REM software distributed under the License is distributed on an |
|||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
|||
@REM KIND, either express or implied. See the License for the |
|||
@REM specific language governing permissions and limitations |
|||
@REM under the License. |
|||
@REM ---------------------------------------------------------------------------- |
|||
|
|||
@REM ---------------------------------------------------------------------------- |
|||
@REM Apache Maven Wrapper startup batch script, version 3.3.2 |
|||
@REM |
|||
@REM Optional ENV vars |
|||
@REM MVNW_REPOURL - repo url base for downloading maven distribution |
|||
@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven |
|||
@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output |
|||
@REM ---------------------------------------------------------------------------- |
|||
|
|||
@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0) |
|||
@SET __MVNW_CMD__= |
|||
@SET __MVNW_ERROR__= |
|||
@SET __MVNW_PSMODULEP_SAVE=%PSModulePath% |
|||
@SET PSModulePath= |
|||
@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @( |
|||
IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B) |
|||
) |
|||
@SET PSModulePath=%__MVNW_PSMODULEP_SAVE% |
|||
@SET __MVNW_PSMODULEP_SAVE= |
|||
@SET __MVNW_ARG0_NAME__= |
|||
@SET MVNW_USERNAME= |
|||
@SET MVNW_PASSWORD= |
|||
@IF NOT "%__MVNW_CMD__%"=="" (%__MVNW_CMD__% %*) |
|||
@echo Cannot start maven from wrapper >&2 && exit /b 1 |
|||
@GOTO :EOF |
|||
: end batch / begin powershell #> |
|||
|
|||
$ErrorActionPreference = "Stop" |
|||
if ($env:MVNW_VERBOSE -eq "true") { |
|||
$VerbosePreference = "Continue" |
|||
} |
|||
|
|||
# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties |
|||
$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl |
|||
if (!$distributionUrl) { |
|||
Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties" |
|||
} |
|||
|
|||
switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) { |
|||
"maven-mvnd-*" { |
|||
$USE_MVND = $true |
|||
$distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip" |
|||
$MVN_CMD = "mvnd.cmd" |
|||
break |
|||
} |
|||
default { |
|||
$USE_MVND = $false |
|||
$MVN_CMD = $script -replace '^mvnw','mvn' |
|||
break |
|||
} |
|||
} |
|||
|
|||
# apply MVNW_REPOURL and calculate MAVEN_HOME |
|||
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash> |
|||
if ($env:MVNW_REPOURL) { |
|||
$MVNW_REPO_PATTERN = if ($USE_MVND) { "/org/apache/maven/" } else { "/maven/mvnd/" } |
|||
$distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace '^.*'+$MVNW_REPO_PATTERN,'')" |
|||
} |
|||
$distributionUrlName = $distributionUrl -replace '^.*/','' |
|||
$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$','' |
|||
$MAVEN_HOME_PARENT = "$HOME/.m2/wrapper/dists/$distributionUrlNameMain" |
|||
if ($env:MAVEN_USER_HOME) { |
|||
$MAVEN_HOME_PARENT = "$env:MAVEN_USER_HOME/wrapper/dists/$distributionUrlNameMain" |
|||
} |
|||
$MAVEN_HOME_NAME = ([System.Security.Cryptography.MD5]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join '' |
|||
$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME" |
|||
|
|||
if (Test-Path -Path "$MAVEN_HOME" -PathType Container) { |
|||
Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME" |
|||
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" |
|||
exit $? |
|||
} |
|||
|
|||
if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) { |
|||
Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl" |
|||
} |
|||
|
|||
# prepare tmp dir |
|||
$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile |
|||
$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir" |
|||
$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null |
|||
trap { |
|||
if ($TMP_DOWNLOAD_DIR.Exists) { |
|||
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } |
|||
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } |
|||
} |
|||
} |
|||
|
|||
New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null |
|||
|
|||
# Download and Install Apache Maven |
|||
Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." |
|||
Write-Verbose "Downloading from: $distributionUrl" |
|||
Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" |
|||
|
|||
$webclient = New-Object System.Net.WebClient |
|||
if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) { |
|||
$webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD) |
|||
} |
|||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 |
|||
$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null |
|||
|
|||
# If specified, validate the SHA-256 sum of the Maven distribution zip file |
|||
$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum |
|||
if ($distributionSha256Sum) { |
|||
if ($USE_MVND) { |
|||
Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." |
|||
} |
|||
Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash |
|||
if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) { |
|||
Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property." |
|||
} |
|||
} |
|||
|
|||
# unzip and move |
|||
Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null |
|||
Rename-Item -Path "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" -NewName $MAVEN_HOME_NAME | Out-Null |
|||
try { |
|||
Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null |
|||
} catch { |
|||
if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) { |
|||
Write-Error "fail to move MAVEN_HOME" |
|||
} |
|||
} finally { |
|||
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } |
|||
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } |
|||
} |
|||
|
|||
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" |
|||
@ -0,0 +1,327 @@ |
|||
<?xml version="1.0" encoding="UTF-8"?> |
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
|||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> |
|||
<modelVersion>4.0.0</modelVersion> |
|||
<parent> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter-parent</artifactId> |
|||
<version>2.0.0.RELEASE</version> |
|||
<relativePath/> <!-- lookup parent from repository --> |
|||
</parent> |
|||
<groupId>com.zyzs</groupId> |
|||
<artifactId>OtherDatasave</artifactId> |
|||
<version>0.0.1-SNAPSHOT</version> |
|||
<name>OtherDatasave</name> |
|||
<description>Demo project for Spring Boot</description> |
|||
<url/> |
|||
<licenses> |
|||
<license/> |
|||
</licenses> |
|||
<developers> |
|||
<developer/> |
|||
</developers> |
|||
<scm> |
|||
<connection/> |
|||
<developerConnection/> |
|||
<tag/> |
|||
<url/> |
|||
</scm> |
|||
<properties> |
|||
<source>1.8</source> |
|||
<start-class>com.zyzs.otherdatasave.OtherDatasaveApplication</start-class> |
|||
<es.version>6.0.0</es.version> |
|||
<spring-boot-version>2.0.0.RELEASE</spring-boot-version> |
|||
<springframework.boot.version>2.0.0.RELEASE</springframework.boot.version> |
|||
<springframework.version>5.0.16.RELEASE</springframework.version> |
|||
<logstash.version>4.4</logstash.version> |
|||
</properties> |
|||
<dependencies> |
|||
<dependency> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter</artifactId> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter-test</artifactId> |
|||
<scope>test</scope> |
|||
</dependency> |
|||
|
|||
|
|||
|
|||
<dependency> |
|||
<groupId>org.junit.jupiter</groupId> |
|||
<artifactId>junit-jupiter</artifactId> |
|||
<version>5.9.2</version> <!-- 使用最新的版本号 --> |
|||
<scope>test</scope> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.springframework</groupId> |
|||
<artifactId>spring-webmvc</artifactId> |
|||
<version>${springframework.version}</version> |
|||
</dependency> |
|||
|
|||
|
|||
|
|||
<dependency> |
|||
<groupId>org.springframework</groupId> |
|||
<artifactId>spring-core</artifactId> |
|||
<version>${springframework.version}</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter-logging</artifactId> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter-web</artifactId> |
|||
<version>${springframework.boot.version}</version> |
|||
<exclusions> |
|||
<exclusion> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter-json</artifactId> |
|||
</exclusion> |
|||
<exclusion> |
|||
<groupId>org.springframework</groupId> |
|||
<artifactId>spring-web</artifactId> |
|||
</exclusion> |
|||
<exclusion> |
|||
<groupId>org.springframework</groupId> |
|||
<artifactId>spring-webmvc</artifactId> |
|||
</exclusion> |
|||
</exclusions> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter</artifactId> |
|||
<version>2.0.0.RELEASE</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter-data-jpa</artifactId> |
|||
<version>2.0.0.RELEASE</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-devtools</artifactId> |
|||
<version>2.0.0.RELEASE</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter-test</artifactId> |
|||
<version>2.0.0.RELEASE</version> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>com.fasterxml.jackson.core</groupId> |
|||
<artifactId>jackson-databind</artifactId> |
|||
<version>2.9.3</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>commons-lang</groupId> |
|||
<artifactId>commons-lang</artifactId> |
|||
<version>2.6</version> |
|||
<scope>compile</scope> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.apache.commons</groupId> |
|||
<artifactId>commons-pool2</artifactId> |
|||
</dependency> |
|||
|
|||
<!--引入redis,排除lettuce的引用--> |
|||
<dependency> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter-data-redis</artifactId> |
|||
|
|||
<exclusions> |
|||
<exclusion> |
|||
<groupId>io.lettuce</groupId> |
|||
<artifactId>lettuce-core</artifactId> |
|||
</exclusion> |
|||
</exclusions> |
|||
</dependency> |
|||
|
|||
<!-- 引入Jedis客戶端--> |
|||
<dependency> |
|||
<groupId>redis.clients</groupId> |
|||
<artifactId>jedis</artifactId> |
|||
<version>3.7.0</version> |
|||
</dependency> |
|||
|
|||
|
|||
<dependency> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-starter-data-redis</artifactId> |
|||
<version>2.4.11</version> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>com.alibaba</groupId> |
|||
<artifactId>easyexcel</artifactId> |
|||
<version>3.2.0</version> |
|||
</dependency> |
|||
|
|||
|
|||
<dependency> |
|||
<groupId>mysql</groupId> |
|||
<artifactId>mysql-connector-java</artifactId> |
|||
<version>8.0.29</version> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>org.apache.commons</groupId> |
|||
<artifactId>commons-pool2</artifactId> |
|||
<version>2.5.0</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.apache.commons</groupId> |
|||
<artifactId>commons-lang3</artifactId> |
|||
<version>3.7</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>commons-lang</groupId> |
|||
<artifactId>commons-lang</artifactId> |
|||
<version>2.6</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>commons-io</groupId> |
|||
<artifactId>commons-io</artifactId> |
|||
<version>2.4</version> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>com.google.guava</groupId> |
|||
<artifactId>guava</artifactId> |
|||
<version>19.0</version> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>com.alibaba</groupId> |
|||
<artifactId>fastjson</artifactId> |
|||
<version>1.2.6</version> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
|
|||
<groupId>com.bfd</groupId> |
|||
<artifactId>elastiUtilsHigh</artifactId> |
|||
<version>7.17.4</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.elasticsearch</groupId> |
|||
<artifactId>elasticsearch</artifactId> |
|||
<version>7.17.4</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.elasticsearch.client</groupId> |
|||
<artifactId>transport</artifactId> |
|||
<version>7.17.4</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.elasticsearch.plugin</groupId> |
|||
<artifactId>transport-netty4-client</artifactId> |
|||
<version>7.17.4</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.elasticsearch.client</groupId> |
|||
<artifactId>elasticsearch-rest-high-level-client</artifactId> |
|||
<version>7.17.4</version> |
|||
</dependency> |
|||
|
|||
|
|||
<dependency> |
|||
<groupId>com.squareup.okhttp3</groupId> |
|||
<artifactId>okhttp</artifactId> |
|||
<version>3.9.1</version> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>org.apache.kafka</groupId> |
|||
<artifactId>kafka-clients</artifactId> |
|||
<version>0.10.1.0</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>com.wandoulabs.jodis</groupId> |
|||
<artifactId>jodis</artifactId> |
|||
<version>0.1.2</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.jsoup</groupId> |
|||
<artifactId>jsoup</artifactId> |
|||
<version>1.8.3</version> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>bfd</groupId> |
|||
<artifactId>utils</artifactId> |
|||
<version>3.0.0</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>com.bfd</groupId> |
|||
<artifactId>elastiUtils</artifactId> |
|||
<version>0.0.2-SNAPSHOT</version> |
|||
</dependency> |
|||
<!-- <dependency>--> |
|||
<!-- <groupId>kafka-utils</groupId>--> |
|||
<!-- <artifactId>kafka</artifactId>--> |
|||
<!-- <version>0.10</version>--> |
|||
<!-- </dependency>--> |
|||
|
|||
<dependency> |
|||
<groupId>kafka-utils</groupId> |
|||
<artifactId>kafka</artifactId> |
|||
<version>0.10</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>org.projectlombok</groupId> |
|||
<artifactId>lombok</artifactId> |
|||
<version>1.18.4</version> |
|||
<scope>provided</scope> |
|||
</dependency> |
|||
|
|||
<dependency> |
|||
<groupId>com.bfd.javaxmail</groupId> |
|||
<artifactId>mail</artifactId> |
|||
<version>0.0.1</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>com.bfd.commontools</groupId> |
|||
<artifactId>commontools</artifactId> |
|||
<version>0.0.2</version> |
|||
<!--<systemPath>BfdRedisTools-1.0.0.jar</systemPath>--> |
|||
</dependency> |
|||
|
|||
|
|||
</dependencies> |
|||
<build> |
|||
<plugins> |
|||
<plugin> |
|||
<groupId>org.apache.maven.plugins</groupId> |
|||
<artifactId>maven-compiler-plugin</artifactId> |
|||
<configuration> |
|||
<source>1.8</source> |
|||
<target>1.8</target> |
|||
</configuration> |
|||
</plugin> |
|||
<plugin> |
|||
<groupId>org.springframework.boot</groupId> |
|||
<artifactId>spring-boot-maven-plugin</artifactId> |
|||
<version>2.0.0.RELEASE</version> |
|||
<configuration> |
|||
<mainClass>com.zyzs.otherdatasave.OtherDatasaveApplication</mainClass> |
|||
</configuration> |
|||
<executions> |
|||
<execution> |
|||
<goals> |
|||
<goal>repackage</goal> |
|||
</goals> |
|||
</execution> |
|||
</executions> |
|||
</plugin> |
|||
</plugins> |
|||
</build> |
|||
|
|||
|
|||
</project> |
|||
@ -0,0 +1,25 @@ |
|||
package com.zyzs.otherdatasave; |
|||
|
|||
import com.alibaba.excel.EasyExcel; |
|||
import com.zyzs.otherdatasave.bean.ExcelData; |
|||
import com.zyzs.otherdatasave.bean.ExcelListener; |
|||
|
|||
import java.util.List; |
|||
|
|||
public class ExcelReader { |
|||
|
|||
public static void main(String[] args) { |
|||
|
|||
String excelFilePath = "data/20250519100453.xls"; |
|||
|
|||
// 读取 Excel 文件 |
|||
List<ExcelData> excelDataList = EasyExcel.read(excelFilePath, ExcelData.class, new ExcelListener()) |
|||
.sheet(0) |
|||
.doReadSync(); |
|||
// 输出读取的数据 |
|||
for (ExcelData data : excelDataList) { |
|||
System.out.println("Field1: " + data.getField1() + ", Field2: " + data.getField2()); |
|||
|
|||
} |
|||
} |
|||
} |
|||
@ -0,0 +1,146 @@ |
|||
package com.zyzs.otherdatasave; |
|||
|
|||
import com.bfd.crawler.elasti.ElastiProducerHigh; |
|||
import com.bfd.crawler.kafka7.KfkConsumer; |
|||
import com.bfd.crawler.utils.JsonUtils; |
|||
import com.zyzs.otherdatasave.cache.Constants; |
|||
import com.zyzs.otherdatasave.config.AppConfig; |
|||
import com.zyzs.otherdatasave.service.*; |
|||
import com.zyzs.otherdatasave.util.ReadLine; |
|||
import com.zyzs.otherdatasave.worker.*; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.boot.SpringApplication; |
|||
import org.springframework.boot.autoconfigure.SpringBootApplication; |
|||
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; |
|||
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration; |
|||
import org.springframework.context.ConfigurableApplicationContext; |
|||
|
|||
import java.io.File; |
|||
import java.util.HashMap; |
|||
import java.util.List; |
|||
import java.util.Map; |
|||
|
|||
@SpringBootApplication(exclude={DataSourceAutoConfiguration.class, HibernateJpaAutoConfiguration.class}) |
|||
public class OtherDatasaveApplication { |
|||
@Autowired |
|||
private AppConfig config; |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(OtherDatasaveApplication.class); |
|||
|
|||
|
|||
@Autowired |
|||
private QueryPaperProducer queryPaper; |
|||
@Autowired |
|||
private QuerycliniProducer queryclini; |
|||
@Autowired |
|||
private QueryDrugProducer queryDrug; |
|||
|
|||
@Autowired |
|||
private QueryProProducer queryProj; |
|||
|
|||
@Autowired |
|||
private QueryPatentProducer queryPatent; |
|||
|
|||
|
|||
@Autowired |
|||
private QueryEqProducer queryEqProducer; |
|||
|
|||
@Autowired |
|||
private QueryorgProducer queryorgProducer; |
|||
|
|||
|
|||
@Autowired |
|||
private QueryKfkaProducer queryKfkaProducer; |
|||
|
|||
|
|||
|
|||
|
|||
public static void main(String[] args) { |
|||
ConfigurableApplicationContext context = SpringApplication.run(OtherDatasaveApplication.class, args); |
|||
context.getBean(OtherDatasaveApplication.class).start(); |
|||
} |
|||
|
|||
private void start() { |
|||
|
|||
config.verify(); |
|||
LOGGER.info("App config:{}.", config.toString()); |
|||
// |
|||
KfkConsumer.startReadThread(Constants.getClini(), config.getReadcliniTopic(), 9, config.getReadcliniGroupid(),1); |
|||
KfkConsumer.startReadThread(Constants.getProj(), config.getReadprojTopic(), 9, config.getReadprojgroupid(),1); |
|||
KfkConsumer.startReadThread(Constants.getDrug(), config.getReaddrugTopic(), 9, config.getReaddrugGroupid(),1); |
|||
KfkConsumer.startReadThread(Constants.getPatent(), config.getReadpatentTopic(), 9, config.getReadpatentGroupid(),1); |
|||
KfkConsumer.startReadThread(Constants.getPaper(), config.getReadpaperTopic(), 9, config.getReadpaperGroupid(),1); |
|||
|
|||
KfkConsumer.startReadThread(Constants.getEquipment(), config.getReadeqTopic(), 9, config.getReadeqGroupid(),1); |
|||
|
|||
queryPatent.start(); |
|||
queryclini.start(); |
|||
queryProj.start(); |
|||
queryEqProducer.start(); |
|||
queryPaper.start(); |
|||
queryDrug.start(); |
|||
|
|||
|
|||
|
|||
// KfkConsumer.startReadThread(Constants.getClini(), "0522", 9, "123456",1); |
|||
// |
|||
// queryKfkaProducer.start(); |
|||
|
|||
|
|||
|
|||
// KfkConsumer.startReadThread(Constants.getClini(), "newa123", 9, "123",1); |
|||
// |
|||
// queryKfkaProducer.start(); |
|||
|
|||
|
|||
|
|||
} |
|||
//回写es数据 306812 实验室 csci_laboratory |
|||
//csci_org |
|||
// private void start() { |
|||
// |
|||
// config.verify(); |
|||
// LOGGER.info("App config:{}.", config.toString()); |
|||
// KfkConsumer.startReadThread(Constants.getEquipment(), "csci_org", 9,"0798",1); |
|||
// queryorgProducer.start(); |
|||
|
|||
// KfkConsumer.startReadThread(Constants.getEquipment(), "csci_laboratory", 9,"0677",1); |
|||
|
|||
// |
|||
// List<String> list = ReadLine.readLine(new File("data/subjectIds.txt")); |
|||
// for (String line : list) { |
|||
// |
|||
// String id= line.trim().split("\\s+")[0]; |
|||
// String name =line.trim().split("\\s+")[1]; |
|||
// String lat =line.trim().split("\\s+")[2]; |
|||
// String lon =line.trim().split("\\s+")[3]; |
|||
// |
|||
// |
|||
// Map cou=new HashMap<>(); |
|||
// Map location=new HashMap<>(); |
|||
// cou.put("countryId", id); |
|||
// cou.put("countryName", name); |
|||
// cou.put("_id_", id); |
|||
// location.put("lat",lat); |
|||
// location.put("lon",lon); |
|||
// cou.put("location", location); |
|||
// System.out.println(JsonUtils.toJSONString(cou)); |
|||
// ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "countries_location","_doc" ); |
|||
// elastiProducer.sendMessageToEs(JsonUtils.toJSONString(cou)); |
|||
// String a[10]; |
|||
// if (line.contains("\t")) { |
|||
// System.out.println("分隔符是:[\t]"); |
|||
// a = line.split("\t"); |
|||
// } else if (line.contains(" ")) { |
|||
// a = line.trim().split("\\s+"); |
|||
// } |
|||
// |
|||
|
|||
// } |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,38 @@ |
|||
package com.zyzs.otherdatasave.bean; |
|||
|
|||
import lombok.Data; |
|||
|
|||
import java.util.List; |
|||
|
|||
@Data |
|||
public class Clini { |
|||
private String desc; |
|||
private String title; |
|||
private String regNum; |
|||
private String regDate; |
|||
private String regStatus; |
|||
private String regTitle; |
|||
private String scientificName; |
|||
private String source; |
|||
private String responsibleUnit; |
|||
private String studyType; |
|||
private String phase; |
|||
private String diseases; |
|||
private String studyDesign; |
|||
private String purpose; |
|||
private String projectFunding; |
|||
private String country; |
|||
private String imtime; |
|||
private List measures; |
|||
private List recruitment; |
|||
private List avatarPath; |
|||
private String docId; |
|||
private String dataId; |
|||
private String _id_; |
|||
private String enSource; |
|||
private String docType ; |
|||
private long createTime; |
|||
private String createTimeStr; |
|||
private String isShow; |
|||
private String crawlUrl; |
|||
} |
|||
@ -0,0 +1,40 @@ |
|||
package com.zyzs.otherdatasave.bean; |
|||
|
|||
import lombok.Data; |
|||
|
|||
import java.util.Date; |
|||
import java.util.List; |
|||
|
|||
@Data |
|||
public class ClinikJ { |
|||
|
|||
|
|||
|
|||
|
|||
private String id; |
|||
private Date create_time; |
|||
private Date update_time; |
|||
private byte del; |
|||
private String classify; |
|||
private String subject; |
|||
private String image; |
|||
private String url; |
|||
private String tag; |
|||
private String title; |
|||
private String summary; |
|||
private String keywords; |
|||
private String country; |
|||
private List variable; |
|||
|
|||
|
|||
|
|||
|
|||
private String title_raw; |
|||
private String summary_raw; |
|||
private String keywords_raw; |
|||
private String country_raw; |
|||
private List file_list; |
|||
private String trial_registration_date; |
|||
private String trial_intervention; |
|||
private String trial_intervention_raw; |
|||
} |
|||
@ -0,0 +1,55 @@ |
|||
package com.zyzs.otherdatasave.bean; |
|||
|
|||
|
|||
import lombok.Data; |
|||
|
|||
import java.util.List; |
|||
|
|||
@Data |
|||
public class Drug { |
|||
private String drugName; |
|||
private String genericName; |
|||
private String ingredients; |
|||
private String indication; |
|||
private String strength; |
|||
private String dosage; |
|||
private String route; |
|||
private String sideEffect; |
|||
private String marketingStatus; |
|||
private String company; |
|||
private String country; |
|||
private String accessionNum; |
|||
private String accessionDate; |
|||
private String storageConditions; |
|||
private List drugInteractions; |
|||
private List attachmentInstructions; |
|||
private List attachmentInstructionsfilePath; |
|||
private String submission; |
|||
private String actionType; |
|||
private String submissionClassification; |
|||
private String reviewPriority; |
|||
private String lettersReviewsLabels; |
|||
private String notes ; |
|||
private String structure; |
|||
private List structurefilePath; |
|||
private long createTime; |
|||
private String createTimeStr; |
|||
private long crawlTime; |
|||
private String crawlUrl; |
|||
private String keywords; |
|||
private String weight; |
|||
private String chemicalFormula; |
|||
private String docId; |
|||
private String dataId; |
|||
private String _id_; |
|||
private String isShow; |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,27 @@ |
|||
package com.zyzs.otherdatasave.bean; |
|||
|
|||
import lombok.Data; |
|||
|
|||
import java.util.List; |
|||
|
|||
@Data |
|||
public class Eqiupment { |
|||
|
|||
private long crawlTime; |
|||
private String crawlUrl; |
|||
private String cate; |
|||
private String title; |
|||
private String introduction_short; |
|||
private List img; |
|||
private List imgPath; |
|||
private String content; |
|||
private String forwardcontent; |
|||
private String docId; |
|||
private String dataId; |
|||
private String _id_; |
|||
private long createTime; |
|||
private String createTimeStr; |
|||
|
|||
private String isShow; |
|||
|
|||
} |
|||
@ -0,0 +1,129 @@ |
|||
package com.zyzs.otherdatasave.bean; |
|||
|
|||
import com.alibaba.excel.annotation.ExcelProperty; |
|||
|
|||
public class ExcelData { |
|||
|
|||
@ExcelProperty("项目名称") |
|||
private String name; |
|||
|
|||
// @ExcelProperty("项目名称") |
|||
// private String othername; |
|||
// |
|||
// @ExcelProperty("缩写名称") |
|||
// private String othername; |
|||
// @ExcelProperty("翻译名称(英文)") |
|||
// private String othername; |
|||
// |
|||
// @ExcelProperty("翻译名称(中文)") |
|||
// private String othername; |
|||
// |
|||
// @ExcelProperty("项目编号") |
|||
// private String id; |
|||
// @ExcelProperty("其他编号") |
|||
// private String field1; |
|||
// @ExcelProperty("子项目编号") |
|||
// private String field1; |
|||
// @ExcelProperty("国别") |
|||
// private String country; |
|||
// @ExcelProperty("语种") |
|||
// private String field2; |
|||
// |
|||
// @ExcelProperty("资助来源") |
|||
// private String fundingAgency; |
|||
// @ExcelProperty("管理机构") |
|||
// private String field1; |
|||
// |
|||
// @ExcelProperty("项目类型") |
|||
// private String field2; |
|||
// @ExcelProperty("学科分类") |
|||
// private String fieldsSubject; |
|||
// |
|||
// @ExcelProperty("项目开始年") |
|||
// private String startyear; |
|||
// |
|||
// @ExcelProperty("特色分类") |
|||
// private String topics; |
|||
// |
|||
// @ExcelProperty("项目负责人") |
|||
// private String author; |
|||
// |
|||
// @ExcelProperty("项目参加人") |
|||
// private String author; |
|||
// |
|||
// @ExcelProperty("主持机构") |
|||
// private String Institution; |
|||
// |
|||
// @ExcelProperty("参与机构") |
|||
// private String Institutaion; |
|||
// |
|||
// @ExcelProperty("批准年") |
|||
// private String field1; |
|||
// |
|||
// |
|||
// @ExcelProperty("开始日期") |
|||
// private String starttime; |
|||
// @ExcelProperty("结束日期") |
|||
// private String endtime; |
|||
// |
|||
// @ExcelProperty("资助经费") |
|||
// private String funding; |
|||
// @ExcelProperty("经费单位") |
|||
// private String field1; |
|||
// |
|||
// |
|||
// @ExcelProperty("项目网址") |
|||
// private String field1; |
|||
// |
|||
// |
|||
// @ExcelProperty("项目摘要") |
|||
// private String summary; |
|||
// |
|||
// |
|||
// @ExcelProperty("原文摘要") |
|||
// private String summary; |
|||
// |
|||
// @ExcelProperty("结题摘要") |
|||
// private String summary; |
|||
// |
|||
// @ExcelProperty("翻译摘要") |
|||
// private String summary; |
|||
// |
|||
// @ExcelProperty("英文关键词") |
|||
// String field1; |
|||
// |
|||
// @ExcelProperty("来源关键词") |
|||
// private String field2; |
|||
// |
|||
// @ExcelProperty("翻译关键词(英文") |
|||
// private String field2; |
|||
// |
|||
// @ExcelProperty("翻译关键词(中文") |
|||
// private String field2; |
|||
// |
|||
@ExcelProperty("抽取关键词") |
|||
private String field2; |
|||
|
|||
|
|||
@ExcelProperty("欧盟资助金额") |
|||
private String field1; |
|||
|
|||
|
|||
|
|||
public String getField1() { |
|||
return field1; |
|||
} |
|||
|
|||
public void setField1(String field1) { |
|||
this.field1 = field1; |
|||
} |
|||
|
|||
public String getField2() { |
|||
return field2; |
|||
} |
|||
|
|||
public void setField2(String field2) { |
|||
this.field2 = field2; |
|||
} |
|||
|
|||
} |
|||
@ -0,0 +1,26 @@ |
|||
package com.zyzs.otherdatasave.bean; |
|||
|
|||
import com.alibaba.excel.context.AnalysisContext; |
|||
import com.alibaba.excel.event.AnalysisEventListener; |
|||
|
|||
import java.util.ArrayList; |
|||
import java.util.List; |
|||
|
|||
public class ExcelListener extends AnalysisEventListener<ExcelData> { |
|||
|
|||
private final List<ExcelData> excelDataList = new ArrayList<>(); |
|||
|
|||
@Override |
|||
public void invoke(ExcelData data, AnalysisContext context) { |
|||
excelDataList.add(data); |
|||
} |
|||
|
|||
@Override |
|||
public void doAfterAllAnalysed(AnalysisContext context) { |
|||
// 这里可以添加在所有数据读取完成后执行的操作 |
|||
} |
|||
|
|||
public List<ExcelData> getExcelDataList() { |
|||
return excelDataList; |
|||
} |
|||
} |
|||
@ -0,0 +1,46 @@ |
|||
package com.zyzs.otherdatasave.bean; |
|||
|
|||
import jdk.nashorn.internal.ir.LiteralNode; |
|||
import lombok.Data; |
|||
|
|||
import java.util.ArrayList; |
|||
import java.util.List; |
|||
|
|||
@Data |
|||
public class Paper { |
|||
private String title; |
|||
private int citedCountTotal; |
|||
private List authors; |
|||
private List source; |
|||
private String pubDate; |
|||
private String doi; |
|||
private String classify; |
|||
private String keywords; |
|||
private String summary; |
|||
private String topics; |
|||
private String fieldsSubject; |
|||
private String references; |
|||
private String docId; |
|||
private String dataId; |
|||
private String _id_; |
|||
private String country ; |
|||
private String crawlUrl; |
|||
private String enSource; |
|||
private String docType; |
|||
private String avatarPath; |
|||
private String translatetitle; |
|||
private String translatekeywords; |
|||
private String translatesummary; |
|||
private long createTime; |
|||
private String createTimeStr; |
|||
private String isshow; |
|||
private String content; |
|||
private List filePath; |
|||
private List filePathSize; |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,47 @@ |
|||
package com.zyzs.otherdatasave.bean; |
|||
|
|||
|
|||
import lombok.Data; |
|||
|
|||
import java.util.List; |
|||
|
|||
@Data |
|||
public class Patent { |
|||
private String title; |
|||
private List inventor; |
|||
private String summary; |
|||
private String keywords; |
|||
private String num; |
|||
private List patentee; |
|||
private List fieldsSubject; |
|||
private String patentNum; |
|||
private String patentTime; |
|||
private String applyNum; |
|||
private String applyTime; |
|||
private String classify; |
|||
private String country; |
|||
private int citedCountTotal; |
|||
private String claims; |
|||
private List imagePath; |
|||
private String docId; |
|||
private String dataId; |
|||
private String _id_; |
|||
private String enSource; |
|||
private String avatarPath; |
|||
private String translatetitle; |
|||
private List translateinventor; |
|||
private String translatesummary; |
|||
private String translatekeywords; |
|||
private String translatepatentee; |
|||
private List translatefieldsSubject; |
|||
private String translateClassify; |
|||
private String translatecountry; |
|||
private String translateclaims; |
|||
private long createTime; |
|||
private String createTimeStr; |
|||
private String docType; |
|||
private String crawlTimeStr; |
|||
private String crawlUrl; |
|||
private String ishow; |
|||
|
|||
} |
|||
@ -0,0 +1,38 @@ |
|||
package com.zyzs.otherdatasave.bean; |
|||
|
|||
|
|||
import lombok.Data; |
|||
|
|||
import java.util.Date; |
|||
import java.util.List; |
|||
|
|||
@Data |
|||
public class Patentkj { |
|||
|
|||
private String id; |
|||
private Date create_time; |
|||
private Date update_time; |
|||
private byte del; |
|||
private String classify; |
|||
private String subject; |
|||
private String image; |
|||
private String url; |
|||
private String tag; |
|||
private String title; |
|||
private String summary; |
|||
private String keywords; |
|||
private String country; |
|||
private List variable; |
|||
|
|||
|
|||
|
|||
|
|||
private String title_raw; |
|||
private String summary_raw; |
|||
private String keywords_raw; |
|||
private String country_raw; |
|||
private List file_list; |
|||
private Date patent_pub_date; |
|||
private long patent_cited_count; |
|||
|
|||
} |
|||
@ -0,0 +1,34 @@ |
|||
package com.zyzs.otherdatasave.bean; |
|||
|
|||
|
|||
import lombok.Data; |
|||
|
|||
import java.util.List; |
|||
|
|||
@Data |
|||
public class Proj { |
|||
|
|||
private String name; |
|||
private String pubTime; |
|||
private String fundingAgency; |
|||
private String cycle; |
|||
private String funding; |
|||
private List projectInformation; |
|||
private List fundingInformation; |
|||
private List organs ; |
|||
private String dataId; |
|||
private String _id_; |
|||
private String country ; |
|||
private String enSource; |
|||
private String docType ; |
|||
private String docId; |
|||
private long createTime; |
|||
private String createTimeStr; |
|||
private String keywords; |
|||
private String isShow; |
|||
private String crawlUrl; |
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,24 @@ |
|||
package com.zyzs.otherdatasave.cache; |
|||
|
|||
import java.util.Date; |
|||
|
|||
public class CacheObject { |
|||
|
|||
private String res; |
|||
private long timeStamp; |
|||
public String getRes() { |
|||
return res; |
|||
} |
|||
public void setRes(String res) { |
|||
this.res = res; |
|||
} |
|||
public long getTimeStamp() { |
|||
return timeStamp; |
|||
} |
|||
public void setTimeStamp(long timeStamp) { |
|||
this.timeStamp = timeStamp; |
|||
} |
|||
public static void main(String[] args) { |
|||
System.out.println(new Date().getTime()); |
|||
} |
|||
} |
|||
@ -0,0 +1,533 @@ |
|||
package com.zyzs.otherdatasave.cache; |
|||
|
|||
|
|||
import java.util.Date; |
|||
import java.util.HashMap; |
|||
import java.util.List; |
|||
import java.util.Map; |
|||
import java.util.concurrent.ArrayBlockingQueue; |
|||
import java.util.concurrent.ConcurrentHashMap; |
|||
import java.util.concurrent.LinkedBlockingDeque; |
|||
|
|||
/** |
|||
* @author 35707 |
|||
* |
|||
*/ |
|||
public final class Constants { |
|||
|
|||
private static LinkedBlockingDeque<HashMap<String, Object>> inputQueue = new LinkedBlockingDeque<HashMap<String, Object>>(1000); |
|||
|
|||
public static ArrayBlockingQueue<String> content = new ArrayBlockingQueue<>(8000); |
|||
public static ArrayBlockingQueue<String> comment = new ArrayBlockingQueue<>(8000); |
|||
public static ArrayBlockingQueue<String> fans = new ArrayBlockingQueue<>(8000); |
|||
public static ArrayBlockingQueue<String> follow = new ArrayBlockingQueue<>(8000); |
|||
public static ArrayBlockingQueue<String> user = new ArrayBlockingQueue<>(8000); |
|||
|
|||
|
|||
public static Map<String,Object> maps = new HashMap<>(8000); |
|||
|
|||
|
|||
public static ArrayBlockingQueue<String> getClini() { |
|||
return clini; |
|||
} |
|||
|
|||
public static void setClini(ArrayBlockingQueue<String> clini) { |
|||
Constants.clini = clini; |
|||
} |
|||
|
|||
public static ArrayBlockingQueue<String> getProj() { |
|||
return proj; |
|||
} |
|||
|
|||
public static void setProj(ArrayBlockingQueue<String> proj) { |
|||
Constants.proj = proj; |
|||
} |
|||
|
|||
public static ArrayBlockingQueue<String> getDrug() { |
|||
return drug; |
|||
} |
|||
|
|||
public static void setDrug(ArrayBlockingQueue<String> drug) { |
|||
Constants.drug = drug; |
|||
} |
|||
|
|||
public static ArrayBlockingQueue<String> getPatent() { |
|||
return patent; |
|||
} |
|||
|
|||
public static void setPatent(ArrayBlockingQueue<String> patent) { |
|||
Constants.patent = patent; |
|||
} |
|||
|
|||
public static ArrayBlockingQueue<String> getPaper() { |
|||
return paper; |
|||
} |
|||
|
|||
public static void setPaper(ArrayBlockingQueue<String> paper) { |
|||
Constants.paper = paper; |
|||
} |
|||
|
|||
public static ArrayBlockingQueue<String> clini = new ArrayBlockingQueue<>(8000); |
|||
public static ArrayBlockingQueue<String> proj = new ArrayBlockingQueue<>(8000); |
|||
public static ArrayBlockingQueue<String> drug = new ArrayBlockingQueue<>(8000); |
|||
public static ArrayBlockingQueue<String> patent = new ArrayBlockingQueue<>(8000); |
|||
public static ArrayBlockingQueue<String> paper = new ArrayBlockingQueue<>(8000); |
|||
public static ArrayBlockingQueue<String> equipment = new ArrayBlockingQueue<>(8000); |
|||
|
|||
|
|||
public static ArrayBlockingQueue<String> getEquipment() { |
|||
return equipment; |
|||
} |
|||
|
|||
public static void setEquipment(ArrayBlockingQueue<String> equipment) { |
|||
Constants.equipment = equipment; |
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
public static ArrayBlockingQueue<String> getOutQueue() { |
|||
return outQueue; |
|||
} |
|||
|
|||
public static void setOutQueue(ArrayBlockingQueue<String> outQueue) { |
|||
Constants.outQueue = outQueue; |
|||
} |
|||
|
|||
private static ArrayBlockingQueue<String> outQueue = new ArrayBlockingQueue<>(5000); |
|||
private static ArrayBlockingQueue<String> userInputQueue = new ArrayBlockingQueue<String>(5000); |
|||
|
|||
private static ArrayBlockingQueue<String> InputQueue = new ArrayBlockingQueue<String>(5000); |
|||
|
|||
private static ArrayBlockingQueue<String> textInputQueue = new ArrayBlockingQueue<String>(5000); |
|||
|
|||
|
|||
|
|||
private static LinkedBlockingDeque<HashMap<String, Object>> hdfsInputQueue = new LinkedBlockingDeque<HashMap<String, Object>>(1000); |
|||
private static ArrayBlockingQueue<HashMap<String, Object>> errorRecordQueue = new ArrayBlockingQueue<HashMap<String, Object>>(1000); |
|||
|
|||
private static Map<String, ArrayBlockingQueue<String>> sysInputMap = new HashMap<String, ArrayBlockingQueue<String>>(); |
|||
|
|||
private static Map<String, Thread> readInputMap = new HashMap<String, Thread>(); |
|||
|
|||
private static final String log4jPath = "../etc/log4j.properties"; |
|||
private static final String confPath = "../etc/config.properties"; |
|||
private static final String dbConfPath = "../etc/dbconfig.xml"; |
|||
|
|||
|
|||
private static int kfkInputReader = 9; |
|||
private static int dedupWorkerNum = 5; |
|||
private static int textWorkerNum = 5; |
|||
private static int kfkOuputWorkerNum = 5; |
|||
private static int saverWorkerNum = 5; |
|||
private static int sentiSwitch = 0; |
|||
private static int keywordsSwitch = 0; |
|||
private static int dedupSwitch = 0; |
|||
private static int readQueueSize = 10; |
|||
public static final String KafkaTopic = "kafka_topic"; |
|||
public static final String kfkaddr = "kafka_addr"; |
|||
public static final String subjectId = "subject_id"; |
|||
|
|||
private static String sentiUrl = ""; |
|||
private static String keywordUrl = ""; |
|||
private static String dedupIceAddr = ""; |
|||
private static String iceDupServerName = ""; |
|||
private static String zkAddr = "127.0.0.1:2181"; |
|||
private static String zkPath = "/crawler"; |
|||
private static int iceErrorTimes = 5; |
|||
private static int iceTimeOut = 5000; |
|||
|
|||
|
|||
private static String dedupOutTopic = "kafka30_crawl_alert"; |
|||
private static String textOutputTopic = "textOutDefault"; |
|||
private static String textInputGroupID = "test1"; |
|||
private static int keywordNumber = 5; |
|||
private static String errorSavePath = "./errorbak/"; |
|||
private static String errorMsgTopic = "errorMessageDefaultTopic"; |
|||
private static String wechatURL = "http://172.18.1.181:8412/sendwechatalarm/"; |
|||
private static boolean sysControlSwitch = true; |
|||
public static long updatetime = new Date().getTime()/1000; |
|||
private static String manualSites = "Taobao,Tmall,weixin"; |
|||
|
|||
|
|||
|
|||
// private static Map<String, Object> filterCache = new HashMap<String, Object>(); |
|||
private static Map<String, List<Object>> dedupRuleCache = new HashMap<String, List<Object>>(); //[time,content] |
|||
private static Map<String, Map<String, List<String>>> textConfigCache = new HashMap<String, Map<String, List<String>>>(); //{keywords:[time,content],senti:[content]} |
|||
private static Map<String, Object> basicCache = new HashMap<>(); |
|||
private static Map<String, String> kfkGroupCache = new HashMap<String, String>(); |
|||
private static Map<String, String> isDedupCache = new HashMap<String, String>(); |
|||
private static Map<String, String> isSendIncrementCache = new HashMap<String, String>(); |
|||
private static Map<String, String> alertCacheSingle = new ConcurrentHashMap<String, String>(); |
|||
private static Map<String, Object> alertCacheSinglemysql = new ConcurrentHashMap<String, Object>(); |
|||
|
|||
public static Map<String, Object> getRuleCache() { |
|||
return ruleCache; |
|||
} |
|||
|
|||
public static void setRuleCache(Map<String, Object> ruleCache) { |
|||
Constants.ruleCache = ruleCache; |
|||
} |
|||
|
|||
private static Map<String, Object> ruleCache = new HashMap<String, Object>(); |
|||
|
|||
|
|||
private static Map<String, Integer> email_cooldown = new HashMap<String, Integer>(); |
|||
private static Map<String, Integer> siteList = new HashMap<String, Integer>(); |
|||
|
|||
private static Map<String,List<String>> emailList = new HashMap<String, List<String>>(); |
|||
private static Map<String, List<Map<String, String>>> subjectTaskMap = new HashMap<>(); |
|||
|
|||
public static Map<String, List<Map<String, String>>> getFinalsubjectTaskMap() { |
|||
return finalsubjectTaskMap; |
|||
} |
|||
|
|||
public static void setFinalsubjectTaskMap(Map<String, List<Map<String, String>>> finalsubjectTaskMap) { |
|||
Constants.finalsubjectTaskMap = finalsubjectTaskMap; |
|||
} |
|||
|
|||
private static Map<String, List<Map<String, String>>> finalsubjectTaskMap = new HashMap<>(); |
|||
|
|||
public static Map<String, List<Map<String, String>>> getSubjectTaskMap() { |
|||
return subjectTaskMap; |
|||
} |
|||
|
|||
public static void setSubjectTaskMap(Map<String, List<Map<String, String>>> subjectTaskMap) { |
|||
Constants.subjectTaskMap = subjectTaskMap; |
|||
} |
|||
|
|||
final public static int TYPE_DATAERR = 1; |
|||
final public static int TYPE_DATATIMEOUTALERT = 2; |
|||
final public static int TYPE_DATATIMEOUT = 3; |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
public static String getManualSites() { |
|||
return manualSites; |
|||
} |
|||
public static void setManualSites(String manualSites) { |
|||
Constants.manualSites = manualSites; |
|||
} |
|||
public static Map<String, Integer> getSiteList() { |
|||
return siteList; |
|||
} |
|||
public static void setSiteList(Map<String, Integer> siteList) { |
|||
Constants.siteList = siteList; |
|||
} |
|||
public static String getWechatURL() { |
|||
return wechatURL; |
|||
} |
|||
public static void setWechatURL(String wechatURL) { |
|||
Constants.wechatURL = wechatURL; |
|||
} |
|||
public static Map<String, Integer> getEmail_cooldown() { |
|||
return email_cooldown; |
|||
} |
|||
public static void setEmail_cooldown(Map<String, Integer> email_cooldown) { |
|||
Constants.email_cooldown = email_cooldown; |
|||
} |
|||
public static Map<String, List<String>> getEmailList() { |
|||
return emailList; |
|||
} |
|||
public static void setEmailList(Map<String, List<String>> emailList) { |
|||
Constants.emailList = emailList; |
|||
} |
|||
public static Map<String, String> getAlertCacheSingle() { |
|||
return alertCacheSingle; |
|||
} |
|||
public static void setAlertCacheSingle(Map<String, String> alertCacheSingle) { |
|||
Constants.alertCacheSingle = alertCacheSingle; |
|||
} |
|||
public static String getErrorMsgTopic() { |
|||
return errorMsgTopic; |
|||
} |
|||
public static void setErrorMsgTopic(String errorMsgTopic) { |
|||
Constants.errorMsgTopic = errorMsgTopic; |
|||
} |
|||
|
|||
public static Map<String, String> getIsSendIncrementCache() { |
|||
return isSendIncrementCache; |
|||
} |
|||
public static void setIsSendIncrementCache( |
|||
Map<String, String> isSendIncrementCache) { |
|||
Constants.isSendIncrementCache = isSendIncrementCache; |
|||
} |
|||
public static ArrayBlockingQueue<String> getFollow() { |
|||
return follow; |
|||
} |
|||
|
|||
public static void setFollow(ArrayBlockingQueue<String> follow) { |
|||
Constants.follow = follow; |
|||
} |
|||
public static String getTextInputGroupID() { |
|||
return textInputGroupID; |
|||
} |
|||
public static void setTextInputGroupID(String textInputGroupID) { |
|||
Constants.textInputGroupID = textInputGroupID; |
|||
} |
|||
public static Map<String, String> getIsDedupCache() { |
|||
return isDedupCache; |
|||
} |
|||
public static void setIsDedupCache(Map<String, String> isDedupCache) { |
|||
Constants.isDedupCache = isDedupCache; |
|||
} |
|||
public static int getReadQueueSize() { |
|||
return readQueueSize; |
|||
} |
|||
public static void setReadQueueSize(int readQueueSize) { |
|||
Constants.readQueueSize = readQueueSize; |
|||
} |
|||
public static Map<String, Map<String, List<String>>> getTextConfigCache() { |
|||
return textConfigCache; |
|||
} |
|||
public static void setTextConfigCache(Map<String, Map<String, List<String>>> textConfigCache) { |
|||
Constants.textConfigCache = textConfigCache; |
|||
} |
|||
public static int getKeywordNumber() { |
|||
return keywordNumber; |
|||
} |
|||
public static void setKeywordNumber(int keywordNumber) { |
|||
Constants.keywordNumber = keywordNumber; |
|||
} |
|||
public static String getIceDupServerName() { |
|||
return iceDupServerName; |
|||
} |
|||
public static void setIceDupServerName(String iceDupServerName) { |
|||
Constants.iceDupServerName = iceDupServerName; |
|||
} |
|||
public static int getIceTimeOut() { |
|||
return iceTimeOut; |
|||
} |
|||
public static void setIceTimeOut(int iceTimeOut) { |
|||
Constants.iceTimeOut = iceTimeOut; |
|||
} |
|||
public static void setIceErrorTimes(int iceErrorTimes) { |
|||
Constants.iceErrorTimes = iceErrorTimes; |
|||
} |
|||
public static int getIceErrorTimes() { |
|||
return iceErrorTimes; |
|||
} |
|||
public static Map<String, List<Object>> getDedupRuleCache() { |
|||
return dedupRuleCache; |
|||
} |
|||
public static void setDedupRuleCache(Map<String, List<Object>> dedupRuleCache) { |
|||
Constants.dedupRuleCache = dedupRuleCache; |
|||
} |
|||
public static LinkedBlockingDeque<HashMap<String, Object>> getInputQueue() { |
|||
return inputQueue; |
|||
} |
|||
public static void setInputQueue( |
|||
LinkedBlockingDeque<HashMap<String, Object>> inputQueue) { |
|||
Constants.inputQueue = inputQueue; |
|||
} |
|||
|
|||
public static ArrayBlockingQueue<String> getTextInputQueue() { |
|||
return textInputQueue; |
|||
} |
|||
public static void setTextInputQueue( |
|||
ArrayBlockingQueue<String> textInputQueue) { |
|||
Constants.textInputQueue = textInputQueue; |
|||
} |
|||
public static LinkedBlockingDeque<HashMap<String, Object>> getHdfsInputQueue() { |
|||
return hdfsInputQueue; |
|||
} |
|||
public static void setHdfsInputQueue( |
|||
LinkedBlockingDeque<HashMap<String, Object>> hdfsInputQueue) { |
|||
Constants.hdfsInputQueue = hdfsInputQueue; |
|||
} |
|||
public static ArrayBlockingQueue<HashMap<String, Object>> getErrorRecordQueue() { |
|||
return errorRecordQueue; |
|||
} |
|||
public static void setErrorRecordQueue( |
|||
ArrayBlockingQueue<HashMap<String, Object>> errorRecordQueue) { |
|||
Constants.errorRecordQueue = errorRecordQueue; |
|||
} |
|||
public static Map<String, ArrayBlockingQueue<String>> getSysInputMap() { |
|||
return sysInputMap; |
|||
} |
|||
public static void setSysInputMap( |
|||
Map<String, ArrayBlockingQueue<String>> sysInputMap) { |
|||
Constants.sysInputMap = sysInputMap; |
|||
} |
|||
public static int getDedupWorkerNum() { |
|||
return dedupWorkerNum; |
|||
} |
|||
public static void setDedupWorkerNum(int dedupWorkerNum) { |
|||
Constants.dedupWorkerNum = dedupWorkerNum; |
|||
} |
|||
public static int getTextWorkerNum() { |
|||
return textWorkerNum; |
|||
} |
|||
public static void setTextWorkerNum(int textWorkerNum) { |
|||
Constants.textWorkerNum = textWorkerNum; |
|||
} |
|||
public static int getKfkOuputWorkerNum() { |
|||
return kfkOuputWorkerNum; |
|||
} |
|||
public static void setKfkOuputWorkerNum(int kfkOuputWorkerNum) { |
|||
Constants.kfkOuputWorkerNum = kfkOuputWorkerNum; |
|||
} |
|||
public static int getSaverWorkerNum() { |
|||
return saverWorkerNum; |
|||
} |
|||
public static void setSaverWorkerNum(int saverWorkerNum) { |
|||
Constants.saverWorkerNum = saverWorkerNum; |
|||
} |
|||
public static int getSentiSwitch() { |
|||
return sentiSwitch; |
|||
} |
|||
public static void setSentiSwitch(int sentiSwitch) { |
|||
Constants.sentiSwitch = sentiSwitch; |
|||
} |
|||
public static int getKeywordsSwitch() { |
|||
return keywordsSwitch; |
|||
} |
|||
public static void setKeywordsSwitch(int keywordsSwitch) { |
|||
Constants.keywordsSwitch = keywordsSwitch; |
|||
} |
|||
public static int getDedupSwitch() { |
|||
return dedupSwitch; |
|||
} |
|||
public static void setDedupSwitch(int dedupSwitch) { |
|||
Constants.dedupSwitch = dedupSwitch; |
|||
} |
|||
public static String getSentiUrl() { |
|||
return sentiUrl; |
|||
} |
|||
public static void setSentiUrl(String sentiUrl) { |
|||
Constants.sentiUrl = sentiUrl; |
|||
} |
|||
public static String getKeywordUrl() { |
|||
return keywordUrl; |
|||
} |
|||
public static void setKeywordUrl(String keywordUrl) { |
|||
Constants.keywordUrl = keywordUrl; |
|||
} |
|||
public static String getDedupIceAddr() { |
|||
return dedupIceAddr; |
|||
} |
|||
public static void setDedupIceAddr(String dedupIceAddr) { |
|||
Constants.dedupIceAddr = dedupIceAddr; |
|||
} |
|||
public static String getDedupOutTopic() { |
|||
return dedupOutTopic; |
|||
} |
|||
public static void setDedupOutTopic(String dedupOutTopic) { |
|||
Constants.dedupOutTopic = dedupOutTopic; |
|||
} |
|||
public static String getTextOutputTopic() { |
|||
return textOutputTopic; |
|||
} |
|||
public static void setTextOutputTopic(String textOutputTopic) { |
|||
Constants.textOutputTopic = textOutputTopic; |
|||
} |
|||
public static String getErrorSavePath() { |
|||
return errorSavePath; |
|||
} |
|||
public static void setErrorSavePath(String errorSavePath) { |
|||
Constants.errorSavePath = errorSavePath; |
|||
} |
|||
public static boolean isSysControlSwitch() { |
|||
return sysControlSwitch; |
|||
} |
|||
public static void setSysControlSwitch(boolean sysControlSwitch) { |
|||
Constants.sysControlSwitch = sysControlSwitch; |
|||
} |
|||
// public static Map<String, Object> getFilterCache() { |
|||
// return filterCache; |
|||
// } |
|||
// public static void setFilterCache(Map<String, Object> filterCache) { |
|||
// Constants.filterCache = filterCache; |
|||
// } |
|||
public static Map<String, Object> getBasicCache() { |
|||
return basicCache; |
|||
} |
|||
public static void setBasicCache(Map<String, Object> inoutCache) { |
|||
Constants.basicCache = inoutCache; |
|||
} |
|||
public static Map<String, String> getKfkGroupCache() { |
|||
return kfkGroupCache; |
|||
} |
|||
public static void setKfkGroupCache(Map<String, String> kfkGroupCache) { |
|||
Constants.kfkGroupCache = kfkGroupCache; |
|||
} |
|||
public static String getLog4jpath() { |
|||
return log4jPath; |
|||
} |
|||
public static String getConfpath() { |
|||
return confPath; |
|||
} |
|||
public static String getDbconfpath() { |
|||
return dbConfPath; |
|||
} |
|||
public static String getZkAddr() { |
|||
return zkAddr; |
|||
} |
|||
public static void setZkAddr(String zkAddr) { |
|||
Constants.zkAddr = zkAddr; |
|||
} |
|||
public static String getZkPath() { |
|||
return zkPath; |
|||
} |
|||
public static void setZkPath(String zkPath) { |
|||
Constants.zkPath = zkPath; |
|||
} |
|||
public static Map<String, Thread> getReadInputMap() { |
|||
return readInputMap; |
|||
} |
|||
public static void setReadInputMap(Map<String, Thread> readInputMap) { |
|||
Constants.readInputMap = readInputMap; |
|||
} |
|||
public static int getKfkInputReader() { |
|||
return kfkInputReader; |
|||
} |
|||
public static void setKfkInputReader(int kfkInputReader) { |
|||
Constants.kfkInputReader = kfkInputReader; |
|||
} |
|||
|
|||
public static ArrayBlockingQueue<String> getContent() { |
|||
return content; |
|||
} |
|||
|
|||
public static void setContent(ArrayBlockingQueue<String> content) { |
|||
Constants.content = content; |
|||
} |
|||
|
|||
public static ArrayBlockingQueue<String> getComment() { |
|||
return comment; |
|||
} |
|||
|
|||
public static void setComment(ArrayBlockingQueue<String> comment) { |
|||
Constants.comment = comment; |
|||
} |
|||
|
|||
public static ArrayBlockingQueue<String> getFans() { |
|||
return fans; |
|||
} |
|||
|
|||
public static void setFans(ArrayBlockingQueue<String> fans) { |
|||
Constants.fans = fans; |
|||
} |
|||
|
|||
public static ArrayBlockingQueue<String> getUser() { |
|||
return user; |
|||
} |
|||
|
|||
public static void setUser(ArrayBlockingQueue<String> user) { |
|||
Constants.user = user; |
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,29 @@ |
|||
package com.zyzs.otherdatasave.cache; |
|||
|
|||
|
|||
|
|||
import com.alibaba.fastjson.JSON; |
|||
|
|||
import java.util.Map; |
|||
import java.util.concurrent.ConcurrentHashMap; |
|||
|
|||
|
|||
public class ProjectCache { |
|||
public static Map<String, Object> proj2Topic = new ConcurrentHashMap<String, Object>(); |
|||
|
|||
|
|||
static{ |
|||
UpdateCacheThread update = new UpdateCacheThread(); |
|||
new Thread(update).start(); |
|||
try { |
|||
Thread.currentThread().sleep(2000); |
|||
} catch (InterruptedException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
update.setFlag(false); |
|||
} |
|||
public static void main(String[] args) { |
|||
ProjectCache o = new ProjectCache(); |
|||
System.out.println("proj3topic:"+ JSON.toJSONString(proj2Topic)); |
|||
} |
|||
} |
|||
@ -0,0 +1,63 @@ |
|||
package com.zyzs.otherdatasave.cache; |
|||
|
|||
import org.apache.log4j.Logger; |
|||
|
|||
import java.util.Date; |
|||
|
|||
|
|||
public class UpdateCacheThread implements Runnable{ |
|||
private static final Logger log = Logger.getLogger(UpdateCacheThread.class); |
|||
private volatile boolean flag = true; |
|||
|
|||
public boolean isFlag() { |
|||
return flag; |
|||
} |
|||
|
|||
public void setFlag(boolean flag) { |
|||
this.flag = flag; |
|||
} |
|||
|
|||
@Override |
|||
public void run() { |
|||
// while(flag){ |
|||
// ConfigClient client = new ConfigClient(); |
|||
// String bizName = "ALL"; |
|||
// Map<String, String> configName = new HashMap<String, String>(); |
|||
// configName.put("req", "project"); |
|||
// String configJson = JsonUtils.toJSONString(configName); |
|||
//// String rs = client.getConfig(bizName, configJson); |
|||
// String rs = client.getConfig(bizName, configJson); |
|||
// try { |
|||
// Map<String, Object> data = (Map<String, Object>)JSON.parseObject(rs); |
|||
// List<Map<String, Object>> projs = (List<Map<String, Object>>)data.get("data"); |
|||
// ProjectCache.proj2Topic.clear(); |
|||
// log.info("-----------follow is proj2topic----------------"); |
|||
// for(Map<String, Object> proj:projs){ |
|||
// log.info(proj.get("projName")+":"+proj.get("kfkTopic")); |
|||
// ProjectCache.proj2Topic.put(proj.get("projName").toString(), proj.get("kfkTopic")); |
|||
// } |
|||
// } catch (Exception e) { |
|||
// e.printStackTrace(); |
|||
// } |
|||
// try { |
|||
// Thread.currentThread().sleep(5*1000); |
|||
// } catch (InterruptedException e) { |
|||
// e.printStackTrace(); |
|||
// } |
|||
// } |
|||
// |
|||
} |
|||
|
|||
public static void main(String[] args) { |
|||
UpdateCacheThread uct = new UpdateCacheThread(); |
|||
new Thread(uct).start(); |
|||
Date begin = new Date(); |
|||
try { |
|||
Thread.currentThread().sleep(1000*20); |
|||
} catch (InterruptedException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
uct.setFlag(false); |
|||
} |
|||
|
|||
} |
|||
@ -0,0 +1,714 @@ |
|||
package com.zyzs.otherdatasave.config; |
|||
|
|||
import org.springframework.boot.context.properties.ConfigurationProperties; |
|||
import org.springframework.context.annotation.Configuration; |
|||
import org.springframework.util.Assert; |
|||
|
|||
import java.util.List; |
|||
import java.util.Map; |
|||
|
|||
@Configuration |
|||
@ConfigurationProperties(prefix = "worker") |
|||
public class AppConfig { |
|||
// 自己定义的参数 |
|||
|
|||
// 从配置文件中读的参数 |
|||
private static final String CONFIG_ES_CLUSTER_NAME = "name"; |
|||
private static final String CONFIG_ES_ADDRESS = "address"; |
|||
private static final String CONFIG_ES_SOURCE_UPPER = "upper"; |
|||
private static final String CONFIG_ES_SOURCE_STANDBY = "standby"; |
|||
private static final String CONFIG_ES_BULK_THREAD_COUNT = "bulk-thread-count"; |
|||
private static final String CONFIG_ES_BULK_RATE = "bulk-rate"; |
|||
private static final String CONFIG_ES_BULK_SIZE = "bulk-size"; |
|||
|
|||
|
|||
// public double getErrpercentage() { |
|||
// return errpercentage; |
|||
// } |
|||
// |
|||
// public void setErrpercentage(double errpercentage) { |
|||
// this.errpercentage = errpercentage; |
|||
// } |
|||
// |
|||
// private double errpercentage=0.1; |
|||
|
|||
private String version; |
|||
private Boolean enableTest; |
|||
private Integer testThreadCount; |
|||
private Long testTaskId; |
|||
|
|||
private String readUserTopic; |
|||
private String readUserGroupid; |
|||
|
|||
|
|||
|
|||
// private String readContentTopic; |
|||
// private String readContentgroupid; |
|||
// private String readCommentTopic; |
|||
// private String readCommentGroupid; |
|||
// private String readFansTopic; |
|||
// private String readFansGroupid; |
|||
// private String readFollowTopic; |
|||
// private String readFollowGroupid; |
|||
|
|||
|
|||
private String readcliniGroupid; |
|||
private String readprojTopic; |
|||
private String readprojgroupid; |
|||
|
|||
private String readdrugTopic; |
|||
private String readdrugGroupid; |
|||
private String readpatentTopic; |
|||
private String readpatentGroupid; |
|||
private String readpaperTopic; |
|||
private String readpaperGroupid; |
|||
|
|||
private String readeqTopic; |
|||
|
|||
public String getReadeqTopic() { |
|||
return readeqTopic; |
|||
} |
|||
|
|||
public void setReadeqTopic(String readeqTopic) { |
|||
this.readeqTopic = readeqTopic; |
|||
} |
|||
|
|||
public String getReadeqGroupid() { |
|||
return readeqGroupid; |
|||
} |
|||
|
|||
public void setReadeqGroupid(String readeqGroupid) { |
|||
this.readeqGroupid = readeqGroupid; |
|||
} |
|||
|
|||
private String readeqGroupid; |
|||
|
|||
|
|||
|
|||
public String getReadcliniTopic() { |
|||
return readcliniTopic; |
|||
} |
|||
|
|||
public void setReadcliniTopic(String readcliniTopic) { |
|||
this.readcliniTopic = readcliniTopic; |
|||
} |
|||
|
|||
private String readcliniTopic; |
|||
|
|||
public String getReadcliniGroupid() { |
|||
return readcliniGroupid; |
|||
} |
|||
|
|||
public void setReadcliniGroupid(String readcliniGroupid) { |
|||
this.readcliniGroupid = readcliniGroupid; |
|||
} |
|||
|
|||
public String getReadprojTopic() { |
|||
return readprojTopic; |
|||
} |
|||
|
|||
public void setReadprojTopic(String readprojTopic) { |
|||
this.readprojTopic = readprojTopic; |
|||
} |
|||
|
|||
public String getReadprojgroupid() { |
|||
return readprojgroupid; |
|||
} |
|||
|
|||
public void setReadprojgroupid(String readprojgroupid) { |
|||
this.readprojgroupid = readprojgroupid; |
|||
} |
|||
|
|||
public String getReaddrugTopic() { |
|||
return readdrugTopic; |
|||
} |
|||
|
|||
public void setReaddrugTopic(String readdrugTopic) { |
|||
this.readdrugTopic = readdrugTopic; |
|||
} |
|||
|
|||
public String getReaddrugGroupid() { |
|||
return readdrugGroupid; |
|||
} |
|||
|
|||
public void setReaddrugGroupid(String readdrugGroupid) { |
|||
this.readdrugGroupid = readdrugGroupid; |
|||
} |
|||
|
|||
public String getReadpatentTopic() { |
|||
return readpatentTopic; |
|||
} |
|||
|
|||
public void setReadpatentTopic(String readpatentTopic) { |
|||
this.readpatentTopic = readpatentTopic; |
|||
} |
|||
|
|||
public String getReadpatentGroupid() { |
|||
return readpatentGroupid; |
|||
} |
|||
|
|||
public void setReadpatentGroupid(String readpatentGroupid) { |
|||
this.readpatentGroupid = readpatentGroupid; |
|||
} |
|||
|
|||
public String getReadpaperTopic() { |
|||
return readpaperTopic; |
|||
} |
|||
|
|||
public void setReadpaperTopic(String readpaperTopic) { |
|||
this.readpaperTopic = readpaperTopic; |
|||
} |
|||
|
|||
public String getReadpaperGroupid() { |
|||
return readpaperGroupid; |
|||
} |
|||
|
|||
public void setReadpaperGroupid(String readpaperGroupid) { |
|||
this.readpaperGroupid = readpaperGroupid; |
|||
} |
|||
|
|||
private String aligofasturl; |
|||
private Boolean isDownloadAvatar; |
|||
private Boolean isDownloadImage; |
|||
private Boolean isDownloadFile; |
|||
private Boolean isDownloadVideo; |
|||
private Boolean isDownloadforwardVideo; |
|||
|
|||
public Boolean getDownloadforwardVideo() { |
|||
return isDownloadforwardVideo; |
|||
} |
|||
|
|||
public void setDownloadforwardVideo(Boolean downloadforwardVideo) { |
|||
isDownloadforwardVideo = downloadforwardVideo; |
|||
} |
|||
|
|||
public Boolean getDownloadforwardImage() { |
|||
return isDownloadforwardImage; |
|||
} |
|||
|
|||
public void setDownloadforwardImage(Boolean downloadforwardImage) { |
|||
isDownloadforwardImage = downloadforwardImage; |
|||
} |
|||
|
|||
|
|||
private Boolean isDownloadforwardImage; |
|||
|
|||
public Boolean getDownloadAvatar() { |
|||
return isDownloadAvatar; |
|||
} |
|||
|
|||
public void setDownloadAvatar(Boolean downloadAvatar) { |
|||
isDownloadAvatar = downloadAvatar; |
|||
} |
|||
|
|||
public Boolean getDownloadImage() { |
|||
return isDownloadImage; |
|||
} |
|||
|
|||
public void setDownloadImage(Boolean downloadImage) { |
|||
isDownloadImage = downloadImage; |
|||
} |
|||
|
|||
public Boolean getDownloadFile() { |
|||
return isDownloadFile; |
|||
} |
|||
|
|||
public void setDownloadFile(Boolean downloadFile) { |
|||
isDownloadFile = downloadFile; |
|||
} |
|||
|
|||
public Boolean getDownloadVideo() { |
|||
return isDownloadVideo; |
|||
} |
|||
|
|||
public void setDownloadVideo(Boolean downloadVideo) { |
|||
isDownloadVideo = downloadVideo; |
|||
} |
|||
|
|||
|
|||
|
|||
public String getAligofasturl() { |
|||
return aligofasturl; |
|||
} |
|||
|
|||
public void setAligofasturl(String aligofasturl) { |
|||
this.aligofasturl = aligofasturl; |
|||
} |
|||
|
|||
public String getGofasturl() { |
|||
return gofasturl; |
|||
} |
|||
|
|||
public void setGofasturl(String gofasturl) { |
|||
this.gofasturl = gofasturl; |
|||
} |
|||
|
|||
private String gofasturl; |
|||
|
|||
|
|||
|
|||
public String getWriteTopic() { |
|||
return writeTopic; |
|||
} |
|||
|
|||
public void setWriteTopic(String writeTopic) { |
|||
this.writeTopic = writeTopic; |
|||
} |
|||
|
|||
private String writeTopic; |
|||
|
|||
|
|||
private List<String> analysisTopic; |
|||
private String analysisGroup; |
|||
// private Boolean enableAnalysisProducer; |
|||
// private Boolean enableAnalysisConsumer; |
|||
// private Integer analysisProducerThreadCount; |
|||
// private Integer analysisConsumerThreadCount; |
|||
private Boolean enableStatisticsProducer; // 离线查询统计服务的状态 |
|||
private Boolean enableQueryProducer; // 离线查询数据服务的状态 |
|||
private Boolean enableBacktraceProducer; // 离线拉取数据服务的状态(欧莱雅) |
|||
private Integer statisticsProducerThreadCount; // 离线查询统计服务的线程数 |
|||
private Integer queryProducerThreadCount; |
|||
private Integer queryUserProducerThreadCount; |
|||
|
|||
public Integer getQueryUserProducerThreadCount() { |
|||
return queryUserProducerThreadCount; |
|||
} |
|||
|
|||
public void setQueryUserProducerThreadCount(Integer queryUserProducerThreadCount) { |
|||
this.queryUserProducerThreadCount = queryUserProducerThreadCount; |
|||
} |
|||
|
|||
public Integer getQueryConetnteProducerThreadCount() { |
|||
return queryConetnteProducerThreadCount; |
|||
} |
|||
|
|||
public void setQueryConetnteProducerThreadCount(Integer queryConetnteProducerThreadCount) { |
|||
this.queryConetnteProducerThreadCount = queryConetnteProducerThreadCount; |
|||
} |
|||
|
|||
public Integer getQueryfansProducerThreadCount() { |
|||
return queryfansProducerThreadCount; |
|||
} |
|||
|
|||
public void setQueryfansProducerThreadCount(Integer queryfansProducerThreadCount) { |
|||
this.queryfansProducerThreadCount = queryfansProducerThreadCount; |
|||
} |
|||
|
|||
private Integer queryConetnteProducerThreadCount; |
|||
|
|||
public Integer getQueryConmmentProducerThreadCount() { |
|||
return queryConmmentProducerThreadCount; |
|||
} |
|||
|
|||
public void setQueryConmmentProducerThreadCount(Integer queryConmmentProducerThreadCount) { |
|||
this.queryConmmentProducerThreadCount = queryConmmentProducerThreadCount; |
|||
} |
|||
|
|||
private Integer queryConmmentProducerThreadCount; |
|||
private Integer queryfansProducerThreadCount; |
|||
|
|||
public Integer getQueryfollowProducerThreadCount() { |
|||
return queryfollowProducerThreadCount; |
|||
} |
|||
|
|||
public void setQueryfollowProducerThreadCount(Integer queryfollowProducerThreadCount) { |
|||
this.queryfollowProducerThreadCount = queryfollowProducerThreadCount; |
|||
} |
|||
|
|||
private Integer queryfollowProducerThreadCount; |
|||
|
|||
|
|||
|
|||
private Integer backtraceProducerThreadCount; |
|||
// private Boolean enableCompany; |
|||
// private Integer companyThreadCount; |
|||
// private Boolean enableCompanyProducer; |
|||
// private Boolean enableCompanyConsumer; |
|||
// private Integer companyProducerThreadCount; |
|||
// private Integer companyConsumerThreadCount; |
|||
// private Boolean enableZombie; |
|||
private Integer periodS; |
|||
private String ruleRest; |
|||
private String commentRest; |
|||
private Integer ruleRestConcurrency; |
|||
private Integer contentLimit; |
|||
private Integer failureUpper; |
|||
private Map<String, Object> esNormal; |
|||
private Map<String, Object> esMini; |
|||
|
|||
|
|||
public Integer getPeriodS() { |
|||
return periodS; |
|||
} |
|||
|
|||
public void setPeriodS(Integer periodS) { |
|||
this.periodS = periodS; |
|||
} |
|||
|
|||
public Integer getQueryProducerThreadCount() { |
|||
return queryProducerThreadCount; |
|||
} |
|||
|
|||
public void setQueryProducerThreadCount(Integer queryProducerThreadCount) { |
|||
this.queryProducerThreadCount = queryProducerThreadCount; |
|||
} |
|||
|
|||
public Integer getBacktraceProducerThreadCount() { |
|||
return backtraceProducerThreadCount; |
|||
} |
|||
|
|||
public void setBacktraceProducerThreadCount(Integer backtraceProducerThreadCount) { |
|||
this.backtraceProducerThreadCount = backtraceProducerThreadCount; |
|||
} |
|||
|
|||
public Boolean getEnableQueryProducer() { |
|||
return enableQueryProducer; |
|||
} |
|||
|
|||
public void setEnableQueryProducer(Boolean enableQueryProducer) { |
|||
this.enableQueryProducer = enableQueryProducer; |
|||
} |
|||
|
|||
public Boolean getEnableBacktraceProducer() { |
|||
return enableBacktraceProducer; |
|||
} |
|||
|
|||
public void setEnableBacktraceProducer(Boolean enableBacktraceProducer) { |
|||
this.enableBacktraceProducer = enableBacktraceProducer; |
|||
} |
|||
|
|||
public Boolean getEnableStatisticsProducer() { |
|||
return enableStatisticsProducer; |
|||
} |
|||
|
|||
public void setEnableStatisticsProducer(Boolean enableStatisticsProducer) { |
|||
this.enableStatisticsProducer = enableStatisticsProducer; |
|||
} |
|||
|
|||
public Integer getStatisticsProducerThreadCount() { |
|||
return statisticsProducerThreadCount; |
|||
} |
|||
|
|||
public void setStatisticsProducerThreadCount(Integer statisticsProducerThreadCount) { |
|||
this.statisticsProducerThreadCount = statisticsProducerThreadCount; |
|||
} |
|||
|
|||
public String getVersion() { |
|||
return version; |
|||
} |
|||
|
|||
public void setVersion(String version) { |
|||
this.version = version; |
|||
} |
|||
|
|||
public Boolean getEnableTest() { |
|||
return enableTest; |
|||
} |
|||
|
|||
public void setEnableTest(Boolean enableTest) { |
|||
this.enableTest = enableTest; |
|||
} |
|||
|
|||
public Integer getTestThreadCount() { |
|||
return testThreadCount; |
|||
} |
|||
|
|||
public void setTestThreadCount(Integer testThreadCount) { |
|||
this.testThreadCount = testThreadCount; |
|||
} |
|||
|
|||
public Long getTestTaskId() { |
|||
return testTaskId; |
|||
} |
|||
|
|||
public void setTestTaskId(Long testTaskId) { |
|||
this.testTaskId = testTaskId; |
|||
} |
|||
|
|||
|
|||
public List<String> getAnalysisTopic() { |
|||
return analysisTopic; |
|||
} |
|||
|
|||
public void setAnalysisTopic(List<String> analysisTopic) { |
|||
this.analysisTopic = analysisTopic; |
|||
} |
|||
|
|||
public String getAnalysisGroup() { |
|||
return analysisGroup; |
|||
} |
|||
|
|||
public void setAnalysisGroup(String analysisGroup) { |
|||
this.analysisGroup = analysisGroup; |
|||
} |
|||
|
|||
|
|||
public String getRuleRest() { |
|||
return ruleRest; |
|||
} |
|||
|
|||
public void setRuleRest(String ruleRest) { |
|||
this.ruleRest = ruleRest; |
|||
} |
|||
|
|||
public Integer getRuleRestConcurrency() { |
|||
return ruleRestConcurrency; |
|||
} |
|||
|
|||
public void setRuleRestConcurrency(Integer ruleRestConcurrency) { |
|||
this.ruleRestConcurrency = ruleRestConcurrency; |
|||
} |
|||
|
|||
public Integer getContentLimit() { |
|||
return contentLimit; |
|||
} |
|||
|
|||
public void setContentLimit(Integer contentLimit) { |
|||
this.contentLimit = contentLimit; |
|||
} |
|||
|
|||
public Integer getFailureUpper() { |
|||
return failureUpper; |
|||
} |
|||
|
|||
public void setFailureUpper(Integer failureUpper) { |
|||
this.failureUpper = failureUpper; |
|||
} |
|||
|
|||
public Map<String, Object> getEsNormal() { |
|||
return esNormal; |
|||
} |
|||
|
|||
public void setEsNormal(Map<String, Object> esNormal) { |
|||
this.esNormal = esNormal; |
|||
} |
|||
|
|||
public String esNormalClusterName() { |
|||
return (String) esNormal.get(CONFIG_ES_CLUSTER_NAME); |
|||
} |
|||
|
|||
public String[] esNormalAddress() { |
|||
return ((String) esNormal.get(CONFIG_ES_ADDRESS)).split(","); |
|||
} |
|||
|
|||
// public Long esNormalUpper() { |
|||
// String upper = (String) esNormal.get(CONFIG_ES_SOURCE_UPPER); |
|||
// |
|||
// return Instant.parse(upper).getMillis(); |
|||
// } |
|||
|
|||
public String esNormalStandby() { |
|||
return (String) esNormal.get(CONFIG_ES_SOURCE_STANDBY); |
|||
} |
|||
|
|||
public Map<String, Object> getEsMini() { |
|||
return esMini; |
|||
} |
|||
|
|||
public void setEsMini(Map<String, Object> esMini) { |
|||
this.esMini = esMini; |
|||
} |
|||
|
|||
public String esMiniClusterName() { |
|||
return (String) esMini.get(CONFIG_ES_CLUSTER_NAME); |
|||
} |
|||
|
|||
public String[] esMiniAddress() { |
|||
return ((String) esMini.get(CONFIG_ES_ADDRESS)).split(","); |
|||
} |
|||
|
|||
public Integer esMiniBulkThreadCount() { |
|||
Integer count = Integer.parseInt(esMini.get(CONFIG_ES_BULK_THREAD_COUNT).toString()); |
|||
return count; |
|||
} |
|||
|
|||
public Integer esMiniBulkRate() { |
|||
Integer rate = Integer.parseInt(esMini.get(CONFIG_ES_BULK_RATE).toString()); |
|||
return rate; |
|||
} |
|||
|
|||
public Integer esMiniBulkSize() { |
|||
Integer size = Integer.parseInt(esMini.get(CONFIG_ES_BULK_SIZE).toString()); |
|||
return size; |
|||
} |
|||
|
|||
|
|||
public void verify() { |
|||
|
|||
Assert.hasLength(version, "Config version must not be empty"); |
|||
// if (enableTest) { |
|||
// Assert.isTrue(testThreadCount > 0, "Config testThreadCount must gt 0"); |
|||
// Assert.isTrue(testTaskId > 0, "Config testTaskId must gt 0"); |
|||
// } |
|||
// |
|||
// if(enableStatisticsProducer){ |
|||
// Assert.isTrue(statisticsProducerThreadCount > 0, "Config statisticsProducerThreadCount must gt 0"); |
|||
// } |
|||
// if(enableQueryProducer){ |
|||
// Assert.isTrue(queryProducerThreadCount > 0, "Config statisticsProducerThreadCount must gt 0"); |
|||
// } |
|||
// if(enableStatisticsProducer){ |
|||
// Assert.isTrue(backtraceProducerThreadCount > 0, "Config statisticsProducerThreadCount must gt 0"); |
|||
// } |
|||
// if (enableAnalysisProducer) { |
|||
// Assert.isTrue(analysisProducerThreadCount > 0, "Config analysisProducerThreadCount must gt 0"); |
|||
// Assert.notEmpty(analysisTopic, "Config analysisTopic must not be empty."); |
|||
// } |
|||
// if (enableAnalysisConsumer) { |
|||
// Assert.isTrue(analysisConsumerThreadCount > 0, "Config analysisConsumerThreadCount must gt 0"); |
|||
// Assert.hasLength(analysisGroup, "Config analysisGroup must not be empty."); |
|||
// } |
|||
// if (enableCompany) { |
|||
// Assert.isTrue(companyThreadCount > 0, "Config companyThreadCount must gt 0"); |
|||
// } |
|||
// if (enableCompanyProducer) { |
|||
// Assert.isTrue(companyProducerThreadCount > 0, "Config companyProducerThreadCount must gt 0"); |
|||
// |
|||
// } |
|||
// if (enableCompanyConsumer) { |
|||
// Assert.isTrue(companyConsumerThreadCount > 0, "Config companyConsumerThreadCount must gt 0"); |
|||
// |
|||
// } |
|||
// Assert.isTrue(periodS > 0, "Config periodS must gt 0"); |
|||
// Assert.hasLength(ruleRest, "Config ruleRest must not be empty"); |
|||
// Assert.isTrue(ruleRestConcurrency > 0, "Config ruleRestConcurrency must gt 0"); |
|||
// Assert.isTrue(contentLimit > 0, "Config contentLimit must gt 0"); |
|||
// Assert.isTrue(failureUpper > 0, "Config failureUpper must gt 0"); |
|||
// Assert.notEmpty(esNormal, "Config esSource must not be empty"); |
|||
// Assert.notEmpty(esMini, "Config esTarget must not be empty"); |
|||
} |
|||
|
|||
public String getCommentRest() { |
|||
return commentRest; |
|||
} |
|||
|
|||
public void setCommentRest(String commentRest) { |
|||
this.commentRest = commentRest; |
|||
} |
|||
|
|||
// public Map<String, Object> getRedis() { |
|||
// return redis; |
|||
// } |
|||
// public void setRedis(Map<String, Object> redis) { |
|||
// this.redis = redis; |
|||
// } |
|||
|
|||
// public String redisModel() { |
|||
// return (String) redis.get(REDIS_MODEL); |
|||
// } |
|||
// public String redisZkadress() { |
|||
// return (String) redis.get(REDIS_ZKADRESS); |
|||
// } |
|||
// public String redisZksessiontimeoutms() { |
|||
// return (String) redis.get(REDIS_ZKSESSIONTIMEOUTMS); |
|||
// } |
|||
// public String redisProxypath() { |
|||
// return (String) redis.get(REDIS_PORT); |
|||
// } |
|||
// public String redisAddress() { |
|||
// return (String) redis.get(REDIS_ADDRESS); |
|||
// } |
|||
// public String redisPort() { |
|||
// return (String) redis.get(REDIS_PORT); |
|||
// } |
|||
public String getReadUserTopic() { |
|||
return readUserTopic; |
|||
} |
|||
|
|||
public void setReadUserTopic(String readUserTopic) { |
|||
this.readUserTopic = readUserTopic; |
|||
} |
|||
|
|||
public String getReadUserGroupid() { |
|||
return readUserGroupid; |
|||
} |
|||
|
|||
public void setReadUserGroupid(String readUserGroupid) { |
|||
this.readUserGroupid = readUserGroupid; |
|||
} |
|||
// |
|||
// public String getReadContentTopic() { |
|||
// return readContentTopic; |
|||
// } |
|||
// |
|||
// public void setReadContentTopic(String readContentTopic) { |
|||
// this.readContentTopic = readContentTopic; |
|||
// } |
|||
// |
|||
// public String getReadContentgroupid() { |
|||
// return readContentgroupid; |
|||
// } |
|||
// |
|||
// public void setReadContentgroupid(String readContentgroupid) { |
|||
// this.readContentgroupid = readContentgroupid; |
|||
// } |
|||
// |
|||
// public String getReadCommentTopic() { |
|||
// return readCommentTopic; |
|||
// } |
|||
// |
|||
// public void setReadCommentTopic(String readCommentTopic) { |
|||
// this.readCommentTopic = readCommentTopic; |
|||
// } |
|||
// |
|||
// public String getReadCommentGroupid() { |
|||
// return readCommentGroupid; |
|||
// } |
|||
// |
|||
// public void setReadCommentGroupid(String readCommentGroupid) { |
|||
// this.readCommentGroupid = readCommentGroupid; |
|||
// } |
|||
// |
|||
// public String getReadFansTopic() { |
|||
// return readFansTopic; |
|||
// } |
|||
// |
|||
// public void setReadFansTopic(String readFansTopic) { |
|||
// this.readFansTopic = readFansTopic; |
|||
// } |
|||
// |
|||
// public String getReadFansGroupid() { |
|||
// return readFansGroupid; |
|||
// } |
|||
// |
|||
// public void setReadFansGroupid(String readFansGroupid) { |
|||
// this.readFansGroupid = readFansGroupid; |
|||
// } |
|||
// |
|||
// public String getReadFollowTopic() { |
|||
// return readFollowTopic; |
|||
// } |
|||
// |
|||
// public void setReadFollowTopic(String readFollowTopic) { |
|||
// this.readFollowTopic = readFollowTopic; |
|||
// } |
|||
// |
|||
// public String getReadFollowGroupid() { |
|||
// return readFollowGroupid; |
|||
// } |
|||
// |
|||
// public void setReadFollowGroupid(String readFollowGroupid) { |
|||
// this.readFollowGroupid = readFollowGroupid; |
|||
// } |
|||
|
|||
// @Override |
|||
// public String toString() { |
|||
// return "AppConfig{" + |
|||
// ", aligofasturl='" + aligofasturl + '\'' + |
|||
// ", isDownloadAvatar=" + isDownloadAvatar + |
|||
// ", isDownloadImage=" + isDownloadImage + |
|||
// ", isDownloadFile=" + isDownloadFile + |
|||
// ", isDownloadVideo=" + isDownloadVideo + |
|||
// ", isDownloadforwardVideo=" + isDownloadforwardVideo + |
|||
// ", isDownloadforwardImage=" + isDownloadforwardImage + |
|||
// ", gofasturl='" + gofasturl + '\'' + |
|||
// ", writeTopic='" + writeTopic + '\'' + |
|||
// ", queryUserProducerThreadCount=" + queryUserProducerThreadCount + |
|||
// ", queryConetnteProducerThreadCount=" + queryConetnteProducerThreadCount + |
|||
// ", queryConmmentProducerThreadCount=" + queryConmmentProducerThreadCount + |
|||
// ", queryfansProducerThreadCount=" + queryfansProducerThreadCount + |
|||
// ", queryfollowProducerThreadCount=" + queryfollowProducerThreadCount + |
|||
// '}'; |
|||
// } |
|||
} |
|||
1116
src/main/java/com/zyzs/otherdatasave/config/BFDApiConfig.java
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
1092
src/main/java/com/zyzs/otherdatasave/config/ESConstants.java
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,97 @@ |
|||
package com.zyzs.otherdatasave.service; |
|||
|
|||
import com.alibaba.fastjson.JSONObject; |
|||
import com.bfd.crawler.elasti.ElastiProducerHigh; |
|||
import com.bfd.crawler.utils.JsonUtils; |
|||
import com.zyzs.otherdatasave.bean.Drug; |
|||
import com.zyzs.otherdatasave.cache.Constants; |
|||
import com.zyzs.otherdatasave.util.DataCheckUtil; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.stereotype.Service; |
|||
|
|||
import java.util.*; |
|||
|
|||
import static com.zyzs.otherdatasave.util.MfMD5Util.GetMD5Code; |
|||
|
|||
@Service |
|||
public class QueryDrug { |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryDrug.class); |
|||
public void query() { |
|||
String inputMessage = Constants.getDrug().poll();// poll -->若队列为空,返回null |
|||
if (Objects.isNull(inputMessage)) { |
|||
return; |
|||
} |
|||
Map<String, Object> messageMap = new HashMap<>(); |
|||
if (inputMessage.length()>10) { |
|||
try { |
|||
messageMap = JsonUtils.parseObject(inputMessage); |
|||
Drug dru=new Drug(); |
|||
dru.setDrugName((String) messageMap.get("drugName")); |
|||
dru.setGenericName((String) messageMap.get("genericName")); |
|||
dru.setIngredients((String) messageMap.get("ingredients")); |
|||
dru.setIndication((String) messageMap.get("indication")); |
|||
dru.setStrength((String) messageMap.get("strength")); |
|||
dru.setDosage((String) messageMap.get("dosage")); |
|||
dru.setRoute((String) messageMap.get("route")); |
|||
dru.setSideEffect((String) messageMap.get("sideEffect")); |
|||
dru.setMarketingStatus((String) messageMap.get("marketingStatus")); |
|||
dru.setCompany((String) messageMap.get("company")); |
|||
dru.setCountry((String) messageMap.get("country")); |
|||
dru.setAccessionNum((String) messageMap.get("accessionNum")); |
|||
dru.setAccessionDate((String) messageMap.get("accessionDate")); |
|||
dru.setStorageConditions((String) messageMap.get("storageConditions")); |
|||
try { |
|||
dru.setDrugInteractions((List) messageMap.get("drugInteractions")); |
|||
} finally { |
|||
} |
|||
List attachmentInstructions=new ArrayList(); |
|||
String attachmentInstructionsa= messageMap.get("attachmentInstructions").toString(); |
|||
if(attachmentInstructionsa.length()>5){ |
|||
attachmentInstructions.add(messageMap.get("attachmentInstructions").toString()); |
|||
} |
|||
dru.setAttachmentInstructions(attachmentInstructions); |
|||
List attachmentInstructionsPath=new ArrayList(); |
|||
String attachmentInstructionsfilePath = (String) messageMap.get("attachmentInstructionsfilePath"); |
|||
if (attachmentInstructionsfilePath.length()>5){ |
|||
attachmentInstructionsPath.add((String) messageMap.get("attachmentInstructionsfilePath")); |
|||
} |
|||
dru.setAttachmentInstructions(attachmentInstructionsPath); |
|||
dru.setSubmission((String) messageMap.get("submission")); |
|||
dru.setActionType((String) messageMap.get("actionType")); |
|||
dru.setSubmissionClassification((String) messageMap.get("submissionClassification")); |
|||
dru.setReviewPriority((String) messageMap.get("reviewPriority")); |
|||
dru.setLettersReviewsLabels((String) messageMap.get("lettersReviewsLabels")); |
|||
dru.setNotes((String) messageMap.get("notes")); |
|||
long dateTimenow = System.currentTimeMillis(); |
|||
dru.setCreateTime(dateTimenow); |
|||
String createTimeStr= DataCheckUtil.getCurrentTime(dateTimenow); |
|||
dru.setCreateTimeStr(createTimeStr); |
|||
dru.setKeywords((String) messageMap.get("keywords")); |
|||
dru.setCrawlUrl((String) messageMap.get("crawlUrl")); |
|||
dru.setWeight((String) messageMap.get("weight")); |
|||
dru.setChemicalFormula((String) messageMap.get("chemicalFormula")); |
|||
dru.setCrawlUrl((String) messageMap.get("crawlUrl")); |
|||
String docid = GetMD5Code((String) messageMap.get("drugName")+(String) messageMap.get("strength")); |
|||
dru.setDocId(docid); |
|||
dru.setDataId(docid); |
|||
dru.set_id_(docid); |
|||
dru.setIsShow("20250512"); |
|||
LOGGER.info("Parse QueryDrug={}", JSONObject.toJSON(dru)); |
|||
ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_drug_crb","_doc" ); |
|||
elastiProducer.sendMessageToEs(JsonUtils.toJSONString(dru)); |
|||
|
|||
} catch (Exception e) { |
|||
LOGGER.error("ERROR QueryDrug={}", JSONObject.toJSON(messageMap)); |
|||
e.printStackTrace(); |
|||
} |
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
} |
|||
@ -0,0 +1,80 @@ |
|||
package com.zyzs.otherdatasave.service; |
|||
|
|||
import com.alibaba.fastjson.JSONObject; |
|||
import com.bfd.crawler.elasti.ElastiProducerHigh; |
|||
import com.bfd.crawler.utils.JsonUtils; |
|||
import com.zyzs.otherdatasave.bean.Eqiupment; |
|||
import com.zyzs.otherdatasave.bean.Proj; |
|||
import com.zyzs.otherdatasave.cache.Constants; |
|||
import com.zyzs.otherdatasave.util.DataCheckUtil; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.stereotype.Service; |
|||
|
|||
import java.util.*; |
|||
|
|||
import static com.zyzs.otherdatasave.util.MfMD5Util.GetMD5Code; |
|||
|
|||
@Service |
|||
public class QueryEq { |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryEq.class); |
|||
public void query() { |
|||
String inputMessage = Constants.getEquipment().poll();// poll -->若队列为空,返回null |
|||
if (Objects.isNull(inputMessage)) { |
|||
return; |
|||
} |
|||
Map<String, Object> messageMap = new HashMap<>(); |
|||
if (inputMessage.length()>10) { |
|||
try { |
|||
messageMap = JsonUtils.parseObject(inputMessage); |
|||
} catch (Exception e) { |
|||
e.printStackTrace(); |
|||
} |
|||
try { |
|||
Eqiupment eq=new Eqiupment(); |
|||
eq.setCrawlUrl((String) messageMap.get("crawlUrl")); |
|||
eq.setCate((String) messageMap.get("cate")); |
|||
eq.setTitle((String) messageMap.get("title")); |
|||
eq.setIntroduction_short((String) messageMap.get("introduction_short")); |
|||
if(!messageMap.get("img").toString().isEmpty()){ |
|||
String img=(String) messageMap.get("img"); |
|||
List imga=new ArrayList(); |
|||
imga.add(img); |
|||
eq.setImg(imga); |
|||
} |
|||
if(!messageMap.get("imgPath").toString().isEmpty()){ |
|||
String img=(String) messageMap.get("imgPath"); |
|||
img="/group"+img.split("/group")[1]; |
|||
List imga=new ArrayList(); |
|||
imga.add(img); |
|||
eq.setImgPath(imga); |
|||
} |
|||
eq.setContent((String) messageMap.get("content")); |
|||
eq.setForwardcontent((String) messageMap.get("forwardcontent")); |
|||
String docid = GetMD5Code((String) messageMap.get("crawlUrl")); |
|||
eq.setDocId(docid); |
|||
eq.setDataId(docid); |
|||
eq.set_id_(docid); |
|||
long dateTimenow = System.currentTimeMillis(); |
|||
eq.setCreateTime(dateTimenow); |
|||
String createTimeStr= DataCheckUtil.getCurrentTime(dateTimenow); |
|||
eq.setCreateTimeStr(createTimeStr); |
|||
eq.setCrawlUrl((String) messageMap.get("crawlUrl")); |
|||
LOGGER.info("Parse QueryEq={}", JSONObject.toJSON(eq)); |
|||
ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_equipment_crb","_doc" ); |
|||
elastiProducer.sendMessageToEs(JsonUtils.toJSONString(eq)); |
|||
} catch (Exception e) { |
|||
// LOGGER.error("Parse ErrorQueryEq={}", JSONObject.toJSON(inputMessage)); |
|||
e.printStackTrace(); |
|||
} |
|||
|
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
} |
|||
@ -0,0 +1,160 @@ |
|||
package com.zyzs.otherdatasave.service; |
|||
|
|||
import com.alibaba.fastjson.JSONObject; |
|||
import com.bfd.crawler.elasti.ElastiProducerHigh; |
|||
import com.bfd.crawler.utils.JsonUtils; |
|||
import com.zyzs.otherdatasave.bean.Clini; |
|||
import com.zyzs.otherdatasave.bean.Paper; |
|||
import com.zyzs.otherdatasave.cache.Constants; |
|||
import com.zyzs.otherdatasave.util.DataCheckUtil; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.stereotype.Service; |
|||
|
|||
import javax.annotation.PostConstruct; |
|||
import java.time.LocalDateTime; |
|||
import java.time.format.DateTimeFormatter; |
|||
import java.util.*; |
|||
|
|||
import static com.zyzs.otherdatasave.util.MfMD5Util.GetMD5Code; |
|||
@Service |
|||
public class QueryPaper { |
|||
|
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryPaper.class); |
|||
@PostConstruct |
|||
public void init() { |
|||
|
|||
} |
|||
|
|||
public void query() { |
|||
String inputMessage = Constants.getPaper().poll();// poll -->若队列为空,返回null |
|||
if (Objects.isNull(inputMessage)) { |
|||
return; |
|||
} |
|||
Map<String, Object> messageMap = new HashMap<>(); |
|||
if (inputMessage.length()>10) { |
|||
try { |
|||
try { |
|||
messageMap = JsonUtils.parseObject(inputMessage); |
|||
} catch (Exception e) { |
|||
e.printStackTrace(); |
|||
} |
|||
Paper paper=new Paper(); |
|||
paper.setTitle((String) messageMap.get("title")); |
|||
paper.setCrawlUrl((String) messageMap.get("crawlUrl")); |
|||
paper.setContent((String) messageMap.get("content")); |
|||
|
|||
// |
|||
// List<String> authorList = new ArrayList<>(); |
|||
// String author=(String) messageMap.get("author"); |
|||
// if(author.contains(";")){ |
|||
// // 如果包含分号,则按分号分割 |
|||
// authorList = Arrays.asList(author.split(";")); |
|||
// }else if (author.contains(",")){ |
|||
// // 如果包含分号,则按分号分割 |
|||
// authorList = Arrays.asList(author.split(",")); |
|||
// } |
|||
// if(authorList.size()>0){ |
|||
// List authors=new ArrayList<>(); |
|||
// for (String name:authorList){ |
|||
// Map ma=new HashMap(); |
|||
// ma.put("id",""); |
|||
// ma.put("name",name); |
|||
// // 组织信息 |
|||
// Map or =new HashMap(); |
|||
// or.put("id",""); |
|||
// or.put("name",""); |
|||
// List organs=new ArrayList<>(); |
|||
// organs.add(or); |
|||
// ma.put("organs",organs); |
|||
// authors.add(ma); |
|||
// } |
|||
// paper.setAuthors(authors); |
|||
// } |
|||
|
|||
if (messageMap.containsKey("filePath")){ |
|||
List<String> file = (List) messageMap.get("filePath"); |
|||
List<String> cleanedList = new ArrayList<>(); |
|||
for (String url : file) { |
|||
// 使用replaceAll方法移除匹配的<url>标签 |
|||
String cleanedUrl = url.replaceAll("http://192.168.0.41:8081", ""); |
|||
cleanedList.add(cleanedUrl); |
|||
} |
|||
paper.setFilePath(cleanedList); |
|||
paper.setFilePathSize((List) messageMap.get("filePathSize")); |
|||
} |
|||
|
|||
List<Map> authors= (List<Map>) messageMap.get("authors"); |
|||
if(authors.size()>0){ |
|||
List authorsall=new ArrayList<>(); |
|||
for (Map<String,Object>keyValueMap : authors){ |
|||
for (Map.Entry<String, Object> entry : keyValueMap.entrySet()) { |
|||
String key = entry.getKey(); // 获取键 |
|||
Object value = entry.getValue(); // 获取值 |
|||
Map ma=new HashMap(); |
|||
ma.put("id",""); |
|||
ma.put("name",key); |
|||
// 组织信息 |
|||
Map or =new HashMap(); |
|||
or.put("id",""); |
|||
or.put("name",value); |
|||
List organs=new ArrayList<>(); |
|||
organs.add(or); |
|||
ma.put("organs",organs); |
|||
authorsall.add(ma); |
|||
paper.setAuthors(authorsall); |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
//来源信息 |
|||
Map source=new HashMap(); |
|||
source.put("id", ""); |
|||
source.put("name", messageMap.get("crawlUrl")); |
|||
source.put("type", ""); |
|||
List sourcelist=new ArrayList<>(); |
|||
sourcelist.add(source); |
|||
paper.setSource(sourcelist); |
|||
|
|||
String inputDate = (String) messageMap.get("pubDate"); |
|||
String outputFormat = "yyyy-MM-dd"; |
|||
// 定义输入格式 |
|||
DateTimeFormatter inputFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); |
|||
// 解析输入字符串为 LocalDateTime 对象 |
|||
LocalDateTime dateTime = LocalDateTime.parse(inputDate, inputFormatter); |
|||
// 格式化为所需的输出格式 |
|||
String formattedDate = dateTime.format(DateTimeFormatter.ofPattern(outputFormat)); |
|||
paper.setPubDate(formattedDate); |
|||
paper.setDoi((String) messageMap.get("doi")); |
|||
paper.setClassify((String) messageMap.get("classify")); |
|||
paper.setKeywords((String) messageMap.get("keywords")); |
|||
paper.setSummary((String) messageMap.get("summary")); |
|||
paper.setTopics((String) messageMap.get("topics")); |
|||
paper.setFieldsSubject((String) messageMap.get("fieldsSubject")); |
|||
paper.setReferences((String) messageMap.get("references")); |
|||
String docid = GetMD5Code((String) messageMap.get("doi")); |
|||
paper.setDocId(docid); |
|||
paper.setDataId(docid); |
|||
paper.set_id_(docid); |
|||
paper.setCountry((String) messageMap.get("country")); |
|||
|
|||
paper.setTranslatetitle(""); |
|||
paper.setTranslatekeywords (""); |
|||
paper.setTranslatesummary(""); |
|||
paper.setIsshow("20250520"); |
|||
long dateTimenow = System.currentTimeMillis(); |
|||
paper.setCreateTime(dateTimenow); |
|||
String createTimeStr=DataCheckUtil.getCurrentTime(dateTimenow); |
|||
paper.setCreateTimeStr(createTimeStr); |
|||
LOGGER.info("Parse Paper={}", JSONObject.toJSON(paper)); |
|||
ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_paper_csci","_doc" ); |
|||
elastiProducer.sendMessageToEs(JsonUtils.toJSONString(paper)); |
|||
} |
|||
catch (Exception e) { |
|||
LOGGER.info("Parse PaperError={}", JSONObject.toJSON(inputMessage)); |
|||
e.printStackTrace(); |
|||
} |
|||
} |
|||
} |
|||
} |
|||
@ -0,0 +1,229 @@ |
|||
package com.zyzs.otherdatasave.service; |
|||
|
|||
import com.alibaba.fastjson.JSONObject; |
|||
import com.bfd.crawler.elasti.ElastiProducerHigh; |
|||
import com.bfd.crawler.utils.JsonUtils; |
|||
import com.zyzs.otherdatasave.bean.Patent; |
|||
import com.zyzs.otherdatasave.cache.Constants; |
|||
import com.zyzs.otherdatasave.util.DataCheckUtil; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.stereotype.Service; |
|||
|
|||
import java.util.*; |
|||
|
|||
import static com.zyzs.otherdatasave.util.MfMD5Util.GetMD5Code; |
|||
|
|||
@Service |
|||
public class QueryPatent { |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryPatent.class); |
|||
|
|||
public void query() { |
|||
String inputMessage = Constants.getPatent().poll();// poll -->若队列为空,返回null |
|||
if (Objects.isNull(inputMessage)) { |
|||
return; |
|||
} |
|||
Map<String, Object> messageMap = new HashMap<>(); |
|||
if (inputMessage.length()>10) { |
|||
try { |
|||
messageMap = JsonUtils.parseObject(inputMessage); |
|||
Patent patent=new Patent(); |
|||
patent.setTitle((String) messageMap.get("title")); |
|||
String inv=(String) messageMap.get("inventor"); |
|||
List<String> invList=new ArrayList(); |
|||
if(inv.contains(";")){ |
|||
// 如果包含分号,则按分号分割 |
|||
invList = Arrays.asList(inv.split(";")); |
|||
}else if (inv.contains(",")){ |
|||
// 如果包含分号,则按分号分割 |
|||
invList = Arrays.asList(inv.split(",")); |
|||
} |
|||
if(invList.size()>0){ |
|||
List inventor=new ArrayList(); |
|||
for (String invname:invList){ |
|||
Map ma=new HashMap(); |
|||
ma.put("id",""); |
|||
ma.put("name",invname); |
|||
// 组织信息 |
|||
Map or =new HashMap(); |
|||
or.put("id",""); |
|||
or.put("name",""); |
|||
List organs=new ArrayList<>(); |
|||
organs.add(or); |
|||
ma.put("organs",organs); |
|||
inventor.add(ma); |
|||
patent.setInventor(inventor); |
|||
} |
|||
} |
|||
patent.setSummary((String) messageMap.get("summary")); |
|||
patent.setKeywords((String) messageMap.get("keywords")); |
|||
patent.setNum((String) messageMap.get("num")); |
|||
String patentee=(String) messageMap.get("patentee"); |
|||
List<String> patenteeList=new ArrayList(); |
|||
if(patentee.contains(";")){ |
|||
// 如果包含分号,则按分号分割 |
|||
patenteeList = Arrays.asList(inv.split(";")); |
|||
}else if (patentee.contains(",")){ |
|||
// 如果包含分号,则按分号分割 |
|||
patenteeList = Arrays.asList(inv.split(",")); |
|||
}else { |
|||
patenteeList.add(patentee); |
|||
} |
|||
if(patenteeList.size()>0){ |
|||
List inventor=new ArrayList(); |
|||
for (String invname:patenteeList){ |
|||
Map ma=new HashMap(); |
|||
ma.put("id",""); |
|||
ma.put("name",invname.trim()); |
|||
// 组织信息 |
|||
Map or =new HashMap(); |
|||
or.put("id",""); |
|||
or.put("name",""); |
|||
List organs=new ArrayList<>(); |
|||
organs.add(or); |
|||
ma.put("organs",organs); |
|||
inventor.add(ma); |
|||
patent.setPatentee(inventor); |
|||
} |
|||
} |
|||
patent.setPatentNum((String) messageMap.get("patentNum")); |
|||
patent.setPatentTime(DataCheckUtil.convertString((String) messageMap.get("patentTime"))); |
|||
patent.setApplyNum((String) messageMap.get("applyNum")); |
|||
patent.setApplyTime(DataCheckUtil.convertString((String) messageMap.get("applyTime"))); |
|||
patent.setClassify((String) messageMap.get("classify")); |
|||
patent.setCountry((String) messageMap.get("country")); |
|||
patent.setClaims((String) messageMap.get("claims")); |
|||
patent.setCrawlUrl((String) messageMap.get("crawlUrl")); |
|||
patent.setCrawlTimeStr("2025-05-31 00:00:00"); |
|||
patent.setIshow("20250512"); |
|||
try { |
|||
patent.setCitedCountTotal((int) messageMap.get("citedCountTotal")); |
|||
} catch (Exception e) { |
|||
// e.printStackTrace(); |
|||
} |
|||
patent.setImagePath(new ArrayList()); |
|||
String docid = GetMD5Code((String) messageMap.get("num")); |
|||
patent.setDocId(docid); |
|||
// patent.setDocType("四级实验室"); |
|||
patent.set_id_(docid); |
|||
// patent.setEnSource((String) messageMap.get("keywords")); |
|||
long dateTimenow = System.currentTimeMillis(); |
|||
patent.setCreateTime(dateTimenow); |
|||
String createTimeStr= DataCheckUtil.getCurrentTime(dateTimenow); |
|||
patent.setCreateTimeStr(createTimeStr); |
|||
LOGGER.info("Parse Patent={}", JSONObject.toJSON(patent)); |
|||
ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_patent_csci","_doc" ); |
|||
elastiProducer.sendMessageToEs(JsonUtils.toJSONString(patent)); |
|||
} catch (Exception e) { |
|||
LOGGER.error("Parse ErrotPatent={}", JSONObject.toJSON(messageMap)); |
|||
e.printStackTrace(); |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
public void querykeji() { |
|||
String inputMessage = Constants.getPatent().poll();// poll -->若队列为空,返回null |
|||
if (Objects.isNull(inputMessage)) { |
|||
return; |
|||
} |
|||
Map<String, Object> messageMap = new HashMap<>(); |
|||
if (inputMessage.length()>10) { |
|||
try { |
|||
messageMap = JsonUtils.parseObject(inputMessage); |
|||
Patent patent=new Patent(); |
|||
patent.setTitle((String) messageMap.get("title")); |
|||
String inv=(String) messageMap.get("inventor"); |
|||
List<String> invList=new ArrayList(); |
|||
if(inv.contains(";")){ |
|||
// 如果包含分号,则按分号分割 |
|||
invList = Arrays.asList(inv.split(";")); |
|||
}else if (inv.contains(",")){ |
|||
// 如果包含分号,则按分号分割 |
|||
invList = Arrays.asList(inv.split(",")); |
|||
} |
|||
if(invList.size()>0){ |
|||
List inventor=new ArrayList(); |
|||
for (String invname:invList){ |
|||
Map ma=new HashMap(); |
|||
ma.put("id",""); |
|||
ma.put("name",invname); |
|||
// 组织信息 |
|||
Map or =new HashMap(); |
|||
or.put("id",""); |
|||
or.put("name",""); |
|||
List organs=new ArrayList<>(); |
|||
organs.add(or); |
|||
ma.put("organs",organs); |
|||
inventor.add(ma); |
|||
patent.setInventor(inventor); |
|||
} |
|||
} |
|||
patent.setSummary((String) messageMap.get("summary")); |
|||
patent.setKeywords((String) messageMap.get("keywords")); |
|||
patent.setNum((String) messageMap.get("num")); |
|||
String patentee=(String) messageMap.get("patentee"); |
|||
List<String> patenteeList=new ArrayList(); |
|||
if(patentee.contains(";")){ |
|||
// 如果包含分号,则按分号分割 |
|||
patenteeList = Arrays.asList(inv.split(";")); |
|||
}else if (patentee.contains(",")){ |
|||
// 如果包含分号,则按分号分割 |
|||
patenteeList = Arrays.asList(inv.split(",")); |
|||
}else { |
|||
patenteeList.add(patentee); |
|||
} |
|||
if(patenteeList.size()>0){ |
|||
List inventor=new ArrayList(); |
|||
for (String invname:patenteeList){ |
|||
Map ma=new HashMap(); |
|||
ma.put("id",""); |
|||
ma.put("name",invname.trim()); |
|||
// 组织信息 |
|||
Map or =new HashMap(); |
|||
or.put("id",""); |
|||
or.put("name",""); |
|||
List organs=new ArrayList<>(); |
|||
organs.add(or); |
|||
ma.put("organs",organs); |
|||
inventor.add(ma); |
|||
patent.setPatentee(inventor); |
|||
} |
|||
} |
|||
patent.setPatentNum((String) messageMap.get("patentNum")); |
|||
patent.setPatentTime(DataCheckUtil.convertString((String) messageMap.get("patentTime"))); |
|||
patent.setApplyNum((String) messageMap.get("applyNum")); |
|||
patent.setApplyTime(DataCheckUtil.convertString((String) messageMap.get("applyTime"))); |
|||
patent.setClassify((String) messageMap.get("classify")); |
|||
patent.setCountry((String) messageMap.get("country")); |
|||
patent.setClaims((String) messageMap.get("claims")); |
|||
patent.setCrawlUrl((String) messageMap.get("crawlUrl")); |
|||
patent.setCrawlTimeStr("2025-05-31 00:00:00"); |
|||
patent.setIshow("20250512"); |
|||
try { |
|||
patent.setCitedCountTotal((int) messageMap.get("citedCountTotal")); |
|||
} catch (Exception e) { |
|||
// e.printStackTrace(); |
|||
} |
|||
patent.setImagePath(new ArrayList()); |
|||
String docid = GetMD5Code((String) messageMap.get("num")); |
|||
patent.setDocId(docid); |
|||
// patent.setDocType("四级实验室"); |
|||
patent.set_id_(docid); |
|||
// patent.setEnSource((String) messageMap.get("keywords")); |
|||
long dateTimenow = System.currentTimeMillis(); |
|||
patent.setCreateTime(dateTimenow); |
|||
String createTimeStr= DataCheckUtil.getCurrentTime(dateTimenow); |
|||
patent.setCreateTimeStr(createTimeStr); |
|||
LOGGER.info("Parse Patent={}", JSONObject.toJSON(patent)); |
|||
ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_patent_csci","_doc" ); |
|||
elastiProducer.sendMessageToEs(JsonUtils.toJSONString(patent)); |
|||
} catch (Exception e) { |
|||
LOGGER.error("Parse ErrotPatent={}", JSONObject.toJSON(messageMap)); |
|||
e.printStackTrace(); |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,98 @@ |
|||
package com.zyzs.otherdatasave.service; |
|||
|
|||
import com.alibaba.fastjson.JSONObject; |
|||
import com.bfd.crawler.elasti.ElastiProducerHigh; |
|||
import com.bfd.crawler.utils.JsonUtils; |
|||
import com.zyzs.otherdatasave.bean.Clini; |
|||
import com.zyzs.otherdatasave.bean.Proj; |
|||
import com.zyzs.otherdatasave.cache.Constants; |
|||
import com.zyzs.otherdatasave.util.DataCheckUtil; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.stereotype.Service; |
|||
|
|||
import java.util.*; |
|||
|
|||
import static com.zyzs.otherdatasave.util.MfMD5Util.GetMD5Code; |
|||
|
|||
@Service |
|||
public class QueryProj { |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryProj.class); |
|||
public void query() { |
|||
String inputMessage = Constants.getDrug().poll();// poll -->若队列为空,返回null |
|||
if (Objects.isNull(inputMessage)) { |
|||
return; |
|||
} |
|||
Map<String, Object> messageMap = new HashMap<>(); |
|||
if (inputMessage.length()>10) { |
|||
try { |
|||
messageMap = JsonUtils.parseObject(inputMessage); |
|||
Map data= (Map) messageMap.get("data"); |
|||
Proj pro=new Proj(); |
|||
pro.setName((String) data.get("title")); |
|||
pro.setPubTime((String) data.get("projectStartTime")); |
|||
pro.setFundingAgency((String) data.get("sponsor")); |
|||
pro.setCycle(""); |
|||
pro.setCountry((String) data.get("country")); |
|||
|
|||
pro.setFunding((String) data.get("projectFunding")); |
|||
Map info=new HashMap(); |
|||
info.put("id",(String) data.get("projectNum")); |
|||
info.put("author",(String) data.get("projectLeader")); |
|||
info.put("starttime",(String) data.get("projectStartTime")); |
|||
info.put("endtime",(String) data.get("projectEndTime")); |
|||
info.put("Institution",(String) data.get("sponsorPart")); |
|||
info.put("summary",(String) data.get("brief")); |
|||
List infomation=new ArrayList(); |
|||
infomation.add(info); |
|||
pro.setProjectInformation(infomation); |
|||
|
|||
Map funin=new HashMap(); |
|||
funin.put("agency",(String) data.get("sponsor")); |
|||
funin.put("funding",(String) data.get("projectFunding")); |
|||
funin.put("country",(String) data.get("country")); |
|||
List funlist=new ArrayList(); |
|||
funlist.add(funin); |
|||
pro.setFundingInformation(funlist); |
|||
|
|||
Map org=new HashMap(); |
|||
org.put("id",""); |
|||
org.put("name",data.get("relatedProject")); |
|||
List orglist=new ArrayList(); |
|||
orglist.add(org); |
|||
pro.setOrgans(orglist); |
|||
String kewords= (String) messageMap.get("keyword"); |
|||
kewords=kewords.replace("+", " "); |
|||
pro.setKeywords(kewords); |
|||
String docid = GetMD5Code((String) data.get("projectNum")); |
|||
pro.setDocId(docid); |
|||
pro.setDataId(docid); |
|||
pro.set_id_(docid); |
|||
pro.setEnSource(""); |
|||
pro.setDocType(""); |
|||
pro.setDocType(""); |
|||
pro.setCreateTimeStr("20250512"); |
|||
pro.setCrawlUrl("crawlUrl"); |
|||
|
|||
long dateTimenow = System.currentTimeMillis(); |
|||
pro.setCreateTime(dateTimenow); |
|||
String createTimeStr= DataCheckUtil.getCurrentTime(dateTimenow); |
|||
pro.setCreateTimeStr(createTimeStr); |
|||
LOGGER.info("Parse QueryProj={}", JSONObject.toJSON(pro)); |
|||
ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_project_csci","_doc" ); |
|||
elastiProducer.sendMessageToEs(JsonUtils.toJSONString(pro)); |
|||
} catch (Exception e) { |
|||
LOGGER.error("Parse ErrotProj={}", JSONObject.toJSON(messageMap)); |
|||
e.printStackTrace(); |
|||
} |
|||
|
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
} |
|||
@ -0,0 +1,98 @@ |
|||
package com.zyzs.otherdatasave.service; |
|||
|
|||
import com.alibaba.fastjson.JSONObject; |
|||
import com.bfd.crawler.elasti.ElastiProducerHigh; |
|||
import com.bfd.crawler.utils.JsonUtils; |
|||
import com.zyzs.otherdatasave.bean.Clini; |
|||
|
|||
import com.zyzs.otherdatasave.cache.Constants; |
|||
import com.zyzs.otherdatasave.util.DataCheckUtil; |
|||
import com.zyzs.otherdatasave.worker.QuerycliniProducer; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.stereotype.Service; |
|||
import java.util.*; |
|||
import static com.zyzs.otherdatasave.util.MfMD5Util.GetMD5Code; |
|||
|
|||
@Service |
|||
public class Queryclini { |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(Queryclini.class); |
|||
public void query() { |
|||
String inputMessage = Constants.getClini().poll();// poll -->若队列为空,返回null |
|||
if (Objects.isNull(inputMessage)) { |
|||
return; |
|||
} |
|||
Map<String, Object> messageMap = new HashMap<>(); |
|||
if (inputMessage.length()>10) { |
|||
try { |
|||
messageMap = JsonUtils.parseObject(inputMessage); |
|||
Clini cl=new Clini(); |
|||
cl.setCountry((String) messageMap.get("country")); |
|||
cl.setDesc((String) messageMap.get("title")); |
|||
cl.setRegNum((String) messageMap.get("registNum")); |
|||
String regtime=messageMap.get("registTime").toString().trim(); |
|||
// regtime = regtime.substring(0, regtime.length() - 1); |
|||
cl.setRegDate(DataCheckUtil.convertString(regtime)); |
|||
cl.setRegStatus((String) messageMap.get("registStatus")); |
|||
cl.setTitle((String) messageMap.get("registTitle")); |
|||
cl.setScientificName((String) messageMap.get("fullTitle")); |
|||
cl.setSource((String) messageMap.get("sponsor")); |
|||
cl.setResponsibleUnit((String) messageMap.get("sponsorPart")); |
|||
cl.setStudyType((String) messageMap.get("studyType")); |
|||
cl.setPhase((String) messageMap.get("phase")); |
|||
cl.setDiseases((String) messageMap.get("disease")); |
|||
cl.setStudyDesign((String) messageMap.get("studyDesign")); |
|||
cl.setPurpose((String) messageMap.get("studyObjective")); |
|||
// cl.setProjectFunding((String) messageMap.get("fullTitle")); 研究经费 |
|||
cl.setCountry((String) messageMap.get("country")); |
|||
cl.setImtime((String) messageMap.get("studyStartDate")); |
|||
//样本情况 |
|||
List measure=new ArrayList(); |
|||
Map meas=new HashMap(); |
|||
meas.put("inclusion",(String) messageMap.get("inclusionCriteria")); |
|||
meas.put("exclusion",(String) messageMap.get("exclusionCriteria")); |
|||
meas.put("interventions",(String) messageMap.get("intervention")); |
|||
meas.put("measuring",(String) messageMap.get("primaryOutcome")); |
|||
measure.add(meas); |
|||
cl.setMeasures(measure); |
|||
// 征募情况 |
|||
List recru=new ArrayList(); |
|||
Map recruitment=new HashMap(); |
|||
recruitment.put("condition",(String) messageMap.get("currentStatus")); |
|||
recruitment.put("size", (String) messageMap.get("enrollment")); |
|||
recruitment.put("country", (String) messageMap.get("country")); |
|||
recruitment.put("tagTime", (String) messageMap.get("tagTime")); |
|||
recru.add(recruitment); |
|||
cl.setRecruitment(recru); |
|||
cl.setAvatarPath(new ArrayList()); |
|||
String docid = GetMD5Code((String) messageMap.get("registNum")); |
|||
cl.setDocId(docid); |
|||
cl.setDataId(docid); |
|||
cl.set_id_(docid); |
|||
cl.setEnSource(""); |
|||
cl.setDocType(""); |
|||
long dateTimenow = System.currentTimeMillis(); |
|||
cl.setCreateTime(dateTimenow); |
|||
String createTimeStr= DataCheckUtil.getCurrentTime(dateTimenow); |
|||
cl.setCreateTimeStr(createTimeStr); |
|||
cl.setIsShow("20250512"); |
|||
|
|||
cl.setCrawlUrl((String) messageMap.get("crawlUrl")); |
|||
LOGGER.info("Parse Queryclini={}", JSONObject.toJSON(cl)); |
|||
ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_clini_csci","_doc" ); |
|||
elastiProducer.sendMessageToEs(JsonUtils.toJSONString(cl)); |
|||
} catch (Exception e) { |
|||
LOGGER.error("Parse Errotclini={}", JSONObject.toJSON(messageMap)); |
|||
e.printStackTrace(); |
|||
} |
|||
|
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
} |
|||
@ -0,0 +1,52 @@ |
|||
package com.zyzs.otherdatasave.service; |
|||
|
|||
import com.alibaba.fastjson.JSONObject; |
|||
import com.bfd.crawler.elasti.ElastiProducerHigh; |
|||
import com.bfd.crawler.utils.JsonUtils; |
|||
import com.zyzs.otherdatasave.bean.Clini; |
|||
import com.zyzs.otherdatasave.bean.ClinikJ; |
|||
import com.zyzs.otherdatasave.cache.Constants; |
|||
import com.zyzs.otherdatasave.util.DataCheckUtil; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.stereotype.Service; |
|||
|
|||
import java.util.*; |
|||
|
|||
import static com.zyzs.otherdatasave.util.MfMD5Util.GetMD5Code; |
|||
|
|||
@Service |
|||
public class QuerycliniKJ { |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QuerycliniKJ.class); |
|||
public void query() { |
|||
String inputMessage = Constants.getClini().poll();// poll -->若队列为空,返回null |
|||
if (Objects.isNull(inputMessage)) { |
|||
return; |
|||
} |
|||
Map<String, Object> messageMap = new HashMap<>(); |
|||
if (inputMessage.length()>10) { |
|||
try { |
|||
messageMap = JsonUtils.parseObject(inputMessage); |
|||
ClinikJ cl = new ClinikJ(); |
|||
cl.setCountry_raw((String) messageMap.get("country")); |
|||
cl.setTitle_raw((String) messageMap.get("title")); |
|||
cl.setSummary_raw((String) messageMap.get("summary")); |
|||
cl.setTrial_intervention_raw((String) messageMap.get("intervention")); |
|||
cl.setTrial_registration_date((String) messageMap.get("registTime")); |
|||
cl.setUrl((String) messageMap.get("crawlUrl")); |
|||
|
|||
|
|||
} catch (Exception e) { |
|||
e.printStackTrace(); |
|||
} |
|||
|
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
} |
|||
@ -0,0 +1,69 @@ |
|||
package com.zyzs.otherdatasave.service; |
|||
|
|||
import com.alibaba.fastjson.JSONObject; |
|||
import com.bfd.crawler.elasti.ElastiProducerHigh; |
|||
import com.bfd.crawler.utils.JsonUtils; |
|||
import com.zyzs.otherdatasave.bean.Clini; |
|||
import com.zyzs.otherdatasave.cache.Constants; |
|||
import com.zyzs.otherdatasave.util.AllKeys; |
|||
import com.zyzs.otherdatasave.util.DataCheckUtil; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.stereotype.Service; |
|||
|
|||
import java.util.*; |
|||
|
|||
import static com.zyzs.otherdatasave.util.MfMD5Util.GetMD5Code; |
|||
|
|||
@Service |
|||
public class Querykafka { |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(Querykafka.class); |
|||
public void query() { |
|||
String inputMessage = Constants.getClini().poll();// poll -->若队列为空,返回null |
|||
|
|||
if (Objects.isNull(inputMessage)) { |
|||
return; |
|||
} |
|||
|
|||
Map<String, Object> messageMap = new HashMap<>(); |
|||
Map<String, Object> readmessageMap = new HashMap<>(); |
|||
if (inputMessage.length()>10) { |
|||
try { |
|||
|
|||
messageMap= AllKeys.getMap(); |
|||
readmessageMap = JsonUtils.parseObject(inputMessage); |
|||
readmessageMap.put("enSource", readmessageMap.get("enSource").toString().trim()); |
|||
readmessageMap.put("_id_", readmessageMap.get("_id").toString()); |
|||
readmessageMap.remove("_id"); |
|||
messageMap.putAll(readmessageMap); |
|||
messageMap.put("isnew","0522"); |
|||
|
|||
// |
|||
// if ( ! Constants.equipment.contains(messageMap.get("url").toString())){ |
|||
// Constants.equipment.put(messageMap.get("url").toString()); |
|||
// |
|||
// ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_news_crb","_doc" ); |
|||
// elastiProducer.sendMessageToEs(JsonUtils.toJSONString(messageMap)); |
|||
// } |
|||
|
|||
|
|||
System.out.printf(messageMap.get("isshow").toString()); |
|||
|
|||
ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_306296","_doc" ); |
|||
elastiProducer.sendMessageToEs(JsonUtils.toJSONString(messageMap)); |
|||
|
|||
} catch (Exception e) { |
|||
// LOGGER.error("Parse Errotclini={}", JSONObject.toJSON(messageMap)); |
|||
e.printStackTrace(); |
|||
} |
|||
|
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
} |
|||
@ -0,0 +1,147 @@ |
|||
package com.zyzs.otherdatasave.service; |
|||
|
|||
import com.alibaba.fastjson.JSONObject; |
|||
import com.bfd.crawler.elasti.ElastiProducerHigh; |
|||
import com.bfd.crawler.utils.JsonUtils; |
|||
import com.zyzs.otherdatasave.bean.Eqiupment; |
|||
import com.zyzs.otherdatasave.cache.Constants; |
|||
import com.zyzs.otherdatasave.util.*; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.stereotype.Service; |
|||
|
|||
import java.util.*; |
|||
|
|||
import static com.zyzs.otherdatasave.util.MfMD5Util.GetMD5Code; |
|||
|
|||
@Service |
|||
public class Queryorg { |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(Queryorg.class); |
|||
public void query() { |
|||
String inputMessage = Constants.getEquipment().poll();// poll -->若队列为空,返回null |
|||
if (Objects.isNull(inputMessage)) { |
|||
return; |
|||
} |
|||
Map<String, Object> messageMap = new HashMap<>(); |
|||
|
|||
Map<String, Object> readmessageMap = new HashMap<>(); |
|||
|
|||
|
|||
Map<String, Object> readmessageMap6812 = new HashMap<>(); |
|||
if (inputMessage.length()>10) { |
|||
try { |
|||
messageMap= AllKeys.getMap(); |
|||
// readmessageMap6812= AllKeys6812.getMap(); |
|||
|
|||
|
|||
readmessageMap6812= AllKeys6813.getMap(); |
|||
|
|||
|
|||
readmessageMap=JsonUtils.parseObject(inputMessage); |
|||
long dateTimenow = System.currentTimeMillis(); |
|||
String createTimeStr= DataCheckUtil.getCurrentTime(dateTimenow); |
|||
messageMap.put("createTime", dateTimenow); |
|||
messageMap.put("createTimeStr", createTimeStr); |
|||
|
|||
|
|||
|
|||
|
|||
String docid = GetMD5Code((String) messageMap.get("content")); |
|||
messageMap.put("dataId",docid); |
|||
messageMap.put("docId","zyzs_gov_"+docid); |
|||
messageMap.put("_id_",docid); |
|||
|
|||
// messageMap.put("crawlDataFlagType","keyword:"+messageMap.get("title")); |
|||
|
|||
|
|||
messageMap.putAll(readmessageMap); |
|||
messageMap.put("pubTimeStr", DateUtil.getbeforeHour()); |
|||
|
|||
|
|||
} catch (Exception e) { |
|||
e.printStackTrace(); |
|||
} |
|||
try { |
|||
// 实验室的 |
|||
// ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_306812","_doc" ); |
|||
// elastiProducer.sendMessageToEs(JsonUtils.toJSONString(messageMap)); |
|||
|
|||
String ensource=messageMap.get("enSource").toString(); |
|||
ensource=ensource.trim(); |
|||
if (ensource.contains("n")){ |
|||
if(readmessageMap6812.containsKey(ensource)){ |
|||
String taskid=readmessageMap6812.get(ensource).toString(); |
|||
messageMap.put("taskId", taskid); |
|||
messageMap.put("crawlDataFlag","keyword:"+messageMap.get("title")); |
|||
System.out.println(JsonUtils.toJSONString(messageMap)); |
|||
ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_306813","_doc" ); |
|||
elastiProducer.sendMessageToEs(JsonUtils.toJSONString(messageMap)); |
|||
// |
|||
// |
|||
// |
|||
// ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_306815","_doc" ); |
|||
// elastiProducer.sendMessageToEs(JsonUtils.toJSONString(messageMap)); |
|||
} |
|||
// |
|||
} |
|||
|
|||
|
|||
// |
|||
// |
|||
|
|||
// REWU |
|||
// String ensource=messageMap.get("title").toString(); |
|||
// ensource=ensource.trim(); |
|||
// String taskid=readmessageMap6812.get(ensource).toString(); |
|||
//// map.put("Dr. Lucía Ramírez", "1303702###sjwszz###世界卫生组织"); |
|||
// String taskId=taskid.split("###")[0]; |
|||
// String enSource=taskid.split("###")[1]; |
|||
// String source=taskid.split("###")[2]; |
|||
// messageMap.put("crawlDataFlag","keyword:"+messageMap.get("source")); |
|||
// messageMap.put("taskId", taskId); |
|||
// messageMap.put("enSource", enSource); |
|||
// messageMap.put("source", source); |
|||
// |
|||
// System.out.println(JsonUtils.toJSONString(messageMap)); |
|||
// ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_306815","_doc" ); |
|||
// elastiProducer.sendMessageToEs(JsonUtils.toJSONString(messageMap)); |
|||
|
|||
// |
|||
// String ensource=messageMap.get("title").toString(); |
|||
// |
|||
// if(readmessageMap6812.containsKey(ensource)){ |
|||
// String taskid=readmessageMap6812.get(ensource).toString(); |
|||
// messageMap.put("crawlDataFlag","keyword:"+ensource); |
|||
// String taskId=taskid.split("###")[0]; |
|||
// String enSource=taskid.split("###")[1]; |
|||
// messageMap.put("taskId", taskId); |
|||
// messageMap.put("enSource", enSource); |
|||
// |
|||
//// ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_306812","_doc" ); |
|||
//// elastiProducer.sendMessageToEs(JsonUtils.toJSONString(messageMap)); |
|||
// |
|||
// |
|||
// |
|||
// ElastiProducerHigh elastiProducer = ElastiProducerHigh.getInstance(1, 3, "cl_special_1.0_306814","_doc" ); |
|||
// elastiProducer.sendMessageToEs(JsonUtils.toJSONString(messageMap)); |
|||
// } |
|||
//// |
|||
|
|||
|
|||
|
|||
|
|||
} catch (Exception e) { |
|||
// LOGGER.error("Parse ErrorQueryEq={}", JSONObject.toJSON(inputMessage)); |
|||
e.printStackTrace(); |
|||
} |
|||
|
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
} |
|||
} |
|||
@ -0,0 +1,202 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
import java.util.*; |
|||
|
|||
public class AllKeys { |
|||
private static Map<String,Object> map = new HashMap(); |
|||
|
|||
public static Map<String, Object> getMap() { |
|||
return map; |
|||
} |
|||
|
|||
public static void setMap(Map<String, Object> map) { |
|||
AllKeys.map = map; |
|||
} |
|||
|
|||
static { |
|||
map.put("_id_",""); |
|||
map.put("age",""); |
|||
map.put("area",""); |
|||
map.put("attitudesCount",""); |
|||
map.put("attr",""); |
|||
map.put("author",""); |
|||
map.put("authorId",""); |
|||
map.put("authorLevel",""); |
|||
map.put("authornickname",""); |
|||
map.put("availability",0); |
|||
map.put("avatar",""); |
|||
map.put("brand",""); |
|||
map.put("brandId",""); |
|||
map.put("cate",""); |
|||
map.put("channel",""); |
|||
map.put("city",""); |
|||
map.put("collectCount",0); |
|||
map.put("commentId",""); |
|||
map.put("commentScore",0); |
|||
map.put("commentsCount",0); |
|||
map.put("commentUrl",""); |
|||
map.put("content",""); |
|||
map.put("contentLength",0); |
|||
map.put("contentSimHash",""); |
|||
map.put("contentTag",""); |
|||
map.put("country",""); |
|||
map.put("crawlDataFlag",""); |
|||
map.put("crawlDate",new Date ()); |
|||
map.put("crawlDay",0L); |
|||
map.put("crawlTime",0L); |
|||
map.put("crawlTimeStr",""); |
|||
map.put("createDate",new Date ()); |
|||
map.put("createDay",0L); |
|||
map.put("createTime",0L); |
|||
map.put("createTimeStr",""); |
|||
map.put("dataCount",0); |
|||
map.put("dataId",""); |
|||
map.put("docId",""); |
|||
map.put("docType",""); |
|||
map.put("downCnt",0); |
|||
map.put("egc",0); |
|||
map.put("enSource",""); |
|||
map.put("expression",new ArrayList<>()); |
|||
map.put("extension",""); |
|||
map.put("fansCount",""); |
|||
map.put("favorCnt",0); |
|||
map.put("filePath",new ArrayList<>()); |
|||
map.put("imagePath",new ArrayList<>()); |
|||
map.put("videoPath",new ArrayList<>()); |
|||
map.put("filePathSize",new ArrayList<>()); |
|||
map.put("imagePathSize",new ArrayList<>()); |
|||
map.put("videoPathSize",new ArrayList<>()); |
|||
map.put("finalPhrase",""); |
|||
map.put("firstListBrand",""); |
|||
map.put("fiveListBrand",""); |
|||
map.put("forumScore",""); |
|||
map.put("forwardAttitudesCount",0); |
|||
map.put("forwardAuthor",""); |
|||
map.put("forwardAvatar",""); |
|||
map.put("forwardCommentsCount",0); |
|||
map.put("forwardContent",""); |
|||
map.put("forwardImgs",""); |
|||
map.put("forwardPostSource",""); |
|||
map.put("forwardPubTime",0L); |
|||
map.put("forwardQuoteCount",0); |
|||
map.put("forwardUrl",""); |
|||
map.put("forwardUserId",""); |
|||
map.put("forwardUserType",0); |
|||
map.put("forwardUserUrl",""); |
|||
map.put("fourListBrand",""); |
|||
map.put("friendsCount",""); |
|||
map.put("getSource",""); |
|||
map.put("hashTag",new ArrayList<>()); |
|||
map.put("hlKeywords",new ArrayList<>()); |
|||
map.put("impression",""); |
|||
map.put("isDownload",false); |
|||
map.put("isVip",0); |
|||
map.put("language",""); |
|||
map.put("lastModifiedTime",0L); |
|||
map.put("listBrand",""); |
|||
map.put("location",""); |
|||
map.put("nomorprice",0); |
|||
map.put("opinions",new ArrayList<>()); |
|||
map.put("originalPhrase",""); |
|||
map.put("otherSourceJson",""); |
|||
map.put("pageCommentCount",0); |
|||
map.put("pageTranspondCount",0); |
|||
map.put("pageType",""); |
|||
map.put("pgc",0); |
|||
map.put("pictureList",""); |
|||
map.put("places",new ArrayList<>()); |
|||
map.put("postCount",""); |
|||
map.put("postId",""); |
|||
map.put("postSource",""); |
|||
map.put("price",0); |
|||
map.put("primary",1); |
|||
map.put("productParameter",""); |
|||
map.put("projectName",""); |
|||
map.put("promotionInfo",""); |
|||
map.put("province",""); |
|||
map.put("pubDate",DateUtil.TgetbeforeHour()); |
|||
map.put("pubDay",DateUtil.getday()); |
|||
map.put("pubTime",DateUtil.getbeforonecurr()); |
|||
map.put("pubTimeStr", DateUtil.getbeforeHour()); |
|||
map.put("quoteCount",0); |
|||
map.put("readCount",0); |
|||
map.put("resolution",""); |
|||
map.put("secondListBrand",""); |
|||
map.put("sex",""); |
|||
map.put("sign",""); |
|||
map.put("siteId",""); |
|||
map.put("skuProperties",""); |
|||
map.put("smallImgs",""); |
|||
map.put("source",""); |
|||
map.put("sysAbstract",""); |
|||
map.put("sysKeywords",""); |
|||
map.put("sysSentiment",0.0); |
|||
map.put("threeListBrand",""); |
|||
map.put("thumbnails",""); |
|||
map.put("title",""); |
|||
map.put("titleLength",0); |
|||
map.put("titleSimHash",""); |
|||
map.put("translateContent",""); |
|||
map.put("translateTitle",""); |
|||
map.put("ugc",0); |
|||
map.put("url",""); |
|||
map.put("urlHash",""); |
|||
map.put("userType",""); |
|||
map.put("userUrl",""); |
|||
map.put("videoTime",""); |
|||
map.put("videoUrl",""); |
|||
map.put("avatarPath",""); |
|||
map.put("viewCnt",0); |
|||
map.put("channelNum",""); |
|||
map.put("crawlDataFlagType",""); |
|||
map.put("primaryPost",""); |
|||
map.put("dns",""); |
|||
map.put("asrText",""); |
|||
map.put("ocrText",new ArrayList<>()); |
|||
map.put("srcfilePath",new ArrayList<>()); |
|||
map.put("srcimagePath",new ArrayList<>()); |
|||
map.put("srcvideoPath",new ArrayList<>()); |
|||
map.put("hasOCR",0); |
|||
map.put("hasASR",0); |
|||
map.put("asrLength",0); |
|||
map.put("ocrLength",0); |
|||
map.put("translateTitleLength",""); |
|||
map.put("translateContentLength",""); |
|||
map.put("hasTrans",0); |
|||
map.put("goodrate",0); |
|||
map.put("generalrate",0); |
|||
map.put("poorrate",0); |
|||
map.put("processtime",new HashMap<>()); |
|||
map.put("tag",""); |
|||
map.put("mentionAccountUrl",new ArrayList<>()); |
|||
map.put("mentionAccount",new ArrayList<>()); |
|||
map.put("userTypeContent",""); |
|||
map.put("mentionTopic",new ArrayList<>()); |
|||
map.put("mentionTopicUrl",new ArrayList<>()); |
|||
map.put("groupRules",new ArrayList<>()); |
|||
map.put("otherInfoJson",""); |
|||
map.put("members",new ArrayList<>()); |
|||
map.put("postLikeCount",-1); |
|||
map.put("postPubTimeStr",""); |
|||
map.put("postReadCount",-1); |
|||
map.put("postAttitudesCount",-1); |
|||
map.put("postAuthor",""); |
|||
map.put("postAvatar",""); |
|||
map.put("postCommentsCount",-1); |
|||
map.put("postContent",""); |
|||
map.put("postTitle",""); |
|||
map.put("postImgs",""); |
|||
map.put("postPostSource",""); |
|||
map.put("postPubTime",0L); |
|||
map.put("postQuoteCount",-1); |
|||
map.put("postUrl",""); |
|||
map.put("postUserId",""); |
|||
map.put("postUserType",""); |
|||
map.put("contentHtml",""); |
|||
map.put("isJson","0"); |
|||
map.put("isshow",""); |
|||
map.put("keywords",""); |
|||
|
|||
|
|||
} |
|||
} |
|||
@ -0,0 +1,255 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
import java.util.ArrayList; |
|||
import java.util.Date; |
|||
import java.util.HashMap; |
|||
import java.util.Map; |
|||
|
|||
public class AllKeys6812 { |
|||
private static Map<String,Object> map = new HashMap(); |
|||
|
|||
public static Map<String, Object> getMap() { |
|||
return map; |
|||
} |
|||
|
|||
public static void setMap(Map<String, Object> map) { |
|||
AllKeys6812.map = map; |
|||
} |
|||
|
|||
static { |
|||
map.put("nadlydw",1302568); |
|||
map.put("nkuleuven",1302569); |
|||
map.put("ncharite",1302570); |
|||
map.put("nflxd",1302571); |
|||
map.put("nxjdx",1302572); |
|||
map.put("nsvl",1302573); |
|||
map.put("ncnm",1302574); |
|||
map.put("nwrai",1302575); |
|||
map.put("nwashington",1302576); |
|||
map.put("nutsouthwestern",1302577); |
|||
map.put("nunm",1302578); |
|||
map.put("ndhsgov",1302579); |
|||
map.put("ncuhcbl",1302580); |
|||
map.put("nwwwcdcgov",1302581); |
|||
map.put("ncfdc",1302582); |
|||
map.put("nfolkhalsomyndigheten",1302583); |
|||
map.put("nunil",1302584); |
|||
map.put("nuogb",1302585); |
|||
map.put("nphenghl",1302586); |
|||
map.put("nwnbl",1302587); |
|||
map.put("ncrlaanscfsidk",1302588); |
|||
map.put("nvrcofcvsckmok",1302589); |
|||
map.put("nbckmsoek",1302590); |
|||
map.put("nsccpeehmohk",1302591); |
|||
map.put("nupsok",1302592); |
|||
map.put("nlmaog",1302593); |
|||
map.put("nvlss",1302594); |
|||
map.put("ncdphntb",1302595); |
|||
map.put("nktsrsc",1302596); |
|||
map.put("nkpl",1302597); |
|||
map.put("ncrl",1302598); |
|||
map.put("nashdec",1302599); |
|||
map.put("napsm",1302600); |
|||
map.put("nsvlotroa",1302601); |
|||
map.put("nadapdrl4lilkls",1302602); |
|||
map.put("nrvlzvl5lbgghgsgs",1302603); |
|||
map.put("ndrsldsfcsouk",1302604); |
|||
map.put("nlrsvl",1302605); |
|||
map.put("nprslsdscsouk",1302606); |
|||
map.put("nkrtslsdscsouk",1302607); |
|||
map.put("nkrslsdscsouk",1302608); |
|||
map.put("ncrslsdscsouk",1302609); |
|||
map.put("ncrslsdscsouk",1302610); |
|||
map.put("nnphcomhuk",1302611); |
|||
map.put("nvrlecomhuk",1302612); |
|||
map.put("nvrlecomhuk",1302613); |
|||
map.put("ndrlecomhuk",1302614); |
|||
map.put("nzrlecomhuk",1302615); |
|||
map.put("nlrlcomhuk",1302616); |
|||
map.put("ntrlcomhuk",1302617); |
|||
map.put("nkrlecomhuk",1302618); |
|||
map.put("nkrlecomhuk",1302619); |
|||
map.put("norlsdscsouk",1302620); |
|||
map.put("ncrlotncdcandmk",1302621); |
|||
map.put("nlrdvl",1302622); |
|||
map.put("ngrl",1302623); |
|||
map.put("ntrl",1302624); |
|||
map.put("ngrl",1302625); |
|||
map.put("nsrl",1302626); |
|||
map.put("nlrl",1302627); |
|||
map.put("nncdcapmh",1302628); |
|||
map.put("nfnfscma",1302629); |
|||
map.put("nnrl",1302630); |
|||
map.put("nurdl",1302631); |
|||
map.put("nnarl",1302632); |
|||
map.put("nardl",1302633); |
|||
map.put("nfrdl",1302634); |
|||
map.put("nunhl",1302635); |
|||
map.put("ncvdl",1302636); |
|||
map.put("ncphrl",1302637); |
|||
map.put("nvbdl",1302638); |
|||
map.put("nslcvl",1302639); |
|||
map.put("nnpha",1302640); |
|||
map.put("nusamruk",1302641); |
|||
map.put("nusacshpk65thmb",1302642); |
|||
map.put("nusammck",1302643); |
|||
map.put("nbp8d837tbsk",1302644); |
|||
map.put("nkabsk",1302645); |
|||
map.put("nmst",1302646); |
|||
map.put("nmgosabsk",1302647); |
|||
map.put("nusacshc",1302648); |
|||
map.put("nkcrbchlab",1302649); |
|||
map.put("nnbtc",1302650); |
|||
map.put("nnclela",1302651); |
|||
map.put("nnahl",1302652); |
|||
map.put("npheoc",1302653); |
|||
map.put("nnbtc",1302654); |
|||
map.put("nusammce",1302655); |
|||
map.put("nusphceg",1302656); |
|||
map.put("nccrl",1302657); |
|||
map.put("nzddlraddlma",1302658); |
|||
map.put("nswdlwhhgaf",1302659); |
|||
map.put("nnarrl",1302660); |
|||
map.put("nardl",1302661); |
|||
map.put("nrdl",1302662); |
|||
map.put("nsim",1302663); |
|||
map.put("ntrl",1302664); |
|||
map.put("nnrlnltldtsc",1302665); |
|||
map.put("ntafimsoiusadoh",1302666); |
|||
map.put("nshmsm",1302667); |
|||
map.put("nftmm",1302668); |
|||
map.put("ncusm",1302669); |
|||
map.put("nncgeabmohesri",1302670); |
|||
map.put("nqsmi",1302671); |
|||
map.put("nifmdmfalt",1302672); |
|||
map.put("nivccmfalt",1302673); |
|||
map.put("ncbcoutaccdc",1302674); |
|||
map.put("nusacdmdri",1302675); |
|||
map.put("nllnl",1302676); |
|||
map.put("nlanl",1302677); |
|||
map.put("nsnl",1302678); |
|||
map.put("ncdpncehlsous",1302679); |
|||
map.put("nbadeidcwbyrc",1302680); |
|||
map.put("ndbvrc",1302681); |
|||
map.put("nnadc",1302682); |
|||
map.put("nlstf",1302683); |
|||
map.put("nnsocdbtdl",1302684); |
|||
map.put("nllnl",1302685); |
|||
map.put("ncdpnceadzvd",1302686); |
|||
map.put("nfsrldw",1302687); |
|||
map.put("nsprl",1302688); |
|||
map.put("nievidimcqphrid",1302689); |
|||
map.put("nbfgl",1302690); |
|||
map.put("nnhlntlrb",1302691); |
|||
map.put("ncuhfid",1302692); |
|||
map.put("nfvpl",1302693); |
|||
map.put("nfihtwlbv",1302694); |
|||
map.put("nutibatuhtl",1302695); |
|||
map.put("nhmbl",1302696); |
|||
map.put("ndaftml",1302697); |
|||
map.put("nucdnvrl",1302698); |
|||
map.put("nhsephld",1302699); |
|||
map.put("nsrl",1302700); |
|||
map.put("nanscaid",1302701); |
|||
map.put("ntrl",1302702); |
|||
map.put("nhrl",1302703); |
|||
map.put("nvdrvhl",1302704); |
|||
map.put("nreuhlslidclnmrl",1302705); |
|||
map.put("nreuhidcnmrl",1302706); |
|||
map.put("nnphsl",1302707); |
|||
map.put("nnhl",1302708); |
|||
map.put("npimbl",1302709); |
|||
map.put("nmdhdl",1302710); |
|||
map.put("nlfbd",1302711); |
|||
map.put("nbdribel",1302712); |
|||
map.put("nrnvafsabsl",1302713); |
|||
map.put("nuhg",1302714); |
|||
map.put("nancdcprlcb",1302715); |
|||
map.put("ncdluhiddfm",1302716); |
|||
map.put("nlailibmckg",1302717); |
|||
map.put("nrlfhpmoh",1302718); |
|||
map.put("nbdtl",1302719); |
|||
map.put("nfi",1302720); |
|||
map.put("napha",1302721); |
|||
map.put("nukhsa",1302722); |
|||
map.put("nibsc",1302723); |
|||
map.put("npi",1302724); |
|||
map.put("nhsaukc",1302725); |
|||
map.put("ncbac",1302726); |
|||
map.put("nusaidmri",1302727); |
|||
map.put("nfdcri",1302728); |
|||
map.put("ncrfrml",1302729); |
|||
map.put("ngncl",1302730); |
|||
map.put("nbsatloljmj",1302731); |
|||
map.put("ngsuhpc",1302732); |
|||
map.put("nneidl",1302733); |
|||
map.put("nuadc",1302734); |
|||
map.put("nnmrc",1302735); |
|||
map.put("nnrl",1302736); |
|||
map.put("nnsocnbacldb",1302737); |
|||
map.put("ncrfrml",1302738); |
|||
map.put("nfdiocr",1302739); |
|||
map.put("nfanratbarc",1302740); |
|||
map.put("npiadc",1302741); |
|||
map.put("ncwbyadeidrc",1302742); |
|||
map.put("ndabvrc",1302743); |
|||
map.put("nnhsql",1302744); |
|||
map.put("nqhfss",1302745); |
|||
map.put("naeidabr",1302746); |
|||
map.put("nfadlsp",1302747); |
|||
map.put("ncnml",1302748); |
|||
map.put("nnhlblfadpachvricaas",1302749); |
|||
map.put("nnkhlbpeca4l",1302750); |
|||
map.put("njml",1302751); |
|||
map.put("npinvril",1302752); |
|||
map.put("nbsl",1302753); |
|||
map.put("nbsl4leidzdcninfnl",1302754); |
|||
map.put("nhplsphasbl4l",1302755); |
|||
map.put("nsl",1302756); |
|||
map.put("nlsuhlocmvabed",1302757); |
|||
map.put("nsrcvl",1302758); |
|||
map.put("nvios",1302759); |
|||
map.put("ncrlotanscfsidk",1302760); |
|||
map.put("ncrl",1302761); |
|||
map.put("ncphrl",1302762); |
|||
map.put("nuamruetd",1302763); |
|||
map.put("nafrioms",1302764); |
|||
map.put("nunmrusp",1302765); |
|||
map.put("nadfmaidi",1302766); |
|||
map.put("nccrl",1302767); |
|||
map.put("nabr",1302768); |
|||
map.put("npibr",1302769); |
|||
map.put("nim",1302770); |
|||
map.put("nivb",1302771); |
|||
map.put("nbacc",1302772); |
|||
map.put("nrlcp",1302773); |
|||
map.put("nmnpritclo",1302774); |
|||
map.put("nlnibsa",1302775); |
|||
map.put("nrikenbsl4japan",1302776); |
|||
map.put("nivubaa",1302777); |
|||
map.put("nsdlusamriid",1302778); |
|||
map.put("nlospgrcvirb",1302779); |
|||
map.put("nvdlivugusa",1302780); |
|||
map.put("nnivnz",1302781); |
|||
map.put("nnivisrael",1302782); |
|||
map.put("nbiadab",1302783); |
|||
map.put("nuvrc",1302784); |
|||
map.put("nivbch",1302785); |
|||
map.put("nivrjbr",1302786); |
|||
map.put("nniphe-netherlands",1302787); |
|||
map.put("niitd",1302788); |
|||
map.put("ntbdcrcfidcr",1302789); |
|||
map.put("nivlpt",1302790); |
|||
map.put("ndib",1302791); |
|||
map.put("ntbsrvc",1302792); |
|||
map.put("nicbr",1302793); |
|||
map.put("niidbarcelona",1302794); |
|||
map.put("nkiv",1302795); |
|||
map.put("nmrcrscvirb",1302796); |
|||
map.put("nlvrvcrg",1302797); |
|||
map.put("niiddmarseille",1302798); |
|||
map.put("nnslh",1302799); |
|||
map.put("nlvrsl",1302800); |
|||
map.put("nusnmrcasiaz",1302801); |
|||
} |
|||
} |
|||
@ -0,0 +1,176 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
import java.util.HashMap; |
|||
import java.util.Map; |
|||
|
|||
public class AllKeys6813 { |
|||
private static Map<String,Object> map = new HashMap(); |
|||
|
|||
public static Map<String, Object> getMap() { |
|||
return map; |
|||
} |
|||
|
|||
public static void setMap(Map<String, Object> map) { |
|||
AllKeys6813.map = map; |
|||
} |
|||
|
|||
static { |
|||
map.put("numontpellier", 1303116); |
|||
map.put("nkdca", 1303117); |
|||
map.put("nweizmann", 1303118); |
|||
map.put("nnicd", 1303119); |
|||
map.put("nniovi", 1303120); |
|||
map.put("nastar", 1303121); |
|||
map.put("nplymouth", 1303122); |
|||
map.put("nvniiv", 1303123); |
|||
map.put("ncsiro", 1303124); |
|||
map.put("nfiocruz", 1303125); |
|||
map.put("niovmg", 1303126); |
|||
map.put("nniidj", 1303127); |
|||
map.put("nivubaa", 1303128); |
|||
map.put("nusamriid", 1303129); |
|||
map.put("nrsrcfvb", 1303130); |
|||
map.put("nrkig", 1303131); |
|||
map.put("nivugusa", 1303132); |
|||
map.put("niib", 1303133); |
|||
map.put("nnniv", 1303134); |
|||
map.put("ninv", 1303135); |
|||
map.put("naDifbu", 1303136); |
|||
map.put("nuvrc", 1303137); |
|||
map.put("nbivc", 1303138); |
|||
map.put("nrjivb", 1303139); |
|||
map.put("nnih", 1303140); |
|||
map.put("npbrc", 1303141); |
|||
map.put("nnniphe", 1303142); |
|||
map.put("nsiva", 1303143); |
|||
map.put("nfniid", 1303144); |
|||
map.put("nmanchester", 1303145); |
|||
map.put("npasteur", 1303146); |
|||
map.put("niss", 1303147); |
|||
map.put("nuvrcs", 1303148); |
|||
map.put("npmrc", 1303149); |
|||
map.put("nszu", 1303150); |
|||
map.put("ncrvrc", 1303151); |
|||
map.put("nmedicina", 1303152); |
|||
map.put("nritmgovph", 1303153); |
|||
map.put("nsstdk", 1303154); |
|||
map.put("nconicetgovar", 1303155); |
|||
map.put("ncvrc", 1303156); |
|||
map.put("nsaioid", 1303157); |
|||
map.put("nnvigoth", 1303158); |
|||
map.put("nicmrgov", 1303159); |
|||
map.put("nbioids", 1303160); |
|||
map.put("ninspmp", 1303161); |
|||
map.put("nvector", 1303162); |
|||
map.put("nnihgokr", 1303163); |
|||
map.put("nleibniz", 1303164); |
|||
map.put("ninserm", 1303165); |
|||
map.put("nbnitm", 1303166); |
|||
map.put("nerasmusmc", 1303167); |
|||
map.put("nhrboicrf", 1303168); |
|||
map.put("nmi", 1303169); |
|||
map.put("nsioi", 1303170); |
|||
map.put("nvpupiog", 1303171); |
|||
map.put("ndomnkuoams", 1303172); |
|||
map.put("ndrde", 1303173); |
|||
map.put("ngmodiom", 1303174); |
|||
map.put("nivoum", 1303175); |
|||
map.put("nflifrioah", 1303176); |
|||
map.put("nbniotm", 1303177); |
|||
map.put("nrkicbtp", 1303178); |
|||
map.put("ngeiobmav", 1303179); |
|||
map.put("nsnrcnvi", 1303180); |
|||
map.put("ndgamnrbc", 1303181); |
|||
map.put("nffsa", 1303182); |
|||
map.put("nfdfmmc", 1303183); |
|||
map.put("ndncbbr", 1303184); |
|||
map.put("nbiopharm", 1303185); |
|||
map.put("nnrcc", 1303186); |
|||
map.put("ndrdcv", 1303187); |
|||
map.put("ndrdccsd", 1303188); |
|||
map.put("ncnip", 1303189); |
|||
map.put("npic", 1303190); |
|||
map.put("ndimsaimbas", 1303191); |
|||
map.put("nbnivrfdr", 1303192); |
|||
map.put("nfurj", 1303193); |
|||
map.put("nbabi", 1303194); |
|||
map.put("nrrpcem", 1303195); |
|||
map.put("nnbcepteptadmod", 1303196); |
|||
map.put("nabdrddstg", 1303197); |
|||
map.put("nacdp", 1303198); |
|||
map.put("nafrims", 1303199); |
|||
map.put("nkecvi", 1303200); |
|||
map.put("nnibmscua", 1303201); |
|||
map.put("nspcseem", 1303202); |
|||
map.put("nttdtnmrcusm", 1303203); |
|||
map.put("nadfmidi", 1303204); |
|||
map.put("ntabtnmrc", 1303205); |
|||
map.put("nusnmru2", 1303206); |
|||
map.put("nimsusaf", 1303207); |
|||
map.put("nuppsmohk", 1303208); |
|||
map.put("niibrd", 1303209); |
|||
map.put("nibrc", 1303210); |
|||
map.put("nniidlsh", 1303211); |
|||
map.put("niibr", 1303212); |
|||
map.put("nnzniv", 1303213); |
|||
map.put("nbifgh", 1303214); |
|||
map.put("nivuhm", 1303215); |
|||
map.put("ntnvi", 1303216); |
|||
map.put("nlvi", 1303217); |
|||
map.put("nugvi", 1303218); |
|||
map.put("nusamriid", 1303219); |
|||
map.put("ncrcidr", 1303220); |
|||
map.put("nniphren", 1303221); |
|||
map.put("nkiv", 1303222); |
|||
map.put("nirim", 1303223); |
|||
map.put("nvbri", 1303224); |
|||
map.put("nsrcvb", 1303225); |
|||
map.put("nliv", 1303226); |
|||
map.put("nimppdg", 1303227); |
|||
map.put("ndbi", 1303228); |
|||
map.put("nnclel", 1303229); |
|||
map.put("npil", 1303230); |
|||
map.put("nviuba", 1303231); |
|||
map.put("nusaafaaa", 1303232); |
|||
map.put("nwrrari", 1303233); |
|||
map.put("ntamch", 1303234); |
|||
map.put("nwraig", 1303235); |
|||
map.put("nnmru2ppd", 1303236); |
|||
map.put("namruk", 1303237); |
|||
map.put("nmrc", 1303238); |
|||
map.put("ntfsibra", 1303239); |
|||
map.put("nrmdcrin48", 1303240); |
|||
map.put("nfriFcrpwsbrmpri", 1303241); |
|||
map.put("nncdpmha", 1303242); |
|||
map.put("ndsnldmeri", 1303243); |
|||
map.put("nulfvmnvi", 1303244); |
|||
map.put("nulmfimi", 1303245); |
|||
map.put("nbdptladas", 1303246); |
|||
map.put("nlieh", 1303247); |
|||
map.put("nnphcmhu", 1303248); |
|||
map.put("nobnildvse", 1303249); |
|||
map.put("nnscecvmnaasu", 1303250); |
|||
map.put("nnvisva", 1303251); |
|||
map.put("nnildvseu", 1303252); |
|||
map.put("nrtcaddviinaasu", 1303253); |
|||
map.put("nimv", 1303254); |
|||
map.put("nvai", 1303255); |
|||
map.put("nashdhecc", 1303256); |
|||
map.put("ntimvbg", 1303257); |
|||
map.put("ntrsckg", 1303258); |
|||
map.put("nncidctg", 1303259); |
|||
map.put("nrlcphr", 1303260); |
|||
map.put("nnrcevi", 1303261); |
|||
map.put("nrimsek", 1303262); |
|||
map.put("nncbmesk", 1303263); |
|||
map.put("nuhg", 1303264); |
|||
map.put("nndmc", 1303265); |
|||
map.put("nnvrcvcsckma", 1303266); |
|||
map.put("nnphasl", 1303267); |
|||
map.put("nwraruik", 1303268); |
|||
map.put("naidb", 1303269); |
|||
map.put("nnicsa", 1303270); |
|||
map.put("ntdtnmrc", 1303271); |
|||
map.put("ncncead", 1303272); |
|||
} |
|||
} |
|||
@ -0,0 +1,53 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
import java.util.HashMap; |
|||
import java.util.Map; |
|||
|
|||
public class AllKeys6814 { |
|||
private static Map<String,Object> map = new HashMap(); |
|||
|
|||
public static Map<String, Object> getMap() { |
|||
return map; |
|||
} |
|||
|
|||
public static void setMap(Map<String, Object> map) { |
|||
AllKeys6814.map = map; |
|||
} |
|||
|
|||
static { |
|||
map.put("美海军陆战队化生事件快速反应部队(CBIRF)", "1303275###mhjlzdhssjfybd"); |
|||
map.put("美国国民警卫队化生放核事件加强反应部队", "1303276###mggmjwdhsfhsjjqfybd"); |
|||
map.put("华盛顿州国民警卫队化生放核事件加强反应部队", "1303277###hsdzgmjwdhsfhsjjqfybd"); |
|||
map.put("联合支助处(德国)", "1303278###lhzzc"); |
|||
map.put("德国联邦国防军放射生物研究所", "1303279###dglbgfjfsswyjs"); |
|||
map.put("德国陆军(德国联邦国防军陆军)", "1303280###dglj"); |
|||
map.put("德国联邦国防军微生物研究所", "1303281###dglbgfjwswyjs"); |
|||
map.put("德国联邦国防军", "1303282###dglbgfj"); |
|||
map.put("BBK(联邦民防与灾害援助局)", "1303283###lbmfyzhyzj"); |
|||
map.put("CBRN 防御营 750", "1303284###cbrnfyy750"); |
|||
map.put("北约应急部队", "1303285###byyjbd"); |
|||
map.put("北约组织", "1303286###byzz"); |
|||
map.put("德国联邦国防军 ABC 防御司令部", "1303287###dglbgfj"); |
|||
map.put("ABC 防御第 750 营", "1303288###abcfyd750y"); |
|||
map.put("ABC 防御第 7 营", "1303289###abcfyd7y"); |
|||
map.put("ABC 防御连(eFP 战斗群)", "1303290###abcfyl"); |
|||
map.put("ABC 防御第 1 团", "1303291###abcfyd1t"); |
|||
map.put("日本自卫队", "1303292###rbzwd"); |
|||
map.put("陆上自卫队", "1303293###lszwd"); |
|||
map.put("海上自卫队", "1303294###hszwd"); |
|||
map.put("航空自卫队", "1303295###hkzwd"); |
|||
map.put("对特殊武器卫生队", "1303296###dtswqwsd"); |
|||
map.put("第七化学防护队", "1303297###dqhxfhd"); |
|||
map.put("中央即时连队", "1303298###zyjsld"); |
|||
map.put("第 10 特殊武器防护队", "1303299###d10tswqfhd"); |
|||
map.put("化学科(陆上自卫队)", "1303300###hxk"); |
|||
map.put("日本自卫队联合作战司令部", "1303301###rbzwdlhzzslb"); |
|||
map.put("第 3 特殊武器防护队", "1303302###d3tswqfhd"); |
|||
map.put("第 8 特殊武器防护队", "1303303###d8tswqfhd"); |
|||
map.put("中央特种武器防护队", "1303304###zytzwqfhd"); |
|||
map.put("第 14 特殊武器防护队", "1303305###d14tswqfhd"); |
|||
map.put("第五化学防护队", "1303306###d5hxfhd"); |
|||
map.put("第 11 特殊武器防护队", "1303307###d11tswqfhd"); |
|||
map.put("第 15 旅核生物化学部队", "1303308###d15lhswhxbd"); |
|||
} |
|||
} |
|||
@ -0,0 +1,233 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
import java.util.HashMap; |
|||
import java.util.Map; |
|||
|
|||
public class AllKeys6815 { |
|||
private static Map<String,Object> map = new HashMap(); |
|||
|
|||
public static Map<String, Object> getMap() { |
|||
return map; |
|||
} |
|||
|
|||
public static void setMap(Map<String, Object> map) { |
|||
AllKeys6815.map = map; |
|||
} |
|||
|
|||
static { |
|||
map.put("克里斯托弗·巴特兰(Christophe Barraud)", "1303524###ldswfywyh###两党生物防御委员会数据采集"); |
|||
map.put("玛丽·杜邦(Marie Dupont)", "1303525###ldswfywyh###两党生物防御委员会数据采集"); |
|||
map.put("Dr. Kieran Moore", "1303526###ldswfywyh###两党生物防御委员会数据采集"); |
|||
map.put("Dr. Peter Donnelly", "1303527###ldswfywyh###两党生物防御委员会数据采集"); |
|||
map.put("Dr. Robert Kadlec", "1303528###mgkxy###美国科学院"); |
|||
map.put("Dr. Scott Weaver", "1303529###mgkxy###美国科学院"); |
|||
map.put("池荣美(Ji Young-mi)", "1303530###sjwszz###世界卫生组织"); |
|||
map.put("李尚熙(Lee Sang-hee)", "1303531###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Shmuel Shapira", "1303532###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Ronen Hazan", "1303533###sjwszz###世界卫生组织"); |
|||
map.put("Prof. Dr. Andreas Plückthun", "1303534###sjwszz###世界卫生组织"); |
|||
map.put("Prof. Dr. Michael Hall", "1303535###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Rochelle Walensky", "1303536###mgkxy###美国科学院"); |
|||
map.put("Dr. Anthony Fauci", "1303537###mgkxy###美国科学院"); |
|||
map.put("Mariano Esteban教授", "1303538###sjwszz###世界卫生组织"); |
|||
map.put("José María Valpuesta教授", "1303539###sjwszz###世界卫生组织"); |
|||
map.put("李跃志教授", "1303540###sjwszz###世界卫生组织"); |
|||
map.put("张华教授", "1303541###swkxaqzx###生命科学安全中心"); |
|||
map.put("Lynn Morris教授", "1303542###swkxaqzx###生命科学安全中心"); |
|||
map.put("Dr. Johan Neyts", "1303543###swkxaqzx###生命科学安全中心"); |
|||
map.put("Dr. Anne-Mieke Vandamme", "1303544###swkxaqzx###生命科学安全中心"); |
|||
map.put("Dr. Priya Abraham", "1303545###swkxaqzx###生命科学安全中心"); |
|||
map.put("Dr. A. C. Mishra", "1303546###swkxaqzx###生命科学安全中心"); |
|||
map.put("Dr. P. G. K. Pillai", "1303547###swkxaqzx###生命科学安全中心"); |
|||
map.put("Dr. M. S. Chadha", "1303548###swkxaqzx###生命科学安全中心"); |
|||
map.put("Dr. Tan Eng Kiong", "1303549###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Lim Poh Lian", "1303550###swaqaqyydxh###生物安全安全与应对协会"); |
|||
map.put("Dr. Wang Linfa", "1303551###swaqaqyydxh###生物安全安全与应对协会"); |
|||
map.put("Dr. Paul Nurse", "1303552###swaqaqyydxh###生物安全安全与应对协会"); |
|||
map.put("Dr. Jennifer Doudna", "1303553###swaqaqyydxh###生物安全安全与应对协会"); |
|||
map.put("Prof. Lothar H. Wieler", "1303554###swaqaqyydxh###生物安全安全与应对协会"); |
|||
map.put("Dr. Sandra Ciesek", "1303555###swaqaqyydxh###生物安全安全与应对协会"); |
|||
map.put("马克休托夫·里纳特·阿米罗维奇", "1303556###swaqaqyydxh###生物安全安全与应对协会"); |
|||
map.put("谢尔盖·涅恰耶夫", "1303557###sjwszz###世界卫生组织"); |
|||
map.put("邱香果(Xiangguo Qiu)", "1303558###sjwszz###世界卫生组织"); |
|||
map.put("程克定(Keding Cheng)", "1303559###swaqyswfyjh###生物安全与生物防御计划"); |
|||
map.put("弗兰克·普卢默(Frank Plummer)", "1303560###swaqyswfyjh###生物安全与生物防御计划"); |
|||
map.put("马修·吉尔摩(Matthew Gilmour)", "1303561###swaqyswfyjh###生物安全与生物防御计划"); |
|||
map.put("澳大利亚联邦科学与工业研究组织(CSIRO)", "1303562###swaqyswfyjh###生物安全与生物防御计划"); |
|||
map.put("澳大利亚动物健康实验室(AAHL)", "1303563###swaqyswfyjh###生物安全与生物防御计划"); |
|||
map.put("Dr. Ricardo Trindade", "1303564###swaqyswfyjh###生物安全与生物防御计划"); |
|||
map.put("Dr. Marly Babinski", "1303565###swaqyswfyjh###生物安全与生物防御计划"); |
|||
map.put("Dr. Wilson Teixeira", "1303566###swaqyswfyjh###生物安全与生物防御计划"); |
|||
map.put("Elisabeth", "1303567###swaqyswfyjh###生物安全与生物防御计划"); |
|||
map.put("Dr. Tetsuya Mizutani", "1303568###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Hiroshi Kida", "1303569###sjwszz###世界卫生组织"); |
|||
map.put("Dr. John Smith", "1303570###mgkxy###美国科学院"); |
|||
map.put("Dr. Emily Johnson", "1303571###mgkxy###美国科学院"); |
|||
map.put("Dr. William Patrick", "1303572###mgkxy###美国科学院"); |
|||
map.put("Dr. Deborah Watson", "1303573###mgkxy###美国科学院"); |
|||
map.put("Dr. Heinz Feldmann", "1303574###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Elke Mühlberger", "1303575###swaqhzyjyzx###生物安全合作与教育中心"); |
|||
map.put("Dr. Lucile Guitton", "1303576###swaqhzyjyzx###生物安全合作与教育中心"); |
|||
map.put("Dr. François Barre-Sinoussi", "1303577###swaqhzyjyzx###生物安全合作与教育中心"); |
|||
map.put("Dr. Claire Luby", "1303578###swaqhzyjyzx###生物安全合作与教育中心"); |
|||
map.put("雅克·格兰仕", "1303579###swaqhzyjyzx###生物安全合作与教育中心"); |
|||
map.put("袁志明", "1303580###swaqhzyjyzx###生物安全合作与教育中心"); |
|||
map.put("石正丽", "1303581###swaqhzyjyzx###生物安全合作与教育中心"); |
|||
map.put("所长", "1303582###swaqhzyjyzx###生物安全合作与教育中心"); |
|||
map.put("病毒学部门负责人", "1303583###swaqhzyjyzx###生物安全合作与教育中心"); |
|||
map.put("细菌学部门负责人", "1303584###swaqhzyjyzx###生物安全合作与教育中心"); |
|||
map.put("Dr. Juan Pérez", "1303585###sjwszz###世界卫生组织"); |
|||
map.put("Prof. Ana González", "1303586###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Carlos Rodríguez", "1303587###sjwszz###世界卫生组织"); |
|||
map.put("所长", "1303588###mgkxy###美国科学院"); |
|||
map.put("高级病毒学家", "1303589###mgkxy###美国科学院"); |
|||
map.put("生物防御专家", "1303590###mgkxy###美国科学院"); |
|||
map.put("拉伊萨·马卡罗娃(Raisa Makarova)", "1303591###sjwszz###世界卫生组织"); |
|||
map.put("安德烈·克拉斯诺夫(Andrey Krasnov)", "1303592###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Joseph DeRisi", "1303593###mgkxy###美国科学院"); |
|||
map.put("Professor Sharon Peacock", "1303594###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Matthew Gilmour", "1303595###swaqyjs###生物安全研究所"); |
|||
map.put("Dr. Gary Kobinger", "1303596###swaqyjs###生物安全研究所"); |
|||
map.put("洛塔尔·威勒(Lothar H. Wieler)", "1303597###swaqyjs###生物安全研究所"); |
|||
map.put("克里斯托弗·哈斯尔曼(Christopher Haasmann)", "1303598###swaqyjs###生物安全研究所"); |
|||
map.put("卡特琳·鲍曼(Katrin Baumann)", "1303599###swaqyjs###生物安全研究所"); |
|||
map.put("Dr. Deborah Middleton", "1303600###swaqyjs###生物安全研究所"); |
|||
map.put("Dr. David Williams", "1303601###swaqyjs###生物安全研究所"); |
|||
map.put("Dr. George Smith", "1303602###mgkxy###美国科学院"); |
|||
map.put("Dr. Emily Johnson", "1303603###mgkxy###美国科学院"); |
|||
map.put("Dr. Robert Liu", "1303604###mgkxy###美国科学院"); |
|||
map.put("Dr. Siti Rahmawati", "1303605###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Agus Salim", "1303606###swaqlm###生物安全联盟"); |
|||
map.put("Dr. Nia Kurniawati", "1303607###swaqlm###生物安全联盟"); |
|||
map.put("Dr. Emily Johnson", "1303608###swaqlm###生物安全联盟"); |
|||
map.put("Dr. William Smith", "1303609###swaqlm###生物安全联盟"); |
|||
map.put("Dr. Jennifer Lee", "1303610###swaqlm###生物安全联盟"); |
|||
map.put("Dr. Robert Thompson", "1303611###swaqlm###生物安全联盟"); |
|||
map.put("Dr. Yael Cohen", "1303612###swaqlm###生物安全联盟"); |
|||
map.put("Dr. David Ben-Haim", "1303613###swaqlm###生物安全联盟"); |
|||
map.put("Prof. Miriam Shmulovitz", "1303614###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Omar Al-Mansouri", "1303615###swaqyjjjh###生物安全研究基金会"); |
|||
map.put("Prof. Noura Al-Hamadi", "1303616###swaqyjjjh###生物安全研究基金会"); |
|||
map.put("Dr. Khaled Al-Zahrani", "1303617###swaqyjjjh###生物安全研究基金会"); |
|||
map.put("Dr. Ivan Petrov", "1303618###swaqyjjjh###生物安全研究基金会"); |
|||
map.put("Prof. Maria Ivchenko", "1303619###swaqyjjjh###生物安全研究基金会"); |
|||
map.put("Dr. Andriy Kovalchuk", "1303620###swaqyjjjh###生物安全研究基金会"); |
|||
map.put("Dr. Zhang Wei", "1303621###swaqyjjjh###生物安全研究基金会"); |
|||
map.put("Prof. Li Hua", "1303622###swaqyjjjh###生物安全研究基金会"); |
|||
map.put("Dr. Wang Lei", "1303623###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Jari Lehtinen", "1303624###sjwszz###世界卫生组织"); |
|||
map.put("Prof. Maria Nieminen", "1303625###swaqzx###生物安全中心"); |
|||
map.put("Dr. Marko Rautio", "1303626###swaqzx###生物安全中心"); |
|||
map.put("Dr. Pedro Silva", "1303627###swaqzx###生物安全中心"); |
|||
map.put("Prof. Ana Costa", "1303628###swaqzx###生物安全中心"); |
|||
map.put("Dr. Roberto Almeida", "1303629###swaqzx###生物安全中心"); |
|||
map.put("Dr. Kjersti A. K. Sørensen", "1303630###swaqzx###生物安全中心"); |
|||
map.put("Prof. Bjørn H. L. Hansen", "1303631###swaqzx###生物安全中心"); |
|||
map.put("Dr. Line M. Jacobsen", "1303632###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Laura De Vries", "1303633###sjwszz###世界卫生组织"); |
|||
map.put("Prof. David J. van der Meer", "1303634###swaqfyxm###生物安全防御项目"); |
|||
map.put("Dr. Katrien B. Hillewaert", "1303635###swaqfyxm###生物安全防御项目"); |
|||
map.put("Dr. Imran Khan", "1303636###swaqfyxm###生物安全防御项目"); |
|||
map.put("Prof. Ayesha Zafar", "1303637###swaqfyxm###生物安全防御项目"); |
|||
map.put("Dr. Faisal Malik", "1303638###swaqfyxm###生物安全防御项目"); |
|||
map.put("Dr. Els Bruggeman", "1303639###swaqfyxm###生物安全防御项目"); |
|||
map.put("Prof. Jan Vennema", "1303640###swaqfyxm###生物安全防御项目"); |
|||
map.put("Dr. Petra de Gier", "1303641###swaqfyxm###生物安全防御项目"); |
|||
map.put("Dr. Michael Huber", "1303642###swaqfyxm###生物安全防御项目"); |
|||
map.put("Prof. Stéphanie Démoulin", "1303643###swaqfyxm###生物安全防御项目"); |
|||
map.put("Dr. Antoine Sauvage", "1303644###sjwszz###世界卫生组织"); |
|||
map.put("Prof. Rebecca McCauley", "1303645###adlyswaqyjs###澳大利亚生物安全研究所"); |
|||
map.put("Dr. Peter Zhang", "1303646###adlyswaqyjs###澳大利亚生物安全研究所"); |
|||
map.put("Dr. Sarah Williams", "1303647###adlyswaqyjs###澳大利亚生物安全研究所"); |
|||
map.put("Prof. David Anderson", "1303648###mgkxy###美国科学院"); |
|||
map.put("Dr. Sarah Jenkins", "1303649###mgkxy###美国科学院"); |
|||
map.put("Dr. John Carter", "1303650###mgkxy###美国科学院"); |
|||
map.put("Dr. Michael Harris", "1303651###mgkxy###美国科学院"); |
|||
map.put("Dr. Emily Taylor", "1303652###mgkxy###美国科学院"); |
|||
map.put("Dr. Jean-Luc Dubois", "1303653###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Isabelle Frossard", "1303654###swaqyjhfyzx###生物安全预警和反应中心"); |
|||
map.put("Dr. Anna Korhonen", "1303655###swaqyjhfyzx###生物安全预警和反应中心"); |
|||
map.put("Dr. Marko Virtanen", "1303656###swaqyjhfyzx###生物安全预警和反应中心"); |
|||
map.put("Dr. Peter Smith", "1303657###swaqyjhfyzx###生物安全预警和反应中心"); |
|||
map.put("Dr. Emma Johnson", "1303658###swaqyjhfyzx###生物安全预警和反应中心"); |
|||
map.put("Dr. Sarah Williams", "1303659###swaqyjhfyzx###生物安全预警和反应中心"); |
|||
map.put("Dr. James Thompson", "1303660###swaqyjhfyzx###生物安全预警和反应中心"); |
|||
map.put("郭巍", "1303661###swaqyjhfyzx###生物安全预警和反应中心"); |
|||
map.put("张晶", "1303662###swaqyjhfyzx###生物安全预警和反应中心"); |
|||
map.put("Dr. Nadia Benhammou", "1303663###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Mohamed Khellaf", "1303664###swaqhswfxgl###生物安全和生物风险管理"); |
|||
map.put("Dr. Laura Tontodonati", "1303665###swaqhswfxgl###生物安全和生物风险管理"); |
|||
map.put("Dr. Marco Ferretti", "1303666###swaqhswfxgl###生物安全和生物风险管理"); |
|||
map.put("教授A", "1303667###swaqhswfxgl###生物安全和生物风险管理"); |
|||
map.put("教授B", "1303668###swaqhswfxgl###生物安全和生物风险管理"); |
|||
map.put("Anna Kowalska", "1303669###swaqhswfxgl###生物安全和生物风险管理"); |
|||
map.put("Piotr Nowak", "1303670###swaqhswfxgl###生物安全和生物风险管理"); |
|||
map.put("Jan Novak", "1303671###swaqhswfxgl###生物安全和生物风险管理"); |
|||
map.put("Marta Vancova", "1303672###researchgate###ResearchGate"); |
|||
map.put("Petr Smejkal", "1303673###researchgate###ResearchGate"); |
|||
map.put("Lucia Polakova", "1303674###researchgate###ResearchGate"); |
|||
map.put("Carlos Mendez", "1303675###researchgate###ResearchGate"); |
|||
map.put("Ana López", "1303676###researchgate###ResearchGate"); |
|||
map.put("Luis Pérez", "1303677###googlescholar###Google Scholar"); |
|||
map.put("María Fernández", "1303678###googlescholar###Google Scholar"); |
|||
map.put("Maria Silva", "1303679###googlescholar###Google Scholar"); |
|||
map.put("José Martins", "1303680###googlescholar###Google Scholar"); |
|||
map.put("Ana Costa", "1303681###googlescholar###Google Scholar"); |
|||
map.put("Luís Ferreira", "1303682###googlescholar###Google Scholar"); |
|||
map.put("Dr. Michael J. Katze", "1303683###mgkxy###美国科学院"); |
|||
map.put("Dr. Deborah L. Bonner", "1303684###mgkxy###美国科学院"); |
|||
map.put("Dr. Jennifer M. Gommerman", "1303685###mgkxy###美国科学院"); |
|||
map.put("Dr. Luis A. Rodriguez", "1303686###mgkxy###美国科学院"); |
|||
map.put("Dr. Celia Carlos", "1303687###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Eva Cutiongco-Dela Paz", "1303688###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Hans Jacobsen", "1303689###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Lars M. Petersen", "1303690###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Maria Andersen", "1303691###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Erik Nystrøm", "1303692###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Tan Chee Kheong", "1303693###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Lim Mei Ling", "1303694###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Wong Sze Wah", "1303695###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Lee Kai Fong", "1303696###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Carlos Guzmán", "1303697###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Maria Fernandez", "1303698###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Juan Pérez", "1303699###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Ana María López", "1303700###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Carlos Restrepo", "1303701###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Lucía Ramírez", "1303702###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Thandeka Moyo", "1303703###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Sipho Dlamini", "1303704###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Carla van der Merwe", "1303705###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Ananya Srisakdi", "1303706###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Supachai Sittisart", "1303707###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Praphat Jantrapirom", "1303708###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Antti Karjalainen", "1303709###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Jari Lappalainen", "1303710###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Mika Nieminen", "1303711###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Arun K. Singh", "1303712###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Priya Yadav", "1303713###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Rajesh Kumar", "1303714###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Maria González", "1303715###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Xavier Carrillo", "1303716###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Pilar Rodríguez", "1303717###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Alejandro Gómez", "1303718###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Laura Martínez", "1303719###sjwszz###世界卫生组织"); |
|||
map.put("Dr. José Hernández", "1303720###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Alexei Smirnov", "1303721###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Natalia Petrovna", "1303722###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Ivan Pavlov", "1303723###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Ji Hyun Lee", "1303724###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Sung Ho Kim", "1303725###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Soojung Park", "1303726###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Martin Müller", "1303727###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Julia Schneider", "1303728###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Thomas Richter", "1303729###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Ahmed Al-Fahad", "1303730###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Fatimah Al-Saif", "1303731###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Saeed Al-Dosari", "1303732###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Didier Raoult", "1303733###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Sophie Dufresne", "1303734###sjwszz###世界卫生组织"); |
|||
map.put("Dr. Jean-Luc Valleron", "1303735###sjwszz###世界卫生组织"); |
|||
map.put("Rolf Horstmann", "1303736###sjwszz###世界卫生组织"); |
|||
map.put("Raoul Tan", "1303737###sjwszz###世界卫生组织"); |
|||
} |
|||
} |
|||
@ -0,0 +1,260 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
import java.util.HashMap; |
|||
import java.util.Map; |
|||
|
|||
public class AllKeysPatent { |
|||
private static Map<String,Object> map = new HashMap(); |
|||
|
|||
public static Map<String, Object> getMap() { |
|||
return map; |
|||
} |
|||
|
|||
public static void setMap(Map<String, Object> map) { |
|||
AllKeysPatent.map = map; |
|||
} |
|||
|
|||
static { |
|||
map.put("Montpellier Institute of Virology, France","numontpellier"); |
|||
map.put("Korea National Institute of Infectious Diseases (KCDC)","nkdca"); |
|||
map.put("Israel Institute of Life Sciences","nweizmann"); |
|||
map.put("South African National Institute of Infectious Diseases","nnicd"); |
|||
map.put("National Institute of Virology, India","nniovi"); |
|||
map.put("Singapore Biomedical Research Centre (BMRC)","nastar"); |
|||
map.put("Francis Crick Institute, UK","nplymouth"); |
|||
map.put("Vino Itzberg Institute of Virology, Russia","nvniiv"); |
|||
map.put("Australian Biosafety Research Centre (ABSL-4)","ncsiro"); |
|||
map.put("Fernando von Beren Institute, Brazil","nfiocruz"); |
|||
map.put("Institute of Virology, Marburg, Germany","niovmg"); |
|||
map.put("National Institute of Infectious Diseases, Japan","nniidj"); |
|||
map.put("Institute of Virology, University of Buenos Aires, Argentina","nivubaa"); |
|||
map.put("USAMRIID","nusamriid"); |
|||
map.put("Russian State Research Center for Virology and Biotechnology","nrsrcfvb"); |
|||
map.put("Robert Koch Institute, Germany","nrkig"); |
|||
map.put("Institute of Virology, University of Georgia, USA","nivugusa"); |
|||
map.put("Indonesia Institute of Biosafety","niib"); |
|||
map.put("New Zealand National Institute of Virology","nnniv"); |
|||
map.put("Israel National Institute of Virology","ninv"); |
|||
map.put("Abu Dhabi Institute for Biosafety, United Arab Emirates","naDifb"); |
|||
map.put("Ukrainian Virology Research Center","nuvrc"); |
|||
map.put("Beijing Institute of Virology, China","nbivc"); |
|||
map.put("Rio de Janeiro Institute of Virology, Brazil","nrjivb"); |
|||
map.put("Norwegian Institute of Public Health","nnih"); |
|||
map.put("Pakistan Biosafety Research Center","npbrc"); |
|||
map.put("Netherlands National Institute of Public Health and Environment","nnniphe"); |
|||
map.put("Sydney Institute of Virology, Australia","nsiva"); |
|||
map.put("Finnish National Institute of Infectious Diseases","nfniid"); |
|||
map.put("Victoria Institute of Virology, UK","nmanchester"); |
|||
map.put("Pasteur Institute in Algeria","npasteur"); |
|||
map.put("Italian Institute of Tropical Diseases","niss"); |
|||
map.put("Uppsala Virology Research Center, Sweden","nuvrcs"); |
|||
map.put("Polish Medical Research Center","npmrc"); |
|||
map.put("Czech Republic Infectious Disease Research Center","nsz"); |
|||
map.put("Costa Rica Virology Research Center","ncrvrc"); |
|||
map.put("Lisbon Institute of Virology, Portugal","nmedicina"); |
|||
map.put("Philippine Institute of Tropical Medicine","nritmgovph"); |
|||
map.put("Danish Institute for Biosafety","nsstdk"); |
|||
map.put("Argentine National Vaccine Research Center","nconicetgovar"); |
|||
map.put("Columbia Virology Research Center","ncvrc"); |
|||
map.put("South African Institute of Infectious Diseases","nsaioid"); |
|||
map.put("Thailand Biosafety and Vaccine Research Center","nnvigoth"); |
|||
map.put("Indian Biosafety Research Center","nicmrgov"); |
|||
map.put("Barcelona Institute of Infectious Diseases, Spain","nbioids"); |
|||
map.put("Mexican Institute of Public Health","ninspmp"); |
|||
map.put("Russian Institute of Virology and Biosafety","nvector"); |
|||
map.put("Korea Virus Research Institute","nnihgokr"); |
|||
map.put("Leibniz Virus Research Center in Germany","nleibniz"); |
|||
map.put("Marseille Institute of Infectious Diseases, France","ninserm"); |
|||
map.put("Bernhard Nocht Institute of Tropical Medicine (BNITM)","nbnitm"); |
|||
map.put("Erasmus University Medical Center (Erasmus MC)","nerasmusmc"); |
|||
map.put("Health Research Board of Ireland (HRB) Clinical Research Facility","nhrboicrf"); |
|||
map.put("Marine Institute of Ireland","nmi"); |
|||
map.put("Serum Institute of India","nsioi"); |
|||
map.put("Vaccine Production Unit, Pasteur Institute of Greece","nvpupiog"); |
|||
map.put("Department of Microbiology, National and Kapodistrian University of Athens Medical School","ndomnkuoams"); |
|||
map.put("Defence Research and Development Establishment (DRDE)","ndrde"); |
|||
map.put("German Ministry of Defense Institute of Microbiology","ngmodiom"); |
|||
map.put("Institute of Virology, University of Marburg","nivoum"); |
|||
map.put("Fritz Lipmann Institute (Federal Research Institute for Animal Health)","nflifrioah"); |
|||
map.put("Bernhard Nocht Institute for Tropical Medicine","nbniotm"); |
|||
map.put("Robert Koch Institute (RKI) Center for Biological Threats and Special Pathogens","nrkicbtp"); |
|||
map.put("G. Eliava Institute of Bacteriophages, Microbiology and Virology","ngeiobmav"); |
|||
map.put("Swiss National Reference Centre for Novel Virus Infection","nsnrcnvi"); |
|||
map.put("DGA Maîtrise NRBC","ndgamnrbc"); |
|||
map.put("Finnish Food Safety Authority (Ruokavirasto)","nffsa"); |
|||
map.put("Finnish Defence Forces Military Medical Centre","nfdfmmc"); |
|||
map.put("Danish National Centre for Biosafety and Biological Readiness (CBB)","ndncbbr"); |
|||
map.put("BIOPHARM","nbiopharm"); |
|||
map.put("National Research Council of Canada","nnrcc"); |
|||
map.put("Defence Research and Development Canada - Valcartier","ndrdcv"); |
|||
map.put("Defence Research and Development Centre of Canada - Saffield Division (DRDC Suffield)","ndrdccsd"); |
|||
map.put("Cambodia National Institute of Public Health","ncnip"); |
|||
map.put("Pastel Institute Cambodia (IPC)","npic"); |
|||
map.put("Department of Infectious Microbiology, Stephan Angeloff Institute of Microbiology, Bulgarian Academy of Sciences","ndimsaimbas"); |
|||
map.put("Bulgarian National Institute of Veterinary Research for Diagnostic Research(NDVRMI)","nbnivrfdr"); |
|||
map.put("Federal University of Rio de Janeiro","nfurj"); |
|||
map.put("Brazil Army Biological Institute","nbabi"); |
|||
map.put("Republican Research and Practical Center for Epidemiology and Microbiology (RRPCEM)","nrrpcem"); |
|||
map.put("NBC & Environmental Protection Technology Division, Armaments and Defence Technology Agency, Austrian Federal Ministry of Defence","nnbcepteptadmod"); |
|||
map.put("Australia Biological Defence Research Division, Defence Science and Technology Group (DSTG)","nabdrddstg"); |
|||
map.put("Australian Centre for Disease Preparedness (ACDP)","nacdp"); |
|||
map.put("Armed Forces Medical Research Institute (AFRIMS)","nafrims"); |
|||
map.put("Kharkiv Experimental and Clinical Veterinary Institute (IEKVM)","nkecvi"); |
|||
map.put("National Institute of Biotechnology and Microbial Strain Scientific Control,Ukraine","nnibmscua"); |
|||
map.put("Scientific and Practical Center for Expertise and Monitoring in Hygiene and Epidemiology of the Ministry of Health of Kazakhstan (SPCSEEM)","nspcseem"); |
|||
map.put("The Third Division of the Naval Medical Research Center of the United States Military","nttdtnmrcusm"); |
|||
map.put("Australian Defence Force Malaria and Infectious Disease Institute","nadfmidi"); |
|||
map.put("The Asian Branch of the United States Naval Medical Research Center","ntabtnmrc"); |
|||
map.put("U.S. Naval Medical Research Unit Two","nusnmru2"); |
|||
map.put("Institute of Medical Sciences of the United States Armed Forces","nimsusaf"); |
|||
map.put("Uralsk Plague Prevention Station of the Ministry of Health of Kazakhstan","nuppsmohk"); |
|||
map.put("Indonesian Institute of Biosecurity Research and Development","niibrd"); |
|||
map.put(" India Biosafety Research Centre","nibrc"); |
|||
map.put("National Institute for Infectious Diseases, Lazzaro Spallanzani Hospital","nniidlsh"); |
|||
map.put("Israel Institute for Biological Research,IIBR","niibr"); |
|||
map.put(" New Zealand National Institute of Virology","nnzniv"); |
|||
map.put(" Barcelona Institute for Global Health (ISGlobal)","nbifgh"); |
|||
map.put("Institute of Virology, Ukrainian Health Ministry","nivuhm"); |
|||
map.put(" Thailand National Vaccine Institute","ntnvi"); |
|||
map.put(" Lisbon Virology Institute","nlvi"); |
|||
map.put(" University of Georgia Virology Institute","nugvi"); |
|||
map.put(" United States Army Medical Research Institute of Infectious Diseases (USAMRIID)","nusam"); |
|||
map.put(" Czech Republic Centre for Infectious Diseases Research","ncrcidr"); |
|||
map.put("National Institute of Public Health and the Environment, Netherlands","nniphren"); |
|||
map.put(" Korea Institute of Virology","nkiv"); |
|||
map.put("Institut de Recherche en Infectiologie de Marseille","nirim"); |
|||
map.put(" Virology and Biosafety Research Institute","nvbri"); |
|||
map.put("Russian State Research Center for Virology and Biotechnology","nsrcvb"); |
|||
map.put(" Leibniz Institute for Virology","nliv"); |
|||
map.put("Institute of Microbiology Prof. Paulo de Góes","nimppdg"); |
|||
map.put(" Danish Biosafety Institute","ndbi"); |
|||
map.put("National Center for Laboratory and Epidemiology, Laos","nnclel"); |
|||
map.put("Pasteur Institute of Laos","npil"); |
|||
map.put(" Virology Institute of the University of Buenos Aires","nviuba"); |
|||
map.put("The United States Air Force Academy of Aeronautics and Astronautics","nusaafaaa"); |
|||
map.put("Walter Reed Army Research Institute","nwrrari"); |
|||
map.put("Tripler Army Medical Center, Hawaii","ntamch"); |
|||
map.put("Waltridge Army Research Institute Georgia Unit (USAMRD-G)","nwraig"); |
|||
map.put("U.S. Naval Medical Research Unit Two, Phnom Penh Detachment","nnmru2ppd"); |
|||
map.put("U.S. Army Medical Research Unit Kenya","namruk"); |
|||
map.put("Mbarara Medical Research Center","nmrc"); |
|||
map.put("THE FOOD SAFETY INSPECTION BODY OF THE REPUBLIC OF ARMENIA","ntfsibra"); |
|||
map.put("Russian Ministry of Defense Central Research Institute No. 48","nrmdcrin48"); |
|||
map.put("Federal Research Institution: Federal Consumer Rights and Personnel Welfare Supervision Bureau Rostov Microbiology and Parasitology Research Institute","nfriFcrpwsbrmpri"); |
|||
map.put("National Center for Disease Control and Prevention, Ministry of Health, Armenia","nncdpmha"); |
|||
map.put("DSO National Laboratory, Defense Medical and Environmental Research Institute","ndsnldmeri"); |
|||
map.put("University of Ljubljana Faculty of Veterinary Medicine, National Veterinary Institute","nulfvmnvi"); |
|||
map.put("University of Ljubljana Medical Faculty Institute of Microbiology and Immunology","nulmfimi"); |
|||
map.put("Biomedical Division of Protection Technology Laboratory (a department of ARMSCOR SOC)","nbdptladas"); |
|||
map.put("Lviv Institute of Epidemiology and Hygiene","nlieh"); |
|||
map.put("National Public Health Center of the Ministry of Health of Ukraine","nnphcmh"); |
|||
map.put("Odessa Branch of the National Institute of Laboratory Diagnostics and Veterinary Sanitary Expertise","nobnildvse"); |
|||
map.put("National Scientific Center of Experimental and Clinical Veterinary Medicine of the National Academy of Agricultural Sciences of Ukraine","nnscecvmnaas"); |
|||
map.put("National Veterinary Institute SVA","nnvisva"); |
|||
map.put("National Institute of Laboratory Diagnostics and Veterinary Sanitary Expertise,Ukraine","nnildvse"); |
|||
map.put("Research and Training Center for Animal Disease Diagnosis of the Veterinary Institute of the National Academy of Agricultural Sciences (RTCADD),Ukraine","nrtcaddviinaas"); |
|||
map.put("Institute of Medical Virology","nimv"); |
|||
map.put("Institute of Virology and Immunology","nvai"); |
|||
map.put("Azerbaijan State Housing Department Hygiene and Epidemiology Control Center","nashdhecc"); |
|||
map.put("Tbilisi Institute of Microbiology, Virology and Bacteriophage, Georgia","ntimvbg"); |
|||
map.put("Threat Reduction Support Center, Kutaisi, Georgia","ntrsckg"); |
|||
map.put("National Center for Infectious Disease Control in Tbilisi, Georgia","nncidctg"); |
|||
map.put("Richard Lugar Center for Public Health Research, Georgia","nrlcphr"); |
|||
map.put("National Reference Center for Emerging Viral Infections","nnrcevi"); |
|||
map.put("Research Institute of the Ministry of Science and Education of Kazakhstan (RIBSP)","nrimsek"); |
|||
map.put("National Biotechnology Center (NCB) of the Ministry of Education and Science of Kazakhstan","nncbmesk"); |
|||
map.put("University Hospital Galway","nuhg"); |
|||
map.put("National Defense Medical College","nndmc"); |
|||
map.put("National Veterinary Reference Center of the Veterinary Control and Supervision Committee of the Ministry of Agriculture of Kazakhstan","nnvrcvcsckma"); |
|||
map.put("National Public Health Authority of Sierra Leone","nnphasl"); |
|||
map.put("Walter Reed Army Research Institute Unit in Kenya (USAMRD-K)","nwraruik"); |
|||
map.put(" Abu Dhabi Institute of Biosafety","naidb"); |
|||
map.put("The Sixth Division of the United States Naval Medical Research Center","ntdtnmrc"); |
|||
map.put("Canada National Center for Exotic Animal Diseases","ncncead"); |
|||
map.put("Finnish Defense Research Agency (FDRA)","nfdra"); |
|||
map.put("Viral Zoonoses Group, Department of Virology","nvzgdov"); |
|||
map.put("French Army Institute of Biomedical Research (IRBA)","nfaiobr"); |
|||
map.put("France Military Biomedical Research Institute","nfmbri"); |
|||
map.put("France Nuclear, Biological, Chemical and Radiological Control Center of the General Administration of Weapons and Equipment","nfnbcarccotga"); |
|||
map.put("German Defense Institute – ABC-Schutz (Bundeswehr Institute for Protection Technology and Nuclear, Biological and Chemical Protection)","ngdias"); |
|||
// map.put("National Institute for Infectious Diseases "L. Spallanzani" (INMI) IRCCS","nnifidls"); |
|||
map.put("Istituto Zooprofilattico Sperimentale dell'Abruzzo e del Molise (IZSAM)","nizsdaedm"); |
|||
map.put("Scientific Department of the Army Medical Center","nsdotamc"); |
|||
map.put("Istituto Superiore di Sanità (ISS) – National Institute of Health","nisdsnioh"); |
|||
map.put("Istituto Zooprofilattico Sperimentale della Lombardia e dell’Emilia-Romagna (IZSLER) – Pavia Section","nizsdlederp"); |
|||
map.put("Istituto Zooprofilattico Sperimentale delle Venezie (IZSVE)","nizsdv"); |
|||
map.put("Nuclear, Biological and Chemical (NBC) Joint Logistic Technical Centre","nnbacjltc"); |
|||
// map.put("National Society of Infectious Diseases "Lazzaro Spallanzani" (INMI)","nnsoidls"); |
|||
map.put("Canada National Reference Centre for Exotic Animal Diseases and National Reference Centre for Animal Brucellosis","ncnrcfeadanrc"); |
|||
map.put("ANTHRAX REFERENCE INSTITUTE OF ITALY OF ISTITUTO ZOOPROFILATTICO SPERIMENTALE OF PUGLIA AND BASILICATA","narioizs"); |
|||
map.put("Murayama Annex National Institute for Infectious Diseases","nmanifd"); |
|||
map.put("RIKEN Tsukuba Institute","nrti"); |
|||
map.put("Acquisition, Technology & Logistics Agency Ground System Research Center","natlagsrc"); |
|||
map.put("National Veterinary Reference Center, Regional Branch of the Committee for Veterinary Control and Supervision of the Ministry of Agriculture of Kazakhstan","nnvrcrbotcfvc"); |
|||
map.put("Biosafety Research Institute of the Committee of Science, Ministry of Education and Science of Kazakhstan (Bacterial and Viral Infectious Disease Monitoring, BSL-3)","nbriotcosmoeasa"); |
|||
map.put("Biosafety Research Institute of the Committee of Science, Ministry of Education and Science of Kazakhstan (Microbial Specimen Collection)","nbriotcosmoeasb"); |
|||
map.put("Biosafety Research Institute of the Committee of Science, Ministry of Education and Science of Kazakhstan","nbriotcosmoeasc"); |
|||
map.put("Aikimbayev National Center for High-Risk Infections, Kazakhstan","nancfhrik"); |
|||
map.put("National Influenza Center","nic"); |
|||
map.put("Kenya Veterinary Vaccines Production Institute","nkvvpi"); |
|||
map.put("Veterinary Diagnostic and Identification Center of the Ministry of Agriculture of the Kyrgyz Republic","nvdaicotmoaotkr"); |
|||
map.put("National Quarantine and High-Risk Infectious Diseases Center of the Ministry of Health of the Kyrgyz Republic","nnqahridcotmohotk"); |
|||
map.put("Duyisheyev Kyrgyz Veterinary Science Research Institute of the Ministry of Education and Science of the Kyrgyz Republic","ndkvsriotmoeas"); |
|||
map.put("Latvian Biomedical Research and Study Centre","nlbrasc"); |
|||
map.put("Veterinary Research Institute","nvri"); |
|||
map.put("Public Health Research Institute","nphri"); |
|||
map.put("National Institute for Public Health and the Environment (RIVM)","nnifphate"); |
|||
map.put("Netherlands Organization for Applied Scientific Research (TNO) - Nuclear, Chemical, and Biological Defense Division","nnofasrncab"); |
|||
map.put("Netherlands Organization for Applied Scientific Research (TNO) - Nuclear, Chemical, and Biological Defense","nnofasrncaba"); |
|||
map.put("Wageningen Bioveterinary Research Institute (WBVR)","nwbri"); |
|||
map.put("National Veterinary Research Institute","nnvri"); |
|||
map.put("Norwegian Defence Research Establishment (FFI)","ndre"); |
|||
map.put("Norwegian Institute of Public Health (FHI)","nnioph"); |
|||
map.put("Biological Research Center (BRC), Defence Science and Technology Organization (DESTO)","nbrcdsato"); |
|||
map.put("National Institute for Public Health - National Health Institute BSL3 NIPH-NIH","nifphnhi"); |
|||
map.put("Quarantine, Invasion, and Transgenic Organism Research Center","nqiatorc"); |
|||
map.put("Karol Kaczkowski Military Institute of Health and Epidemiology","nkkmiohae"); |
|||
map.put("Institute of Animal Health","nioah"); |
|||
map.put("Department of Plant Pharmacology and Pharmaceutical Technology (DFGTF)","ndoppapt"); |
|||
map.put("Faculty of Science, University of Lisbon (FCUL)","nfosuol"); |
|||
map.put("Institute of Chemical and Biological Technology, New Lisbon University (ITQB-NOVA)","niocabtnlu"); |
|||
map.put("Institute of Health and Tropical Medicine, New Lisbon University","niohatmnlu"); |
|||
map.put("Institute of Life and Health Sciences","niolahs"); |
|||
map.put("Military Medical Research Center","nmmrc"); |
|||
map.put("National Institute for Infectious Diseases, Bucharest","nnifidb"); |
|||
map.put("National Medical Military Research and Development Institute","nnmmradi"); |
|||
map.put("Institute of Public Health in Health and Veterinary Sciences","niophihavs"); |
|||
map.put("Veterinary Biologics and Drug Control Institute","nvbadci"); |
|||
map.put("National Pasteur Institute (Bucharest Branch)","nnpi"); |
|||
// map.put("Federal Budget Institution "Russian Ministry of Defense Central Research Institute No. 48"","nfbirmodcri"); |
|||
// map.put("Federal Budget Institution "Russian Ministry of Defense Central Research Institute No. 48" Branch (Kirov)","nfbirmodcria"); |
|||
// map.put("Federal Budget Institution "Russian Ministry of Defense Central Research Institute No. 48" Branch (Yekaterinburg)","nfbirmodcrib"); |
|||
map.put("Russian Ministry of Defense National Military Medical Scientific Test Institute","nfbirmodcric"); |
|||
// map.put("Federal Research Institution: Federal Consumer Rights and Personnel Welfare Supervision Bureau "Vector" National Virus and Biotechnology Science Center","nfbirmodcrid"); |
|||
map.put("Federal Health Institution: Federal Consumer Rights and Personnel Welfare Supervision Bureau Volga Plague Research Institute","nfbirmodcrie"); |
|||
map.put("Federal Research Institution: National Applied Microbiology and Biotechnology Science Center","nfbirmodcrif"); |
|||
map.put("Federal Health Institution: Federal Consumer Rights and Personnel Welfare Supervision Bureau Siberian and Far Eastern Irkutsk Plague Research Institute","nfbirmodcrih"); |
|||
// map.put("Federal Health Institution: Federal Consumer Rights and Personnel Welfare Supervision Bureau "Microbe" Plague Research Institute","nfbirmodcriai"); |
|||
map.put("Federal Health Institution: Federal Consumer Rights and Personnel Welfare Supervision Bureau Rostov-on-Don Plague Research Institute","nfbirmodcrij"); |
|||
map.put("Federal Health Institution: Federal Consumer Rights and Personnel Welfare Supervision Bureau Stavropol Plague Research Institute","nfbirmodcrio"); |
|||
map.put("Federal Research Institution: Federal Research Center Russian Ministry of Health Gamaleya Institute of Epidemiology and Microbiology","nfbirmodcrip"); |
|||
// map.put("Federal Research Institution: Federal Research Center Russian Ministry of Health Gamaleya Institute of Epidemiology and Microbiology, Branch "Ivanov Virology Institute"","nfbirmodcriqa"); |
|||
map.put("Federal Research Institution: Russian Ministry of Health Smorodin Flu Research Institute","nfbirmodcrir"); |
|||
map.put("Federal Budget Institution: Federal Animal and Plant Health Supervision Bureau All-Russian Animal Health Protection Center","nfbirmodcris"); |
|||
//map.put("Federal Research Institution: Russian Ministry of Agriculture "Federal Toxicology, Radiation, and Biological Safety Center","nfbirmodcrit"); |
|||
// map.put("Federal Budget Institution: Russian Ministry of Health Gamaleya Institute of Epidemiology and Microbiology "Gamaleya" Branch","nfbirmodcriq"); |
|||
map.put("Federal National Budget Science Institution: Russian Academy of Sciences Chumakov Federal Immunobiological Preparation Research and Development Science Center","nfbirmodcriu"); |
|||
map.put("CBRN Defense and Safety Department, Swedish Defense Research Agency (FOI)","ncdasdsdra"); |
|||
map.put("Air Force Research Laboratory (AFRL), 711 HPW","nafrl"); |
|||
map.put("Argonne National Laboratory (ANL","nanl"); |
|||
map.put("Northwest Pacific National Laboratory (PNNL)","nnpnl"); |
|||
map.put("The Food and Drug Administration (FDA) White Oak Campus","ntfadawqc"); |
|||
map.put("University Park Campus of the Food and Drug Administration (FDA","nupcotfada"); |
|||
map.put("Food and Drug Administration (FDA) Moffitt Campus","nfadamc"); |
|||
map.put("A scientific research institution for eliminating invasive weeds","nasrifriw"); |
|||
map.put("Poultry Research Laboratory of Southeast Region","nprlossr"); |
|||
map.put("Pacific Northwest National Laboratory (PNNL)","npnnl"); |
|||
map.put("Centers for Disease Control and Prevention, National Center for Environmental Health (NCEH), Laboratory Science Division (DLS)","ncfdcapncfeh"); |
|||
map.put("Flower and nursery plant research at the Beltzville Agricultural Research Center (BARC)","nfanpratbarc"); |
|||
} |
|||
} |
|||
@ -0,0 +1,315 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
import org.apache.commons.lang3.StringUtils; |
|||
import org.apache.log4j.Logger; |
|||
|
|||
import java.text.ParseException; |
|||
import java.text.SimpleDateFormat; |
|||
import java.time.LocalDateTime; |
|||
import java.time.format.DateTimeFormatter; |
|||
import java.util.Date; |
|||
import java.util.regex.Matcher; |
|||
import java.util.regex.Pattern; |
|||
|
|||
|
|||
public class DataCheckUtil { |
|||
|
|||
public static Pattern datePattrn = Pattern.compile("^\\d{4}\\-\\d{2}\\-\\d{2}\\s\\d{2}\\:\\d{2}:\\d{2}$"); |
|||
|
|||
public static Pattern dayPattrn = Pattern.compile("^\\d{2,4}\\-\\d{1,2}\\-\\d{1,2}$"); |
|||
|
|||
private static SimpleDateFormat ddf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
|
|||
public static Pattern p = Pattern.compile("\\s+"); |
|||
|
|||
private static final Logger LOG = Logger.getLogger(DataCheckUtil.class); |
|||
|
|||
public static String chechData2(String dataStr){ |
|||
dataStr = dataStr.replace("Z",""); |
|||
dataStr = checkData(dataStr); |
|||
Matcher matcher = datePattrn.matcher(dataStr); |
|||
if(!matcher.find()){ |
|||
System.out.println("格式错误,使用当前时间 : " + dataStr); |
|||
dataStr = DateUtil.getDateTime(); |
|||
}else{ |
|||
dataStr = matcher.group(0); |
|||
} |
|||
return dataStr; |
|||
} |
|||
|
|||
public static String checkData(String dataStr){ |
|||
SimpleDateFormat ddf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
if(StringUtils.isBlank(dataStr)){ |
|||
return ddf.format(new Date()); |
|||
} |
|||
if(dataStr.contains("-:")){ |
|||
dataStr = dataStr.replace("-:",":"); |
|||
} |
|||
if(dataStr.contains(":-")){ |
|||
dataStr = dataStr.replace(":-",":"); |
|||
} |
|||
|
|||
Matcher matcher = datePattrn.matcher(dataStr); |
|||
|
|||
if(!matcher.find()){ |
|||
dataStr = dataStr.trim(); |
|||
if(!p.matcher(dataStr).find()){ |
|||
if(!dayPattrn.matcher(dataStr).find()){ |
|||
return ddf.format(new Date()); |
|||
} |
|||
} |
|||
|
|||
String[] dates = dataStr.split("\\s+"); |
|||
String years = ""; |
|||
String times = ""; |
|||
if(dates.length == 2){ |
|||
years = dates[0]; |
|||
times = dates[1]; |
|||
}else{ |
|||
years = dates[0]; |
|||
} |
|||
|
|||
if(years.contains("/")){ |
|||
years = years.replace("/", "-"); |
|||
} |
|||
String[] yearStr = years.split("-"); |
|||
String yms = "" ; |
|||
if(yearStr.length == 3){ |
|||
String year = yearStr[0]; |
|||
String month = yearStr[1]; |
|||
String day = yearStr[2]; |
|||
if(year.length() == 2){ |
|||
year = "20"+year; |
|||
} |
|||
if(month.length() == 1){ |
|||
month = "0"+month; |
|||
} |
|||
if(day.length() == 1){ |
|||
day = "0"+day; |
|||
} |
|||
yms = year+"-"+month+"-"+day; |
|||
} |
|||
|
|||
String hms = ""; |
|||
if(StringUtils.isBlank(times)){ |
|||
hms = "00:00:00"; |
|||
}else{ |
|||
times = times.replace("/", ":"); |
|||
if(times.contains(":")){ |
|||
String[] timeStr = times.split(":"); |
|||
if( timeStr.length >= 3 ){ |
|||
String hours = timeStr[0]; |
|||
String mins = timeStr[1]; |
|||
String s = timeStr[2]; |
|||
|
|||
if(hours.length() == 1){ |
|||
hours = "0"+hours; |
|||
} |
|||
if(mins.length() == 1){ |
|||
mins = "0"+mins; |
|||
} |
|||
if(s.length() == 1){ |
|||
s = "0"+s; |
|||
} |
|||
hms = hours+":"+mins+":"+s; |
|||
}else if(timeStr.length == 2){ |
|||
String hours = timeStr[0]; |
|||
String mins = timeStr[1]; |
|||
String s = "00"; |
|||
if(hours.length() == 1){ |
|||
hours = "0"+hours; |
|||
} |
|||
if(mins.length() == 1){ |
|||
mins = "0"+mins; |
|||
} |
|||
hms = hours+":"+mins+":"+s; |
|||
} else { |
|||
String hours = timeStr[0]; |
|||
String mins = "00" ; |
|||
String s = "00"; |
|||
if(hours.length() == 1){ |
|||
hours = "0"+hours; |
|||
} |
|||
hms = hours+":"+mins+":"+s; |
|||
} |
|||
}else{ |
|||
if(isNum(times) && times.length()==2){ |
|||
hms = times+":00:00"; |
|||
}else if(isNum(times) && times.length()==1){ |
|||
hms = "0"+times+":00:00"; |
|||
}else{ |
|||
hms = "00:00:00" ; |
|||
} |
|||
} |
|||
} |
|||
if(StringUtils.isBlank(yms)){ |
|||
return ddf.format(new Date()); |
|||
} |
|||
if(yms != "" || hms != ""){ |
|||
return yms+" "+hms; |
|||
} |
|||
} |
|||
return dataStr ; |
|||
} |
|||
|
|||
private static boolean isNum(String time){ |
|||
Pattern p = Pattern.compile("\\d+"); |
|||
if(p.matcher(time).find()){ |
|||
return true ; |
|||
} |
|||
return false ; |
|||
} |
|||
|
|||
public static String convertString(String inputDate){ |
|||
|
|||
String outputFormat = "yyyy-MM-dd"; |
|||
// 定义输入格式 |
|||
DateTimeFormatter inputFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); |
|||
// 解析输入字符串为 LocalDateTime 对象 |
|||
LocalDateTime dateTime = LocalDateTime.parse(inputDate, inputFormatter); |
|||
// 格式化为所需的输出格式 |
|||
String formattedDate = dateTime.format(DateTimeFormatter.ofPattern(outputFormat)); |
|||
return formattedDate ; |
|||
} |
|||
|
|||
|
|||
|
|||
public static String convertStringTotime(String datetime){ |
|||
if(StringUtils.isBlank(datetime)){ |
|||
return DateUtil.getDateTime(System.currentTimeMillis()); |
|||
} |
|||
String creationTime = ""; |
|||
if(datetime.length() == 13){ |
|||
creationTime = DateUtil.getDateTime(Long.valueOf(datetime)); |
|||
}else{ |
|||
creationTime = DateUtil.getDateTime(Long.valueOf(datetime) *1000); |
|||
} |
|||
return creationTime ; |
|||
|
|||
} |
|||
|
|||
public static long convertStringToLong(String datetime){ |
|||
if(StringUtils.isBlank(datetime)){ |
|||
return System.currentTimeMillis(); |
|||
} |
|||
long creationTime ; |
|||
if(datetime.length() == 13){ |
|||
creationTime = Long.valueOf(datetime); |
|||
}else{ |
|||
creationTime = Long.valueOf(datetime) *1000; |
|||
} |
|||
return creationTime ; |
|||
} |
|||
|
|||
public static long convertTimeTotime(String datetime){ |
|||
if(StringUtils.isBlank(datetime)){ |
|||
return System.currentTimeMillis() / 1000; |
|||
} |
|||
long creationTime ; |
|||
if(datetime.length() == 13){ |
|||
creationTime = Long.valueOf(datetime) / 1000; |
|||
}else{ |
|||
creationTime = Long.valueOf(datetime) ; |
|||
} |
|||
return creationTime ; |
|||
|
|||
} |
|||
|
|||
|
|||
public static long convertDateTotime(String datetime){ |
|||
if(StringUtils.isBlank(datetime)){ |
|||
return System.currentTimeMillis() / 1000; |
|||
} |
|||
long creationTime = 0; |
|||
try { |
|||
SimpleDateFormat ddf1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
creationTime = Long.valueOf(ddf1.parse(datetime).getTime()) / 1000; |
|||
} catch (Exception e) { |
|||
// TODO Auto-generated catch block |
|||
e.printStackTrace(); |
|||
} |
|||
return creationTime ; |
|||
|
|||
} |
|||
|
|||
public static String getCurrentTime(){ |
|||
long dateTime = System.currentTimeMillis() ; |
|||
SimpleDateFormat ddf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
return ddf.format(new Date(dateTime)); |
|||
} |
|||
|
|||
public static String getCurrentTime(long dateTime){ |
|||
SimpleDateFormat ddf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
return ddf.format(new Date(dateTime)); |
|||
} |
|||
|
|||
public static String getDate(long dateTime){ |
|||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); |
|||
return sdf.format(new Date(dateTime)); |
|||
} |
|||
|
|||
public static String getDate(String dateTime){ |
|||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); |
|||
SimpleDateFormat ddf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
try { |
|||
Date date = ddf.parse(dateTime) ; |
|||
return sdf.format(date); |
|||
} catch (ParseException e) { |
|||
// TODO Auto-generated catch block |
|||
e.printStackTrace(); |
|||
LOG.error("DataCheckUtil getDate() err data:"+dateTime); |
|||
|
|||
} |
|||
return sdf.format(new Date()); |
|||
} |
|||
|
|||
public static long getDay(long dateTime){ |
|||
try{ |
|||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); |
|||
String dayStr = sdf.format(new Date(dateTime)); |
|||
Date date = sdf.parse(dayStr); |
|||
return date.getTime(); |
|||
}catch(Exception e){ |
|||
e.printStackTrace(); |
|||
LOG.error("DataCheckUtil getDay() err data:"+dateTime); |
|||
} |
|||
return 0; |
|||
} |
|||
|
|||
public static long getDay(String dateTime){ |
|||
try{ |
|||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); |
|||
Date date = sdf.parse(dateTime); |
|||
return date.getTime(); |
|||
}catch(Exception e){ |
|||
e.printStackTrace(); |
|||
LOG.error("DataCheckUtil getDay2() err data:"+dateTime); |
|||
} |
|||
return 0; |
|||
} |
|||
|
|||
|
|||
// public static void main(String[] args) { |
|||
// //System.out.println(checkData("")); |
|||
// /*System.out.println(System.currentTimeMillis()); |
|||
// System.out.println(Calendar.getInstance().getTimeInMillis() / 1000); |
|||
// System.out.println(new Date().getTime() / 1000); |
|||
// System.out.println(DateUtil.getDateTime((System.currentTimeMillis() / 1000) * 1000)); |
|||
// System.out.println(convertStringTotime("1558077405")); |
|||
// System.out.println(convertTimeTotime(null));*/ |
|||
// //System.out.println(DateUtil.getTimeMillis("2019-03-01 01:01:01")); |
|||
// |
|||
// /*String aa = DataCheckUtil.convertStringTotime("1563245342"); |
|||
// System.out.println(aa);*/ |
|||
// /*SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); |
|||
// try { |
|||
// Date date = sdf.parse("2019-03-01"); |
|||
// System.out.println(date.getTime()); |
|||
// } catch (ParseException e) { |
|||
// // TODO Auto-generated catch block |
|||
// e.printStackTrace(); |
|||
// }*/ |
|||
// System.out.println(getDate("2019-03-01 01:01:01")); |
|||
// } |
|||
|
|||
} |
|||
@ -0,0 +1,936 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
import org.apache.commons.logging.Log; |
|||
import org.apache.commons.logging.LogFactory; |
|||
|
|||
import java.text.ParseException; |
|||
import java.text.ParsePosition; |
|||
import java.text.SimpleDateFormat; |
|||
import java.util.Calendar; |
|||
import java.util.Date; |
|||
import java.util.GregorianCalendar; |
|||
|
|||
|
|||
/** |
|||
* 日期处理工具类 |
|||
*/ |
|||
|
|||
public class DateUtil { |
|||
//~ Static fields/initializers ============================================= |
|||
|
|||
|
|||
private static Log log = LogFactory.getLog(DateUtil.class); |
|||
private static String defaultDatePattern = null; |
|||
private static String timePattern = "HH:mm"; |
|||
public static final String TS_FORMAT = DateUtil.getDatePattern() + " HH:mm:ss.S"; |
|||
private static Calendar cale = Calendar.getInstance(); |
|||
|
|||
|
|||
//~ Methods ================================================================ |
|||
|
|||
public DateUtil(){ |
|||
} |
|||
|
|||
/** |
|||
* 获得服务器当前日期及时间,以格式为:yyyy-MM-dd HH:mm:ss的日期字符串形式返回 |
|||
*/ |
|||
public static String getDateTime(){ |
|||
try{ |
|||
SimpleDateFormat datetime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
return datetime.format(Calendar.getInstance().getTime()); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getDateTime():" + e.getMessage()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 获得服务器当前日期及时间,以格式为:yyyy-MM-dd HH:mm:ss的日期字符串形式返回 |
|||
*/ |
|||
public static String getDateTime(long date){ |
|||
try{ |
|||
SimpleDateFormat datetime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
return datetime.format(new Date(date)); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getDateTime():" + e.getMessage()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 获得服务器当前日期,以格式为:yyyy-MM-dd的日期字符串形式返回 |
|||
*/ |
|||
public static String getDate(){ |
|||
try{ |
|||
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd"); |
|||
return date.format(Calendar.getInstance().getTime()); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getDate():" + e.getMessage()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 获得服务器当前时间,以格式为:HH:mm:ss的日期字符串形式返回 |
|||
*/ |
|||
public static String getTime(){ |
|||
String temp = ""; |
|||
try{ |
|||
SimpleDateFormat time = new SimpleDateFormat("HH:mm:ss"); |
|||
temp += time.format(cale.getTime()); |
|||
return temp; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getTime():" + e.getMessage()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 获得服务器当前时间,以格式为:HH:mm:ss的日期字符串形式返回 |
|||
*/ |
|||
public static int getHour(){ |
|||
int temp = 0; |
|||
try{ |
|||
temp = Calendar.getInstance().get(Calendar.HOUR_OF_DAY); |
|||
return temp; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getTime():" + e.getMessage()); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 返回日期加X天后的日期 |
|||
*/ |
|||
@SuppressWarnings("static-access") |
|||
public static int getHour(String fromdate){ |
|||
try{ |
|||
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
GregorianCalendar gCal = new GregorianCalendar(); |
|||
Date datetime = date.parse(fromdate) ; |
|||
gCal.setTime(datetime) ; |
|||
return gCal.get(gCal.HOUR_OF_DAY); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.addDay():" + e.toString()); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 返回当前时间日期减去一个小时 |
|||
*/ |
|||
public static String getbeforeHour(){ |
|||
try{ |
|||
Calendar calendar = Calendar.getInstance(); |
|||
calendar.setTime(new Date()); |
|||
calendar.set(Calendar.HOUR, calendar.get(Calendar.HOUR) - 1);// 当前时间减去1小时 |
|||
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
date.format(calendar.getTime()); |
|||
return date.format(calendar.getTime()); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.addDay():" + e.toString()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
|
|||
|
|||
/** |
|||
* 返回当前时间日期减去一个小时 |
|||
*/ |
|||
public static String TgetbeforeHour(){ |
|||
try{ |
|||
// 创建一个SimpleDateFormat对象,用于定义日期时间的格式 |
|||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
// 设置时区,如果不设置,默认使用系统时区 |
|||
//sdf.setTimeZone(TimeZone.getTimeZone("GMT")); |
|||
// 创建一个Calendar对象,获取当前时间 |
|||
Calendar calendar = Calendar.getInstance(); |
|||
// 减去5分钟 |
|||
calendar.add(Calendar.MINUTE, -5); |
|||
// 使用SimpleDateFormat格式化Calendar对象 |
|||
String formattedDate = sdf.format(calendar.getTime()); |
|||
|
|||
return formattedDate; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.addDay():" + e.toString()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
|
|||
public static int getMinute(){ |
|||
int temp = 0; |
|||
try{ |
|||
temp = Calendar.getInstance().get(Calendar.MINUTE); |
|||
return temp; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getTime():" + e.getMessage()); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
|
|||
/** |
|||
* 统计时开始日期的默认值, |
|||
* 今年的开始时间 |
|||
*/ |
|||
public static String getStartDate(){ |
|||
try{ |
|||
return getYear() + "-01-01"; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getStartDate():" + e.getMessage()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 统计时结束日期的默认值 |
|||
*/ |
|||
public static String getEndDate(){ |
|||
try{ |
|||
return getDate(); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getEndDate():" + e.getMessage()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
|
|||
/** |
|||
* 获得服务器当前日期的年份 |
|||
*/ |
|||
public static String getYear(){ |
|||
try{ |
|||
//返回的int型,需要字符串转换 |
|||
return String.valueOf(cale.get(Calendar.YEAR)); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getYear():" + e.getMessage()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 获得服务器当前日期的月份 |
|||
*/ |
|||
public static String getMonth(){ |
|||
try{ |
|||
//一个数字格式,非常好 |
|||
java.text.DecimalFormat df = new java.text.DecimalFormat(); |
|||
df.applyPattern("00"); |
|||
return df.format((cale.get(Calendar.MONTH) + 1)); |
|||
//return String.valueOf(cale.get(Calendar.MONTH) + 1); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getMonth():" + e.getMessage()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 获得服务器在当前月中天数 |
|||
*/ |
|||
public static String getDay(){ |
|||
try{ |
|||
return String.valueOf(cale.get(Calendar.DAY_OF_MONTH)); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getDay():" + e.getMessage()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 比较两个日期相差的天数, |
|||
* 第一个日期要比第二个日期要晚 |
|||
*/ |
|||
public static int getDays(String date1,String date2){ |
|||
int margin; |
|||
try{ |
|||
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd"); |
|||
ParsePosition pos = new ParsePosition(0); |
|||
ParsePosition pos1 = new ParsePosition(0); |
|||
Date dt1 = date.parse(date1,pos); |
|||
Date dt2 = date.parse(date2,pos1); |
|||
long l = dt1.getTime() - dt2.getTime(); |
|||
margin = (int)(l / (24 * 60 * 60 * 1000)); |
|||
return margin; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getDays():" + e.toString()); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 比较两个日期相差的 小时数, |
|||
* 第一个日期要比第二个日期要晚 |
|||
*/ |
|||
public static int getHours(String date1,String date2){ |
|||
int margin; |
|||
try{ |
|||
SimpleDateFormat datetime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
ParsePosition pos = new ParsePosition(0); |
|||
ParsePosition pos1 = new ParsePosition(0); |
|||
Date dt1 = datetime.parse(date1,pos); |
|||
Date dt2 = datetime.parse(date2,pos1); |
|||
long l = dt1.getTime() - dt2.getTime(); |
|||
margin = (int)(l / ( 60 * 60 * 1000)); |
|||
return margin; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getHours():" + e.toString()); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 比较两个日期相差的分钟数, |
|||
* 第一个日期要比第二个日期要晚 |
|||
*/ |
|||
public static int getMinutes(String date1,String date2){ |
|||
int margin; |
|||
try{ |
|||
SimpleDateFormat datetime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
ParsePosition pos = new ParsePosition(0); |
|||
ParsePosition pos1 = new ParsePosition(0); |
|||
Date dt1 = datetime.parse(date1,pos); |
|||
Date dt2 = datetime.parse(date2,pos1); |
|||
long l = dt1.getTime() - dt2.getTime(); |
|||
margin = (int)(l / ( 60 * 1000)); |
|||
return margin; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getMinutes():" + e.toString()); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 返回日期加X天后的日期 |
|||
*/ |
|||
@SuppressWarnings("static-access") |
|||
public static int getMinutes(String fromdate){ |
|||
try{ |
|||
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
GregorianCalendar gCal = new GregorianCalendar(); |
|||
Date datetime = date.parse(fromdate) ; |
|||
gCal.setTime(datetime) ; |
|||
return gCal.get(gCal.MINUTE); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.addDay():" + e.toString()); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 比较两个日期相差的秒数, |
|||
* 第一个日期要比第二个日期要晚 |
|||
*/ |
|||
public static int getSeconds(String date1,String date2){ |
|||
int margin; |
|||
try{ |
|||
SimpleDateFormat datetime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
Date dt1 = datetime.parse(date1); |
|||
Date dt2 = datetime.parse(date2); |
|||
long dateintvlong = dt1.getTime() - dt2.getTime(); |
|||
margin = (int)(dateintvlong /1000); |
|||
return margin; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getSeconds():" + e.toString()); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
|
|||
/** |
|||
* 比较两个日期相差的天数,格式不一样 |
|||
* 第一个日期要比第二个日期要晚 |
|||
*/ |
|||
public static double getDoubledays(String date1,String date2){ |
|||
double margin; |
|||
try{ |
|||
SimpleDateFormat datetime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
ParsePosition pos = new ParsePosition(0); |
|||
ParsePosition pos1 = new ParsePosition(0); |
|||
Date dt1 = datetime.parse(date1,pos); |
|||
Date dt2 = datetime.parse(date2,pos1); |
|||
long l = dt1.getTime() - dt2.getTime(); |
|||
margin = (l / (24 * 60 * 60 * 1000.00)); |
|||
return margin; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getMargin():" + e.toString()); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
|
|||
/** |
|||
* 比较两个日期相差的月数 |
|||
*/ |
|||
public static int getMonthMargin(String date1,String date2){ |
|||
int margin; |
|||
try{ |
|||
margin = (Integer.parseInt(date2.substring(0,4)) - Integer.parseInt(date1.substring(0,4)))* 12; |
|||
margin += (Integer.parseInt(date2.substring(4,7).replaceAll("-0","-")) - Integer.parseInt(date1.substring(4,7).replaceAll("-0","-"))); |
|||
return margin; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getMargin():" + e.toString()); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 返回日期加X天后的日期 |
|||
*/ |
|||
public static String addDay(String fromdate,int i){ |
|||
try{ |
|||
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd"); |
|||
GregorianCalendar gCal = new GregorianCalendar(Integer.parseInt(fromdate.substring(0,4)),Integer.parseInt(fromdate.substring(5,7))-1,Integer.parseInt(fromdate.substring(8,10))); |
|||
gCal.add(GregorianCalendar.DATE,i); |
|||
return date.format(gCal.getTime()); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.addDay():" + e.toString()); |
|||
return getDate(); |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 返回日期加X天后的日期 |
|||
*/ |
|||
public static String addDay(int i){ |
|||
try{ |
|||
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd"); |
|||
Calendar cale = Calendar.getInstance() ; |
|||
cale.add(Calendar.DAY_OF_MONTH, i) ; |
|||
return date.format(cale.getTime()); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.addDay():" + e.toString()); |
|||
return getDate(); |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 返回日期加X月后的日期 |
|||
*/ |
|||
public static String addMonth(String fromdate,int i){ |
|||
try{ |
|||
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd"); |
|||
GregorianCalendar gCal = new GregorianCalendar(Integer.parseInt(fromdate.substring(0,4)),Integer.parseInt(fromdate.substring(5,7))-1,Integer.parseInt(fromdate.substring(8,10))); |
|||
gCal.add(GregorianCalendar.MONTH,i); |
|||
return date.format(gCal.getTime()); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.addMonth():" + e.toString()); |
|||
return getDate(); |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 返回日期加X年后的日期 |
|||
*/ |
|||
public static String addYear(String fromdate,int i){ |
|||
try{ |
|||
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd"); |
|||
GregorianCalendar gCal = new GregorianCalendar(Integer.parseInt(fromdate.substring(0,4)),Integer.parseInt(fromdate.substring(5,7))-1,Integer.parseInt(fromdate.substring(8,10))); |
|||
gCal.add(GregorianCalendar.YEAR,i); |
|||
return date.format(gCal.getTime()); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.addYear():" + e.toString()); |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
|
|||
/** |
|||
* 返回某年某月中的最大天 |
|||
*/ |
|||
public static int getMaxDay(String year,String month){ |
|||
int day = 0; |
|||
try{ |
|||
int iyear = Integer.parseInt(year); |
|||
int imonth = Integer.parseInt(month); |
|||
if(imonth == 1 || imonth == 3 || imonth == 5 || imonth == 7 || imonth == 8 || imonth == 10 || imonth == 12){ |
|||
day = 31; |
|||
} else if(imonth == 4 || imonth == 6 || imonth == 9 || imonth == 11){ |
|||
day = 30; |
|||
} else if((0 == (iyear % 4)) && (0 != (iyear % 100)) || (0 == (iyear % 400))){ |
|||
day = 29; |
|||
} else{ |
|||
day = 28; |
|||
} |
|||
return day; |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getMonthDay():" + e.toString()); |
|||
return 1; |
|||
} |
|||
} |
|||
|
|||
|
|||
|
|||
/** |
|||
* 格式化日期 |
|||
*/ |
|||
@SuppressWarnings("static-access") |
|||
public String rollDate(String orgDate,int Type,int Span){ |
|||
try{ |
|||
String temp = ""; |
|||
int iyear,imonth,iday; |
|||
int iPos = 0; |
|||
char seperater = '-'; |
|||
if(orgDate == null || orgDate.length() < 6){ |
|||
return ""; |
|||
} |
|||
|
|||
iPos = orgDate.indexOf(seperater); |
|||
if(iPos > 0){ |
|||
iyear = Integer.parseInt(orgDate.substring(0,iPos)); |
|||
temp = orgDate.substring(iPos + 1); |
|||
} else{ |
|||
iyear = Integer.parseInt(orgDate.substring(0,4)); |
|||
temp = orgDate.substring(4); |
|||
} |
|||
|
|||
iPos = temp.indexOf(seperater); |
|||
if(iPos > 0){ |
|||
imonth = Integer.parseInt(temp.substring(0,iPos)); |
|||
temp = temp.substring(iPos + 1); |
|||
} else{ |
|||
imonth = Integer.parseInt(temp.substring(0,2)); |
|||
temp = temp.substring(2); |
|||
} |
|||
|
|||
imonth--; |
|||
if(imonth < 0 || imonth > 11){ |
|||
imonth = 0; |
|||
} |
|||
|
|||
iday = Integer.parseInt(temp); |
|||
if(iday < 1 || iday > 31) |
|||
iday = 1; |
|||
|
|||
Calendar orgcale = Calendar.getInstance(); |
|||
orgcale.set(iyear,imonth,iday); |
|||
temp = this.rollDate(orgcale,Type,Span); |
|||
return temp; |
|||
}catch(Exception e){ |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
public static String rollDate(Calendar cal,int Type,int Span){ |
|||
try{ |
|||
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd"); |
|||
String temp = ""; |
|||
Calendar rolcale; |
|||
rolcale = cal; |
|||
rolcale.add(Type,Span); |
|||
temp = date.format(rolcale.getTime()); |
|||
return temp; |
|||
}catch(Exception e){ |
|||
return ""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* |
|||
* 返回默认的日期格式 |
|||
* |
|||
*/ |
|||
public static synchronized String getDatePattern() { |
|||
defaultDatePattern = "yyyy-MM-dd"; |
|||
return defaultDatePattern; |
|||
} |
|||
|
|||
/** |
|||
* 将指定日期按默认格式进行格式代化成字符串后输出如:yyyy-MM-dd |
|||
*/ |
|||
public static final String getDate(Date aDate) { |
|||
SimpleDateFormat df = null; |
|||
String returnValue = ""; |
|||
|
|||
if (aDate != null) { |
|||
df = new SimpleDateFormat(getDatePattern()); |
|||
returnValue = df.format(aDate); |
|||
} |
|||
|
|||
return (returnValue); |
|||
} |
|||
|
|||
|
|||
|
|||
/** |
|||
* 取得给定日期的时间字符串,格式为当前默认时间格式 |
|||
*/ |
|||
public static String getTimeNow(Date theTime) { |
|||
return getDateTime(timePattern, theTime); |
|||
} |
|||
|
|||
/** |
|||
* 取得当前时间的Calendar日历对象 |
|||
*/ |
|||
public Calendar getToday() throws ParseException { |
|||
Date today = new Date(); |
|||
SimpleDateFormat df = new SimpleDateFormat(getDatePattern()); |
|||
String todayAsString = df.format(today); |
|||
Calendar cal = new GregorianCalendar(); |
|||
cal.setTime(convertStringToDate(todayAsString)); |
|||
return cal; |
|||
} |
|||
|
|||
/** |
|||
* 将日期类转换成指定格式的字符串形式 |
|||
*/ |
|||
public static final String getDateTime(String aMask, Date aDate) { |
|||
SimpleDateFormat df = null; |
|||
String returnValue = ""; |
|||
|
|||
if (aDate == null) { |
|||
log.error("aDate is null!"); |
|||
} else { |
|||
df = new SimpleDateFormat(aMask); |
|||
returnValue = df.format(aDate); |
|||
} |
|||
return (returnValue); |
|||
} |
|||
|
|||
/** |
|||
* 将指定的日期转换成默认格式的字符串形式 |
|||
*/ |
|||
public static final String convertDateToString(Date aDate) { |
|||
return getDateTime(getDatePattern(), aDate); |
|||
} |
|||
|
|||
|
|||
/** |
|||
* 将日期字符串按指定格式转换成日期类型 |
|||
* @param aMask 指定的日期格式,如:yyyy-MM-dd |
|||
* @param strDate 待转换的日期字符串 |
|||
*/ |
|||
|
|||
public static final Date convertStringToDate(String aMask, String strDate) |
|||
throws ParseException { |
|||
SimpleDateFormat df = null; |
|||
Date date = null; |
|||
df = new SimpleDateFormat(aMask); |
|||
|
|||
if (log.isDebugEnabled()) { |
|||
log.debug("converting '" + strDate + "' to date with mask '" |
|||
+ aMask + "'"); |
|||
} |
|||
try { |
|||
date = df.parse(strDate); |
|||
} catch (ParseException pe) { |
|||
log.error("ParseException: " + pe); |
|||
throw pe; |
|||
} |
|||
return (date); |
|||
} |
|||
|
|||
/** |
|||
* 将日期字符串按默认格式转换成日期类型 |
|||
*/ |
|||
public static Date convertStringToDate(String strDate) |
|||
throws ParseException { |
|||
Date aDate = null; |
|||
|
|||
try { |
|||
if (log.isDebugEnabled()) { |
|||
log.debug("converting date with pattern: " + getDatePattern()); |
|||
} |
|||
aDate = convertStringToDate(getDatePattern(), strDate); |
|||
} catch (ParseException pe) { |
|||
log.error("Could not convert '" + strDate |
|||
+ "' to a date, throwing exception"); |
|||
throw new ParseException(pe.getMessage(), |
|||
pe.getErrorOffset()); |
|||
|
|||
} |
|||
|
|||
return aDate; |
|||
} |
|||
|
|||
/** |
|||
* 返回一个JAVA简单类型的日期字符串 |
|||
*/ |
|||
public static String getSimpleDateFormat(){ |
|||
SimpleDateFormat formatter=new SimpleDateFormat(); |
|||
String NDateTime=formatter.format(new Date()); |
|||
return NDateTime; |
|||
} |
|||
|
|||
/** |
|||
* 将两个字符串格式的日期进行比较 |
|||
* @param last 要比较的第一个日期字符串 |
|||
* @param now 要比较的第二个日期格式字符串 |
|||
* @return true(last 在now 日期之前),false(last 在now 日期之后) |
|||
*/ |
|||
public static boolean compareTo(String last, String now) { |
|||
try { |
|||
SimpleDateFormat formatter = new SimpleDateFormat( |
|||
"yyyy-MM-dd HH:mm:ss"); |
|||
Date temp1 = formatter.parse(last); |
|||
Date temp2 = formatter.parse(now); |
|||
if (temp1.after(temp2)) |
|||
return false; |
|||
else if (temp1.before(temp2)) |
|||
return true; |
|||
} catch (ParseException e) { |
|||
log.debug(e.getMessage()); |
|||
} |
|||
return false; |
|||
} |
|||
|
|||
|
|||
/** |
|||
* 将两个字符串格式的日期进行比较 |
|||
* @param last 要比较的第一个日期字符串 |
|||
* @param now 要比较的第二个日期格式字符串 |
|||
* @return true(last 在now 日期之前),false(last 在now 日期之后) |
|||
*/ |
|||
public static boolean compareToForBBS(String last, String now) { |
|||
try { |
|||
if(last.equals(now)) |
|||
return true; |
|||
SimpleDateFormat formatter = new SimpleDateFormat( |
|||
"yyyy-MM-dd"); |
|||
Date temp1 = formatter.parse(last); |
|||
Date temp2 = formatter.parse(now); |
|||
if (temp1.after(temp2)) |
|||
return false; |
|||
else if (temp1.before(temp2)) |
|||
return true; |
|||
} catch (ParseException e) { |
|||
log.debug(e.getMessage()); |
|||
} |
|||
return false; |
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
/** |
|||
* 为查询日期添加最小时间 |
|||
* @return |
|||
*/ |
|||
@SuppressWarnings("deprecation") |
|||
public static Date addStartTime(Date param) { |
|||
Date date = param; |
|||
try{ |
|||
date.setHours(0); |
|||
date.setMinutes(0); |
|||
date.setSeconds(0); |
|||
return date; |
|||
}catch(Exception ex){ |
|||
return date; |
|||
} |
|||
} |
|||
|
|||
|
|||
|
|||
/** |
|||
* 为查询日期添加最大时间 |
|||
* @return |
|||
*/ |
|||
@SuppressWarnings("deprecation") |
|||
public static Date addEndTime(Date param) { |
|||
Date date = param; |
|||
try{ |
|||
date.setHours(23); |
|||
date.setMinutes(59); |
|||
date.setSeconds(0); |
|||
return date; |
|||
}catch(Exception ex){ |
|||
return date; |
|||
} |
|||
} |
|||
|
|||
|
|||
|
|||
/** |
|||
* 返回系统现在年份中指定月份的天数 |
|||
* @return 指定月的总天数 |
|||
*/ |
|||
@SuppressWarnings("deprecation") |
|||
public static String getMonthLastDay(int month) |
|||
{ |
|||
Date date=new Date(); |
|||
int[][] day={{0,30,28,31,30,31,30,31,31,30,31,30,31}, |
|||
{0,31,29,31,30,31,30,31,31,30,31,30,31}}; |
|||
int year=date.getYear()+1900; |
|||
if(year%4==0 && year%100!=0 || year%400==0) |
|||
{ |
|||
return day[1][month]+""; |
|||
} |
|||
else |
|||
{ |
|||
return day[0][month]+""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 返回指定年份中指定月份的天数 |
|||
* @return 指定月的总天数 |
|||
*/ |
|||
public static String getMonthLastDay(int year,int month) |
|||
{ |
|||
int[][] day={{0,30,28,31,30,31,30,31,31,30,31,30,31}, |
|||
{0,31,29,31,30,31,30,31,31,30,31,30,31}}; |
|||
if(year%4==0 && year%100!=0 || year%400==0) |
|||
{ |
|||
return day[1][month]+""; |
|||
} |
|||
else |
|||
{ |
|||
return day[0][month]+""; |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 取得当前时间的日戳 |
|||
* @return |
|||
*/ |
|||
@SuppressWarnings("deprecation") |
|||
public static String getTimestamp(){ |
|||
Date date=new Date(); |
|||
String timestamp=""+(date.getYear()+1900)+date.getMonth()+date.getDate()+date.getMinutes()+date.getSeconds()+date.getTime(); |
|||
return timestamp; |
|||
} |
|||
/** |
|||
* 取得指定时间的日戳 |
|||
* @return |
|||
*/ |
|||
@SuppressWarnings("deprecation") |
|||
public static String getTimestamp(Date date){ |
|||
String timestamp=""+(date.getYear()+1900)+date.getMonth()+date.getDate()+date.getMinutes()+date.getSeconds()+date.getTime(); |
|||
return timestamp; |
|||
} |
|||
|
|||
|
|||
public static Date getDate(String time) { |
|||
Date date = new Date(); |
|||
try { |
|||
SimpleDateFormat datetime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
date = datetime.parse(time); |
|||
} catch (ParseException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
return date; |
|||
} |
|||
|
|||
|
|||
public static long getTimeMillis(String datetime){ |
|||
long timemillis = 0 ; |
|||
Calendar cal = Calendar.getInstance(); |
|||
Date date = getDate(datetime) ; |
|||
cal.setTime(date) ; |
|||
timemillis = cal.getTimeInMillis() ; |
|||
return timemillis ; |
|||
} |
|||
public static long getcurr(){ |
|||
Date date = new Date(); |
|||
Long l_date = date.getTime(); |
|||
return l_date; |
|||
} |
|||
//获取一个小时之前的时间戳 |
|||
public static long getbeforonecurr(){ |
|||
try { |
|||
Date date = new Date(); |
|||
Long l_date = System.currentTimeMillis(); |
|||
return l_date-60*60*1000; |
|||
} catch (Exception e) { |
|||
return 0L; |
|||
// e.printStackTrace(); |
|||
} |
|||
} |
|||
|
|||
public static long getday(){ |
|||
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); |
|||
Calendar calendar = Calendar.getInstance(); |
|||
try { |
|||
return dateFormat.parse(dateFormat.format(calendar.getTime())).getTime(); |
|||
} catch (ParseException e) { |
|||
return 0L; |
|||
} |
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
public static long getsmallSec(String datetime1,String datetime2){ |
|||
long time1 = 0 ; |
|||
long time2 = 0 ; |
|||
long time = 0 ; |
|||
if(datetime1!=null){ |
|||
time1 = getTimeMillis(datetime1) ; |
|||
} |
|||
if(datetime2!=null){ |
|||
time2 = getTimeMillis(datetime2) ; |
|||
} |
|||
if(time1==0){ |
|||
time = time2 ; |
|||
}else if(time2==0){ |
|||
time = time1 ; |
|||
}else if(time1>time2){ |
|||
time = time2 ; |
|||
}else{ |
|||
time = time1 ; |
|||
} |
|||
time = time/1000 ; |
|||
return time ; |
|||
} |
|||
|
|||
/** |
|||
* @Description: TODO |
|||
* @param calendarField 修改的字段 |
|||
* @param calc add/sub |
|||
* @param n |
|||
* @return |
|||
* |
|||
* @return String |
|||
* @throws |
|||
*/ |
|||
public static String getDatetimeNfieldgap(int calendarField,String calc,int n){ |
|||
try{ |
|||
SimpleDateFormat datetime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
|||
Calendar cal = Calendar.getInstance() ; |
|||
int value = cal.get(calendarField) ; |
|||
if(calc.equals("add")){ |
|||
cal.set(calendarField, value+n) ; |
|||
}else if(calc.equals("sub")){ |
|||
cal.set(calendarField, value-n); |
|||
} |
|||
return datetime.format(cal.getTime()); |
|||
} catch(Exception e){ |
|||
log.debug("DateUtil.getDay():" + e.getMessage()); |
|||
return "" ; |
|||
} |
|||
} |
|||
|
|||
@SuppressWarnings("static-access") |
|||
public static void sleep(int seconds){ |
|||
if(seconds<1) |
|||
return; |
|||
try { |
|||
Thread.currentThread().sleep(seconds*1000); |
|||
} catch (InterruptedException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
} |
|||
|
|||
@SuppressWarnings("static-access") |
|||
public static void sleepLong(long millisecond){ |
|||
if(millisecond<1) |
|||
return; |
|||
try { |
|||
Thread.currentThread().sleep(millisecond); |
|||
} catch (InterruptedException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
} |
|||
|
|||
public static void main(String[] args) { |
|||
System.out.println(TgetbeforeHour()); |
|||
} |
|||
} |
|||
@ -0,0 +1,644 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
import com.alibaba.fastjson.JSON; |
|||
import com.alibaba.fastjson.JSONObject; |
|||
import com.google.common.collect.Lists; |
|||
import com.google.common.collect.Maps; |
|||
import org.apache.http.HttpHost; |
|||
import org.apache.http.auth.AuthScope; |
|||
import org.apache.http.auth.UsernamePasswordCredentials; |
|||
import org.apache.http.impl.client.BasicCredentialsProvider; |
|||
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; |
|||
import org.apache.http.impl.nio.reactor.IOReactorConfig; |
|||
import org.elasticsearch.action.search.*; |
|||
import org.elasticsearch.client.RequestOptions; |
|||
import org.elasticsearch.client.RestClient; |
|||
import org.elasticsearch.client.RestClientBuilder; |
|||
import org.elasticsearch.client.RestHighLevelClient; |
|||
import org.elasticsearch.client.indices.GetIndexRequest; |
|||
import org.elasticsearch.common.Strings; |
|||
import org.elasticsearch.core.TimeValue; |
|||
import org.elasticsearch.index.query.BoolQueryBuilder; |
|||
import org.elasticsearch.index.query.QueryBuilder; |
|||
import org.elasticsearch.index.query.QueryBuilders; |
|||
import org.elasticsearch.index.query.WrapperQueryBuilder; |
|||
import org.elasticsearch.search.Scroll; |
|||
import org.elasticsearch.search.SearchHit; |
|||
import org.elasticsearch.search.aggregations.AggregationBuilder; |
|||
import org.elasticsearch.search.aggregations.AggregationBuilders; |
|||
import org.elasticsearch.search.aggregations.bucket.terms.Terms; |
|||
import org.elasticsearch.search.builder.SearchSourceBuilder; |
|||
import org.joda.time.LocalDateTime; |
|||
import org.joda.time.format.DateTimeFormat; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
|
|||
import java.io.IOException; |
|||
import java.util.ArrayList; |
|||
import java.util.HashMap; |
|||
import java.util.List; |
|||
import java.util.Map; |
|||
import java.util.function.Consumer; |
|||
|
|||
|
|||
public abstract class EsUtils { |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(EsUtils.class); |
|||
private static final Map<String, RestHighLevelClient> CLIENT_MAP = Maps.newHashMap(); |
|||
private static final String DOCS = "docs"; |
|||
|
|||
public static void registerCluster(String clusterName, String[] brokers) { |
|||
RestClientBuilder builder; |
|||
RestHighLevelClient restHighLevelClient; |
|||
HttpHost[] httpHosts = new HttpHost[brokers.length]; |
|||
for(int i = 0 ; i < brokers.length ; i++){ |
|||
String[] brokerIpHost = brokers[i].split(":"); |
|||
httpHosts[i] = new HttpHost(brokerIpHost[0],Integer.valueOf(brokerIpHost[1]),"http"); |
|||
} |
|||
final BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); |
|||
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials("elastic", "baifendian123")); |
|||
builder= RestClient.builder(httpHosts) |
|||
.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { |
|||
@Override |
|||
public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { |
|||
return httpClientBuilder |
|||
.setDefaultCredentialsProvider(credentialsProvider) |
|||
.setMaxConnTotal(100) |
|||
.setMaxConnPerRoute(100) |
|||
.setDefaultIOReactorConfig( |
|||
IOReactorConfig.custom() |
|||
.setIoThreadCount(Runtime.getRuntime().availableProcessors()) |
|||
.build() |
|||
); |
|||
} |
|||
}); |
|||
|
|||
restHighLevelClient = new RestHighLevelClient(builder); |
|||
CLIENT_MAP.put(clusterName, restHighLevelClient); |
|||
} |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
/** |
|||
* 查询 |
|||
*/ |
|||
/** |
|||
* 查询 |
|||
*/ |
|||
public static void scrollQuery(String clusterName, String []indices, String type, |
|||
QueryBuilder queryBuilder, Integer size, int minutes, |
|||
Consumer<List<JSONObject>> consumer) { |
|||
try { |
|||
RestHighLevelClient client=getClient(clusterName); |
|||
SearchResponse response = null; |
|||
//失效时间为3min |
|||
|
|||
|
|||
// // 第一个SearchRequest |
|||
// SearchRequest searchRequest = new SearchRequest(indexName); |
|||
// searchRequest.scroll(scroll); |
|||
// SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); |
|||
// searchSourceBuilder.query(QueryBuilders.matchAllQuery()); // 这里只是一个示例查询,你可以替换成任何你需要的查询 |
|||
// searchRequest.source(searchSourceBuilder); |
|||
// |
|||
|
|||
Scroll scroll = new Scroll(TimeValue.timeValueMinutes(3)); |
|||
|
|||
SearchRequest searchRequest = new SearchRequest(indices); |
|||
searchRequest.scroll(scroll); |
|||
|
|||
// 创建一个SearchSourceBuilder并设置查询和其他参数 |
|||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); |
|||
searchSourceBuilder.query(queryBuilder).size(size);// 这里只是一个示例,你可以设置任何查询 |
|||
// 将SearchSourceBuilder设置到SearchRequest中 |
|||
searchRequest.source(searchSourceBuilder); |
|||
|
|||
long s = System.currentTimeMillis(); |
|||
// 执行初始搜索 |
|||
try { |
|||
response = client.search(searchRequest, RequestOptions.DEFAULT); |
|||
|
|||
} catch (IOException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
long e = System.currentTimeMillis(); |
|||
LOGGER.error("First query es, size:{}, took:{} ms.indices:{}", |
|||
response.getHits().getHits().length, (e - s),indices); |
|||
|
|||
|
|||
// 获取scroll_id |
|||
String scrollId = response.getScrollId(); |
|||
while (response.getHits().getHits().length > 0) { |
|||
List<JSONObject> dataList = Lists.newLinkedList(); |
|||
for (SearchHit hit : response.getHits().getHits()) { |
|||
dataList.add(JSON.parseObject(hit.getSourceAsString())); |
|||
} |
|||
consumer.accept(dataList); |
|||
if (dataList.size() < size) { |
|||
break; |
|||
} |
|||
|
|||
long s1 = System.currentTimeMillis(); |
|||
//获取scrollId |
|||
SearchScrollRequest searchScrollRequest = new SearchScrollRequest(scrollId); |
|||
searchScrollRequest.scroll(scroll); |
|||
try { |
|||
response = client.searchScroll(searchScrollRequest,RequestOptions.DEFAULT); |
|||
} catch (IOException ioException) { |
|||
ioException.printStackTrace(); |
|||
} |
|||
scrollId = response.getScrollId(); |
|||
long e1 = System.currentTimeMillis(); |
|||
LOGGER.error("Query es, size:{}, took:{} ms,scrollIds:{},indices:{}", |
|||
response.getHits().getHits().length, (e1 - s1),scrollId,indices); |
|||
} |
|||
// 清除scroll |
|||
ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); |
|||
clearScrollRequest.addScrollId(scrollId); |
|||
ClearScrollResponse clearScrollResponse = client.clearScroll(clearScrollRequest, RequestOptions.DEFAULT); |
|||
boolean succeeded = clearScrollResponse.isSucceeded(); |
|||
} catch (IOException ioException) { |
|||
ioException.printStackTrace(); |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 查询前[size]满足条件的数据 |
|||
* @return 没有数据:返回null,有数据:返回数据列表 |
|||
*/ |
|||
public static List<JSONObject> query(String clusterName, String index, final QueryBuilder queryBuilder, int size) { |
|||
RestHighLevelClient restHighLevelClient=getClient(clusterName); |
|||
SearchRequest searchRequest = new SearchRequest(index); |
|||
// 创建一个SearchSourceBuilder并设置查询和其他参数 |
|||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); |
|||
searchSourceBuilder.query(queryBuilder) |
|||
.size(size);// 这里只是一个示例,你可以设置任何查询 |
|||
searchSourceBuilder.trackTotalHits(true); |
|||
// 将SearchSourceBuilder设置到SearchRequest中 |
|||
searchRequest.source(searchSourceBuilder); |
|||
// 执行搜索请求 |
|||
SearchResponse searchResponse = null; |
|||
|
|||
|
|||
try { |
|||
searchResponse = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT); |
|||
} catch (IOException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
List<JSONObject> dataList = new ArrayList<>(); |
|||
if (searchResponse.getHits().getTotalHits().value > 0) { |
|||
SearchHit[] hits = searchResponse.getHits().getHits(); |
|||
for (int i = 0; i < hits.length; i++) { |
|||
SearchHit[] hit2 = searchResponse.getHits().getHits(); |
|||
JSONObject data = new JSONObject(); |
|||
data.putAll(hit2[i].getSourceAsMap()); |
|||
data.put("subjectId", hit2[i].getIndex() |
|||
.replace("cl_major_", "") |
|||
.replace("cl_subject_", "") |
|||
.replace("cl_special_1.0_", "")); |
|||
dataList.add(data); |
|||
} |
|||
return dataList; |
|||
} |
|||
|
|||
|
|||
|
|||
// try { |
|||
// searchResponse = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT); |
|||
// } catch (IOException e) { |
|||
// e.printStackTrace(); |
|||
// } |
|||
// SearchResponse response = client.prepareSearch() |
|||
// .setIndices(index) |
|||
// .setIndicesOptions(IndicesOptions.fromOptions(true, true, |
|||
// true, false)) |
|||
// .setSize(size) |
|||
// .setFrom(0) |
|||
// .setQuery(queryBuilder) |
|||
// .execute().actionGet(); |
|||
// if (response.getHits().totalHits > 0) { |
|||
// List<JSONObject> dataList = Lists.newLinkedList(); |
|||
// SearchHit[] hits = response.getHits().getHits(); |
|||
// for (int i = 0; i < hits.length; i++) { |
|||
// JSONObject data = new JSONObject(); |
|||
// data.putAll(hits[i].getSourceAsMap()); |
|||
// dataList.add(data); |
|||
// } |
|||
// return dataList; |
|||
// } |
|||
|
|||
return null; |
|||
} |
|||
|
|||
/** |
|||
* 根据时间范围获取index集合 |
|||
* |
|||
* @param startMills 起始时间(ms) |
|||
* @param endMils 结束时间(ms) |
|||
* @return |
|||
*/ |
|||
public static String[] getIndices(String prefix, String separator, |
|||
long startMills, long endMils, String pattern) { |
|||
List<String> indexList = Lists.newArrayList(); |
|||
LocalDateTime start = new LocalDateTime(startMills); |
|||
LocalDateTime end = new LocalDateTime(endMils); |
|||
for (LocalDateTime dt = start; dt.isBefore(end); dt = dt.plusDays(1)) { |
|||
String dtStr = dt.toString(DateTimeFormat.forPattern(pattern)); |
|||
String index = new StringBuilder() |
|||
.append(prefix) |
|||
.append(separator) |
|||
.append(dtStr) |
|||
.toString(); |
|||
indexList.add(index); |
|||
} |
|||
|
|||
String[] indices = new String[indexList.size()]; |
|||
indices = indexList.toArray(indices); |
|||
return indices; |
|||
} |
|||
|
|||
/** |
|||
* 根据时间范围获取index集合 |
|||
* @param startMills 起始时间(ms) |
|||
* @param endMils 结束时间(ms) |
|||
* @return |
|||
*/ |
|||
public static String[] getIndices(String prefix, String separator, |
|||
long startMills, long endMils, |
|||
String pattern, Long upperMills, |
|||
String standbyIndex,Long year) { |
|||
List<String> indexList = Lists.newArrayList(); |
|||
LocalDateTime start = new LocalDateTime(startMills); |
|||
LocalDateTime end = new LocalDateTime(endMils); |
|||
LocalDateTime upper = new LocalDateTime(upperMills); |
|||
if (start.isBefore(upper)) { |
|||
indexList.add(standbyIndex); |
|||
start = upper; |
|||
} |
|||
if(startMills < year){ |
|||
for (LocalDateTime dt = start; dt.isEqual(end) || dt.isBefore(end); dt = dt.plusYears(1)) { |
|||
String dtStr = dt.toString(DateTimeFormat.forPattern("YYYY")); |
|||
String index = new StringBuilder() |
|||
.append(prefix) |
|||
.append(separator) |
|||
.append(dtStr) |
|||
.toString(); |
|||
indexList.add(index); |
|||
} |
|||
start = new LocalDateTime(year); |
|||
} |
|||
for (LocalDateTime dt = start; dt.isEqual(end) || dt.isBefore(end); dt = dt.plusDays(1)) { |
|||
String dtStr = dt.toString(DateTimeFormat.forPattern(pattern)); |
|||
String index = new StringBuilder() |
|||
.append(prefix) |
|||
.append(separator) |
|||
.append(dtStr) |
|||
.toString(); |
|||
indexList.add(index); |
|||
} |
|||
// 只拉主贴, |
|||
indexList.add("cl_index_item"); |
|||
String[] indices = new String[indexList.size()]; |
|||
indices = indexList.toArray(indices); |
|||
return indices; |
|||
} |
|||
|
|||
// public static void main(String[] args) { |
|||
// String prefix = "cl_aaa_"; |
|||
// String separator = "-"; |
|||
// long startMills = 1083340800000L; |
|||
// long endMils = 1556640000000L; //1556640000 1546272000000L |
|||
// String pattern = AppConfig.DATE_FORMAT; |
|||
// Long upperMills = 946656000L; |
|||
// String standbyIndex = "cl_index_0"; |
|||
// String [] indexs = {}; |
|||
// if(startMills < 1546272000000L){ |
|||
// LocalDateTime start = new LocalDateTime(startMills); |
|||
// LocalDateTime end = new LocalDateTime(endMils); |
|||
// LocalDateTime upper = new LocalDateTime(upperMills); |
|||
// for (LocalDateTime dt = start; dt.isEqual(end) || dt.isBefore(end); dt = dt.plusYears(1)) { |
|||
// String dtStr = dt.toString(DateTimeFormat.forPattern("YYYY")); |
|||
// String index = new StringBuilder() |
|||
// .append(prefix) |
|||
// .append(separator) |
|||
// .append(dtStr) |
|||
// .toString(); |
|||
// System.out.println("*** "+ index); |
|||
// //indexs.add(index); |
|||
// } |
|||
// } |
|||
// startMills = 1546272000000L; |
|||
// indexs = getIndices(prefix,separator,startMills,endMils,pattern,upperMills,standbyIndex); |
|||
// for (int i = 0 ; i < indexs.length ; i ++){ |
|||
// System.out.println(indexs[i]); |
|||
// } |
|||
// } |
|||
|
|||
/** |
|||
* 根据indexName获取一定存在的index |
|||
* 如果indexName存在则返回,不存在则创建 |
|||
*/ |
|||
// public static String getOrCreateIndex(String clusterName, String indexName, String type, |
|||
// int shard, int replica, String mappingFile) { |
|||
// try { |
|||
// if (!EsUtils.exists(clusterName, indexName)) { |
|||
// byte[] bytes = Files.readAllBytes(Paths.get(mappingFile)); |
|||
// String mappingDef = new String(bytes); |
|||
// boolean flag = EsUtils.createIndex(clusterName, indexName, type, |
|||
// shard, replica, mappingDef); |
|||
// if (!flag) { |
|||
// throw new RuntimeException("Create index " + indexName + " error."); |
|||
// } |
|||
// } |
|||
// } catch (Exception e) { |
|||
// throw new RuntimeException(e); |
|||
// } |
|||
// return indexName; |
|||
// } |
|||
|
|||
/** |
|||
* index一个文档 |
|||
*/ |
|||
// public static String index(String clusterName, String indexName, String type, final JSONObject data, String idField) { |
|||
// indexName = indexName.replace("cl_major_","cl_special_1.0_"); |
|||
// TransportClient client = getClient(clusterName); |
|||
// IndexResponse response = client.prepareIndex(indexName, type) |
|||
// .setSource(data, XContentType.JSON) |
|||
// .setId(data.getString(idField)) |
|||
// .get(); |
|||
// // 并且创建好别名 |
|||
// String aliasName = indexName.replace("cl_special_1.0_","cl_major_"); |
|||
// client.admin().indices().prepareAliases().addAlias(indexName,aliasName).execute().actionGet(); |
|||
// return response.getId(); |
|||
// } |
|||
|
|||
/** |
|||
* 根据日期 |
|||
* index一个文档 |
|||
*/ |
|||
// public static void index(String clusterName, String indexName, String type, final List<JSONObject> dataList, String idField) { |
|||
// if (CollectionUtils.isEmpty(dataList)) { |
|||
// return; |
|||
// } |
|||
// TransportClient client = getClient(clusterName); |
|||
// for (int i = 0; i < dataList.size(); i++) { |
|||
// JSONObject data = dataList.get(i); |
|||
// client.prepareIndex(indexName, type) |
|||
// .setSource(data, XContentType.JSON) |
|||
// .setId(data.getString(idField)) |
|||
// .get(); |
|||
// } |
|||
// } |
|||
|
|||
/** |
|||
* 批量index文档 |
|||
*/ |
|||
// public static boolean bulkIndex(String clusterName, final List<BulkItem> bulkItemList, String idField) { |
|||
// if (CollectionUtils.isEmpty(bulkItemList)) { |
|||
// return true; |
|||
// } |
|||
// TransportClient client = getClient(clusterName); |
|||
// BulkRequestBuilder rb = client.prepareBulk(); |
|||
// for (BulkItem item : bulkItemList) { |
|||
// rb.add(client.prepareIndex(item.getIndexName(), item.getType(), item.getData().getString(idField)) |
|||
// .setSource(item.getData(), XContentType.JSON)); |
|||
// } |
|||
// BulkResponse response = rb.get(); |
|||
// LOGGER.info("Bulk index, size:{}.", bulkItemList.size()); |
|||
// return response.hasFailures(); |
|||
// } |
|||
|
|||
/** |
|||
* 判断索引是否存在 |
|||
*/ |
|||
// public static Boolean exists(String clusterName, String indexName) { |
|||
// TransportClient client = getClient(clusterName); |
|||
// IndicesExistsRequest request = new IndicesExistsRequest() |
|||
// .indices(indexName); |
|||
// IndicesExistsResponse response = client.admin().indices().exists(request).actionGet(); |
|||
// return response.isExists(); |
|||
// } |
|||
|
|||
/** |
|||
* 创建一个index |
|||
*/ |
|||
// public static Boolean createIndex(String clusterName, String indexName, String type, |
|||
// Integer shardCount, Integer replicaCount, String mappingDef) { |
|||
// TransportClient client = getClient(clusterName); |
|||
// CreateIndexRequest request = new CreateIndexRequest(indexName); |
|||
// request.settings(Settings.builder() |
|||
// .put("index.number_of_shards", shardCount) |
|||
// .put("index.number_of_replicas", replicaCount) |
|||
// .put("index.refresh_interval", 2, TimeUnit.SECONDS) |
|||
// .put("index.analysis.filter.shingle_filter.type", "shingle") |
|||
// .put("index.analysis.filter.shingle_filter.min_shingle_size", 2) |
|||
// .put("index.analysis.filter.shingle_filter.max_shingle_size", 2) |
|||
// .put("index.analysis.filter.shingle_filter.output_unigrams", false) |
|||
// .put("index.analysis.analyzer.shingle_analyzer.type", "custom") |
|||
// .put("index.analysis.analyzer.shingle_analyzer.tokenizer", "ik_smart") |
|||
// .putArray("index.analysis.analyzer.shingle_analyzer.filter", "lowercase", "shingle_filter") |
|||
// ); |
|||
// |
|||
// request.mapping(type, mappingDef, XContentType.JSON); |
|||
// CreateIndexResponse createIndexResponse = client.admin().indices().create(request).actionGet(); |
|||
// boolean acknowledged = createIndexResponse.isAcknowledged(); |
|||
// boolean shardsAcknowledged = createIndexResponse.isShardsAcked(); |
|||
// if (acknowledged && shardsAcknowledged) { |
|||
// return true; |
|||
// } |
|||
// return false; |
|||
// } |
|||
|
|||
/** |
|||
* 删除index |
|||
*/ |
|||
|
|||
// public static Boolean deleteIndex(String clusterName, String indexName) { |
|||
// TransportClient client = getClient(clusterName); |
|||
// DeleteIndexRequest request = new DeleteIndexRequest() |
|||
// .indices(indexName); |
|||
// AcknowledgedResponse response = client.admin().indices().delete(request).actionGet(); |
|||
// return response.isAcknowledged(); |
|||
// } |
|||
private static RestHighLevelClient getClient(String clusterName) { |
|||
return CLIENT_MAP.get(clusterName); |
|||
} |
|||
public static BulkItem buildBulkItem(String indexName, String type, final JSONObject data) { |
|||
return new BulkItem() |
|||
.setIndexName(indexName) |
|||
.setType(type) |
|||
.setData(data); |
|||
} |
|||
|
|||
public static class BulkItem { |
|||
String indexName; |
|||
String type; |
|||
JSONObject data; |
|||
|
|||
public String getIndexName() { |
|||
return indexName; |
|||
} |
|||
|
|||
public BulkItem setIndexName(String indexName) { |
|||
this.indexName = indexName; |
|||
return this; |
|||
} |
|||
|
|||
public String getType() { |
|||
return type; |
|||
} |
|||
|
|||
public BulkItem setType(String type) { |
|||
this.type = type; |
|||
return this; |
|||
} |
|||
|
|||
public JSONObject getData() { |
|||
return data; |
|||
} |
|||
|
|||
public BulkItem setData(JSONObject data) { |
|||
this.data = data; |
|||
return this; |
|||
} |
|||
} |
|||
|
|||
|
|||
public static boolean indexExists(String clusterName,String index){ |
|||
RestHighLevelClient client=getClient(clusterName); |
|||
GetIndexRequest getIndexRequest = new GetIndexRequest(index); |
|||
try { |
|||
return client.indices().exists(getIndexRequest, RequestOptions.DEFAULT); |
|||
} catch (IOException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
return false; |
|||
} |
|||
|
|||
public static AggregationBuilder getSubjectChannelAB(String tag) { |
|||
boolean sort = true; |
|||
// tag = docType 是渠道的类型 |
|||
AggregationBuilder aggregationBuilder = AggregationBuilders. |
|||
terms(tag + "Tag").field(tag) |
|||
.size(1000) |
|||
.minDocCount(0); |
|||
//.order(BucketOrder.count(sort)); |
|||
return aggregationBuilder; |
|||
} |
|||
|
|||
public static Terms queryTag(String clusterName, String[] index, |
|||
QueryBuilder queryBuilder, |
|||
AggregationBuilder aggregationBuilder, String tag){ |
|||
// TransportClient client = getClient(clusterName); |
|||
RestHighLevelClient client=getClient(clusterName); |
|||
|
|||
SearchRequest searchRequest = new SearchRequest(index); |
|||
// 创建一个SearchSourceBuilder并设置查询和其他参数 |
|||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); |
|||
searchSourceBuilder.query(queryBuilder); |
|||
|
|||
// 添加聚合 // 返回最热门的10个类别 |
|||
searchSourceBuilder.aggregation(aggregationBuilder ); |
|||
|
|||
// 执行搜索请求 |
|||
SearchResponse searchResponse = null; |
|||
|
|||
|
|||
try { |
|||
searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); |
|||
} catch (IOException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
//// |
|||
//// SearchRequestBuilder requestBuilder = client.prepareSearch() |
|||
//// .setIndices(index) |
|||
//// .setIndicesOptions(IndicesOptions.fromOptions(true, true, |
|||
//// true, false)) |
|||
//// .setTypes("docs"); |
|||
//// requestBuilder.addAggregation(aggregationBuilder); |
|||
//// requestBuilder.setQuery(queryBuilder); |
|||
//// System.out.println("***** queryTag : " + requestBuilder.toString()); |
|||
// SearchResponse response = requestBuilder.execute().actionGet(); |
|||
Terms aggregation = searchResponse.getAggregations().get(tag); |
|||
return aggregation; |
|||
} |
|||
|
|||
public static Long queryCount(String clusterName, String index, BoolQueryBuilder qb) { |
|||
RestHighLevelClient client = getClient(clusterName); |
|||
// 创建一个SearchRequest |
|||
SearchRequest searchRequest = new SearchRequest(index); |
|||
// 创建一个SearchSourceBuilder并设置查询和其他参数 |
|||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); |
|||
searchSourceBuilder.query(qb);// 这里只是一个示例,你可以设置任何查询 |
|||
searchSourceBuilder.trackTotalHits(true); |
|||
// 将SearchSourceBuilder设置到SearchRequest中 |
|||
searchRequest.source(searchSourceBuilder); |
|||
// 执行搜索请求 |
|||
SearchResponse searchResponse = null; |
|||
try { |
|||
searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); |
|||
} catch (IOException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
Long count = searchResponse.getHits().getTotalHits().value; |
|||
|
|||
return count; |
|||
} |
|||
|
|||
public static Map<String,Long> parseTerms(Terms result) { |
|||
Map<String,Long> resultMap = new HashMap<>(); |
|||
if (result.getBuckets().size() > 0) { |
|||
for (Terms.Bucket bucket : result.getBuckets()) { |
|||
long count = bucket.getDocCount(); |
|||
if(null != bucket.getKey()) { |
|||
resultMap.put(bucket.getKey().toString(), count); |
|||
} |
|||
} |
|||
} |
|||
return resultMap; |
|||
} |
|||
|
|||
/** |
|||
* 组装 cralwDataFlag 类型的查询结果 |
|||
*/ |
|||
public static Map<String,Long> getResultMap(Map<String, Long> termsMap) { |
|||
Map<String,Long> resultMap = new HashMap<>(); |
|||
Long urlCount = 0L; |
|||
Long accountCount = 0L; |
|||
Long keywordCount = 0L; |
|||
for (Map.Entry<String, Long> entry : termsMap.entrySet()) { |
|||
if(entry.getKey().startsWith("url:")){ |
|||
urlCount = urlCount + entry.getValue(); |
|||
} |
|||
if(entry.getKey().startsWith("account:")){ |
|||
accountCount = accountCount + entry.getValue(); |
|||
} |
|||
if(entry.getKey().startsWith("keyword:")){ |
|||
keywordCount = keywordCount + entry.getValue(); |
|||
} |
|||
} |
|||
resultMap.put("url", urlCount); |
|||
resultMap.put("account", accountCount); |
|||
resultMap.put("keyword", keywordCount); |
|||
return resultMap; |
|||
} |
|||
|
|||
public static BoolQueryBuilder getBoolQueryBuilderFromSqlStr(String str) { |
|||
LOGGER.info("[EsUtils] getBoolQueryBuilderFromSqlStr ..."); |
|||
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
|||
if (Strings.isNullOrEmpty(str) || !str.startsWith("BoolQueryBuilder=")) { |
|||
return qb; |
|||
} |
|||
// str = str.replaceAll("\t", " "); |
|||
int start = str.indexOf("{"); |
|||
String jsonStr = str.substring(start); |
|||
JSONObject json = (JSONObject) JSONObject.parse(jsonStr); |
|||
if (null == json) { |
|||
return qb; |
|||
} |
|||
WrapperQueryBuilder wrapper = QueryBuilders.wrapperQuery(jsonStr); |
|||
qb.must(wrapper); |
|||
return qb; |
|||
} |
|||
} |
|||
@ -0,0 +1,53 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
import java.security.MessageDigest; |
|||
import java.security.NoSuchAlgorithmException; |
|||
|
|||
public class MfMD5Util { |
|||
|
|||
private final static String[] strDigits = {"0", "1", "2", "3", "4", "5", |
|||
"6", "7", "8", "9", "a", "b", "c", "d", "e", "f"}; |
|||
|
|||
public MfMD5Util() { |
|||
} |
|||
|
|||
private static String byteToArrayString(byte bByte) { |
|||
int iRet = bByte; |
|||
// System.out.println("iRet="+iRet); |
|||
if (iRet < 0) { |
|||
iRet += 256; |
|||
} |
|||
int iD1 = iRet / 16; |
|||
int iD2 = iRet % 16; |
|||
return strDigits[iD1] + strDigits[iD2]; |
|||
} |
|||
|
|||
private static String byteToNum(byte bByte) { |
|||
int iRet = bByte; |
|||
System.out.println("iRet1=" + iRet); |
|||
if (iRet < 0) { |
|||
iRet += 256; |
|||
} |
|||
return String.valueOf(iRet); |
|||
} |
|||
|
|||
private static String byteToString(byte[] bByte) { |
|||
StringBuffer sBuffer = new StringBuffer(); |
|||
for (int i = 0; i < bByte.length; i++) { |
|||
sBuffer.append(byteToArrayString(bByte[i])); |
|||
} |
|||
return sBuffer.toString(); |
|||
} |
|||
|
|||
public static String GetMD5Code(String strObj) { |
|||
String resultString = null; |
|||
try { |
|||
resultString = new String(strObj); |
|||
MessageDigest md = MessageDigest.getInstance("MD5"); |
|||
resultString = byteToString(md.digest(strObj.getBytes())); |
|||
} catch (NoSuchAlgorithmException ex) { |
|||
ex.printStackTrace(); |
|||
} |
|||
return resultString; |
|||
} |
|||
} |
|||
@ -0,0 +1,4 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
public class Patentdub { |
|||
} |
|||
@ -0,0 +1,234 @@ |
|||
package com.zyzs.otherdatasave.util; |
|||
|
|||
|
|||
import javax.imageio.ImageIO; |
|||
import javax.imageio.ImageReader; |
|||
import javax.imageio.stream.FileImageInputStream; |
|||
import javax.imageio.stream.ImageInputStream; |
|||
import java.awt.image.BufferedImage; |
|||
import java.io.*; |
|||
import java.util.ArrayList; |
|||
import java.util.Iterator; |
|||
import java.util.List; |
|||
|
|||
public class ReadLine { |
|||
|
|||
public static List<String> readLine( File fileName){ |
|||
List<String> list = new ArrayList<String> (); |
|||
String line; |
|||
try { |
|||
InputStreamReader read = new InputStreamReader(new FileInputStream(fileName), "utf-8"); |
|||
BufferedReader reader = new BufferedReader(read); |
|||
while ((line = reader.readLine()) != null) { |
|||
try { |
|||
if (line.length() > 0) { |
|||
list.add(line); |
|||
} |
|||
} catch (Exception e) { |
|||
e.printStackTrace(); |
|||
} |
|||
} |
|||
return list; |
|||
}catch (UnsupportedEncodingException e) { |
|||
e.printStackTrace(); |
|||
return null; |
|||
} catch (FileNotFoundException e) { |
|||
e.printStackTrace(); |
|||
return null; |
|||
} catch (IOException e) { |
|||
e.printStackTrace(); |
|||
return null; |
|||
} |
|||
} |
|||
|
|||
|
|||
// public static List<JSONObject> readLine(File fileName){ |
|||
// List<JSONObject> list = new ArrayList<JSONObject> (); |
|||
// String line; |
|||
// try { |
|||
// InputStreamReader read = new InputStreamReader(new FileInputStream(fileName), "utf-8"); |
|||
// BufferedReader reader = new BufferedReader(read); |
|||
// while ((line = reader.readLine()) != null) { |
|||
// try { |
|||
// if (line.length() > 0) { |
|||
// list.add(line); |
|||
// } |
|||
// } catch (Exception e) { |
|||
// e.printStackTrace(); |
|||
// } |
|||
// } |
|||
// return list; |
|||
// }catch (UnsupportedEncodingException e) { |
|||
// e.printStackTrace(); |
|||
// return null; |
|||
// } catch (FileNotFoundException e) { |
|||
// e.printStackTrace(); |
|||
// return null; |
|||
// } catch (IOException e) { |
|||
// e.printStackTrace(); |
|||
// return null; |
|||
// } |
|||
// } |
|||
|
|||
// 读取文件内容 |
|||
public static String readFile(String path){ |
|||
File file = new File(path); |
|||
StringBuilder result = new StringBuilder(); |
|||
try{ |
|||
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"));//构造一个BufferedReader类来读取文件 |
|||
String s = null; |
|||
while((s = br.readLine())!=null){//使用readLine方法,一次读一行 |
|||
result.append( System.lineSeparator() + s); |
|||
} |
|||
br.close(); |
|||
}catch(Exception e){ |
|||
e.printStackTrace(); |
|||
} |
|||
return result.toString(); |
|||
} |
|||
|
|||
|
|||
public static void readFiles(File file){ |
|||
if (file.exists()) { |
|||
System.err.println("exist"); |
|||
try { |
|||
FileInputStream fis = new FileInputStream(file); |
|||
InputStreamReader isr = new InputStreamReader(fis, "UTF-8"); |
|||
BufferedReader br = new BufferedReader(isr); |
|||
String line; |
|||
while((line = br.readLine()) != null){ |
|||
System.out.println(line); |
|||
} |
|||
br.close(); |
|||
isr.close(); |
|||
fis.close(); |
|||
} catch (FileNotFoundException e) { |
|||
e.printStackTrace(); |
|||
} catch (UnsupportedEncodingException e) { |
|||
e.printStackTrace(); |
|||
} catch (IOException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
public static String getResolution1(File file) throws IOException { |
|||
BufferedImage image = ImageIO.read(file); |
|||
return image.getWidth() + "x" + image.getHeight(); |
|||
} |
|||
|
|||
|
|||
// public static String getResolution(File file){ |
|||
// Encoder encoder = new Encoder(); |
|||
// try { |
|||
// MultimediaInfo m = encoder.getInfo(file); |
|||
// int height = m.getVideo().getSize().getHeight(); |
|||
// int width = m.getVideo().getSize().getWidth(); |
|||
// System.out.println("width:"+width); |
|||
// System.out.println("height:" + height); |
|||
// FileInputStream fis = new FileInputStream(source); |
|||
// FileChannel fc = fis.getChannel(); |
|||
// BigDecimal fileSize = new BigDecimal(fc.size()); |
|||
// String size = fileSize.divide(new BigDecimal(1048576), 2, RoundingMode.HALF_UP) + "MB"; |
|||
// System.out.println("size:" + size); |
|||
// long duration = m.getDuration()/1000; |
|||
// System.out.println("duration:" + duration + "s"); |
|||
// } catch (Exception e) { |
|||
// e.printStackTrace(); |
|||
// } |
|||
// } |
|||
|
|||
public static String getImageDim(String path) { |
|||
String result = null; |
|||
String suffix = getFileSuffix(path); |
|||
//解码具有给定后缀的文件 |
|||
Iterator<ImageReader> iter = ImageIO.getImageReadersBySuffix(suffix); |
|||
// System.out.println(ImageIO.getImageReadersBySuffix(suffix)); |
|||
if (iter.hasNext()) { |
|||
ImageReader reader = iter.next(); |
|||
try { |
|||
ImageInputStream stream = new FileImageInputStream(new File(path)); |
|||
reader.setInput(stream); |
|||
int width = reader.getWidth(reader.getMinIndex()); |
|||
int height = reader.getHeight(reader.getMinIndex()); |
|||
result = width + "×" + height; |
|||
} catch (IOException e) { |
|||
e.printStackTrace(); |
|||
} finally { |
|||
reader.dispose(); |
|||
} |
|||
} |
|||
// System.out.println("getImageDim:" + result); |
|||
return result; |
|||
} |
|||
|
|||
private static String getFileSuffix(final String path) { |
|||
String result = null; |
|||
if (path != null) { |
|||
result = ""; |
|||
if (path.lastIndexOf('.') != -1) { |
|||
result = path.substring(path.lastIndexOf('.')); |
|||
if (result.startsWith(".")) { |
|||
result = result.substring(1); |
|||
} |
|||
} |
|||
} |
|||
// System.out.println("getFileSuffix:" + result); |
|||
return result; |
|||
} |
|||
|
|||
|
|||
// public static String videosize(String video) { |
|||
// File source = new File(video); |
|||
// Encoder encoder = new Encoder(); |
|||
// try { |
|||
// it.sauronsoftware.jave.MultimediaInfo m = encoder.getInfo(source); |
|||
// return m.getVideo().getSize().getHeight() + "×" + m.getVideo().getSize().getWidth(); |
|||
// } catch (Exception e) { |
|||
// e.printStackTrace(); |
|||
// return null; |
|||
// } |
|||
// } |
|||
|
|||
|
|||
|
|||
// public static String getVideoTime (String path){ |
|||
// File source = new File(path); |
|||
// Encoder encoder = new Encoder(); |
|||
// File[] file = source.listFiles(); |
|||
// long sum =0; |
|||
// for (File file2 : file) { |
|||
// try { |
|||
// MultimediaInfo m = encoder.getInfo(file2); |
|||
// long ls = m.getDuration()/1000; //ls是获取到的秒数 |
|||
// sum += ls; |
|||
// } catch (Exception e) { |
|||
// e.printStackTrace(); |
|||
// } |
|||
// } |
|||
// double sum1 = (double)sum; |
|||
// double sum2 =sum1/3600;// 转换成为了小时 |
|||
// System.out.println(sum2); |
|||
// return sum2+""; |
|||
// } |
|||
// |
|||
|
|||
|
|||
// public static byte[] readFile(String path){ |
|||
// try { |
|||
// FileInputStream fileInputStream = new FileInputStream(path); |
|||
// BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(fileInputStream)); |
|||
// String line = null; |
|||
// while ((line = bufferedReader.readLine()) != null) { |
|||
// System.out.println(line); |
|||
// } |
|||
// fileInputStream.close(); |
|||
// }catch (Exception e){ |
|||
// e.printStackTrace(); |
|||
// } |
|||
// } |
|||
|
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,87 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
import com.google.common.util.concurrent.ThreadFactoryBuilder; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
|
|||
import java.util.concurrent.LinkedBlockingQueue; |
|||
import java.util.concurrent.ThreadFactory; |
|||
import java.util.concurrent.ThreadPoolExecutor; |
|||
import java.util.concurrent.TimeUnit; |
|||
|
|||
public abstract class AbstractWorker implements Worker { |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractWorker.class); |
|||
|
|||
protected volatile boolean running = true; |
|||
protected static final Integer TASK_MAX_COUNT = 10; |
|||
protected ThreadPoolExecutor executor; |
|||
|
|||
@Override |
|||
public void start() { |
|||
init(); |
|||
int threadCount = getThreadCount(); |
|||
// System.out.println("threadCount===="+threadCount); |
|||
for (int i = 0; i < threadCount; i++) { |
|||
executor.submit(() -> { |
|||
while (running) { |
|||
try { |
|||
work(null); |
|||
} catch (Exception e) { |
|||
LOGGER.error("Work error due to [{}].", e.getMessage(), e); |
|||
} |
|||
} |
|||
}); |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 加载配置文件中配置的线程数 |
|||
*/ |
|||
@Override |
|||
public void init() { |
|||
executor = buildExecutor(getThreadCount(), TASK_MAX_COUNT, getThreadNameFormat()); |
|||
|
|||
} |
|||
|
|||
/** |
|||
* 线程池大小 |
|||
*/ |
|||
|
|||
protected abstract Integer getThreadCount(); |
|||
|
|||
/** |
|||
* 线程名称格式 |
|||
*/ |
|||
protected abstract String getThreadNameFormat(); |
|||
|
|||
/** |
|||
* 子类实现,执行具体任务 |
|||
*/ |
|||
protected abstract void work(String json); |
|||
|
|||
@Override |
|||
public void stop() { |
|||
running = false; |
|||
executor.shutdown(); |
|||
} |
|||
|
|||
@Override |
|||
public boolean isStopped() { |
|||
return !running && executor.isShutdown(); |
|||
} |
|||
|
|||
public static ThreadPoolExecutor buildExecutor(int threadCount, int queueSize, String threadNameFormat) { |
|||
ThreadFactory factory = new ThreadFactoryBuilder() |
|||
.setDaemon(true) |
|||
.setNameFormat(threadNameFormat) |
|||
.build(); |
|||
|
|||
ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(threadCount, threadCount, |
|||
0, TimeUnit.MILLISECONDS, |
|||
new LinkedBlockingQueue<>(queueSize), factory); |
|||
threadPoolExecutor.setRejectedExecutionHandler((r, executor) -> { |
|||
LOGGER.error("Executor error."); |
|||
}); |
|||
return threadPoolExecutor; |
|||
} |
|||
} |
|||
@ -0,0 +1,41 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
|
|||
|
|||
import com.zyzs.otherdatasave.config.AppConfig; |
|||
import com.zyzs.otherdatasave.service.QueryDrug; |
|||
import com.zyzs.otherdatasave.service.QueryPaper; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.stereotype.Component; |
|||
|
|||
@Component |
|||
public class QueryDrugProducer extends AbstractWorker{ |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryDrugProducer.class); |
|||
|
|||
@Autowired |
|||
private AppConfig config; |
|||
|
|||
|
|||
@Override |
|||
protected Integer getThreadCount() { |
|||
|
|||
return config.getQueryConetnteProducerThreadCount(); |
|||
} |
|||
|
|||
@Override |
|||
protected String getThreadNameFormat() { |
|||
return "QueryConentProducer-%d"; |
|||
} |
|||
@Autowired |
|||
private QueryDrug queryService; |
|||
|
|||
@Override |
|||
protected void work(String json) { |
|||
queryService.query(); |
|||
|
|||
} |
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,39 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
import com.zyzs.otherdatasave.config.AppConfig; |
|||
import com.zyzs.otherdatasave.service.QueryEq; |
|||
import com.zyzs.otherdatasave.service.QueryProj; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.stereotype.Component; |
|||
|
|||
@Component |
|||
public class QueryEqProducer extends AbstractWorker{ |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryEqProducer.class); |
|||
|
|||
@Autowired |
|||
private AppConfig config; |
|||
|
|||
|
|||
@Override |
|||
protected Integer getThreadCount() { |
|||
|
|||
return config.getQueryConetnteProducerThreadCount(); |
|||
} |
|||
|
|||
@Override |
|||
protected String getThreadNameFormat() { |
|||
return "QueryConentProducer-%d"; |
|||
} |
|||
@Autowired |
|||
private QueryEq queryEq; |
|||
|
|||
@Override |
|||
protected void work(String json) { |
|||
queryEq.query(); |
|||
|
|||
} |
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,41 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
|
|||
|
|||
import com.zyzs.otherdatasave.config.AppConfig; |
|||
import com.zyzs.otherdatasave.service.Queryclini; |
|||
import com.zyzs.otherdatasave.service.Querykafka; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.stereotype.Component; |
|||
|
|||
@Component |
|||
public class QueryKfkaProducer extends AbstractWorker{ |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryKfkaProducer.class); |
|||
|
|||
@Autowired |
|||
private AppConfig config; |
|||
|
|||
|
|||
@Override |
|||
protected Integer getThreadCount() { |
|||
|
|||
return config.getQueryConetnteProducerThreadCount(); |
|||
} |
|||
|
|||
@Override |
|||
protected String getThreadNameFormat() { |
|||
return "QueryConentProducer-%d"; |
|||
} |
|||
@Autowired |
|||
private Querykafka querykafka; |
|||
|
|||
@Override |
|||
protected void work(String json) { |
|||
querykafka.query(); |
|||
|
|||
} |
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,40 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
|
|||
|
|||
import com.zyzs.otherdatasave.config.AppConfig; |
|||
import com.zyzs.otherdatasave.service.QueryPaper; |
|||
import com.zyzs.otherdatasave.service.Queryclini; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.stereotype.Component; |
|||
|
|||
@Component |
|||
public class QueryPaperProducer extends AbstractWorker{ |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryPaperProducer.class); |
|||
|
|||
@Autowired |
|||
private AppConfig config; |
|||
|
|||
|
|||
@Override |
|||
protected Integer getThreadCount() { |
|||
System.out.println(config.getQueryConetnteProducerThreadCount()); |
|||
return config.getQueryConetnteProducerThreadCount(); |
|||
} |
|||
|
|||
@Override |
|||
protected String getThreadNameFormat() { |
|||
return "QueryConentProducer-%d"; |
|||
} |
|||
@Autowired |
|||
private QueryPaper queryPaper; |
|||
|
|||
@Override |
|||
protected void work(String json) { |
|||
queryPaper.query(); |
|||
} |
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,41 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
|
|||
|
|||
import com.zyzs.otherdatasave.config.AppConfig; |
|||
import com.zyzs.otherdatasave.service.QueryPatent; |
|||
import com.zyzs.otherdatasave.service.Queryclini; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.stereotype.Component; |
|||
|
|||
@Component |
|||
public class QueryPatentProducer extends AbstractWorker{ |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryPatentProducer.class); |
|||
|
|||
@Autowired |
|||
private AppConfig config; |
|||
|
|||
|
|||
@Override |
|||
protected Integer getThreadCount() { |
|||
|
|||
return config.getQueryConetnteProducerThreadCount(); |
|||
} |
|||
|
|||
@Override |
|||
protected String getThreadNameFormat() { |
|||
return "QueryConentProducer-%d"; |
|||
} |
|||
@Autowired |
|||
private QueryPatent queryPatent; |
|||
|
|||
@Override |
|||
protected void work(String json) { |
|||
queryPatent.query(); |
|||
|
|||
} |
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,39 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
import com.zyzs.otherdatasave.config.AppConfig; |
|||
import com.zyzs.otherdatasave.service.QueryPatent; |
|||
import com.zyzs.otherdatasave.service.QueryProj; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.stereotype.Component; |
|||
|
|||
@Component |
|||
public class QueryProProducer extends AbstractWorker{ |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryProProducer.class); |
|||
|
|||
@Autowired |
|||
private AppConfig config; |
|||
|
|||
|
|||
@Override |
|||
protected Integer getThreadCount() { |
|||
|
|||
return config.getQueryConetnteProducerThreadCount(); |
|||
} |
|||
|
|||
@Override |
|||
protected String getThreadNameFormat() { |
|||
return "QueryConentProducer-%d"; |
|||
} |
|||
@Autowired |
|||
private QueryProj queryProj; |
|||
|
|||
@Override |
|||
protected void work(String json) { |
|||
queryProj.query(); |
|||
|
|||
} |
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,40 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
|
|||
|
|||
import com.zyzs.otherdatasave.config.AppConfig; |
|||
import com.zyzs.otherdatasave.service.Queryclini; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.stereotype.Component; |
|||
|
|||
@Component |
|||
public class QuerycliniProducer extends AbstractWorker{ |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QuerycliniProducer.class); |
|||
|
|||
@Autowired |
|||
private AppConfig config; |
|||
|
|||
|
|||
@Override |
|||
protected Integer getThreadCount() { |
|||
|
|||
return config.getQueryConetnteProducerThreadCount(); |
|||
} |
|||
|
|||
@Override |
|||
protected String getThreadNameFormat() { |
|||
return "QueryConentProducer-%d"; |
|||
} |
|||
@Autowired |
|||
private Queryclini queryService; |
|||
|
|||
@Override |
|||
protected void work(String json) { |
|||
queryService.query(); |
|||
|
|||
} |
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,41 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
|
|||
|
|||
import com.zyzs.otherdatasave.config.AppConfig; |
|||
import com.zyzs.otherdatasave.service.Queryclini; |
|||
import com.zyzs.otherdatasave.service.Queryorg; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.stereotype.Component; |
|||
|
|||
@Component |
|||
public class QueryorgProducer extends AbstractWorker{ |
|||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryorgProducer.class); |
|||
|
|||
@Autowired |
|||
private AppConfig config; |
|||
|
|||
|
|||
@Override |
|||
protected Integer getThreadCount() { |
|||
|
|||
return config.getQueryConetnteProducerThreadCount(); |
|||
} |
|||
|
|||
@Override |
|||
protected String getThreadNameFormat() { |
|||
return "QueryConentProducer-%d"; |
|||
} |
|||
@Autowired |
|||
private Queryorg queryorg; |
|||
|
|||
@Override |
|||
protected void work(String json) { |
|||
queryorg.query(); |
|||
|
|||
} |
|||
|
|||
|
|||
} |
|||
@ -0,0 +1,24 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
public interface Worker { |
|||
/** |
|||
* 初始化 |
|||
*/ |
|||
void init(); |
|||
|
|||
/** |
|||
* 启动 |
|||
*/ |
|||
void start(); |
|||
|
|||
/** |
|||
* 停止 |
|||
*/ |
|||
void stop(); |
|||
|
|||
/** |
|||
* 是否已经停止 |
|||
* @return |
|||
*/ |
|||
boolean isStopped(); |
|||
} |
|||
@ -0,0 +1,25 @@ |
|||
package com.zyzs.otherdatasave.worker; |
|||
|
|||
import com.zyzs.otherdatasave.util.ReadLine; |
|||
|
|||
import java.io.File; |
|||
import java.util.ArrayList; |
|||
import java.util.List; |
|||
|
|||
public class readtxt { |
|||
|
|||
public static void main(String[] args) { |
|||
List<String> list = ReadLine.readLine(new File("data/1.txt")); |
|||
List<String> listvalue =new ArrayList<>(); |
|||
for (String line : list) { |
|||
String key= line.trim().split("\t")[0]; |
|||
String value =line.trim().split("\t")[1]; |
|||
|
|||
|
|||
System.out.println(key); |
|||
System.out.println(value); |
|||
|
|||
} |
|||
|
|||
} |
|||
} |
|||
@ -0,0 +1,103 @@ |
|||
debug: false |
|||
#logging: |
|||
# config: ../etc/logback.xml |
|||
spring: |
|||
# datasource: |
|||
# driver-class-name: com.mysql.cj.jdbc.Driver |
|||
# username: crawl666 |
|||
# password: lx2a4jN1xFT96kj20LU= |
|||
# url: jdbc:mysql://172.18.1.134:3306/intelligent_crawl?useSSL=true&useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC |
|||
# # username: root |
|||
# password: bfd123 |
|||
# url: jdbc:mysql://172.26.11.113:3306/intelligent_crawl?useSSL=true&useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC |
|||
# username: crawl |
|||
# password: crawl123 |
|||
# url: jdbc:mysql://172.26.11.110:3306/intelligent_crawl?useSSL=true&useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC |
|||
# hikari: |
|||
# maximum-pool-size: 10 |
|||
# minimum-idle: 7 |
|||
# jpa: |
|||
# open-in-view: false |
|||
# database: mysql |
|||
redis: |
|||
host: 172.18.1.113 |
|||
port: 6379 |
|||
database: 0 |
|||
timeout: 30000 |
|||
password: |
|||
|
|||
|
|||
# 临床 cliniTopic |
|||
# 项目 projTopic |
|||
# 药物 drugTopic |
|||
# 专利 patentTopic |
|||
# 文献 paperTopic |
|||
worker: |
|||
version: 2.2.3 |
|||
enable-test: false |
|||
test-thread-count: 10 |
|||
test-task-id: 180 |
|||
## 读取临床的topic |
|||
readcliniTopic: cliniTopic |
|||
## |
|||
readcliniGroupid: othergroup |
|||
## 读取项目的topic |
|||
readprojTopic: projTopic |
|||
## |
|||
readprojgroupid: othergroup1 |
|||
## 读取药物的topic |
|||
readdrugTopic: drugTopic |
|||
## 从阿里云读取的评论的消费者id |
|||
readdrugGroupid: 039othergroup |
|||
## 读取专利的topic |
|||
readpatentTopic: patentTopic |
|||
## 从阿里云读取fans的topic的消费者id |
|||
readpatentGroupid: othergroup22 |
|||
## 读取文献的topic |
|||
readpaperTopic: paperTopic |
|||
## 从阿里云读取的主贴topic的消费者id |
|||
readpaperGroupid: othergroup5 |
|||
|
|||
readeqTopic: equiTopic |
|||
readeqGroupid: 2othergroup |
|||
|
|||
|
|||
|
|||
|
|||
|
|||
##阿里云的(采集平台数据的)gofast地址前缀 |
|||
aligofasturl: http://172.18.1.113:8080 |
|||
|
|||
## 输出到客户现场topic |
|||
writeTopic: si |
|||
##客户gofast上传地址 |
|||
gofasturl: https://gofastdfs.baifendian.com/upload |
|||
|
|||
##是否下载头像 |
|||
DownloadAvatar: true |
|||
##是否下载正文图片 |
|||
DownloadImage: true |
|||
##是否下载正文附件 |
|||
DownloadFile: true |
|||
##是否下载正文视频 |
|||
DownloadVideo: true |
|||
##是否下载正文中转发贴视频 |
|||
DownloadforwardVideo: true |
|||
##是否下载正文中转发贴图片 |
|||
DownloadforwardImage: true |
|||
|
|||
## 启动服务的线程数 |
|||
##用户的下载的线程 |
|||
queryUserProducerThreadCount: 1 |
|||
#主贴的下载线程 |
|||
queryConetnteProducerThreadCount: 1 |
|||
#评论的下载线程 |
|||
queryConmmentProducerThreadCount: 1 |
|||
#粉丝的的下载线程 |
|||
queryfansProducerThreadCount: 1 |
|||
#follow的下载线程 |
|||
queryfollowProducerThreadCount: 1 |
|||
|
|||
|
|||
server: |
|||
port: 8081 |
|||
@ -0,0 +1,13 @@ |
|||
package com.zyzs.otherdatasave; |
|||
|
|||
import org.junit.jupiter.api.Test; |
|||
import org.springframework.boot.test.context.SpringBootTest; |
|||
|
|||
@SpringBootTest |
|||
class OtherDatasaveApplicationTests { |
|||
|
|||
@Test |
|||
void contextLoads() { |
|||
} |
|||
|
|||
} |
|||
Write
Preview
Loading…
Cancel
Save
Reference in new issue