189 changed files with 12733 additions and 7623 deletions
-
1.idea/.name
-
2.idea/compiler.xml
-
8.idea/libraries/Maven__com_alibaba_fastjson_1_2_60.xml
-
8.idea/libraries/Maven__com_fasterxml_classmate_1_4_0.xml
-
8.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_databind_2_9_6.xml
-
8.idea/libraries/Maven__com_github_virtuald_curvesapi_1_06.xml
-
13.idea/libraries/Maven__com_google_code_findbugs_jsr305_1_3_9.xml
-
13.idea/libraries/Maven__com_google_errorprone_error_prone_annotations_2_1_3.xml
-
13.idea/libraries/Maven__com_google_guava_guava_25_0_jre.xml
-
13.idea/libraries/Maven__com_google_j2objc_j2objc_annotations_1_1.xml
-
13.idea/libraries/Maven__com_ibm_icu_icu4j_4_6.xml
-
13.idea/libraries/Maven__com_monitorjbl_xlsx_streamer_2_1_0.xml
-
13.idea/libraries/Maven__com_rackspace_apache_xerces2_xsd11_2_11_1.xml
-
13.idea/libraries/Maven__com_rackspace_eclipse_webtools_sourceediting_org_eclipse_wst_xml_xpath2_processor_2_1_100.xml
-
8.idea/libraries/Maven__com_squareup_okhttp3_okhttp_3_6_0.xml
-
8.idea/libraries/Maven__com_squareup_okio_okio_1_11_0.xml
-
13.idea/libraries/Maven__com_sun_mail_javax_mail_1_6_2.xml
-
13.idea/libraries/Maven__com_swagger_ui_swagger_bootstrap_ui_1_8_8.xml
-
8.idea/libraries/Maven__commons_codec_commons_codec_1_12.xml
-
13.idea/libraries/Maven__edu_princeton_cup_java_cup_10k.xml
-
8.idea/libraries/Maven__io_springfox_springfox_core_2_9_2.xml
-
8.idea/libraries/Maven__io_springfox_springfox_schema_2_9_2.xml
-
8.idea/libraries/Maven__io_springfox_springfox_spi_2_9_2.xml
-
8.idea/libraries/Maven__io_springfox_springfox_spring_web_2_9_2.xml
-
8.idea/libraries/Maven__io_springfox_springfox_swagger2_2_9_2.xml
-
8.idea/libraries/Maven__io_springfox_springfox_swagger_common_2_9_2.xml
-
13.idea/libraries/Maven__io_springfox_springfox_swagger_ui_2_9_2.xml
-
8.idea/libraries/Maven__io_swagger_swagger_annotations_1_5_20.xml
-
8.idea/libraries/Maven__io_swagger_swagger_models_1_5_20.xml
-
13.idea/libraries/Maven__it_sauronsoftware_jave_1_0_2.xml
-
13.idea/libraries/Maven__javax_activation_activation_1_1.xml
-
13.idea/libraries/Maven__javax_mail_javax_mail_api_1_6_2.xml
-
8.idea/libraries/Maven__org_apache_commons_commons_collections4_4_3.xml
-
13.idea/libraries/Maven__org_apache_commons_commons_compress_1_18.xml
-
13.idea/libraries/Maven__org_apache_commons_commons_math3_3_6_1.xml
-
13.idea/libraries/Maven__org_apache_poi_poi_3_15.xml
-
13.idea/libraries/Maven__org_apache_poi_poi_4_1_0.xml
-
8.idea/libraries/Maven__org_apache_poi_poi_ooxml_4_1_0.xml
-
8.idea/libraries/Maven__org_apache_poi_poi_ooxml_schemas_4_1_0.xml
-
8.idea/libraries/Maven__org_apache_xmlbeans_xmlbeans_3_1_0.xml
-
13.idea/libraries/Maven__org_checkerframework_checker_compat_qual_2_0_0.xml
-
13.idea/libraries/Maven__org_codehaus_mojo_animal_sniffer_annotations_1_14.xml
-
8.idea/libraries/Maven__org_mapstruct_mapstruct_1_2_0_Final.xml
-
13.idea/libraries/Maven__org_slf4j_slf4j_api_1_7_12.xml
-
13.idea/libraries/Maven__stax_stax_api_1_0_1.xml
-
13.idea/libraries/Maven__xml_apis_xml_apis_1_4_01.xml
-
13.idea/libraries/Maven__xml_resolver_xml_resolver_1_2.xml
-
2.idea/modules.xml
-
2.idea/vcs.xml
-
51cl_query_data_job/cl_query_data_job.iml
-
78cl_query_data_job/pom.xml
-
20cl_query_data_job/src/main/java/com/bfd/mf/job/Application.java
-
162cl_query_data_job/src/main/java/com/bfd/mf/job/config/AllKeys.java
-
264cl_query_data_job/src/main/java/com/bfd/mf/job/config/AppConfig.java
-
1116cl_query_data_job/src/main/java/com/bfd/mf/job/config/BFDApiConfig.java
-
81cl_query_data_job/src/main/java/com/bfd/mf/job/config/ESConstants.java
-
27cl_query_data_job/src/main/java/com/bfd/mf/job/domain/entity/EmailGroup.java
-
101cl_query_data_job/src/main/java/com/bfd/mf/job/domain/entity/ServiceLoad.java
-
20cl_query_data_job/src/main/java/com/bfd/mf/job/domain/entity/Subject.java
-
9cl_query_data_job/src/main/java/com/bfd/mf/job/domain/entity/SubjectCount.java
-
111cl_query_data_job/src/main/java/com/bfd/mf/job/domain/entity/Task.java
-
36cl_query_data_job/src/main/java/com/bfd/mf/job/domain/entity/TaskCount.java
-
90cl_query_data_job/src/main/java/com/bfd/mf/job/domain/entity/UploadTask.java
-
14cl_query_data_job/src/main/java/com/bfd/mf/job/domain/repository/EmailGroupRepository.java
-
1cl_query_data_job/src/main/java/com/bfd/mf/job/domain/repository/ResultDetailRepository.java
-
17cl_query_data_job/src/main/java/com/bfd/mf/job/domain/repository/ServiceLoadRepository.java
-
11cl_query_data_job/src/main/java/com/bfd/mf/job/domain/repository/SubjectCountRepository.java
-
5cl_query_data_job/src/main/java/com/bfd/mf/job/domain/repository/SubjectRepository.java
-
7cl_query_data_job/src/main/java/com/bfd/mf/job/domain/repository/TaskCountRepository.java
-
61cl_query_data_job/src/main/java/com/bfd/mf/job/domain/repository/TaskRepository.java
-
100cl_query_data_job/src/main/java/com/bfd/mf/job/domain/repository/UploadTaskRepository.java
-
41cl_query_data_job/src/main/java/com/bfd/mf/job/download/DownLoadFile.java
-
1cl_query_data_job/src/main/java/com/bfd/mf/job/download/OkHttpUtils.java
-
1315cl_query_data_job/src/main/java/com/bfd/mf/job/service/BacktraceService.java
-
207cl_query_data_job/src/main/java/com/bfd/mf/job/service/EsQueryMiniService.java
-
7cl_query_data_job/src/main/java/com/bfd/mf/job/service/WriterTXTService.java
-
241cl_query_data_job/src/main/java/com/bfd/mf/job/service/alarm/AlarmService.java
-
230cl_query_data_job/src/main/java/com/bfd/mf/job/service/backtrace/BacktraceService.java
-
203cl_query_data_job/src/main/java/com/bfd/mf/job/service/es/EsQueryMiniService.java
-
2cl_query_data_job/src/main/java/com/bfd/mf/job/service/es/EsQueryNormalService.java
-
354cl_query_data_job/src/main/java/com/bfd/mf/job/service/query/QueryService.java
-
6cl_query_data_job/src/main/java/com/bfd/mf/job/service/query/SaveService.java
-
90cl_query_data_job/src/main/java/com/bfd/mf/job/service/statistics/StatisticsService.java
-
21cl_query_data_job/src/main/java/com/bfd/mf/job/service/statistics/TotalCountService.java
-
311cl_query_data_job/src/main/java/com/bfd/mf/job/service/taskCount/TaskCountService.java
-
545cl_query_data_job/src/main/java/com/bfd/mf/job/service/upload/UpLoadExcelService.java
-
214cl_query_data_job/src/main/java/com/bfd/mf/job/service/upload/UpLoadService.java
-
321cl_query_data_job/src/main/java/com/bfd/mf/job/util/DataCheckUtil.java
-
365cl_query_data_job/src/main/java/com/bfd/mf/job/util/DateUtil.java
-
286cl_query_data_job/src/main/java/com/bfd/mf/job/util/EMailUtils.java
-
57cl_query_data_job/src/main/java/com/bfd/mf/job/util/EsUtils.java
-
451cl_query_data_job/src/main/java/com/bfd/mf/job/util/EsUtils2.java
-
239cl_query_data_job/src/main/java/com/bfd/mf/job/util/ReadLine.java
-
119cl_query_data_job/src/main/java/com/bfd/mf/job/util/ZipUtils.java
-
38cl_query_data_job/src/main/java/com/bfd/mf/job/worker/AlarmProducer.java
-
3cl_query_data_job/src/main/java/com/bfd/mf/job/worker/BacktraceProducer.java
-
14cl_query_data_job/src/main/java/com/bfd/mf/job/worker/QueryProducer.java
-
1cl_query_data_job/src/main/java/com/bfd/mf/job/worker/ReadWriterOlyDataProducer.java
-
40cl_query_data_job/src/main/java/com/bfd/mf/job/worker/SQOutPutProducer.java
-
4cl_query_data_job/src/main/java/com/bfd/mf/job/worker/StatisticsProducer.java
@ -1 +0,0 @@ |
|||||
cl_stream_30 |
|
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: com.alibaba:fastjson:1.2.6"> |
|
||||
|
<library name="Maven: com.alibaba:fastjson:1.2.60"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.6/fastjson-1.2.6.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.60/fastjson-1.2.60.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.6/fastjson-1.2.6-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.60/fastjson-1.2.60-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.6/fastjson-1.2.6-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.60/fastjson-1.2.60-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: com.fasterxml:classmate:1.3.1"> |
|
||||
|
<library name="Maven: com.fasterxml:classmate:1.4.0"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/classmate/1.3.1/classmate-1.3.1.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/classmate/1.4.0/classmate-1.4.0.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/classmate/1.3.1/classmate-1.3.1-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/classmate/1.4.0/classmate-1.4.0-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/classmate/1.3.1/classmate-1.3.1-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/classmate/1.4.0/classmate-1.4.0-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: com.fasterxml.jackson.core:jackson-databind:2.9.5"> |
|
||||
|
<library name="Maven: com.fasterxml.jackson.core:jackson-databind:2.9.6"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.6/jackson-databind-2.9.6.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.6/jackson-databind-2.9.6-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.6/jackson-databind-2.9.6-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: com.github.virtuald:curvesapi:1.04"> |
|
||||
|
<library name="Maven: com.github.virtuald:curvesapi:1.06"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/github/virtuald/curvesapi/1.04/curvesapi-1.04.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/github/virtuald/curvesapi/1.06/curvesapi-1.06.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/github/virtuald/curvesapi/1.04/curvesapi-1.04-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/github/virtuald/curvesapi/1.06/curvesapi-1.06-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/github/virtuald/curvesapi/1.04/curvesapi-1.04-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/github/virtuald/curvesapi/1.06/curvesapi-1.06-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: com.google.code.findbugs:jsr305:1.3.9"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: com.google.errorprone:error_prone_annotations:2.1.3"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/errorprone/error_prone_annotations/2.1.3/error_prone_annotations-2.1.3.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/errorprone/error_prone_annotations/2.1.3/error_prone_annotations-2.1.3-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/errorprone/error_prone_annotations/2.1.3/error_prone_annotations-2.1.3-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: com.google.guava:guava:25.0-jre"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/guava/guava/25.0-jre/guava-25.0-jre.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/guava/guava/25.0-jre/guava-25.0-jre-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/guava/guava/25.0-jre/guava-25.0-jre-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: com.google.j2objc:j2objc-annotations:1.1"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: com.ibm.icu:icu4j:4.6"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/ibm/icu/icu4j/4.6/icu4j-4.6.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/ibm/icu/icu4j/4.6/icu4j-4.6-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/ibm/icu/icu4j/4.6/icu4j-4.6-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: com.monitorjbl:xlsx-streamer:2.1.0"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/monitorjbl/xlsx-streamer/2.1.0/xlsx-streamer-2.1.0.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/monitorjbl/xlsx-streamer/2.1.0/xlsx-streamer-2.1.0-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/monitorjbl/xlsx-streamer/2.1.0/xlsx-streamer-2.1.0-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: com.rackspace.apache:xerces2-xsd11:2.11.1"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/rackspace/apache/xerces2-xsd11/2.11.1/xerces2-xsd11-2.11.1.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/rackspace/apache/xerces2-xsd11/2.11.1/xerces2-xsd11-2.11.1-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/rackspace/apache/xerces2-xsd11/2.11.1/xerces2-xsd11-2.11.1-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: com.rackspace.eclipse.webtools.sourceediting:org.eclipse.wst.xml.xpath2.processor:2.1.100"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/rackspace/eclipse/webtools/sourceediting/org.eclipse.wst.xml.xpath2.processor/2.1.100/org.eclipse.wst.xml.xpath2.processor-2.1.100.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/rackspace/eclipse/webtools/sourceediting/org.eclipse.wst.xml.xpath2.processor/2.1.100/org.eclipse.wst.xml.xpath2.processor-2.1.100-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/rackspace/eclipse/webtools/sourceediting/org.eclipse.wst.xml.xpath2.processor/2.1.100/org.eclipse.wst.xml.xpath2.processor-2.1.100-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: com.squareup.okhttp3:okhttp:3.9.1"> |
|
||||
|
<library name="Maven: com.squareup.okhttp3:okhttp:3.6.0"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp3/okhttp/3.6.0/okhttp-3.6.0.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp3/okhttp/3.6.0/okhttp-3.6.0-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp3/okhttp/3.6.0/okhttp-3.6.0-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: com.squareup.okio:okio:1.13.0"> |
|
||||
|
<library name="Maven: com.squareup.okio:okio:1.11.0"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okio/okio/1.13.0/okio-1.13.0.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okio/okio/1.11.0/okio-1.11.0.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okio/okio/1.13.0/okio-1.13.0-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okio/okio/1.11.0/okio-1.11.0-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okio/okio/1.13.0/okio-1.13.0-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okio/okio/1.11.0/okio-1.11.0-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: com.sun.mail:javax.mail:1.6.2"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/sun/mail/javax.mail/1.6.2/javax.mail-1.6.2.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/sun/mail/javax.mail/1.6.2/javax.mail-1.6.2-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/com/sun/mail/javax.mail/1.6.2/javax.mail-1.6.2-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -1,13 +0,0 @@ |
|||||
<component name="libraryTable"> |
|
||||
<library name="Maven: com.swagger.ui:swagger-bootstrap-ui:1.8.8"> |
|
||||
<CLASSES> |
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/swagger/ui/swagger-bootstrap-ui/1.8.8/swagger-bootstrap-ui-1.8.8.jar!/" /> |
|
||||
</CLASSES> |
|
||||
<JAVADOC> |
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/swagger/ui/swagger-bootstrap-ui/1.8.8/swagger-bootstrap-ui-1.8.8-javadoc.jar!/" /> |
|
||||
</JAVADOC> |
|
||||
<SOURCES> |
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/swagger/ui/swagger-bootstrap-ui/1.8.8/swagger-bootstrap-ui-1.8.8-sources.jar!/" /> |
|
||||
</SOURCES> |
|
||||
</library> |
|
||||
</component> |
|
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: commons-codec:commons-codec:1.10"> |
|
||||
|
<library name="Maven: commons-codec:commons-codec:1.12"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.10/commons-codec-1.10.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.12/commons-codec-1.12.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.10/commons-codec-1.10-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.12/commons-codec-1.12-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.10/commons-codec-1.10-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.12/commons-codec-1.12-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: edu.princeton.cup:java-cup:10k"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/edu/princeton/cup/java-cup/10k/java-cup-10k.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/edu/princeton/cup/java-cup/10k/java-cup-10k-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/edu/princeton/cup/java-cup/10k/java-cup-10k-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: io.springfox:springfox-core:2.6.1"> |
|
||||
|
<library name="Maven: io.springfox:springfox-core:2.9.2"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-core/2.6.1/springfox-core-2.6.1.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-core/2.9.2/springfox-core-2.9.2.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-core/2.6.1/springfox-core-2.6.1-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-core/2.9.2/springfox-core-2.9.2-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-core/2.6.1/springfox-core-2.6.1-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-core/2.9.2/springfox-core-2.9.2-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: io.springfox:springfox-schema:2.6.1"> |
|
||||
|
<library name="Maven: io.springfox:springfox-schema:2.9.2"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-schema/2.6.1/springfox-schema-2.6.1.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-schema/2.9.2/springfox-schema-2.9.2.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-schema/2.6.1/springfox-schema-2.6.1-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-schema/2.9.2/springfox-schema-2.9.2-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-schema/2.6.1/springfox-schema-2.6.1-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-schema/2.9.2/springfox-schema-2.9.2-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: io.springfox:springfox-spi:2.6.1"> |
|
||||
|
<library name="Maven: io.springfox:springfox-spi:2.9.2"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spi/2.6.1/springfox-spi-2.6.1.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spi/2.9.2/springfox-spi-2.9.2.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spi/2.6.1/springfox-spi-2.6.1-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spi/2.9.2/springfox-spi-2.9.2-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spi/2.6.1/springfox-spi-2.6.1-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spi/2.9.2/springfox-spi-2.9.2-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: io.springfox:springfox-spring-web:2.6.1"> |
|
||||
|
<library name="Maven: io.springfox:springfox-spring-web:2.9.2"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spring-web/2.6.1/springfox-spring-web-2.6.1.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spring-web/2.9.2/springfox-spring-web-2.9.2.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spring-web/2.6.1/springfox-spring-web-2.6.1-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spring-web/2.9.2/springfox-spring-web-2.9.2-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spring-web/2.6.1/springfox-spring-web-2.6.1-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-spring-web/2.9.2/springfox-spring-web-2.9.2-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: io.springfox:springfox-swagger2:2.6.1"> |
|
||||
|
<library name="Maven: io.springfox:springfox-swagger2:2.9.2"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger2/2.6.1/springfox-swagger2-2.6.1.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger2/2.9.2/springfox-swagger2-2.9.2.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger2/2.6.1/springfox-swagger2-2.6.1-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger2/2.9.2/springfox-swagger2-2.9.2-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger2/2.6.1/springfox-swagger2-2.6.1-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger2/2.9.2/springfox-swagger2-2.9.2-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: io.springfox:springfox-swagger-common:2.6.1"> |
|
||||
|
<library name="Maven: io.springfox:springfox-swagger-common:2.9.2"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger-common/2.6.1/springfox-swagger-common-2.6.1.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger-common/2.9.2/springfox-swagger-common-2.9.2.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger-common/2.6.1/springfox-swagger-common-2.6.1-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger-common/2.9.2/springfox-swagger-common-2.9.2-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger-common/2.6.1/springfox-swagger-common-2.6.1-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger-common/2.9.2/springfox-swagger-common-2.9.2-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: io.springfox:springfox-swagger-ui:2.9.2"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger-ui/2.9.2/springfox-swagger-ui-2.9.2.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger-ui/2.9.2/springfox-swagger-ui-2.9.2-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/springfox/springfox-swagger-ui/2.9.2/springfox-swagger-ui-2.9.2-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: io.swagger:swagger-annotations:1.5.10"> |
|
||||
|
<library name="Maven: io.swagger:swagger-annotations:1.5.20"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-annotations/1.5.10/swagger-annotations-1.5.10.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-annotations/1.5.20/swagger-annotations-1.5.20.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-annotations/1.5.10/swagger-annotations-1.5.10-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-annotations/1.5.20/swagger-annotations-1.5.20-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-annotations/1.5.10/swagger-annotations-1.5.10-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-annotations/1.5.20/swagger-annotations-1.5.20-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: io.swagger:swagger-models:1.5.10"> |
|
||||
|
<library name="Maven: io.swagger:swagger-models:1.5.20"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-models/1.5.10/swagger-models-1.5.10.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-models/1.5.20/swagger-models-1.5.20.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-models/1.5.10/swagger-models-1.5.10-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-models/1.5.20/swagger-models-1.5.20-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-models/1.5.10/swagger-models-1.5.10-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/io/swagger/swagger-models/1.5.20/swagger-models-1.5.20-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: it.sauronsoftware:jave:1.0.2"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/it/sauronsoftware/jave/1.0.2/jave-1.0.2.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/it/sauronsoftware/jave/1.0.2/jave-1.0.2-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/it/sauronsoftware/jave/1.0.2/jave-1.0.2-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: javax.activation:activation:1.1"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/javax/activation/activation/1.1/activation-1.1.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/javax/activation/activation/1.1/activation-1.1-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/javax/activation/activation/1.1/activation-1.1-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: javax.mail:javax.mail-api:1.6.2"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/javax/mail/javax.mail-api/1.6.2/javax.mail-api-1.6.2.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/javax/mail/javax.mail-api/1.6.2/javax.mail-api-1.6.2-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/javax/mail/javax.mail-api/1.6.2/javax.mail-api-1.6.2-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: org.apache.commons:commons-collections4:4.1"> |
|
||||
|
<library name="Maven: org.apache.commons:commons-collections4:4.3"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-collections4/4.3/commons-collections4-4.3.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-collections4/4.3/commons-collections4-4.3-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-collections4/4.3/commons-collections4-4.3-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: org.apache.commons:commons-compress:1.18"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-compress/1.18/commons-compress-1.18.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-compress/1.18/commons-compress-1.18-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-compress/1.18/commons-compress-1.18-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: org.apache.commons:commons-math3:3.6.1"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-math3/3.6.1/commons-math3-3.6.1.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-math3/3.6.1/commons-math3-3.6.1-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-math3/3.6.1/commons-math3-3.6.1-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -1,13 +0,0 @@ |
|||||
<component name="libraryTable"> |
|
||||
<library name="Maven: org.apache.poi:poi:3.15"> |
|
||||
<CLASSES> |
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi/3.15/poi-3.15.jar!/" /> |
|
||||
</CLASSES> |
|
||||
<JAVADOC> |
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi/3.15/poi-3.15-javadoc.jar!/" /> |
|
||||
</JAVADOC> |
|
||||
<SOURCES> |
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi/3.15/poi-3.15-sources.jar!/" /> |
|
||||
</SOURCES> |
|
||||
</library> |
|
||||
</component> |
|
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: org.apache.poi:poi:4.1.0"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi/4.1.0/poi-4.1.0.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi/4.1.0/poi-4.1.0-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi/4.1.0/poi-4.1.0-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: org.apache.poi:poi-ooxml:3.15"> |
|
||||
|
<library name="Maven: org.apache.poi:poi-ooxml:4.1.0"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml/3.15/poi-ooxml-3.15.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml/4.1.0/poi-ooxml-4.1.0.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml/3.15/poi-ooxml-3.15-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml/4.1.0/poi-ooxml-4.1.0-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml/3.15/poi-ooxml-3.15-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml/4.1.0/poi-ooxml-4.1.0-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: org.apache.poi:poi-ooxml-schemas:3.15"> |
|
||||
|
<library name="Maven: org.apache.poi:poi-ooxml-schemas:4.1.0"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml-schemas/3.15/poi-ooxml-schemas-3.15.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml-schemas/4.1.0/poi-ooxml-schemas-4.1.0.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml-schemas/3.15/poi-ooxml-schemas-3.15-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml-schemas/4.1.0/poi-ooxml-schemas-4.1.0-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml-schemas/3.15/poi-ooxml-schemas-3.15-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/poi/poi-ooxml-schemas/4.1.0/poi-ooxml-schemas-4.1.0-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: org.apache.xmlbeans:xmlbeans:2.6.0"> |
|
||||
|
<library name="Maven: org.apache.xmlbeans:xmlbeans:3.1.0"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/xmlbeans/xmlbeans/2.6.0/xmlbeans-2.6.0.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/xmlbeans/xmlbeans/3.1.0/xmlbeans-3.1.0.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/xmlbeans/xmlbeans/2.6.0/xmlbeans-2.6.0-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/xmlbeans/xmlbeans/3.1.0/xmlbeans-3.1.0-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/xmlbeans/xmlbeans/2.6.0/xmlbeans-2.6.0-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/apache/xmlbeans/xmlbeans/3.1.0/xmlbeans-3.1.0-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: org.checkerframework:checker-compat-qual:2.0.0"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/checkerframework/checker-compat-qual/2.0.0/checker-compat-qual-2.0.0.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/checkerframework/checker-compat-qual/2.0.0/checker-compat-qual-2.0.0-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/checkerframework/checker-compat-qual/2.0.0/checker-compat-qual-2.0.0-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: org.codehaus.mojo:animal-sniffer-annotations:1.14"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/mojo/animal-sniffer-annotations/1.14/animal-sniffer-annotations-1.14.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/mojo/animal-sniffer-annotations/1.14/animal-sniffer-annotations-1.14-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/mojo/animal-sniffer-annotations/1.14/animal-sniffer-annotations-1.14-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -1,13 +1,13 @@ |
|||||
<component name="libraryTable"> |
<component name="libraryTable"> |
||||
<library name="Maven: org.mapstruct:mapstruct:1.0.0.Final"> |
|
||||
|
<library name="Maven: org.mapstruct:mapstruct:1.2.0.Final"> |
||||
<CLASSES> |
<CLASSES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mapstruct/mapstruct/1.0.0.Final/mapstruct-1.0.0.Final.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/mapstruct/mapstruct/1.2.0.Final/mapstruct-1.2.0.Final.jar!/" /> |
||||
</CLASSES> |
</CLASSES> |
||||
<JAVADOC> |
<JAVADOC> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mapstruct/mapstruct/1.0.0.Final/mapstruct-1.0.0.Final-javadoc.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/mapstruct/mapstruct/1.2.0.Final/mapstruct-1.2.0.Final-javadoc.jar!/" /> |
||||
</JAVADOC> |
</JAVADOC> |
||||
<SOURCES> |
<SOURCES> |
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mapstruct/mapstruct/1.0.0.Final/mapstruct-1.0.0.Final-sources.jar!/" /> |
|
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/mapstruct/mapstruct/1.2.0.Final/mapstruct-1.2.0.Final-sources.jar!/" /> |
||||
</SOURCES> |
</SOURCES> |
||||
</library> |
</library> |
||||
</component> |
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: org.slf4j:slf4j-api:1.7.12"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/slf4j-api/1.7.12/slf4j-api-1.7.12.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/slf4j-api/1.7.12/slf4j-api-1.7.12-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/slf4j-api/1.7.12/slf4j-api-1.7.12-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -1,13 +0,0 @@ |
|||||
<component name="libraryTable"> |
|
||||
<library name="Maven: stax:stax-api:1.0.1"> |
|
||||
<CLASSES> |
|
||||
<root url="jar://$MAVEN_REPOSITORY$/stax/stax-api/1.0.1/stax-api-1.0.1.jar!/" /> |
|
||||
</CLASSES> |
|
||||
<JAVADOC> |
|
||||
<root url="jar://$MAVEN_REPOSITORY$/stax/stax-api/1.0.1/stax-api-1.0.1-javadoc.jar!/" /> |
|
||||
</JAVADOC> |
|
||||
<SOURCES> |
|
||||
<root url="jar://$MAVEN_REPOSITORY$/stax/stax-api/1.0.1/stax-api-1.0.1-sources.jar!/" /> |
|
||||
</SOURCES> |
|
||||
</library> |
|
||||
</component> |
|
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: xml-apis:xml-apis:1.4.01"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/xml-apis/xml-apis/1.4.01/xml-apis-1.4.01.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/xml-apis/xml-apis/1.4.01/xml-apis-1.4.01-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/xml-apis/xml-apis/1.4.01/xml-apis-1.4.01-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,13 @@ |
|||||
|
<component name="libraryTable"> |
||||
|
<library name="Maven: xml-resolver:xml-resolver:1.2"> |
||||
|
<CLASSES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/xml-resolver/xml-resolver/1.2/xml-resolver-1.2.jar!/" /> |
||||
|
</CLASSES> |
||||
|
<JAVADOC> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/xml-resolver/xml-resolver/1.2/xml-resolver-1.2-javadoc.jar!/" /> |
||||
|
</JAVADOC> |
||||
|
<SOURCES> |
||||
|
<root url="jar://$MAVEN_REPOSITORY$/xml-resolver/xml-resolver/1.2/xml-resolver-1.2-sources.jar!/" /> |
||||
|
</SOURCES> |
||||
|
</library> |
||||
|
</component> |
@ -0,0 +1,162 @@ |
|||||
|
package com.bfd.mf.job.config; |
||||
|
|
||||
|
import com.bfd.mf.job.util.DateUtil; |
||||
|
|
||||
|
import java.util.ArrayList; |
||||
|
import java.util.Date; |
||||
|
import java.util.HashMap; |
||||
|
import java.util.Map; |
||||
|
|
||||
|
public class AllKeys { |
||||
|
private static Map<String,Object> map = new HashMap(); |
||||
|
|
||||
|
public static Map<String, Object> getMap() { |
||||
|
return map; |
||||
|
} |
||||
|
|
||||
|
public static void setMap(Map<String, Object> map) { |
||||
|
AllKeys.map = map; |
||||
|
} |
||||
|
|
||||
|
static { |
||||
|
map.put("_id_",""); |
||||
|
map.put("age",""); |
||||
|
map.put("area",""); |
||||
|
map.put("attitudesCount",""); |
||||
|
map.put("attr",""); |
||||
|
map.put("author",""); |
||||
|
map.put("authorId",""); |
||||
|
map.put("authorLevel",""); |
||||
|
map.put("authornickname",""); |
||||
|
map.put("availability",0); |
||||
|
map.put("avatar",""); |
||||
|
map.put("brand",""); |
||||
|
map.put("brandId",""); |
||||
|
map.put("cate",""); |
||||
|
map.put("channel",""); |
||||
|
map.put("city",""); |
||||
|
map.put("collectCount",0); |
||||
|
map.put("commentId",""); |
||||
|
map.put("commentScore",0); |
||||
|
map.put("commentsCount",0); |
||||
|
map.put("commentUrl",""); |
||||
|
map.put("content",""); |
||||
|
map.put("contentLength",0); |
||||
|
map.put("contentSimHash",""); |
||||
|
map.put("contentTag",""); |
||||
|
map.put("country",""); |
||||
|
map.put("crawlDataFlag",""); |
||||
|
map.put("crawlDate",new Date ()); |
||||
|
map.put("crawlDay",0L); |
||||
|
map.put("crawlTime",0L); |
||||
|
map.put("crawlTimeStr",""); |
||||
|
map.put("createDate",new Date ()); |
||||
|
map.put("createDay",0L); |
||||
|
map.put("createTime",0L); |
||||
|
map.put("createTimeStr",""); |
||||
|
map.put("dataCount",0); |
||||
|
map.put("dataId",""); |
||||
|
map.put("docId",""); |
||||
|
map.put("docType",""); |
||||
|
map.put("downCnt",0); |
||||
|
map.put("egc",0); |
||||
|
map.put("enSource",""); |
||||
|
map.put("expression",new ArrayList<>()); |
||||
|
map.put("extension",""); |
||||
|
map.put("fansCount",""); |
||||
|
map.put("favorCnt",0); |
||||
|
map.put("filePath",new ArrayList<>()); |
||||
|
map.put("imagePath",new ArrayList<>()); |
||||
|
map.put("videoPath",new ArrayList<>()); |
||||
|
map.put("filePathSize",new ArrayList<>()); |
||||
|
map.put("imagePathSize",new ArrayList<>()); |
||||
|
map.put("videoPathSize",new ArrayList<>()); |
||||
|
map.put("finalPhrase",""); |
||||
|
map.put("firstListBrand",""); |
||||
|
map.put("fiveListBrand",""); |
||||
|
map.put("forumScore",""); |
||||
|
map.put("forwardAttitudesCount",0); |
||||
|
map.put("forwardAuthor",""); |
||||
|
map.put("forwardAvatar",""); |
||||
|
map.put("forwardCommentsCount",0); |
||||
|
map.put("forwardContent",""); |
||||
|
map.put("forwardImgs",""); |
||||
|
map.put("forwardPostSource",""); |
||||
|
map.put("forwardPubTime",0L); |
||||
|
map.put("forwardQuoteCount",0); |
||||
|
map.put("forwardUrl",""); |
||||
|
map.put("forwardUserId",""); |
||||
|
map.put("forwardUserType",0); |
||||
|
map.put("forwardUserUrl",""); |
||||
|
map.put("fourListBrand",""); |
||||
|
map.put("friendsCount",""); |
||||
|
map.put("getSource",""); |
||||
|
map.put("hashTag",new ArrayList<>()); |
||||
|
map.put("hlKeywords",new ArrayList<>()); |
||||
|
map.put("impression",""); |
||||
|
map.put("isDownload",false); |
||||
|
map.put("isVip",0); |
||||
|
map.put("language",""); |
||||
|
map.put("lastModifiedTime",0L); |
||||
|
map.put("listBrand",""); |
||||
|
map.put("location",""); |
||||
|
map.put("nomorprice",0); |
||||
|
map.put("opinions",new ArrayList<>()); |
||||
|
map.put("originalPhrase",""); |
||||
|
map.put("otherSourceJson",""); |
||||
|
map.put("pageCommentCount",0); |
||||
|
map.put("pageTranspondCount",0); |
||||
|
map.put("pageType",""); |
||||
|
map.put("pgc",0); |
||||
|
map.put("pictureList",""); |
||||
|
map.put("places",new ArrayList<>()); |
||||
|
map.put("postCount",""); |
||||
|
map.put("postId",""); |
||||
|
map.put("postSource",""); |
||||
|
map.put("price",0); |
||||
|
map.put("primary",1); |
||||
|
map.put("productParameter",""); |
||||
|
map.put("projectName",""); |
||||
|
map.put("promotionInfo",""); |
||||
|
map.put("province",""); |
||||
|
map.put("pubDate",new Date()); |
||||
|
map.put("pubDay", DateUtil.getcurr()); |
||||
|
map.put("pubTime",DateUtil.getcurr()); |
||||
|
map.put("pubTimeStr", DateUtil.getDateTime()); |
||||
|
map.put("quoteCount",0); |
||||
|
map.put("readCount",0); |
||||
|
map.put("resolution",""); |
||||
|
map.put("secondListBrand",""); |
||||
|
map.put("sex",""); |
||||
|
map.put("sign",""); |
||||
|
map.put("siteId",""); |
||||
|
map.put("skuProperties",""); |
||||
|
map.put("smallImgs",""); |
||||
|
map.put("source",""); |
||||
|
map.put("sysAbstract",""); |
||||
|
map.put("sysKeywords",""); |
||||
|
map.put("sysSentiment",0.0); |
||||
|
map.put("threeListBrand",""); |
||||
|
map.put("thumbnails",""); |
||||
|
map.put("title",""); |
||||
|
map.put("titleLength",0); |
||||
|
map.put("titleSimHash",""); |
||||
|
map.put("translateContent",""); |
||||
|
map.put("translateTitle",""); |
||||
|
map.put("ugc",0); |
||||
|
map.put("url",""); |
||||
|
map.put("urlHash",""); |
||||
|
map.put("userType",""); |
||||
|
map.put("userUrl",""); |
||||
|
map.put("videoTime",""); |
||||
|
map.put("videoUrl",""); |
||||
|
map.put("avatarPath",""); |
||||
|
map.put("viewCnt",0); |
||||
|
map.put("channelNum",""); |
||||
|
map.put("crawlDataFlagType",""); |
||||
|
map.put("primaryPost",""); |
||||
|
map.put("dns",""); |
||||
|
map.put("asrText",""); |
||||
|
map.put("ocrText",new ArrayList<>()); |
||||
|
} |
||||
|
} |
1116
cl_query_data_job/src/main/java/com/bfd/mf/job/config/BFDApiConfig.java
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,27 @@ |
|||||
|
package com.bfd.mf.job.domain.entity; |
||||
|
|
||||
|
import javax.persistence.Entity; |
||||
|
import javax.persistence.Table; |
||||
|
|
||||
|
@Entity |
||||
|
@Table(name = "cl_email_group") |
||||
|
public class EmailGroup extends AbstractEntity{ |
||||
|
private String email; |
||||
|
private String groupName; |
||||
|
|
||||
|
public String getEmail() { |
||||
|
return email; |
||||
|
} |
||||
|
|
||||
|
public void setEmail(String email) { |
||||
|
this.email = email; |
||||
|
} |
||||
|
|
||||
|
public String getGroupName() { |
||||
|
return groupName; |
||||
|
} |
||||
|
|
||||
|
public void setGroupName(String groupName) { |
||||
|
this.groupName = groupName; |
||||
|
} |
||||
|
} |
@ -0,0 +1,101 @@ |
|||||
|
package com.bfd.mf.job.domain.entity; |
||||
|
|
||||
|
import javax.persistence.Entity; |
||||
|
import javax.persistence.Table; |
||||
|
import java.util.Date; |
||||
|
|
||||
|
@Entity |
||||
|
@Table(name = "cl_service_load") |
||||
|
public class ServiceLoad extends AbstractEntity { |
||||
|
|
||||
|
private String channelId; |
||||
|
private String clientId; |
||||
|
private Integer status; |
||||
|
private String serviceUrl; |
||||
|
private String serviceNames; |
||||
|
private String token; |
||||
|
private String serviceCluster; |
||||
|
private Float serviceStatus; |
||||
|
private Date updateTime; |
||||
|
private String updateUser; |
||||
|
|
||||
|
public String getChannelId() { |
||||
|
return channelId; |
||||
|
} |
||||
|
|
||||
|
public void setChannelId(String channelId) { |
||||
|
this.channelId = channelId; |
||||
|
} |
||||
|
|
||||
|
public String getClientId() { |
||||
|
return clientId; |
||||
|
} |
||||
|
|
||||
|
public void setClientId(String clientId) { |
||||
|
this.clientId = clientId; |
||||
|
} |
||||
|
|
||||
|
public Integer getStatus() { |
||||
|
return status; |
||||
|
} |
||||
|
|
||||
|
public void setStatus(Integer status) { |
||||
|
this.status = status; |
||||
|
} |
||||
|
|
||||
|
public String getServiceUrl() { |
||||
|
return serviceUrl; |
||||
|
} |
||||
|
|
||||
|
public void setServiceUrl(String serviceUrl) { |
||||
|
this.serviceUrl = serviceUrl; |
||||
|
} |
||||
|
|
||||
|
public String getServiceNames() { |
||||
|
return serviceNames; |
||||
|
} |
||||
|
|
||||
|
public void setServiceNames(String serviceNames) { |
||||
|
this.serviceNames = serviceNames; |
||||
|
} |
||||
|
|
||||
|
public String getToken() { |
||||
|
return token; |
||||
|
} |
||||
|
|
||||
|
public void setToken(String token) { |
||||
|
this.token = token; |
||||
|
} |
||||
|
|
||||
|
public String getServiceCluster() { |
||||
|
return serviceCluster; |
||||
|
} |
||||
|
|
||||
|
public void setServiceCluster(String serviceCluster) { |
||||
|
this.serviceCluster = serviceCluster; |
||||
|
} |
||||
|
|
||||
|
public Float getServiceStatus() { |
||||
|
return serviceStatus; |
||||
|
} |
||||
|
|
||||
|
public void setServiceStatus(Float serviceStatus) { |
||||
|
this.serviceStatus = serviceStatus; |
||||
|
} |
||||
|
|
||||
|
public Date getUpdateTime() { |
||||
|
return updateTime; |
||||
|
} |
||||
|
|
||||
|
public void setUpdateTime(Date updateTime) { |
||||
|
this.updateTime = updateTime; |
||||
|
} |
||||
|
|
||||
|
public String getUpdateUser() { |
||||
|
return updateUser; |
||||
|
} |
||||
|
|
||||
|
public void setUpdateUser(String updateUser) { |
||||
|
this.updateUser = updateUser; |
||||
|
} |
||||
|
} |
@ -0,0 +1,36 @@ |
|||||
|
package com.bfd.mf.job.domain.entity; |
||||
|
|
||||
|
import javax.persistence.Entity; |
||||
|
import javax.persistence.Table; |
||||
|
|
||||
|
@Entity |
||||
|
@Table(name = "cl_task_count") |
||||
|
public class TaskCount extends AbstractEntity { |
||||
|
private String countDate; |
||||
|
private float avgCount; |
||||
|
private float avgSpeed; |
||||
|
|
||||
|
public String getCountDate() { |
||||
|
return countDate; |
||||
|
} |
||||
|
|
||||
|
public void setCountDate(String countDate) { |
||||
|
this.countDate = countDate; |
||||
|
} |
||||
|
|
||||
|
public float getAvgCount() { |
||||
|
return avgCount; |
||||
|
} |
||||
|
|
||||
|
public void setAvgCount(float avgCount) { |
||||
|
this.avgCount = avgCount; |
||||
|
} |
||||
|
|
||||
|
public float getAvgSpeed() { |
||||
|
return avgSpeed; |
||||
|
} |
||||
|
|
||||
|
public void setAvgSpeed(float avgSpeed) { |
||||
|
this.avgSpeed = avgSpeed; |
||||
|
} |
||||
|
} |
@ -0,0 +1,90 @@ |
|||||
|
package com.bfd.mf.job.domain.entity; |
||||
|
|
||||
|
import javax.persistence.*; |
||||
|
import java.math.BigInteger; |
||||
|
|
||||
|
@Entity |
||||
|
@Table(name = "cl_task") |
||||
|
public class UploadTask extends AbstractEntity { |
||||
|
|
||||
|
// private long top; |
||||
|
private BigInteger subjectId; |
||||
|
private Integer taskType; // u |
||||
|
private Integer crawlStatus; // u |
||||
|
private String fileName; |
||||
|
private String crawlDataFlag; |
||||
|
|
||||
|
public String getCrawlDataFlag() { |
||||
|
return crawlDataFlag; |
||||
|
} |
||||
|
|
||||
|
public void setCrawlDataFlag(String crawlDataFlag) { |
||||
|
this.crawlDataFlag = crawlDataFlag; |
||||
|
} |
||||
|
|
||||
|
public BigInteger getSubjectId() { |
||||
|
return subjectId; |
||||
|
} |
||||
|
|
||||
|
public void setSubjectId(BigInteger subjectId) { |
||||
|
this.subjectId = subjectId; |
||||
|
} |
||||
|
|
||||
|
public int getTaskType() { |
||||
|
return taskType; |
||||
|
} |
||||
|
|
||||
|
public void setTaskType(int taskType) { |
||||
|
this.taskType = taskType; |
||||
|
} |
||||
|
|
||||
|
public int getCrawlStatus() { |
||||
|
return crawlStatus; |
||||
|
} |
||||
|
|
||||
|
public void setCrawlStatus(int crawlStatus) { |
||||
|
this.crawlStatus = crawlStatus; |
||||
|
} |
||||
|
|
||||
|
public String getFileName() { |
||||
|
return fileName; |
||||
|
} |
||||
|
|
||||
|
public void setFileName(String fileName) { |
||||
|
this.fileName = fileName; |
||||
|
} |
||||
|
|
||||
|
// private int del; |
||||
|
// private String fileRemark; |
||||
|
// private String appId; |
||||
|
// private String externalId; |
||||
|
// private long crawlId; |
||||
|
// private int siteType; |
||||
|
|
||||
|
// private long siteId; |
||||
|
// private String cid; |
||||
|
// private String attachTag; |
||||
|
|
||||
|
// private String crawlKeyword; |
||||
|
// private String crawlPageTypes; |
||||
|
// private String crawlContentKey; |
||||
|
// private long crawlMode; |
||||
|
// private int crawlCyclicityTime; //crawl_cyclicity_time |
||||
|
// private long crawlPeriodHour; |
||||
|
// private long maxPageNum; |
||||
|
// private long secondaryDataMaxPage; |
||||
|
// private BigInteger crawlStartTime; |
||||
|
// private BigInteger crawlEndTime; |
||||
|
// private String crawlDataFlag; |
||||
|
// private BigInteger dataTotal; |
||||
|
// private BigInteger todayDataTotal; |
||||
|
// private Integer cacheNum; |
||||
|
// private java.sql.Timestamp createTime; |
||||
|
// private String createUser; |
||||
|
// private String createUserId; |
||||
|
// private Timestamp updateTime; |
||||
|
// private String updateUser; |
||||
|
// private String updateUserId; |
||||
|
|
||||
|
|
||||
|
} |
@ -0,0 +1,14 @@ |
|||||
|
package com.bfd.mf.job.domain.repository; |
||||
|
|
||||
|
import com.bfd.mf.job.domain.entity.EmailGroup; |
||||
|
import org.springframework.data.jpa.repository.Query; |
||||
|
import org.springframework.data.repository.CrudRepository; |
||||
|
|
||||
|
import java.util.List; |
||||
|
|
||||
|
public interface EmailGroupRepository extends CrudRepository<EmailGroup, Long> { |
||||
|
|
||||
|
@Query(value = "SELECT * FROM cl_email_group WHERE id IN (SELECT email_group_id from cl_site_email_res WHERE site_id = (SELECT id FROM `cl_site` WHERE is_usable = 0 AND cid = ?1));", nativeQuery = true) |
||||
|
List<EmailGroup> getEmailGroupsByCid(String cid); |
||||
|
|
||||
|
} |
@ -0,0 +1,17 @@ |
|||||
|
package com.bfd.mf.job.domain.repository; |
||||
|
|
||||
|
import com.bfd.mf.job.domain.entity.ServiceLoad; |
||||
|
import com.bfd.mf.job.domain.entity.TaskCount; |
||||
|
import org.springframework.data.jpa.repository.Modifying; |
||||
|
import org.springframework.data.jpa.repository.Query; |
||||
|
import org.springframework.data.repository.CrudRepository; |
||||
|
import org.springframework.transaction.annotation.Transactional; |
||||
|
|
||||
|
public interface ServiceLoadRepository extends CrudRepository<ServiceLoad, Long> { |
||||
|
|
||||
|
@Modifying |
||||
|
@Transactional(rollbackFor = Exception.class) |
||||
|
@Query(value = "update cl_service_load set service_status =?2 ,update_time = now() where id =?1 ", nativeQuery = true) |
||||
|
void updateTaskCount(Integer id, Float serviceStatus); |
||||
|
|
||||
|
} |
@ -0,0 +1,7 @@ |
|||||
|
package com.bfd.mf.job.domain.repository; |
||||
|
|
||||
|
import com.bfd.mf.job.domain.entity.TaskCount; |
||||
|
import org.springframework.data.repository.CrudRepository; |
||||
|
|
||||
|
public interface TaskCountRepository extends CrudRepository<TaskCount, Long> { |
||||
|
} |
@ -0,0 +1,100 @@ |
|||||
|
package com.bfd.mf.job.domain.repository; |
||||
|
|
||||
|
import com.bfd.mf.job.domain.entity.UploadTask; |
||||
|
import org.springframework.data.jpa.repository.Modifying; |
||||
|
import org.springframework.data.jpa.repository.Query; |
||||
|
import org.springframework.data.repository.CrudRepository; |
||||
|
import org.springframework.transaction.annotation.Transactional; |
||||
|
|
||||
|
import java.util.List; |
||||
|
|
||||
|
public interface UploadTaskRepository extends CrudRepository<UploadTask, Long> { |
||||
|
|
||||
|
@Query(value = "SELECT id,subject_id,task_type,crawl_status,file_name,crawl_data_flag,del " + |
||||
|
"from cl_task " + |
||||
|
"WHERE del = 0 AND task_type = 3 AND crawl_status=0 limit 1",nativeQuery = true) |
||||
|
List<UploadTask> getTaskNeedUpLoad(); |
||||
|
|
||||
|
|
||||
|
// @Query(value = "SELECT id,subject_id,task_type,crawl_status,crawl_data_flag,del from cl_task WHERE del = 0 AND task_type = 3 AND crawl_status=1 ",nativeQuery = true) |
||||
|
// List<UploadTask> getTaskNeedUpLoad(); |
||||
|
|
||||
|
|
||||
|
|
||||
|
// @Query(value = " SELECT SUM(data_total) FROM cl_task WHERE del=0 AND subject_id=?1 AND task_type=?2",nativeQuery = true) |
||||
|
// Long findDataTotalBySbujectIdAndTaskType(BigInteger subjectId,int taskType); |
||||
|
// |
||||
|
// @Query(value = " SELECT SUM(today_data_total) FROM cl_task WHERE del=0 AND subject_id=?1 AND task_type=?2",nativeQuery = true) |
||||
|
// Long findTodayDataTotalBySbujectIdAndTaskType(BigInteger subjectId,int taskType); |
||||
|
|
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* 更新每个任务 拉数据次数 |
||||
|
*/ |
||||
|
@Modifying |
||||
|
@Transactional(rollbackFor = Exception.class) |
||||
|
@Query(value = "update cl_task set cache_num=?1 where id=?2", nativeQuery = true) |
||||
|
Integer updateStatus(int cache_num, long id); |
||||
|
|
||||
|
/** |
||||
|
* 乐观锁 |
||||
|
*/ |
||||
|
@Modifying |
||||
|
@Transactional(rollbackFor = Exception.class) |
||||
|
@Query(value = "update cl_task set cache_num=?1 where id=?2 and cache_num=?3", nativeQuery = true) |
||||
|
Integer tryLock(Integer newStatus, long id, Integer oldStatus); |
||||
|
|
||||
|
/** |
||||
|
* 修改每个任务的统计结果 |
||||
|
*/ |
||||
|
@Modifying |
||||
|
@Transactional(rollbackFor = Exception.class) |
||||
|
@Query(value = "update cl_task set data_total =?2 , today_data_total =?3 where id =?1 ", nativeQuery = true) |
||||
|
void updateTaskCount(Long id, Long totalCount, Long todayCount); |
||||
|
|
||||
|
@Modifying |
||||
|
@Transactional(rollbackFor = Exception.class) |
||||
|
@Query(value = "update cl_task set crawl_status =?2 ,data_total = ?3 ,crawl_start_time = ?4 , crawl_end_time = ?5 where id =?1 ", nativeQuery = true) |
||||
|
void updateCrawlStatus(long taskId,int crawlStatus,int dataTotal,long crawlStartTime,long crawlEndTime); |
||||
|
|
||||
|
|
||||
|
// |
||||
|
// /** |
||||
|
// * 更新进度 |
||||
|
// */ |
||||
|
// @Modifying |
||||
|
// @Transactional(rollbackFor = Exception.class) |
||||
|
// @Query(value = "update cl_label_backtrace_task set processed=processed+:processed,satisfied=satisfied+:satisfied,updated_time=:updatedTime where id=:id", nativeQuery = true) |
||||
|
// Integer increaseStat(@Param("processed") long processed, @Param("satisfied") long satisfied, @Param("updatedTime") long updatedTime, @Param("id") long id); |
||||
|
// |
||||
|
// /** |
||||
|
// * 设置进度 |
||||
|
// */ |
||||
|
// @Modifying |
||||
|
// @Transactional(rollbackFor = Exception.class) |
||||
|
// @Query(value = "update cl_label_backtrace_task set progress=:progress,updated_time=:updatedTime where id=:id and progress <= :progress", nativeQuery = true) |
||||
|
// Integer setProgress(@Param("progress") double progress, @Param("updatedTime") long updatedTime, @Param("id") long id); |
||||
|
// |
||||
|
// /** |
||||
|
// * 增加进度 |
||||
|
// * |
||||
|
// * @Query(value = "update cl_label_backtrace_task set progress=progress+:progress,updated_time=:updatedTime where id=:id", nativeQuery = true) |
||||
|
// */ |
||||
|
// @Modifying |
||||
|
// @Transactional(rollbackFor = Exception.class) |
||||
|
// @Query(value = "update cl_label_backtrace_task set progress=progress+:progress,updated_time=:updatedTime where id=:id", nativeQuery = true) |
||||
|
// Integer increaseProgress(@Param("progress") double progress, @Param("updatedTime") long updatedTime, @Param("id") long id); |
||||
|
// |
||||
|
// |
||||
|
// /** |
||||
|
// * 重置状态 |
||||
|
// * 如果status=2,并且updated_time<给定lastUpdatedTime,并且retry_times<=max_retry_times,重置status为1 |
||||
|
// */ |
||||
|
// @Modifying |
||||
|
// @Transactional(rollbackFor = Exception.class) |
||||
|
// @Query(value = "update cl_label_backtrace_task set status=1 where updated_time<=?1 and status=2 and retry_times<=max_retry_times", nativeQuery = true) |
||||
|
// Integer resetStatus(Long lastUpdatedTime); |
||||
|
|
||||
|
|
||||
|
} |
1315
cl_query_data_job/src/main/java/com/bfd/mf/job/service/BacktraceService.java
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -1,207 +0,0 @@ |
|||||
package com.bfd.mf.job.service; |
|
||||
|
|
||||
import com.bfd.mf.job.config.ESConstants; |
|
||||
import com.bfd.mf.job.domain.entity.Task; |
|
||||
import com.bfd.mf.job.util.EsUtils; |
|
||||
import org.apache.lucene.index.Term; |
|
||||
import org.elasticsearch.index.query.*; |
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder; |
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms; |
|
||||
import org.slf4j.Logger; |
|
||||
import org.slf4j.LoggerFactory; |
|
||||
import org.springframework.stereotype.Service; |
|
||||
|
|
||||
import java.sql.Timestamp; |
|
||||
import java.util.HashMap; |
|
||||
import java.util.Map; |
|
||||
import java.util.TimeZone; |
|
||||
|
|
||||
@Service |
|
||||
public class EsQueryMiniService { |
|
||||
private static Logger logger = LoggerFactory.getLogger(EsQueryMiniService.class); |
|
||||
private static String clSubject = "cl_major_"; |
|
||||
private static String subjectPre = "major"; |
|
||||
|
|
||||
|
|
||||
/** |
|
||||
* 统计 每个专题下,每个渠道 的总量 |
|
||||
*/ |
|
||||
public Map<String,Long> getSubjectChannelStatistics(String clusterName,String indexName) { |
|
||||
Map<String,Long> resultMap = new HashMap<>(); |
|
||||
try{ |
|
||||
if(indexName.contains(subjectPre)) { |
|
||||
boolean isExists = EsUtils.indexExists(clusterName, indexName); |
|
||||
if (isExists) { |
|
||||
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
|
||||
AggregationBuilder ab = EsUtils.getSubjectChannelAB(ESConstants.DOC_TYPE); |
|
||||
String indexNames [] = {indexName}; |
|
||||
Terms result = EsUtils.queryTag(clusterName, indexNames, qb, ab, ESConstants.DOC_TYPE + "Tag"); |
|
||||
resultMap = EsUtils.parseTerms(result); |
|
||||
} |
|
||||
} |
|
||||
}catch (Exception e){ |
|
||||
e.printStackTrace(); |
|
||||
} |
|
||||
return resultMap; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* 统计 每个专题下,每个渠道 当天的增量 |
|
||||
*/ |
|
||||
public Map<String,Long> getSubjectChannelTodayStatistics(String clusterName,String indexName) { |
|
||||
Map<String,Long> resultMap = new HashMap<>(); |
|
||||
try{ |
|
||||
if(indexName.contains(subjectPre)) { |
|
||||
boolean isExists = EsUtils.indexExists(clusterName, indexName); |
|
||||
if (isExists) { |
|
||||
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
|
||||
long current=System.currentTimeMillis(); |
|
||||
long zero=current/(1000*3600*24)*(1000*3600*24)-TimeZone.getDefault().getRawOffset(); |
|
||||
Long startTime = new Timestamp(zero).getTime(); |
|
||||
RangeQueryBuilder rangeQueryBuilder = QueryBuilders |
|
||||
.rangeQuery(ESConstants.CRAWLTIME) |
|
||||
.gte(startTime) |
|
||||
.lt(current); |
|
||||
qb.must(rangeQueryBuilder); |
|
||||
AggregationBuilder ab = EsUtils.getSubjectChannelAB(ESConstants.DOC_TYPE); |
|
||||
String indexNames [] = {indexName}; |
|
||||
Terms result = EsUtils.queryTag(clusterName, indexNames, qb, ab, ESConstants.DOC_TYPE + "Tag"); |
|
||||
resultMap = EsUtils.parseTerms(result); |
|
||||
} |
|
||||
} |
|
||||
}catch (Exception e){ |
|
||||
e.printStackTrace(); |
|
||||
} |
|
||||
return resultMap; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* 统计 每个专题下,crawlDataFlag 三种类型当天的总量 |
|
||||
*/ |
|
||||
public Map<String,Long> getSubjectCrawlDataFlagStatistics(String clusterName, String indexName) { |
|
||||
Map<String,Long> resultMap = new HashMap<>(); |
|
||||
try{ |
|
||||
if(indexName.contains(subjectPre)) { |
|
||||
boolean isExists = EsUtils.indexExists(clusterName, indexName); |
|
||||
if (isExists) { |
|
||||
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
|
||||
AggregationBuilder ab = EsUtils.getSubjectChannelAB(ESConstants.CRAWLDATAFLAG); |
|
||||
String indexNames [] = {indexName}; |
|
||||
Terms result = EsUtils.queryTag(clusterName, indexNames, qb, ab, ESConstants.CRAWLDATAFLAG + "Tag"); |
|
||||
Map<String,Long> termsMap = EsUtils.parseTerms(result); |
|
||||
resultMap = EsUtils.getResultMap(termsMap); |
|
||||
} |
|
||||
} |
|
||||
}catch (Exception e){ |
|
||||
e.printStackTrace(); |
|
||||
} |
|
||||
return resultMap; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* 统计 每个专题下,crawlDataFlag 三种类型 的增量 |
|
||||
*/ |
|
||||
public Map<String,Long> getSubjectCrawlDataFlagTodayStatistics(String clusterName, String indexName) { |
|
||||
Map<String,Long> resultMap = new HashMap<>(); |
|
||||
try{ |
|
||||
if(indexName.contains(subjectPre)) { |
|
||||
boolean isExists = EsUtils.indexExists(clusterName, indexName); |
|
||||
if (isExists) { |
|
||||
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
|
||||
long current=System.currentTimeMillis(); |
|
||||
long zero=current/(1000*3600*24)*(1000*3600*24)-TimeZone.getDefault().getRawOffset(); |
|
||||
Long startTime = new Timestamp(zero).getTime(); |
|
||||
RangeQueryBuilder rangeQueryBuilder = QueryBuilders |
|
||||
.rangeQuery(ESConstants.CRAWLTIME) |
|
||||
.gte(startTime) |
|
||||
.lt(current); |
|
||||
qb.must(rangeQueryBuilder); |
|
||||
AggregationBuilder ab = EsUtils.getSubjectChannelAB(ESConstants.CRAWLDATAFLAG); |
|
||||
String indexNames [] = {indexName}; |
|
||||
Terms result = EsUtils.queryTag(clusterName, indexNames, qb, ab, ESConstants.CRAWLDATAFLAG + "Tag"); |
|
||||
Map<String,Long> termsMap = EsUtils.parseTerms(result); |
|
||||
resultMap = EsUtils.getResultMap(termsMap); |
|
||||
} |
|
||||
} |
|
||||
}catch (Exception e){ |
|
||||
e.printStackTrace(); |
|
||||
} |
|
||||
return resultMap; |
|
||||
} |
|
||||
|
|
||||
|
|
||||
public Map<String,Long> getTaskCount(String clusterName,Long taskId, Task task,String crawlDataFlag) { |
|
||||
Map<String,Long> countMap = new HashMap<>(); |
|
||||
String indexName = clSubject + task.getSubjectId();//subject_id |
|
||||
String cid = task.getCid().toLowerCase(); |
|
||||
Long crawlStartTime = task.getCrawlStartTime().longValue(); |
|
||||
Long crawlEndTime = task.getCrawlEndTime().longValue(); |
|
||||
// String crawlDataFlag =task.getCrawlDataFlag(); |
|
||||
if(indexName.contains(subjectPre)) { |
|
||||
boolean isExists = EsUtils.indexExists(clusterName, indexName); |
|
||||
if (isExists) { |
|
||||
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
|
||||
// 任务ID 筛选 |
|
||||
TermQueryBuilder cidTermQueryBuilder = QueryBuilders.termQuery(ESConstants.EN_SOURCE,cid); |
|
||||
TermQueryBuilder taskIdTermQueryBuilder = QueryBuilders.termQuery(ESConstants.CRAWLDATAFLAG,crawlDataFlag); |
|
||||
qb.must(taskIdTermQueryBuilder).must(cidTermQueryBuilder); |
|
||||
// 时间范围筛选 |
|
||||
BoolQueryBuilder shouldbq = QueryBuilders.boolQuery(); |
|
||||
RangeQueryBuilder rangeQueryBuilder = QueryBuilders |
|
||||
.rangeQuery(ESConstants.PUBTIME) |
|
||||
.gte(crawlStartTime) |
|
||||
.lt(crawlEndTime); |
|
||||
// 用户数据 |
|
||||
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); |
|
||||
TermQueryBuilder primartTermQueryBuilder = QueryBuilders.termQuery(ESConstants.PRIMARY,2); |
|
||||
// TermQueryBuilder pubTimeTermQueryBuilder = QueryBuilders.termQuery(ESConstants.PUBTIME,0); |
|
||||
boolQueryBuilder.must(primartTermQueryBuilder); |
|
||||
shouldbq.should(boolQueryBuilder).should(rangeQueryBuilder); |
|
||||
qb.must(shouldbq); |
|
||||
|
|
||||
logger.info("QB1 : indexName: {}. taskId : {}.{\"query\": {}}.",indexName,taskId,qb.toString().replace("\n","").replace("\r","").replace(" ","")); |
|
||||
|
|
||||
Long count = EsUtils.queryCount(clusterName, indexName, qb); |
|
||||
countMap.put("totalCount",count); |
|
||||
long current=System.currentTimeMillis(); |
|
||||
long zero=current/(1000*3600*24)*(1000*3600*24)-TimeZone.getDefault().getRawOffset(); |
|
||||
Long startTime = new Timestamp(zero).getTime(); |
|
||||
RangeQueryBuilder rangeQueryBuilder2 = QueryBuilders |
|
||||
.rangeQuery(ESConstants.CRAWLTIME) |
|
||||
.gte(startTime).lt(current); |
|
||||
qb.must(rangeQueryBuilder2); |
|
||||
logger.info("QB2 : : indexName: {}. taskId : {}.{\"query\": {}}.",indexName,taskId,qb.toString().replace("\n","").replace("\r","").replace(" ","")); |
|
||||
Long todayCount = EsUtils.queryCount(clusterName,indexName,qb); |
|
||||
countMap.put("todayCount",todayCount); |
|
||||
} |
|
||||
} |
|
||||
return countMap; |
|
||||
} |
|
||||
|
|
||||
// public Long getTaskTodayCount(String clusterName,Integer id, Map<String, Object> task) { |
|
||||
// Long count = 0L; |
|
||||
// String indexName = clSubject + (String) task.get("subject_id"); |
|
||||
// String cid = (String) task.get(ESConstants.CID); |
|
||||
// Long crawlStartTime = (Long) task.get("crawl_start_time"); |
|
||||
// Long crawlEndTime = (Long) task.get("crawl_end_time"); |
|
||||
// String crawlDataFlag = (String) task.get("crawl_data_flag"); |
|
||||
// |
|
||||
// if(indexName.contains(subjectPre)) { |
|
||||
// boolean isExists = EsUtils.indexExists(clusterName, indexName); |
|
||||
// if (isExists) { |
|
||||
// BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
|
||||
// long current=System.currentTimeMillis(); |
|
||||
// long zero=current/(1000*3600*24)*(1000*3600*24)-TimeZone.getDefault().getRawOffset(); |
|
||||
// Long startTime = new Timestamp(zero).getTime(); |
|
||||
// RangeQueryBuilder rangeQueryBuilder = QueryBuilders |
|
||||
// .rangeQuery(ESConstants.CRAWLTIME) |
|
||||
// .gte(startTime) |
|
||||
// .lt(current); |
|
||||
// qb.must(rangeQueryBuilder); |
|
||||
//// Terms result = EsUtils.queryTag(clusterName, indexName, qb, ab, ESConstant.DOC_TYPE + "Tag"); |
|
||||
//// resultMap = parseTerms(result); |
|
||||
// } |
|
||||
// } |
|
||||
// return count; |
|
||||
// } |
|
||||
} |
|
@ -0,0 +1,241 @@ |
|||||
|
package com.bfd.mf.job.service.alarm; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import com.bfd.mf.job.config.AppConfig; |
||||
|
import com.bfd.mf.job.config.ESConstants; |
||||
|
import com.bfd.mf.job.domain.entity.EmailGroup; |
||||
|
import com.bfd.mf.job.domain.entity.TaskCount; |
||||
|
import com.bfd.mf.job.domain.repository.EmailGroupRepository; |
||||
|
import com.bfd.mf.job.domain.repository.ServiceLoadRepository; |
||||
|
import com.bfd.mf.job.domain.repository.TaskCountRepository; |
||||
|
import com.bfd.mf.job.domain.repository.TaskRepository; |
||||
|
import com.bfd.mf.job.util.DateUtil; |
||||
|
import com.bfd.mf.job.util.EMailUtils; |
||||
|
import com.bfd.mf.job.util.EsUtils; |
||||
|
import org.elasticsearch.index.query.BoolQueryBuilder; |
||||
|
import org.elasticsearch.index.query.QueryBuilder; |
||||
|
import org.elasticsearch.index.query.QueryBuilders; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.data.jpa.repository.Query; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import javax.annotation.PostConstruct; |
||||
|
import java.math.BigInteger; |
||||
|
import java.util.*; |
||||
|
|
||||
|
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; |
||||
|
|
||||
|
@Service |
||||
|
public class AlarmService { |
||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(AlarmService.class); |
||||
|
@Autowired |
||||
|
private AppConfig config; |
||||
|
@Autowired |
||||
|
private TaskRepository taskRepository; |
||||
|
@Autowired |
||||
|
private TaskCountRepository taskCountRepository; |
||||
|
@Autowired |
||||
|
private ServiceLoadRepository serviceLoadRepository; |
||||
|
@Autowired |
||||
|
private EmailGroupRepository emailGroupRepository; |
||||
|
|
||||
|
@PostConstruct |
||||
|
public void init() { |
||||
|
// 注册数据查询来源 |
||||
|
// EsUtils.registerCluster(config.esNormalClusterName(), config.esNormalAddress());// 配置文件中的 es-source |
||||
|
// EsUtils.registerCluster(config.esMiniClusterName(), config.esMiniAddress()); // 配置文件中的 es-target |
||||
|
EsUtils.registerCluster(config.esLogstashClusterName(),config.esLogstashAddress()); |
||||
|
} |
||||
|
|
||||
|
/* |
||||
|
尝试在指定时间内获得许可,如果获得了,则直接返回,如果没有获得,则执行下面的流程 |
||||
|
*/ |
||||
|
// public void tryAcquire() { |
||||
|
// long start = System.currentTimeMillis(); |
||||
|
// LOGGER.info("------------------------------------------------------------------ AlarmService ------------------------------------------------------"); |
||||
|
// long end = System.currentTimeMillis(); |
||||
|
// LOGGER.info("TaskCountService finish, took:{} ms.",(end - start)); |
||||
|
// |
||||
|
// |
||||
|
// } |
||||
|
|
||||
|
|
||||
|
public void produce() { |
||||
|
long start = System.currentTimeMillis(); |
||||
|
LOGGER.info("------------------------------------------------------------------ AlarmService ------------------------------------------------------"); |
||||
|
// 索引 |
||||
|
String date = DateUtil.parseDateByday2(new Date().getTime()); |
||||
|
String index = ESConstants.LOGSTASH + date; |
||||
|
|
||||
|
// System.out.println(index); //logstash-2021.05.20 logstash-2021.05.21 |
||||
|
String startTime = DateUtil.getDateTime(System.currentTimeMillis()); |
||||
|
String endTime = DateUtil.getDateTime(System.currentTimeMillis() - 60 * 30 * 1000); |
||||
|
String type = "datasave"; |
||||
|
QueryBuilder queryBuilder = getQueryBuilder(startTime,endTime,type); |
||||
|
|
||||
|
String clusterName = config.esLogstashClusterName(); |
||||
|
String sourceIndices [] = {index}; |
||||
|
Map<String,Integer> errorCid = new HashMap<>(); |
||||
|
// 查询语句 |
||||
|
EsUtils.scrollQuery(clusterName, sourceIndices,"doc", |
||||
|
queryBuilder, ESConstants.SCROLL_PAGE_SIZE, ESConstants.SCROLL_MINUTES, |
||||
|
dataList -> { |
||||
|
try { |
||||
|
if (dataList.size() == 0) { |
||||
|
System.out.println("没查到相关的 评论 数据"); |
||||
|
return; |
||||
|
} |
||||
|
for (JSONObject data : dataList) { |
||||
|
Map<String,Object> dataMap = data; |
||||
|
String cid = (String) dataMap.get("cid"); |
||||
|
if(null == cid){ |
||||
|
// System.out.println(data); |
||||
|
String message = (String) dataMap.get("message"); |
||||
|
if(message.contains("cid")) { |
||||
|
cid = message.split("cid=")[1]; |
||||
|
// System.out.println("--- " + cid); |
||||
|
if(cid.contains("}")){ |
||||
|
cid = cid.split("}")[0]; |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
if(null != cid && cid.contains("}")){ |
||||
|
cid = cid.split("}")[0]; |
||||
|
} |
||||
|
// System.out.println(cid); |
||||
|
if(null == cid){ |
||||
|
System.out.println(data); |
||||
|
} |
||||
|
if(errorCid.containsKey(cid)){ |
||||
|
Integer errorNum = errorCid.get(cid); |
||||
|
errorCid.put(cid,errorNum+1); |
||||
|
}else{ |
||||
|
errorCid.put(cid,1); |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
} catch (Exception e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
System.out.println(JSONObject.toJSONString(errorCid)); |
||||
|
|
||||
|
// 遍历统计的map ,将 value> 10 的报警 |
||||
|
for(Map.Entry<String, Integer> entry : errorCid.entrySet()){ |
||||
|
String cid = entry.getKey(); |
||||
|
Integer errorNum = entry.getValue(); |
||||
|
if(errorNum > 50){ |
||||
|
System.out.println( "这个站点30分钟内的解析失败次数超过15次 " +cid + " : " + errorNum); |
||||
|
saveToAlarm(cid,errorNum); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* 1、时间范围是半小时内 |
||||
|
* 2、 |
||||
|
*/ |
||||
|
|
||||
|
|
||||
|
// 根据查询半小时内下载负载率求平均后修改 负载率表的值 |
||||
|
List<Float> serviceStatusList = new ArrayList<>(); |
||||
|
serviceStatusList.add(32.6F); |
||||
|
serviceStatusList.add(51F); |
||||
|
serviceStatusList.add(0.0F); |
||||
|
serviceStatusList.add(0.0F); |
||||
|
serviceStatusList.add(18.3F); |
||||
|
serviceStatusList.add(23.3F); |
||||
|
serviceStatusList.add(64F); |
||||
|
serviceStatusList.add(73F); |
||||
|
serviceStatusList.add(44.6F); |
||||
|
serviceStatusList.add(38F); |
||||
|
|
||||
|
for(int i = 0; i < 10 ; i ++) { |
||||
|
serviceLoadRepository.updateTaskCount(i+1,serviceStatusList.get(i)); |
||||
|
} |
||||
|
long end = System.currentTimeMillis(); |
||||
|
LOGGER.info("ServiceLoadService finish, took:{} ms.",(end - start)); |
||||
|
|
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
private void saveToAlarm(String cid, Integer errorNum) { |
||||
|
/** |
||||
|
`alarm_config` varchar(255) NOT NULL COMMENT '报警任务配置', |
||||
|
*/ |
||||
|
Integer alarm_tag = 3; |
||||
|
Integer alarm_reason = 1; |
||||
|
String alarm_message = "[chenrui.li]这个站点解析失败次数为:"+errorNum; |
||||
|
String alarm_task_url = ""; // 无法确认是哪个任务 |
||||
|
String alarm_task_content = ""; // 无法确认是哪个任务 |
||||
|
String alarm_cid = cid; |
||||
|
String alarm_config = ""; |
||||
|
String alarm_trigger_time = DateUtil.getDateTime(new Date().getTime()); |
||||
|
Date alarm_assign_time = null; |
||||
|
Date alarm_finish_time = null; |
||||
|
String alarm_handler = "jing.du@percent.cn"; |
||||
|
List<String> emailList = new ArrayList<>(); |
||||
|
emailList.add(alarm_handler); |
||||
|
// 根据站点查询站点的处理人 |
||||
|
String email_addr = getEmailByCid(cid); |
||||
|
String opinion = ""; |
||||
|
Integer status = 2; |
||||
|
String create_time = DateUtil.getDateTime(new Date().getTime()); |
||||
|
Date update_time = null; |
||||
|
int del = 0; |
||||
|
|
||||
|
String sql = "INSERT INTO cl_alarm (alarm_tag,alarm_reason,alarm_message,alarm_task_url,alarm_task_content,alarm_cid," + |
||||
|
"alarm_config,alarm_trigger_time,alarm_handler,status,create_time,del) " + |
||||
|
"values ("+alarm_tag+","+alarm_reason+",'"+alarm_message+"','"+alarm_task_url+"','"+alarm_task_content+"'," + |
||||
|
"'"+alarm_cid+"','"+alarm_config+"','"+alarm_trigger_time+"','"+alarm_handler+"',"+status+",'"+create_time+"',"+del+") "; |
||||
|
System.out.println(sql); |
||||
|
|
||||
|
Map<String, Object> siteMessage = new HashMap<String, Object>(); |
||||
|
siteMessage.put("cid", cid); |
||||
|
// siteMessage.put("categoryName", pagetype); |
||||
|
// siteMessage.put("sample", crawldataflag); |
||||
|
|
||||
|
EMailUtils.getInstance().sendEmail(6, siteMessage, emailList,"30"); |
||||
|
} |
||||
|
|
||||
|
private String getEmailByCid(String cid) { |
||||
|
List<EmailGroup> emails = emailGroupRepository.getEmailGroupsByCid(cid); |
||||
|
String alarmEmail = ""; |
||||
|
String emailGroup = emails.get(0).getEmail(); |
||||
|
if(emailGroup.contains(",")){ |
||||
|
alarmEmail = emailGroup.split(",")[0]; |
||||
|
}else{ |
||||
|
alarmEmail = emailGroup; |
||||
|
} |
||||
|
return alarmEmail; |
||||
|
} |
||||
|
|
||||
|
private QueryBuilder getQueryBuilder(String nowTime, String befor30min,String type) { |
||||
|
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); |
||||
|
try { |
||||
|
// 筛选时间 |
||||
|
boolean boo = true; |
||||
|
QueryBuilder crawlTimeRange = buildRangeQueryBuilder( |
||||
|
"crawl_time.keyword",befor30min ,nowTime, boo, boo); |
||||
|
boolQueryBuilder.must(crawlTimeRange); |
||||
|
//parse |
||||
|
QueryBuilder termQueryBuilder = QueryBuilders.termsQuery("type",type); |
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return boolQueryBuilder; |
||||
|
} |
||||
|
|
||||
|
private QueryBuilder buildRangeQueryBuilder(String field, Object startVal, Object endVal, Boolean isIncludeLower, Boolean isIncludeUpper) { |
||||
|
return rangeQuery(field) |
||||
|
.from(startVal) |
||||
|
.to(endVal) |
||||
|
.includeLower(isIncludeLower) |
||||
|
.includeUpper(isIncludeUpper); |
||||
|
} |
||||
|
} |
@ -0,0 +1,230 @@ |
|||||
|
package com.bfd.mf.job.service.backtrace; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import com.bfd.crawler.elasti.ElastiProducer; |
||||
|
import com.bfd.crawler.utils.JsonUtils; |
||||
|
import com.bfd.mf.job.config.AppConfig; |
||||
|
import com.bfd.mf.job.config.ESConstants; |
||||
|
import com.bfd.mf.job.domain.entity.Subject; |
||||
|
import com.bfd.mf.job.domain.repository.SubjectRepository; |
||||
|
import com.bfd.mf.job.util.EsUtils; |
||||
|
import com.bfd.mf.job.util.Kafka010Utils; |
||||
|
import com.google.common.collect.Maps; |
||||
|
import com.google.common.util.concurrent.RateLimiter; |
||||
|
import org.assertj.core.util.Lists; |
||||
|
import org.elasticsearch.index.query.BoolQueryBuilder; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
import javax.annotation.PostConstruct; |
||||
|
import java.util.*; |
||||
|
import java.util.concurrent.BlockingQueue; |
||||
|
import java.util.concurrent.LinkedBlockingQueue; |
||||
|
import java.util.concurrent.locks.Lock; |
||||
|
import java.util.concurrent.locks.ReentrantLock; |
||||
|
|
||||
|
@Service |
||||
|
public class BacktraceService { |
||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(BacktraceService.class); |
||||
|
private static final long PERIOD_MILLS = 1 * 3600 * 1000L; |
||||
|
private static BlockingQueue<Map<Long, List<? extends Number>>> P_TASK_CACHE_RANGE = new LinkedBlockingQueue<>(); |
||||
|
// private static Map<Long, BacktraceTask> C_UNNORMAL_TASK_CACHE = new ConcurrentHashMap<>(); |
||||
|
private static Map<Long, Double> C_TASK_PROGRESS_CACHE = Maps.newHashMap(); |
||||
|
private static Lock C_TASK_PROGRESS_CACHE_LOCK = new ReentrantLock(); |
||||
|
private static Map<Long, Long> C_TASK_PROCESSED_CACHE = Maps.newHashMap(); |
||||
|
private static Map<Long, Long> C_TASK_SATISFIED_CACHE = Maps.newHashMap(); |
||||
|
private static Map<Long, Long> C_TASK_SEGMENT_CACHE = Maps.newHashMap(); |
||||
|
private static Lock C_TASK_STAT_CACHE_LOCK = new ReentrantLock(); |
||||
|
private static BlockingQueue<EsUtils.BulkItem> DATA_CACHE = new LinkedBlockingQueue<>(10240); |
||||
|
private static BlockingQueue<EsUtils.BulkItem> NEW_DATA_CACHE = new LinkedBlockingQueue<>(10240); |
||||
|
private RateLimiter dataRateLimiter; |
||||
|
private RateLimiter pRateLimiter; |
||||
|
private RateLimiter cRateLimiter; |
||||
|
private static int subjectEsNum = 1; |
||||
|
private static String indexType = "docs"; |
||||
|
private static int bussinessType = 1; |
||||
|
|
||||
|
@Autowired |
||||
|
private AppConfig config; |
||||
|
@Autowired |
||||
|
private SubjectRepository subjectRepository; |
||||
|
|
||||
|
@PostConstruct |
||||
|
public void init() { |
||||
|
// 注册数据查询来源 |
||||
|
EsUtils.registerCluster(config.esNormalClusterName(), config.esNormalAddress());// 配置文件中的 es-source |
||||
|
EsUtils.registerCluster(config.esMiniClusterName(), config.esMiniAddress()); // 配置文件中的 es-target |
||||
|
pRateLimiter = RateLimiter.create(1.0D / config.getPeriodS()); |
||||
|
cRateLimiter = RateLimiter.create(1.0D / config.getPeriodS()); |
||||
|
dataRateLimiter = RateLimiter.create(config.esMiniBulkRate()); |
||||
|
// kafkaProducer = Kafka010Utils.getProducer(config.getBrokerList()); |
||||
|
} |
||||
|
|
||||
|
/* |
||||
|
尝试在指定时间内获得许可,如果获得了,则直接返回,如果没有获得,则执行下面的流程 |
||||
|
*/ |
||||
|
public void tryAcquire() { |
||||
|
if (!pRateLimiter.tryAcquire()) {//是在指定的时间内尝试地获得1个许可,如果获取不到则返回false |
||||
|
return; |
||||
|
} |
||||
|
// 查询 cl_subject 表中 status=0 del =0 cache_recalculate_status = 1 |
||||
|
List<Map<String,Object>> subjectTaskList = subjectRepository.querySubjectTaskByCacheRecalculateStatus(); |
||||
|
for (Map<String,Object> subject: subjectTaskList) { |
||||
|
System.out.println("需要拉数据的任务:"+JSONObject.toJSONString(subject)); |
||||
|
Map<Long, List<? extends Number>> cache = Maps.newHashMap(); |
||||
|
Long subjectId = Long.valueOf( subject.get(ESConstants.ID).toString()); |
||||
|
subjectRepository.updateCacheRecalculateStatus(subjectId,2);// |
||||
|
cache.put(subjectId, Lists.newArrayList(0L, 0L, 1.0, 1L, 1L)); |
||||
|
try { |
||||
|
P_TASK_CACHE_RANGE.put(cache); |
||||
|
} catch (InterruptedException e) { |
||||
|
Thread.currentThread().interrupt(); |
||||
|
} |
||||
|
} |
||||
|
// // 查询 cl_label_backtrace_task 表中 状态为 1 and retry_times <= max_retry_times 的任务出来 |
||||
|
// List<LabelBacktraceTask> backtraceTaskList = SubjectRepository.findAllByStatus(1); |
||||
|
// for (LabelBacktraceTask task : taskList) { |
||||
|
// if (!isAvailable(task)) { |
||||
|
// continue; |
||||
|
// } |
||||
|
// LOGGER.info("Executing task:{}.", JSON.toJSONString(task)); |
||||
|
// failureService.createTable(task.getFailureTableName()); |
||||
|
// Long totalSegment = 1L;//(task.getDateEnd() - task.getDateStart()) / PERIOD_MILLS; // 3600000 |
||||
|
// Long segment = 1L; |
||||
|
// Double progressFactor = 1.0 / totalSegment; |
||||
|
|
||||
|
// } |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public void produce(){ |
||||
|
Map<Long, List<? extends Number>> range = P_TASK_CACHE_RANGE.poll();// poll -->若队列为空,返回null |
||||
|
if (Objects.isNull(range)) { |
||||
|
return; |
||||
|
} |
||||
|
Long subjectId = 0L; |
||||
|
|
||||
|
for (Map.Entry<Long, List<? extends Number>> entry : range.entrySet()) { |
||||
|
subjectId = entry.getKey(); |
||||
|
} |
||||
|
Subject subject = subjectRepository.findById(subjectId).get(); |
||||
|
|
||||
|
String clusterName = config.esNormalClusterName(); |
||||
|
String subjectIndexName = config.getIndexNamePre() + subjectId; |
||||
|
|
||||
|
long fromMills = subject.getCacheStart().longValue(); |
||||
|
long toMills = subject.getCacheEnd().longValue(); |
||||
|
Long year = config.getQueryDataYearStarttime(); |
||||
|
String[] sourceIndices = EsUtils.getIndices(AppConfig.CL_INDEX, "_", |
||||
|
fromMills, toMills, AppConfig.DATE_FORMAT, config.esNormalUpper(), |
||||
|
config.esNormalStandby(),year); |
||||
|
|
||||
|
String esQuery = subject.getEsQuery(); |
||||
|
BoolQueryBuilder qb = EsUtils.getBoolQueryBuilderFromSqlStr(esQuery); |
||||
|
|
||||
|
LOGGER.info("索引范围:"+sourceIndices[0]+" ~ "+ sourceIndices[sourceIndices.length-1] +" ; QB : \n{}.",qb); |
||||
|
|
||||
|
Long finalSubjectId = subjectId; |
||||
|
// sourceIndices.size/100*i 就是当前的百分比 |
||||
|
Double percent = 0.0; |
||||
|
for(int i = 0 ; i < sourceIndices.length ; i ++) { |
||||
|
String index [] = {sourceIndices[i]}; |
||||
|
percent = (i*1.0)/sourceIndices.length ; |
||||
|
EsUtils.scrollQuery(clusterName, index, ESConstants.INDEX_TYPE, |
||||
|
qb, ESConstants.SCROLL_PAGE_SIZE, ESConstants.SCROLL_MINUTES, |
||||
|
dataList -> { |
||||
|
try { |
||||
|
for (JSONObject data : dataList) { |
||||
|
data.put(ESConstants.SUBJECT_ID, finalSubjectId); |
||||
|
String url = data.getString(ESConstants.URL); |
||||
|
// System.out.println(url); |
||||
|
String product_id = getProductId(data,url); |
||||
|
data.put(ESConstants.CRAWLDATAFLAGTYPE,2); |
||||
|
data.put(ESConstants.CRAWLDATAFLAG,"url:"+product_id); |
||||
|
// System.out.println(subjectIndexName + " data --- "+data.get(ESConstants.DOC_ID)); |
||||
|
writerToMajorES(subjectIndexName, data); |
||||
|
} |
||||
|
} catch (Exception e) { |
||||
|
throw new RuntimeException(e); |
||||
|
} |
||||
|
}); |
||||
|
percent = Double.valueOf(String.format("%.3f", percent)); |
||||
|
// 数据拉完了,需要修改一下 cl_subject 表中的 任务的 cache_recalculate_status 状态位, |
||||
|
// 同时,也需要修改一下 cl_task 中每个任务的状态位 |
||||
|
if(percent >= 0.996){ |
||||
|
int status = 3; |
||||
|
subjectRepository.updateCacheRecalculateStatus(subjectId,status); |
||||
|
subjectRepository.updateTaskStatus(subjectId,status); |
||||
|
} |
||||
|
} |
||||
|
LOGGER.info("******** ******** subjectIndexName : " + subjectIndexName + "这个专题下的数据拉完了,可以改状态了!"); |
||||
|
} |
||||
|
|
||||
|
private String getProductId(JSONObject data, String url) { |
||||
|
String product_id = data.getString(ESConstants.PRODUCT_ID); |
||||
|
try { |
||||
|
if (data.get(ESConstants.EN_SOURCE).equals(ESConstants.TMALL)) { |
||||
|
if (url.contains("&id=")) { |
||||
|
product_id = url.split("&id=")[1].split("&")[0].replace("/", "*"); |
||||
|
} else { //https://detail.tmall.com/item.htm?id=582242698961&rn=08db719e4a7ee5b6d4f5b58825d1f261&abbucket=20 |
||||
|
product_id = url.split("/?id=")[1].split("&")[0].replace("/", "*"); |
||||
|
} |
||||
|
} |
||||
|
if (data.get(ESConstants.EN_SOURCE).equals(ESConstants.EJINGDONG)) { |
||||
|
product_id = url.split(".html")[0].split("item.jd.")[1].replace("/", "*"); |
||||
|
} |
||||
|
if (data.get(ESConstants.EN_SOURCE).equals(ESConstants.TAOBAO)) { |
||||
|
if (url.contains("&id=")) { |
||||
|
product_id = url.split("&id=")[1].split("&")[0].replace("/", "*"); |
||||
|
} else { |
||||
|
product_id = url.split("/?id=")[1].split("&")[0].replace("/", "*"); |
||||
|
} |
||||
|
} |
||||
|
if (data.get(ESConstants.EN_SOURCE).equals(ESConstants.WEIPINHUI)) { |
||||
|
product_id = url.split(".vip.com/")[1].split(".html")[0].replace("/", "*"); |
||||
|
} |
||||
|
if (data.get(ESConstants.EN_SOURCE).equals(ESConstants.EJUMEI)) { |
||||
|
product_id = url.split("com/")[1].split(".html")[0].replace("/", "*"); |
||||
|
} |
||||
|
if (data.get(ESConstants.EN_SOURCE).equals(ESConstants.SUNING)) { |
||||
|
product_id = url.split("suning.com/")[1].split(".html")[0].replace("/", "*"); |
||||
|
} |
||||
|
if (data.get(ESConstants.EN_SOURCE).equals(ESConstants.SEPHORA)) { |
||||
|
product_id = url.split(".cn")[1].split(".html")[0].replace("/", "*"); |
||||
|
} |
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return product_id; |
||||
|
|
||||
|
} |
||||
|
|
||||
|
// public static void main(String[] args) { |
||||
|
// BacktraceService b = new BacktraceService(); |
||||
|
// String url = "https://detail.tmall.com/item.htm?id=582242698961&rn=08db719e4a7ee5b6d4f5b58825d1f261&abbucket=20"; |
||||
|
// JSONObject data = new JSONObject(); |
||||
|
// data.put("enSource","tmall"); |
||||
|
// String id = b.getProductId(data,url); |
||||
|
// System.out.println(id); |
||||
|
// } |
||||
|
|
||||
|
private static void writerToMajorES(String indexName , Map<String, Object> responseMap) { |
||||
|
System.out.println("==========================写入到【专题】ES : ==========" + indexName + " - "+responseMap.get("docId") ); |
||||
|
ElastiProducer elastiProducer = ElastiProducer.getInstance(bussinessType, subjectEsNum, indexName, indexType); |
||||
|
elastiProducer.sendMessageToEs(JsonUtils.toJSONString(responseMap)); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
// public static void main(String[] args) { |
||||
|
// String url = "https://detail.tmall.com/item.htm?id=598372446974&skuId=4336725650385&user_id=2549841410&cat_id=50031573&is_b=1&rn=66410a97e53d6338e3bff62cfd307a80"; |
||||
|
// String product_id = ""; |
||||
|
// if(url.contains("&id=")) { |
||||
|
// product_id = url.split("&id=")[1].split("&")[0].replace("/", "*"); |
||||
|
// }else{ |
||||
|
// product_id = url.split("/?id=")[1].split("&")[0].replace("/", "*"); |
||||
|
// } |
||||
|
// System.out.println(product_id); |
||||
|
// } |
||||
|
|
||||
|
} |
@ -0,0 +1,203 @@ |
|||||
|
package com.bfd.mf.job.service.es; |
||||
|
|
||||
|
import com.bfd.mf.job.config.ESConstants; |
||||
|
import com.bfd.mf.job.domain.entity.Task; |
||||
|
import com.bfd.mf.job.util.EsUtils; |
||||
|
import org.elasticsearch.index.query.*; |
||||
|
import org.elasticsearch.search.aggregations.AggregationBuilder; |
||||
|
import org.elasticsearch.search.aggregations.bucket.terms.Terms; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import java.sql.Timestamp; |
||||
|
import java.util.HashMap; |
||||
|
import java.util.Map; |
||||
|
import java.util.TimeZone; |
||||
|
|
||||
|
@Service |
||||
|
public class EsQueryMiniService { |
||||
|
private static Logger logger = LoggerFactory.getLogger(EsQueryMiniService.class); |
||||
|
|
||||
|
/** |
||||
|
* 统计 每个专题下,每个渠道 的总量 |
||||
|
*/ |
||||
|
public Map<String,Long> getSubjectChannelStatistics(String clusterName,String indexName) { |
||||
|
|
||||
|
Map<String,Long> resultMap = new HashMap<>(); |
||||
|
try{ |
||||
|
|
||||
|
boolean isExists = EsUtils.indexExists(clusterName, indexName); |
||||
|
if (isExists) { |
||||
|
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
||||
|
AggregationBuilder ab = EsUtils.getSubjectChannelAB(ESConstants.DOC_TYPE); |
||||
|
String indexNames [] = {indexName}; |
||||
|
Terms result = EsUtils.queryTag(clusterName, indexNames, qb, ab, ESConstants.DOC_TYPE + "Tag"); |
||||
|
resultMap = EsUtils.parseTerms(result); |
||||
|
} |
||||
|
|
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return resultMap; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 统计 每个专题下,每个渠道 当天的增量 |
||||
|
*/ |
||||
|
public Map<String,Long> getSubjectChannelTodayStatistics(String clusterName,String indexName) { |
||||
|
Map<String,Long> resultMap = new HashMap<>(); |
||||
|
try{ |
||||
|
|
||||
|
boolean isExists = EsUtils.indexExists(clusterName, indexName); |
||||
|
if (isExists) { |
||||
|
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
||||
|
long current=System.currentTimeMillis(); |
||||
|
long zero=current/(1000*3600*24)*(1000*3600*24)-TimeZone.getDefault().getRawOffset(); |
||||
|
Long startTime = new Timestamp(zero).getTime(); |
||||
|
RangeQueryBuilder rangeQueryBuilder = QueryBuilders |
||||
|
.rangeQuery(ESConstants.CRAWLTIME) |
||||
|
.gte(startTime) |
||||
|
.lt(current); |
||||
|
qb.must(rangeQueryBuilder); |
||||
|
AggregationBuilder ab = EsUtils.getSubjectChannelAB(ESConstants.DOC_TYPE); |
||||
|
String indexNames [] = {indexName}; |
||||
|
Terms result = EsUtils.queryTag(clusterName, indexNames, qb, ab, ESConstants.DOC_TYPE + "Tag"); |
||||
|
resultMap = EsUtils.parseTerms(result); |
||||
|
|
||||
|
} |
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return resultMap; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 统计 每个专题下,crawlDataFlag 三种类型当天的总量 |
||||
|
*/ |
||||
|
public Map<String,Long> getSubjectCrawlDataFlagStatistics(String clusterName, String indexName) { |
||||
|
Map<String,Long> resultMap = new HashMap<>(); |
||||
|
try{ |
||||
|
boolean isExists = EsUtils.indexExists(clusterName, indexName); |
||||
|
if (isExists) { |
||||
|
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
||||
|
AggregationBuilder ab = EsUtils.getSubjectChannelAB(ESConstants.CRAWLDATAFLAG); |
||||
|
String indexNames [] = {indexName}; |
||||
|
Terms result = EsUtils.queryTag(clusterName, indexNames, qb, ab, ESConstants.CRAWLDATAFLAG + "Tag"); |
||||
|
Map<String,Long> termsMap = EsUtils.parseTerms(result); |
||||
|
resultMap = EsUtils.getResultMap(termsMap); |
||||
|
} |
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return resultMap; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 统计 每个专题下,crawlDataFlag 三种类型 的增量 |
||||
|
*/ |
||||
|
public Map<String,Long> getSubjectCrawlDataFlagTodayStatistics(String clusterName, String indexName) { |
||||
|
Map<String,Long> resultMap = new HashMap<>(); |
||||
|
try{ |
||||
|
|
||||
|
boolean isExists = EsUtils.indexExists(clusterName, indexName); |
||||
|
if (isExists) { |
||||
|
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
||||
|
long current=System.currentTimeMillis(); |
||||
|
long zero=current/(1000*3600*24)*(1000*3600*24)-TimeZone.getDefault().getRawOffset(); |
||||
|
Long startTime = new Timestamp(zero).getTime(); |
||||
|
RangeQueryBuilder rangeQueryBuilder = QueryBuilders |
||||
|
.rangeQuery(ESConstants.CRAWLTIME) |
||||
|
.gte(startTime) |
||||
|
.lt(current); |
||||
|
qb.must(rangeQueryBuilder); |
||||
|
AggregationBuilder ab = EsUtils.getSubjectChannelAB(ESConstants.CRAWLDATAFLAG); |
||||
|
String indexNames [] = {indexName}; |
||||
|
Terms result = EsUtils.queryTag(clusterName, indexNames, qb, ab, ESConstants.CRAWLDATAFLAG + "Tag"); |
||||
|
Map<String,Long> termsMap = EsUtils.parseTerms(result); |
||||
|
resultMap = EsUtils.getResultMap(termsMap); |
||||
|
|
||||
|
} |
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return resultMap; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public Map<String,Long> getTaskCount(String clusterName,Long taskId, Task task,String crawlDataFlag,String indexNamePre) { |
||||
|
Map<String,Long> countMap = new HashMap<>(); |
||||
|
String indexName = indexNamePre + task.getSubjectId();//subject_id |
||||
|
if(null != task.getCid()) { |
||||
|
String cid = task.getCid().toLowerCase(); |
||||
|
Long crawlStartTime = task.getCrawlStartTime().longValue(); |
||||
|
Long crawlEndTime = task.getCrawlEndTime().longValue(); |
||||
|
// String crawlDataFlag =task.getCrawlDataFlag(); |
||||
|
if (indexName.contains(indexNamePre)) { |
||||
|
boolean isExists = EsUtils.indexExists(clusterName, indexName); |
||||
|
if (isExists) { |
||||
|
BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
||||
|
// 任务ID 筛选 |
||||
|
TermQueryBuilder cidTermQueryBuilder = QueryBuilders.termQuery(ESConstants.EN_SOURCE, cid); |
||||
|
TermQueryBuilder taskIdTermQueryBuilder = QueryBuilders.termQuery(ESConstants.CRAWLDATAFLAG, crawlDataFlag); |
||||
|
qb.must(taskIdTermQueryBuilder).must(cidTermQueryBuilder); |
||||
|
// 时间范围筛选 |
||||
|
BoolQueryBuilder shouldbq = QueryBuilders.boolQuery(); |
||||
|
RangeQueryBuilder rangeQueryBuilder = QueryBuilders |
||||
|
.rangeQuery(ESConstants.PUBTIME) |
||||
|
.gte(crawlStartTime) |
||||
|
.lt(crawlEndTime); |
||||
|
// 用户数据 |
||||
|
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); |
||||
|
TermQueryBuilder primartTermQueryBuilder = QueryBuilders.termQuery(ESConstants.PRIMARY, 2); |
||||
|
// TermQueryBuilder pubTimeTermQueryBuilder = QueryBuilders.termQuery(ESConstants.PUBTIME,0); |
||||
|
boolQueryBuilder.must(primartTermQueryBuilder); |
||||
|
shouldbq.should(boolQueryBuilder).should(rangeQueryBuilder); |
||||
|
qb.must(shouldbq); |
||||
|
logger.info("QB1 : indexName: {}. taskId : {}.{\"query\": {}}.", indexName, taskId, qb.toString().replace("\n", "").replace("\r", "").replace(" ", "")); |
||||
|
Long count = EsUtils.queryCount(clusterName, indexName, qb); |
||||
|
countMap.put("totalCount", count); |
||||
|
// 上面的语句是查询 该任务的 总数据量:totalCount,下面的语句是查询 该任务当天的数据量:todayCount |
||||
|
long current = System.currentTimeMillis(); |
||||
|
long zero = current / (1000 * 3600 * 24) * (1000 * 3600 * 24) - TimeZone.getDefault().getRawOffset(); |
||||
|
Long startTime = new Timestamp(zero).getTime(); |
||||
|
RangeQueryBuilder rangeQueryBuilder2 = QueryBuilders |
||||
|
.rangeQuery(ESConstants.CRAWLTIME) |
||||
|
.gte(startTime).lt(current); |
||||
|
qb.must(rangeQueryBuilder2); |
||||
|
logger.info("QB2 : indexName: {}. taskId : {}.{\"query\": {}}.", indexName, taskId, qb.toString().replace("\n", "").replace("\r", "").replace(" ", "")); |
||||
|
Long todayCount = EsUtils.queryCount(clusterName, indexName, qb); |
||||
|
countMap.put("todayCount", todayCount); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
return countMap; |
||||
|
} |
||||
|
|
||||
|
// public Long getTaskTodayCount(String clusterName,Integer id, Map<String, Object> task) { |
||||
|
// Long count = 0L; |
||||
|
// String indexName = clSubject + (String) task.get("subject_id"); |
||||
|
// String cid = (String) task.get(ESConstants.CID); |
||||
|
// Long crawlStartTime = (Long) task.get("crawl_start_time"); |
||||
|
// Long crawlEndTime = (Long) task.get("crawl_end_time"); |
||||
|
// String crawlDataFlag = (String) task.get("crawl_data_flag"); |
||||
|
// |
||||
|
// if(indexName.contains(subjectPre)) { |
||||
|
// boolean isExists = EsUtils.indexExists(clusterName, indexName); |
||||
|
// if (isExists) { |
||||
|
// BoolQueryBuilder qb = QueryBuilders.boolQuery(); |
||||
|
// long current=System.currentTimeMillis(); |
||||
|
// long zero=current/(1000*3600*24)*(1000*3600*24)-TimeZone.getDefault().getRawOffset(); |
||||
|
// Long startTime = new Timestamp(zero).getTime(); |
||||
|
// RangeQueryBuilder rangeQueryBuilder = QueryBuilders |
||||
|
// .rangeQuery(ESConstants.CRAWLTIME) |
||||
|
// .gte(startTime) |
||||
|
// .lt(current); |
||||
|
// qb.must(rangeQueryBuilder); |
||||
|
//// Terms result = EsUtils.queryTag(clusterName, indexName, qb, ab, ESConstant.DOC_TYPE + "Tag"); |
||||
|
//// resultMap = parseTerms(result); |
||||
|
// } |
||||
|
// } |
||||
|
// return count; |
||||
|
// } |
||||
|
} |
@ -1,4 +1,4 @@ |
|||||
package com.bfd.mf.job.service; |
|
||||
|
package com.bfd.mf.job.service.es; |
||||
|
|
||||
import com.bfd.mf.job.config.ESConstants; |
import com.bfd.mf.job.config.ESConstants; |
||||
import com.bfd.mf.job.util.EsUtils; |
import com.bfd.mf.job.util.EsUtils; |
@ -0,0 +1,311 @@ |
|||||
|
package com.bfd.mf.job.service.taskCount; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import com.bfd.mf.job.config.AppConfig; |
||||
|
import com.bfd.mf.job.domain.entity.Task; |
||||
|
import com.bfd.mf.job.domain.entity.TaskCount; |
||||
|
import com.bfd.mf.job.domain.repository.SubjectRepository; |
||||
|
import com.bfd.mf.job.domain.repository.TaskCountRepository; |
||||
|
import com.bfd.mf.job.domain.repository.TaskRepository; |
||||
|
import com.bfd.mf.job.service.es.EsQueryMiniService; |
||||
|
import com.bfd.mf.job.service.es.EsQueryNormalService; |
||||
|
import com.bfd.mf.job.service.statistics.TotalCountService; |
||||
|
import com.bfd.mf.job.util.DateUtil; |
||||
|
import com.bfd.mf.job.util.EsUtils; |
||||
|
import kafka.utils.Json; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.data.jpa.repository.Query; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import javax.annotation.PostConstruct; |
||||
|
import java.math.BigInteger; |
||||
|
import java.text.DecimalFormat; |
||||
|
import java.util.HashMap; |
||||
|
import java.util.List; |
||||
|
import java.util.Map; |
||||
|
import java.util.TimeZone; |
||||
|
|
||||
|
@Service |
||||
|
public class TaskCountService { |
||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(TaskCountService.class); |
||||
|
@Autowired |
||||
|
private AppConfig config; |
||||
|
@Autowired |
||||
|
private TaskRepository taskRepository; |
||||
|
@Autowired |
||||
|
private TaskCountRepository taskCountRepository; |
||||
|
|
||||
|
@PostConstruct |
||||
|
public void init() { |
||||
|
// 注册数据查询来源 |
||||
|
EsUtils.registerCluster(config.esNormalClusterName(), config.esNormalAddress());// 配置文件中的 es-source |
||||
|
EsUtils.registerCluster(config.esMiniClusterName(), config.esMiniAddress()); // 配置文件中的 es-target |
||||
|
} |
||||
|
|
||||
|
/* |
||||
|
尝试在指定时间内获得许可,如果获得了,则直接返回,如果没有获得,则执行下面的流程 |
||||
|
*/ |
||||
|
public void tryAcquire() { |
||||
|
long start = System.currentTimeMillis(); |
||||
|
LOGGER.info("------------------------------------------------------------------ TaskCountService ------------------------------------------------------"); |
||||
|
// 获取当天 0 点~当天12点的时间 |
||||
|
long current = System.currentTimeMillis(); |
||||
|
long zero = current/(1000*3600*24)*(1000*3600*24) - TimeZone.getDefault().getRawOffset(); |
||||
|
long twelve=zero+24*60*60*1000-1;//今天23点59分59秒的毫秒数 |
||||
|
// 任务的统计,统计的是前一天的,因此: |
||||
|
long newZero = zero - 24*60*60*1000; |
||||
|
long newTweleve = twelve - 24*60*60*1000; |
||||
|
String taskStartTime = DateUtil.parseDateByTime(newZero); |
||||
|
String taskEndTime = DateUtil.parseDateByTime(newTweleve); |
||||
|
System.out.println(taskStartTime + " -- " + taskEndTime); |
||||
|
|
||||
|
// 直接查 cl_task 表中,开始时间和结束时间再当天范围内的任务的today_count; |
||||
|
/** |
||||
|
* 1.只统计单次任务 |
||||
|
2.只统计结束时间都在当天的任务(去掉开始时间,以任务完成时间为判断) |
||||
|
3.任务的数据量大于0(拿这一条过滤掉不合理的任务) |
||||
|
*/ |
||||
|
Long sumTodayTotalCount = taskRepository.findTodayDataTotal(taskStartTime,taskEndTime); |
||||
|
//Long count = taskRepository.findTodayDataTotalTaskNum(); |
||||
|
|
||||
|
// List<Map<String,String>> list = taskRepository.findByCrawlTime(taskStartTime,taskEndTime); |
||||
|
// for (Map<String,String> map :list) { |
||||
|
// System.out.println("任务:"+JSONObject.toJSONString(map)); |
||||
|
// } |
||||
|
|
||||
|
// 获取任务的时间差 |
||||
|
List<BigInteger> avgSpeedList = taskRepository.findTaskByCrawlTime(taskStartTime,taskEndTime); |
||||
|
Integer count = avgSpeedList.size(); |
||||
|
Long sumSpeedCount = 0L; |
||||
|
for (BigInteger avg: avgSpeedList) { |
||||
|
sumSpeedCount = sumSpeedCount + Long.valueOf(avg.toString()); |
||||
|
} |
||||
|
System.out.println("sumTodayTotalCount : "+sumTodayTotalCount); |
||||
|
String avgSpeed = "0"; |
||||
|
String avgCount = "0"; |
||||
|
if(sumSpeedCount != null && sumTodayTotalCount != null) { |
||||
|
avgSpeed = String.format("%.1f", sumSpeedCount * 1.0 / count); |
||||
|
avgCount = String.format("%.1f", sumTodayTotalCount * 1.0 / count); |
||||
|
System.out.println("任务数量:" + count + "; 当天的新增总量: " + sumTodayTotalCount + "; 平均任务量:" + avgCount + "; 平均速度:" + avgSpeed); |
||||
|
} |
||||
|
// 将结果插入到 cl_task_count 表中 |
||||
|
String day = taskStartTime.split(" ")[0]; |
||||
|
|
||||
|
TaskCount taskCount = new TaskCount(); |
||||
|
taskCount.setCountDate(day); |
||||
|
taskCount.setAvgCount(Float.valueOf(avgCount)); |
||||
|
taskCount.setAvgSpeed(Float.valueOf(avgSpeed)); |
||||
|
System.out.println(JSONObject.toJSONString(taskCount)); |
||||
|
taskCountRepository.save(taskCount); |
||||
|
long end = System.currentTimeMillis(); |
||||
|
LOGGER.info("TaskCountService finish, took:{} ms.",(end - start)); |
||||
|
|
||||
|
// //-------统计147上的 每个任务的总量------------------------------------------------------------------------- |
||||
|
// start = System.currentTimeMillis(); |
||||
|
// clusterName = config.esMiniClusterName(); // 获得 147 的 clusterName |
||||
|
// statisticsTask(clusterName); |
||||
|
// end = System.currentTimeMillis(); |
||||
|
// LOGGER.info("Statistics Task, took:{} ms.",(end - start)); |
||||
|
// |
||||
|
// //-------统计每个专题的量------------------------------------------------------------------------------------ |
||||
|
// start = System.currentTimeMillis(); |
||||
|
// // 如果是正常任务的,用这种方式统计 |
||||
|
// List<BigInteger> subjectIds = subjectRepository.findAllSubjectIds(); |
||||
|
// for (BigInteger subjectId: subjectIds) { |
||||
|
// statisticsSubjectBySumTask(subjectId); |
||||
|
// } |
||||
|
// end = System.currentTimeMillis(); |
||||
|
// LOGGER.info("Statistics Subject Normal, took:{} ms.",(end - start)); |
||||
|
// // 如果是【欧莱雅】任务的,得用这个方式统计呀 |
||||
|
// start = System.currentTimeMillis(); |
||||
|
// List<BigInteger> subjectIds1 = subjectRepository.findAllOlySubjectIds(); |
||||
|
// for (BigInteger subjectId: subjectIds1) { |
||||
|
// statisticsSubject(subjectId,clusterName); |
||||
|
// } |
||||
|
// end = System.currentTimeMillis(); |
||||
|
// LOGGER.info("Statistics Subject OLY, took:{} ms.",(end - start)); |
||||
|
|
||||
|
} |
||||
|
|
||||
|
// private boolean statisticsSubjectBySumTask(BigInteger subjectId) { |
||||
|
// boolean result = true; |
||||
|
// try{ |
||||
|
// Map<String,Long> subjectChannelMap = new HashMap<>(); |
||||
|
// Map<String,Long> subjectChannelTodayMap = new HashMap<>(); |
||||
|
// long count = 0L; |
||||
|
// long todayCount = 0L; |
||||
|
// // 按渠道统计每个渠道的数据量 |
||||
|
// for(int i = 0 ; i < 8 ; i ++){ |
||||
|
// if(null != taskRepository.findDataTotalBySbujectIdAndSiteType(subjectId,i)){ |
||||
|
// count = taskRepository.findDataTotalBySbujectIdAndSiteType(subjectId,i); |
||||
|
// }else{ |
||||
|
// count = 0; |
||||
|
// } |
||||
|
// if(null != taskRepository.findTodayDataTotalBySbujectIdAndSiteType(subjectId,i)) { |
||||
|
// todayCount = taskRepository.findTodayDataTotalBySbujectIdAndSiteType(subjectId, i); |
||||
|
// }else{ |
||||
|
// todayCount = 0; |
||||
|
// } |
||||
|
// switch (i){ |
||||
|
// case 0: |
||||
|
// subjectChannelMap.put("social",count); |
||||
|
// subjectChannelTodayMap.put("social",todayCount); |
||||
|
// break; |
||||
|
// case 1: |
||||
|
// subjectChannelMap.put("news",count); |
||||
|
// subjectChannelTodayMap.put("news",todayCount); |
||||
|
// break; |
||||
|
// case 2: |
||||
|
// subjectChannelMap.put("blog",count); |
||||
|
// subjectChannelTodayMap.put("blog",todayCount); |
||||
|
// break; |
||||
|
// case 3: |
||||
|
// subjectChannelMap.put("bbs",count); |
||||
|
// subjectChannelTodayMap.put("bbs",todayCount); |
||||
|
// break; |
||||
|
// case 4: |
||||
|
// subjectChannelMap.put("video",count); |
||||
|
// subjectChannelTodayMap.put("video",todayCount); |
||||
|
// break; |
||||
|
// case 5: |
||||
|
// subjectChannelMap.put("item",count); |
||||
|
// subjectChannelTodayMap.put("item",todayCount); |
||||
|
// break; |
||||
|
// case 6: |
||||
|
// subjectChannelMap.put("search",count); |
||||
|
// subjectChannelTodayMap.put("search",todayCount); |
||||
|
// break; |
||||
|
// case 7: |
||||
|
// subjectChannelMap.put("lief",count); |
||||
|
// subjectChannelTodayMap.put("lief",todayCount); |
||||
|
// break; |
||||
|
// } |
||||
|
// } |
||||
|
// // 按采集方式统计数据量 |
||||
|
// Map<String,Long> subjectCrawlDatFlagMap = new HashMap<>(); |
||||
|
// Map<String,Long> subjectCrawlDataFlagTodayMap = new HashMap<>(); |
||||
|
// long siteCount = 0L; |
||||
|
// long siteTodayCount = 0L; |
||||
|
// for(int i = 0 ; i < 4 ; i ++) { |
||||
|
// if(null != taskRepository.findDataTotalBySbujectIdAndTaskType(subjectId, i)) { |
||||
|
// siteCount = taskRepository.findDataTotalBySbujectIdAndTaskType(subjectId, i); |
||||
|
// }else{ |
||||
|
// siteCount = 0; |
||||
|
// } |
||||
|
// if(null != taskRepository.findTodayDataTotalBySbujectIdAndTaskType(subjectId, i)) { |
||||
|
// siteTodayCount = taskRepository.findTodayDataTotalBySbujectIdAndTaskType(subjectId, i); |
||||
|
// }else{ |
||||
|
// siteTodayCount = 0; |
||||
|
// } |
||||
|
// switch (i) { |
||||
|
// case 0: |
||||
|
// subjectCrawlDatFlagMap.put("keyword", siteCount); |
||||
|
// subjectCrawlDataFlagTodayMap.put("keyword", siteTodayCount); |
||||
|
// break; |
||||
|
// case 1: |
||||
|
// subjectCrawlDatFlagMap.put("account", siteCount); |
||||
|
// subjectCrawlDataFlagTodayMap.put("account", siteTodayCount); |
||||
|
// break; |
||||
|
// case 2: |
||||
|
// subjectCrawlDatFlagMap.put("url", siteCount); |
||||
|
// subjectCrawlDataFlagTodayMap.put("url", siteTodayCount); |
||||
|
// break; |
||||
|
// case 3: |
||||
|
// subjectCrawlDatFlagMap.put("upload", siteCount); |
||||
|
// subjectCrawlDataFlagTodayMap.put("upload", siteTodayCount); |
||||
|
// break; |
||||
|
// } |
||||
|
// } |
||||
|
// totalCountService.updateSubjectCount(subjectId,subjectChannelMap,subjectChannelTodayMap,subjectCrawlDatFlagMap,subjectCrawlDataFlagTodayMap); |
||||
|
// }catch (Exception e){ |
||||
|
// result = false; |
||||
|
// LOGGER.error("[StatisticsService] statisticsSubject ERROR... subjectId : " + subjectId + "error : " ); |
||||
|
// e.printStackTrace(); |
||||
|
// } |
||||
|
// return result; |
||||
|
// } |
||||
|
// |
||||
|
// public boolean statisticsSubject(BigInteger subjectId, String miniName){ |
||||
|
// boolean result = true; |
||||
|
// try{ |
||||
|
// // LOGGER.info("[StatisticsService] statisticsSubject start... subjectId : " + subjectId); |
||||
|
// String indexName = config.getIndexNamePre() + subjectId; |
||||
|
// // 统计这个专题下每个渠道的总量 |
||||
|
// Map<String,Long> subjectChannelMap = esQueryMiniService.getSubjectChannelStatistics(miniName,indexName); |
||||
|
// // 统计这个专题下每个渠道的增量 |
||||
|
// Map<String,Long> subjectChannelTodayMap = esQueryMiniService.getSubjectChannelTodayStatistics(miniName,indexName); |
||||
|
// // 统计这个专题下每种采集类型的总量 |
||||
|
// Map<String,Long> subjectCrawlDatFlagMap = esQueryMiniService.getSubjectCrawlDataFlagStatistics(miniName,indexName); |
||||
|
// // 统计这个专题下每种采集类型的增量 |
||||
|
// Map<String,Long> subjectCrawlDataFlagTodayMap = esQueryMiniService.getSubjectCrawlDataFlagTodayStatistics(miniName,indexName); |
||||
|
// // 查入或修改表 |
||||
|
// totalCountService.updateSubjectCount(subjectId,subjectChannelMap,subjectChannelTodayMap,subjectCrawlDatFlagMap,subjectCrawlDataFlagTodayMap); |
||||
|
// totalCountService.updateResultDetil(subjectId,subjectChannelMap); |
||||
|
// }catch (Exception e){ |
||||
|
// result = false; |
||||
|
// LOGGER.error("[StatisticsService] statisticsSubject ERROR... subjectId : " + subjectId + "error : " ); |
||||
|
// e.printStackTrace(); |
||||
|
// } |
||||
|
// return result; |
||||
|
// } |
||||
|
// |
||||
|
// public boolean statisticsTotal(String normalName){ |
||||
|
// boolean result = true; |
||||
|
// try{ |
||||
|
// LOGGER.info("[StatisticsService] statisticsTotal start... "); |
||||
|
// EsQueryNormalService esQueryNormalService = new EsQueryNormalService(); |
||||
|
// // 查询 全局数据 每种类型的统计结果 |
||||
|
// Map<String,Long> channelMap = esQueryNormalService.getChannelStatistics(normalName); |
||||
|
// Map<String,Long> channelTodayMap = esQueryNormalService.getChannelTodayStatistics(normalName); |
||||
|
// Map<String,Long> crawlDataFlagMap = esQueryNormalService.getCrawlDataFlagStatistics(normalName); |
||||
|
// Map<String,Long> crawlDataFlagTodayMap = esQueryNormalService.getCrawlDataFlagTodayStatistics(normalName); |
||||
|
// BigInteger subjectId = new BigInteger("0"); |
||||
|
// totalCountService.updateSubjectCount(subjectId,channelMap,channelTodayMap,crawlDataFlagMap,crawlDataFlagTodayMap); |
||||
|
// }catch (Exception e){ |
||||
|
// result = false; |
||||
|
// LOGGER.error("[StatisticsService] statisticsTotal ERROR... "); |
||||
|
// e.printStackTrace(); |
||||
|
// } |
||||
|
// return result; |
||||
|
// } |
||||
|
// |
||||
|
// public boolean statisticsTask(String miniName){ |
||||
|
// boolean result = true; |
||||
|
// try{ |
||||
|
// // 第一步,需要将 今天之前已完成 的 任务的 today_data_total 改成 0, update_time |
||||
|
// |
||||
|
// |
||||
|
// EsQueryMiniService esQueryMiniService = new EsQueryMiniService(); |
||||
|
// // 从库中查出当前任务表中的所有任务 id 和对应的 cid,cralwDataFlag 以及 subjectId |
||||
|
// List<Task> taskList = taskRepository.findAllBydel0(); |
||||
|
// // 遍历任务List ,根据条件组装ES查询语句去对应的索引下查结果,然后回写到任务表中 |
||||
|
// for (Task task: taskList) { |
||||
|
// Long taskId = task.getId().longValue(); |
||||
|
// String crawlDataFlag = task.getCrawlDataFlag(); |
||||
|
// String indexNamePre = config.getIndexNamePre(); |
||||
|
// Map<String, Long> countMap = new HashMap<>(); |
||||
|
// if(null != task.getCid() && !task.getCid().equals("test")) { |
||||
|
// countMap = esQueryMiniService.getTaskCount(miniName, taskId, task, crawlDataFlag, indexNamePre); |
||||
|
// // 直接更新 cl_task 表中的 data_total 和 today_data_total |
||||
|
// long totalCount = 0L; |
||||
|
// long todayCount = 0L; |
||||
|
// if(countMap.containsKey("totalCount") && countMap.containsKey("todayCount")) { |
||||
|
// totalCount = countMap.get("totalCount"); |
||||
|
// todayCount = countMap.get("todayCount"); |
||||
|
// } |
||||
|
// taskRepository.updateTaskCount(taskId,totalCount,todayCount); |
||||
|
// } |
||||
|
// |
||||
|
// } |
||||
|
// LOGGER.info("[StatisticsService] statisticsTask finish ..."); |
||||
|
// }catch (Exception e){ |
||||
|
// e.printStackTrace(); |
||||
|
// result = false; |
||||
|
// LOGGER.error("[StatisticsService] statisticsTask ERROR..."); |
||||
|
// } |
||||
|
// return result; |
||||
|
// } |
||||
|
|
||||
|
} |
@ -0,0 +1,545 @@ |
|||||
|
package com.bfd.mf.job.service.upload; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import com.bfd.mf.job.config.AllKeys; |
||||
|
import com.bfd.mf.job.config.AppConfig; |
||||
|
import com.bfd.mf.job.config.ESConstants; |
||||
|
import com.bfd.mf.job.download.DownLoadFile; |
||||
|
import com.bfd.mf.job.util.DataCheckUtil; |
||||
|
import com.bfd.mf.job.util.EsUtils2; |
||||
|
import com.bfd.mf.job.util.ReadLine; |
||||
|
import com.monitorjbl.xlsx.StreamingReader; |
||||
|
import org.apache.poi.ss.usermodel.Cell; |
||||
|
import org.apache.poi.ss.usermodel.Row; |
||||
|
import org.apache.poi.ss.usermodel.Sheet; |
||||
|
import org.apache.poi.ss.usermodel.Workbook; |
||||
|
import org.apache.poi.xssf.usermodel.XSSFCell; |
||||
|
import org.apache.poi.xssf.usermodel.XSSFSheet; |
||||
|
import org.apache.poi.xssf.usermodel.XSSFWorkbook; |
||||
|
import org.assertj.core.util.Lists; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import javax.annotation.PostConstruct; |
||||
|
import java.io.File; |
||||
|
import java.io.FileInputStream; |
||||
|
import java.io.FileNotFoundException; |
||||
|
import java.util.*; |
||||
|
import java.util.concurrent.BlockingQueue; |
||||
|
import java.util.concurrent.LinkedBlockingQueue; |
||||
|
|
||||
|
import static com.bfd.mf.job.config.ESConstants.INDEX_TYPE; |
||||
|
|
||||
|
@Service |
||||
|
public class UpLoadExcelService { |
||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(UpLoadExcelService.class); |
||||
|
private static BlockingQueue<EsUtils2.BulkItem> DATA_CACHE = new LinkedBlockingQueue<>(10240); |
||||
|
|
||||
|
|
||||
|
@PostConstruct |
||||
|
public void init() { |
||||
|
// 注册数据查询来源 |
||||
|
String address []= {config.getEsMini().get("address").toString()}; |
||||
|
EsUtils2.registerCluster(config.getEsMini().get("name").toString(), address);// 配置文件中的 es-source |
||||
|
} |
||||
|
|
||||
|
@Autowired |
||||
|
private AppConfig config; |
||||
|
|
||||
|
/** |
||||
|
* 解析 Excel 中的数据,并将数据处理后写入到 对应subjectId 的ES索引中 |
||||
|
* 目前是一条一条写,这块需要优化哦!! |
||||
|
*/ |
||||
|
// public int parseExcel(String subjectId ,String path ,String excelName, Map<String, List<String>> fileNameMap,String crawlDataFlag) { |
||||
|
// LOGGER.info("UpLoadExcelService [parseExcel] parseExcel start ..."); |
||||
|
// int dataCount = 0; |
||||
|
// try{ |
||||
|
// XSSFWorkbook xssfWorkbook = new XSSFWorkbook(new FileInputStream(path+excelName)); |
||||
|
// //获取每一个工作簿的数据 |
||||
|
// long start = System.currentTimeMillis(); |
||||
|
// for (int i = 0; i < xssfWorkbook.getNumberOfSheets(); i++) { |
||||
|
// XSSFSheet sheet = xssfWorkbook.getSheetAt(i); |
||||
|
// int rowNum = sheet.getLastRowNum(); |
||||
|
// int cellNum = sheet.getRow(0).getLastCellNum(); |
||||
|
// if(cellNum < 22){ |
||||
|
// return dataCount; |
||||
|
// } |
||||
|
// dataCount = rowNum ; |
||||
|
// List<JSONObject> resultList = new ArrayList<>(); |
||||
|
// for (int row = 1; row <= rowNum; row++) { |
||||
|
// JSONObject resultJson = new JSONObject(); |
||||
|
// Map<String, Object> resultMap = AllKeys.getMap(); |
||||
|
// String dataId = String.valueOf(sheet.getRow(row).getCell(0)); |
||||
|
// resultMap.put(ESConstants.DATA_ID,dataId); |
||||
|
// resultMap.put(ESConstants._ID,dataId); |
||||
|
// resultMap.put(ESConstants.DOC_ID,String.valueOf(sheet.getRow(row).getCell(1))); |
||||
|
// resultMap.put(ESConstants.CHANNEL,String.valueOf(sheet.getRow(row).getCell(2))); |
||||
|
// resultMap.put(ESConstants.SOURCE,String.valueOf(sheet.getRow(row).getCell(3))); |
||||
|
// resultMap.put(ESConstants.EN_SOURCE,String.valueOf(sheet.getRow(row).getCell(4))); |
||||
|
// resultMap.put(ESConstants.URL,String.valueOf(sheet.getRow(row).getCell(5))); |
||||
|
// resultMap.put(ESConstants.TITLE,String.valueOf(sheet.getRow(row).getCell(6))); |
||||
|
// resultMap.put(ESConstants.TRANSLATETITLE,String.valueOf(sheet.getRow(row).getCell(7))); |
||||
|
// // 发表时间的 4 个字段 |
||||
|
// String pubTimeStr = String.valueOf(sheet.getRow(row).getCell(8)); |
||||
|
// long pubTime = DataCheckUtil.convertDateTotime(pubTimeStr)*1000; |
||||
|
// long pubDay = DataCheckUtil.getDay(pubTime); |
||||
|
// String pubDate = DataCheckUtil.getDate(pubTime); |
||||
|
// resultMap.put(ESConstants.PUBTIME, pubTime); |
||||
|
// resultMap.put(ESConstants.PUBTIMESTR,pubTimeStr); |
||||
|
// resultMap.put(ESConstants.PUBDAY,pubDay); |
||||
|
// resultMap.put(ESConstants.PUBDATE,pubDate); |
||||
|
// |
||||
|
// resultMap.put(ESConstants.AUTHOR,String.valueOf(sheet.getRow(row).getCell(9))); |
||||
|
// resultMap.put(ESConstants.AUTHORID,String.valueOf(sheet.getRow(row).getCell(10))); |
||||
|
// resultMap.put(ESConstants.CONTENT,String.valueOf(sheet.getRow(row).getCell(11))); |
||||
|
// resultMap.put(ESConstants.TRANSLATECONTENT,String.valueOf(sheet.getRow(row).getCell(12))); |
||||
|
// resultMap.put(ESConstants.PRICE,String.valueOf(sheet.getRow(row).getCell(13))); |
||||
|
// resultMap.put(ESConstants.PRODUCTPARAMETER,String.valueOf(sheet.getRow(row).getCell(14))); |
||||
|
// // 抓取时间的 4 个字段 |
||||
|
// String crawlTimeStr = String.valueOf(sheet.getRow(row).getCell(15)); |
||||
|
// long crawlTime = System.currentTimeMillis() ; |
||||
|
// if(!crawlTimeStr.contains("1970")){ |
||||
|
// crawlTime = DataCheckUtil.convertDateTotime(crawlTimeStr)*1000; |
||||
|
// }else{ |
||||
|
// crawlTimeStr = DataCheckUtil.getCurrentTime(crawlTime); |
||||
|
// } |
||||
|
// long crawlDay = DataCheckUtil.getDay(crawlTime); |
||||
|
// String crawlDate = DataCheckUtil.getDate(crawlTime); |
||||
|
// |
||||
|
// resultMap.put(ESConstants.CRAWLTIME,crawlTime); |
||||
|
// resultMap.put(ESConstants.CRAWLTIMESTR,crawlTimeStr); |
||||
|
// resultMap.put(ESConstants.CRAWLDAY,crawlDay); |
||||
|
// resultMap.put(ESConstants.CRAWLDATE,crawlDate); |
||||
|
// // crawlDataFlag 这个字段值不用数据中原有的,而是要用页面传过来的,不然任务查询的时候查不到数据 |
||||
|
// resultMap.put(ESConstants.CRAWLDATAFLAG,crawlDataFlag); |
||||
|
// resultMap.put(ESConstants.SYS_SENTIMENT,String.valueOf(sheet.getRow(row).getCell(17))); |
||||
|
// // 提取的关键字字段的值 |
||||
|
// XSSFCell hlKeywords = sheet.getRow(row).getCell(18); |
||||
|
// List<String> hl = new ArrayList<>(); |
||||
|
// if (null != hlKeywords) { |
||||
|
// if (hlKeywords.toString().equals("[]")) { |
||||
|
// resultMap.put(ESConstants.HL_KEYWORDS, hl); |
||||
|
// } else { |
||||
|
// if (hlKeywords.toString().contains(",")) { |
||||
|
// String hlk[] = hlKeywords.toString().replace("[", "").replace("]", "").replace("\"", "").split(","); |
||||
|
// hl = Arrays.asList(hlk); |
||||
|
// } else { |
||||
|
// String hlk = hlKeywords.toString().replace("[", "").replace("]", ""); |
||||
|
// hl.add(hlk); |
||||
|
// } |
||||
|
// } |
||||
|
// }else { |
||||
|
// resultMap.put(ESConstants.HL_KEYWORDS, hl); |
||||
|
// } |
||||
|
// // 转发、评论、点赞 |
||||
|
// String quoteCount = sheet.getRow(row).getCell(19).toString(); |
||||
|
// if(quoteCount.equals("")){ |
||||
|
// quoteCount = "0"; |
||||
|
// } |
||||
|
// resultMap.put("quoteCount",Integer.valueOf(quoteCount)); |
||||
|
// String commentsCount = sheet.getRow(row).getCell(20).toString(); |
||||
|
// if(commentsCount.equals("")){ |
||||
|
// commentsCount = "0"; |
||||
|
// } |
||||
|
// resultMap.put("commentsCount",Integer.valueOf(commentsCount)); |
||||
|
// String attitudesCount = sheet.getRow(row).getCell(21).toString(); |
||||
|
// if(attitudesCount.equals("")){ |
||||
|
// attitudesCount = "0"; |
||||
|
// } |
||||
|
// resultMap.put("attitudesCount",Integer.valueOf(attitudesCount)); |
||||
|
// // 插入时间的 4个字段 |
||||
|
// long createTime = System.currentTimeMillis() ; |
||||
|
// resultMap.put(ESConstants.CREATETIME, createTime); |
||||
|
// resultMap.put(ESConstants.CREATETIMESTR, DataCheckUtil.getCurrentTime(createTime)); |
||||
|
// resultMap.put(ESConstants.CREATEDAY, DataCheckUtil.getDay(createTime)); |
||||
|
// resultMap.put(ESConstants.CREATEDATE, DataCheckUtil.getDate(createTime)); |
||||
|
// |
||||
|
// // 根据路径和数据ID,读取附件,组装附件的字段值 |
||||
|
// resultMap = getPathSize(path,dataId,resultMap,fileNameMap); |
||||
|
// |
||||
|
// LOGGER.info("The Result: " + JSONObject.toJSONString(resultMap)); |
||||
|
// resultJson.putAll(resultMap); |
||||
|
// resultList.add(resultJson); |
||||
|
// // 一条一条的数据插入 |
||||
|
// // uploadData(subjectId,resultJson); |
||||
|
// } |
||||
|
// LOGGER.info("Writer Data To ES totalCount = " + resultList.size()); |
||||
|
// long end = System.currentTimeMillis(); |
||||
|
// System.out.println(end-start + " === "+resultList.size()); |
||||
|
// // 批量的数据插入 |
||||
|
//// if(resultList.size() >= 1000) { |
||||
|
//// uploadData(subjectId, resultList); |
||||
|
//// resultList.clear(); |
||||
|
//// } |
||||
|
// } |
||||
|
// }catch (Exception e){ |
||||
|
// e.printStackTrace(); |
||||
|
// dataCount = 0; |
||||
|
// } |
||||
|
// return dataCount; |
||||
|
// } |
||||
|
|
||||
|
|
||||
|
|
||||
|
public Map<String,Object> parseExcel2(String subjectId ,String path , |
||||
|
String excelName, |
||||
|
Map<String, List<String>> fileNameMap, |
||||
|
String crawlDataFlag) |
||||
|
throws FileNotFoundException { |
||||
|
LOGGER.info("UpLoadExcelService [parseExcel] 222 parseExcel2 start ..."); |
||||
|
Map<String,Object> returnMap = new HashMap<>(); |
||||
|
int dataCount = 0; |
||||
|
try{ |
||||
|
FileInputStream in = new FileInputStream(path+excelName); |
||||
|
Workbook wk = StreamingReader.builder() |
||||
|
.rowCacheSize(100) //缓存到内存中的行数,默认是10 |
||||
|
.bufferSize(4096) //读取资源时,缓存到内存的字节大小,默认是1024 |
||||
|
.open(in); //打开资源,必须,可以是InputStream或者是File,注意:只能打开XLSX格式的文件 |
||||
|
Sheet sheet = wk.getSheetAt(0); |
||||
|
//遍历所有的行 |
||||
|
List<JSONObject> resultList = new ArrayList<>(); |
||||
|
Map<String,Long> pubTimeMap = new HashMap<>(); |
||||
|
pubTimeMap.put("min", System.currentTimeMillis()); |
||||
|
pubTimeMap.put("max", 0L); |
||||
|
for (Row row : sheet) { |
||||
|
if(row.getRowNum() != 0){ |
||||
|
JSONObject resultJson = new JSONObject(); |
||||
|
Map<String, Object> resultMap = AllKeys.getMap(); |
||||
|
String dataId = String.valueOf(row.getCell(0).getStringCellValue()); |
||||
|
resultMap.put(ESConstants.DATA_ID, dataId); |
||||
|
resultMap.put(ESConstants._ID, dataId); |
||||
|
resultMap.put(ESConstants.DOC_ID, String.valueOf(row.getCell(1).getStringCellValue())); |
||||
|
String channel = String.valueOf(row.getCell(2).getStringCellValue()); |
||||
|
resultMap.put(ESConstants.CHANNEL,channel ); |
||||
|
resultMap.put(ESConstants.DOC_TYPE,getDocType(channel)); |
||||
|
resultMap.put(ESConstants.SOURCE, String.valueOf(row.getCell(3).getStringCellValue())); |
||||
|
resultMap.put(ESConstants.EN_SOURCE, String.valueOf(row.getCell(4).getStringCellValue())); |
||||
|
resultMap.put(ESConstants.URL, String.valueOf(row.getCell(5).getStringCellValue())); |
||||
|
resultMap.put(ESConstants.TITLE, String.valueOf(row.getCell(6).getStringCellValue())); |
||||
|
resultMap.put(ESConstants.TRANSLATETITLE, String.valueOf(row.getCell(7).getStringCellValue())); |
||||
|
// 发表时间的 4 个字段 |
||||
|
String pubTimeStr = String.valueOf(row.getCell(8).getStringCellValue()); |
||||
|
long pubTime = DataCheckUtil.convertDateTotime(pubTimeStr) * 1000; |
||||
|
long pubDay = DataCheckUtil.getDay(pubTime); |
||||
|
String pubDate = DataCheckUtil.getDate(pubTime); |
||||
|
resultMap.put(ESConstants.PUBTIME, pubTime); |
||||
|
if(pubTime < pubTimeMap.get("min")){ |
||||
|
pubTimeMap.put("min",pubTime); |
||||
|
} |
||||
|
if(pubTime > pubTimeMap.get("max")){ |
||||
|
pubTimeMap.put("max",pubTime); |
||||
|
} |
||||
|
resultMap.put(ESConstants.PUBTIMESTR, pubTimeStr); |
||||
|
resultMap.put(ESConstants.PUBDAY, pubDay); |
||||
|
resultMap.put(ESConstants.PUBDATE, pubDate); |
||||
|
|
||||
|
resultMap.put(ESConstants.AUTHOR, String.valueOf(row.getCell(9).getStringCellValue())); |
||||
|
resultMap.put(ESConstants.AUTHORID, String.valueOf(row.getCell(10).getStringCellValue())); |
||||
|
resultMap.put(ESConstants.CONTENT, String.valueOf(row.getCell(11).getStringCellValue())); |
||||
|
resultMap.put(ESConstants.TRANSLATECONTENT, String.valueOf(row.getCell(12).getStringCellValue())); |
||||
|
resultMap.put(ESConstants.PRICE, String.valueOf(row.getCell(13).getStringCellValue())); |
||||
|
resultMap.put(ESConstants.PRODUCTPARAMETER, String.valueOf(row.getCell(14).getStringCellValue())); |
||||
|
// 抓取时间的 4 个字段 |
||||
|
String crawlTimeStr = String.valueOf(row.getCell(15).getStringCellValue()); |
||||
|
long crawlTime = System.currentTimeMillis(); |
||||
|
if (!crawlTimeStr.contains("1970")) { |
||||
|
crawlTime = DataCheckUtil.convertDateTotime(crawlTimeStr) * 1000; |
||||
|
} else { |
||||
|
crawlTimeStr = DataCheckUtil.getCurrentTime(crawlTime); |
||||
|
} |
||||
|
long crawlDay = DataCheckUtil.getDay(crawlTime); |
||||
|
String crawlDate = DataCheckUtil.getDate(crawlTime); |
||||
|
|
||||
|
resultMap.put(ESConstants.CRAWLTIME, crawlTime); |
||||
|
resultMap.put(ESConstants.CRAWLTIMESTR, crawlTimeStr); |
||||
|
resultMap.put(ESConstants.CRAWLDAY, crawlDay); |
||||
|
resultMap.put(ESConstants.CRAWLDATE, crawlDate); |
||||
|
// crawlDataFlag 这个字段值不用数据中原有的,而是要用页面传过来的,不然任务查询的时候查不到数据 |
||||
|
resultMap.put(ESConstants.CRAWLDATAFLAG, crawlDataFlag); |
||||
|
resultMap.put(ESConstants.SYS_SENTIMENT, String.valueOf(row.getCell(17).getStringCellValue())); |
||||
|
// 提取的关键字字段的值 |
||||
|
String hlKeywords = row.getCell(18).getStringCellValue(); |
||||
|
List<String> hl = getHlKeywords(hlKeywords); |
||||
|
resultMap.put(ESConstants.HL_KEYWORDS, hl); |
||||
|
// 转发、评论、点赞 |
||||
|
String quoteCount = row.getCell(19).getStringCellValue(); |
||||
|
if (quoteCount.equals("")) { |
||||
|
quoteCount = "0"; |
||||
|
} |
||||
|
resultMap.put("quoteCount", Integer.valueOf(quoteCount)); |
||||
|
|
||||
|
String commentsCount = row.getCell(20).getStringCellValue(); |
||||
|
if (commentsCount.equals("")) { |
||||
|
commentsCount = "0"; |
||||
|
} |
||||
|
resultMap.put("commentsCount", Integer.valueOf(commentsCount)); |
||||
|
|
||||
|
String attitudesCount = row.getCell(21).getStringCellValue(); |
||||
|
if (attitudesCount.equals("")) { |
||||
|
attitudesCount = "0"; |
||||
|
} |
||||
|
resultMap.put("attitudesCount", Integer.valueOf(attitudesCount)); |
||||
|
// 图像识别结果 |
||||
|
String ocrText = row.getCell(22).getStringCellValue(); |
||||
|
List<String> ocrList = getHlKeywords(ocrText); |
||||
|
resultMap.put(ESConstants.OCRTEXT,ocrList); |
||||
|
// 语音识别结果 |
||||
|
String asrText = row.getCell(23).getStringCellValue(); |
||||
|
resultMap.put(ESConstants.ASRTEXT,asrText); |
||||
|
// 插入时间的 4个字段 |
||||
|
long createTime = System.currentTimeMillis(); |
||||
|
resultMap.put(ESConstants.CREATETIME, createTime); |
||||
|
resultMap.put(ESConstants.CREATETIMESTR, DataCheckUtil.getCurrentTime(createTime)); |
||||
|
resultMap.put(ESConstants.CREATEDAY, DataCheckUtil.getDay(createTime)); |
||||
|
resultMap.put(ESConstants.CREATEDATE, DataCheckUtil.getDate(createTime)); |
||||
|
|
||||
|
|
||||
|
// 根据路径和数据ID,读取附件,组装附件的字段值 |
||||
|
resultMap = getPathSize(path, dataId, resultMap, fileNameMap); |
||||
|
LOGGER.info("The Result: " + JSONObject.toJSONString(resultMap)); |
||||
|
resultJson.putAll(resultMap); |
||||
|
resultList.add(resultJson); |
||||
|
// 一条一条的数据插入 |
||||
|
//uploadData(subjectId,resultJson); |
||||
|
} |
||||
|
if(resultList.size() >=100){ |
||||
|
dataCount = dataCount+resultList.size(); |
||||
|
uploadData(subjectId, resultList); |
||||
|
resultList.clear(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// 最后多出来的但是少于100条的数据 |
||||
|
dataCount = dataCount + resultList.size(); |
||||
|
uploadData(subjectId, resultList); |
||||
|
returnMap.put("pubTimeMap",pubTimeMap); |
||||
|
returnMap.put("dataCount",dataCount); |
||||
|
LOGGER.info("Writer Data To ES totalCount = " + dataCount); |
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return returnMap; |
||||
|
} |
||||
|
|
||||
|
private List<String> getHlKeywords(String hlKeywords) { |
||||
|
List<String> hl = new ArrayList<>(); |
||||
|
if (null != hlKeywords ) { |
||||
|
if (hlKeywords.toString().equals("[]")) { |
||||
|
return hl; |
||||
|
} else { |
||||
|
if (hlKeywords.toString().contains(",")) { |
||||
|
String hlk[] = hlKeywords.toString().replace("[", "").replace("]", "").replace("\"", "").split(","); |
||||
|
hl = Arrays.asList(hlk); |
||||
|
} else { |
||||
|
String hlk = hlKeywords.toString().replace("[", "").replace("]", ""); |
||||
|
hl.add(hlk); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
return hl; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 有附件的,需要上传附件,然后替换字段中的附件路径值, |
||||
|
*/ |
||||
|
private Map<String,Object> getPathSize(String path, String dataId, |
||||
|
Map<String, Object> resultMap, |
||||
|
Map<String, List<String>> fileNameMap) { |
||||
|
// LOGGER.info("UpLoadExcelService [getPathSize] need Download !"); |
||||
|
// 判断文件夹是否尊在,若不存在,则 isDownload = false ,pgc ugc egc 都为0; |
||||
|
File file=new File(path+dataId); |
||||
|
resultMap.put("pgc",0); |
||||
|
resultMap.put("ugc",0); |
||||
|
resultMap.put("egc",0); |
||||
|
List<Map<String,String>> filePathSize = new ArrayList<>(); |
||||
|
List<Map<String,String>> imagePathSize = new ArrayList<>(); |
||||
|
List<Map<String,String>> videoPathSize = new ArrayList<>(); |
||||
|
List<String> filePath = new ArrayList<>(); |
||||
|
List<String> imagePath = new ArrayList<>(); |
||||
|
List<String> videoPath = new ArrayList<>(); |
||||
|
if(!file.exists()){//如果文件夹不存在 |
||||
|
resultMap .put("isDownload",false); |
||||
|
}else{ |
||||
|
resultMap .put("isDownload",true); |
||||
|
List<String> fileNames = fileNameMap.get(dataId); |
||||
|
for (String fileName:fileNames) { // videoPath == egc filePath == ugc imagePath == pgc |
||||
|
// 根据路径读取文件,并上传到 go-fast 上,并根据前缀组装对应的 path 和 pathSize |
||||
|
String goFastUrl = config.getGoFastPostUrl(); |
||||
|
// String zipPath = bfdApiConfig.getUploadZipPath(); |
||||
|
// String url = DownLoadFile.upload(goFastUrl,dataId+fileName,content); |
||||
|
String file1 = path + dataId + "/" + fileName; |
||||
|
Map urlMap = DownLoadFile.upload(goFastUrl,dataId+fileName,new File(file1)); |
||||
|
String url = urlMap.get("path").toString(); |
||||
|
|
||||
|
Map<String,String> pathMap = new HashMap<>(); |
||||
|
pathMap.put("url",url); |
||||
|
// 获取文件的大小 |
||||
|
long size = Long.valueOf(urlMap.get("size").toString()); |
||||
|
Double newSize =(double)(Math.round(size/1024)/100.0); |
||||
|
pathMap.put("size",newSize+"KB"); |
||||
|
// 获取分辨率 |
||||
|
String resolution = ""; |
||||
|
if(fileName.startsWith("image")) { |
||||
|
if(url.endsWith(".svg")){ |
||||
|
resolution = ""; |
||||
|
}else { |
||||
|
resolution = ReadLine.getImageDim(file1); |
||||
|
} |
||||
|
} |
||||
|
if(fileName.startsWith("video")){ |
||||
|
if(url.endsWith(".mp3")){ |
||||
|
resolution = "400*240"; |
||||
|
}else if(url.endsWith(".flv")) { |
||||
|
resolution = ""; |
||||
|
}else{ |
||||
|
resolution = ReadLine.videosize(file1); |
||||
|
} |
||||
|
} |
||||
|
// System.out.println(resolution); |
||||
|
pathMap.put("resolution",resolution); |
||||
|
// 视频的时长 |
||||
|
String videoTime = ""; |
||||
|
pathMap.put("videoTime",videoTime); |
||||
|
|
||||
|
if(fileName.startsWith("file")){ |
||||
|
resultMap.put("ugc",1); |
||||
|
filePathSize.add(pathMap); |
||||
|
filePath.add(url); |
||||
|
} |
||||
|
if(fileName.startsWith("image")){ |
||||
|
resultMap.put("pgc",1); |
||||
|
imagePathSize.add(pathMap); |
||||
|
imagePath.add(url); |
||||
|
} |
||||
|
if(fileName.startsWith("video")){ |
||||
|
resultMap.put("egc",1); |
||||
|
videoPathSize.add(pathMap); |
||||
|
videoPath.add(url); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
resultMap.put("filePathSize",JSONObject.toJSONString(filePathSize)); |
||||
|
resultMap.put("imagePathSize",JSONObject.toJSONString(imagePathSize)); |
||||
|
resultMap.put("videoPathSize",JSONObject.toJSONString(videoPathSize)); |
||||
|
resultMap.put("filePath",filePath); |
||||
|
resultMap.put("imagePath",imagePath); |
||||
|
resultMap.put("videoPath",videoPath); |
||||
|
return resultMap; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 读文件,将数据导入到ES中 |
||||
|
* @param subjectId |
||||
|
*/ |
||||
|
public void uploadData(String subjectId, List<String> list,String crawlDataFlag) { |
||||
|
String indexNamePre = config.getIndexNamePre(); |
||||
|
String indexName = indexNamePre + subjectId; |
||||
|
for (String l:list) { |
||||
|
String c = l.replace("\\\"","\\\""); |
||||
|
JSONObject data = new JSONObject(); |
||||
|
try { |
||||
|
data = JSONObject.parseObject(c); |
||||
|
data.put("crawlDataFlag",crawlDataFlag); |
||||
|
}catch (Exception e){ |
||||
|
//e.printStackTrace(); |
||||
|
// 数据转json 失败 |
||||
|
return; |
||||
|
} |
||||
|
if(data.size() >0) { |
||||
|
try { |
||||
|
DATA_CACHE.put(EsUtils2.buildBulkItem(indexName, INDEX_TYPE, data)); |
||||
|
} catch (InterruptedException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
flushData(); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 将传入的一条数据导入到ES中 |
||||
|
* @param subjectId |
||||
|
* @param result |
||||
|
*/ |
||||
|
public void uploadData(String subjectId, JSONObject result) { |
||||
|
String indexNamePre = config.getIndexNamePre(); |
||||
|
String indexName = indexNamePre + subjectId; |
||||
|
try { |
||||
|
if(result.size() >0) { |
||||
|
try { |
||||
|
DATA_CACHE.put(EsUtils2.buildBulkItem(indexName, INDEX_TYPE, result)); |
||||
|
} catch (InterruptedException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
flushData(); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 将一个List 的数据导入到ES中 |
||||
|
*/ |
||||
|
public void uploadData(String subjectId, List<JSONObject> resultList) { |
||||
|
String indexNamePre = config.getIndexNamePre(); |
||||
|
String indexName = indexNamePre + subjectId; |
||||
|
for (JSONObject data:resultList) { |
||||
|
if(data.size() >0) { |
||||
|
try { |
||||
|
DATA_CACHE.put(EsUtils2.buildBulkItem(indexName, INDEX_TYPE, data)); |
||||
|
} catch (InterruptedException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
flushData(); |
||||
|
} |
||||
|
|
||||
|
public void flushData() { |
||||
|
LOGGER.info("ES flushData"); |
||||
|
List<EsUtils2.BulkItem> dataList = Lists.newArrayList(); |
||||
|
EsUtils2.BulkItem item = DATA_CACHE.poll(); |
||||
|
while (Objects.nonNull(item)) { |
||||
|
if (dataList.size() >= 100) { |
||||
|
EsUtils2.bulkIndex(config.esMiniClusterName(), dataList, "_id_");//"SQ_Mini" config.esMiniClusterName() |
||||
|
LOGGER.debug("Flush data, size:{}.", dataList.size()); |
||||
|
dataList.clear(); |
||||
|
} |
||||
|
dataList.add(item); |
||||
|
item = DATA_CACHE.poll(); |
||||
|
} |
||||
|
if (dataList.size() > 0) { |
||||
|
EsUtils2.bulkIndex(config.esMiniClusterName(), dataList, "_id_"); |
||||
|
LOGGER.debug("Flush data, size:{}.", dataList.size()); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
public int uploadTxt(String subjectId, String filPath, String crawlDataFlag) { |
||||
|
int dataCount = 0; |
||||
|
try { |
||||
|
List<String> list = ReadLine.readLine(new File(filPath)); |
||||
|
dataCount = list.size(); |
||||
|
uploadData(subjectId, list, crawlDataFlag); |
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return dataCount; |
||||
|
} |
||||
|
|
||||
|
private String getDocType (String channel){ |
||||
|
Map<String,String> channelMap = new HashMap<>(); |
||||
|
channelMap.put("社交媒体","social"); |
||||
|
channelMap.put("网络视频","video"); |
||||
|
channelMap.put("新闻资讯","news"); |
||||
|
channelMap.put("博客智库","blog"); |
||||
|
channelMap.put("论坛贴吧","bbs"); |
||||
|
channelMap.put("搜索引擎","search"); |
||||
|
channelMap.put("电商网站","item"); |
||||
|
channelMap.put("生活方式","life"); |
||||
|
return channelMap.get(channel); |
||||
|
} |
||||
|
|
||||
|
} |
@ -0,0 +1,214 @@ |
|||||
|
package com.bfd.mf.job.service.upload; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import com.bfd.mf.job.config.AppConfig; |
||||
|
|
||||
|
import com.bfd.mf.job.domain.entity.UploadTask; |
||||
|
import com.bfd.mf.job.domain.repository.UploadTaskRepository; |
||||
|
import com.bfd.mf.job.util.EsUtils; |
||||
|
import com.bfd.mf.job.util.ZipUtils; |
||||
|
import com.google.common.collect.Maps; |
||||
|
import org.assertj.core.util.Lists; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import javax.annotation.PostConstruct; |
||||
|
import java.io.*; |
||||
|
import java.util.HashMap; |
||||
|
import java.util.List; |
||||
|
import java.util.Map; |
||||
|
import java.util.Objects; |
||||
|
import java.util.concurrent.BlockingQueue; |
||||
|
import java.util.concurrent.LinkedBlockingQueue; |
||||
|
|
||||
|
|
||||
|
@Service |
||||
|
public class UpLoadService { |
||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(UpLoadService.class); |
||||
|
private static BlockingQueue<Map<Long, List<? extends Number>>> P_TASK_CACHE_RANGE = new LinkedBlockingQueue<>(); |
||||
|
|
||||
|
@Autowired |
||||
|
private AppConfig config; |
||||
|
@Autowired |
||||
|
private UploadTaskRepository uploadTaskRepository; |
||||
|
@Autowired |
||||
|
private UpLoadExcelService upLoadExcelService; |
||||
|
|
||||
|
@PostConstruct |
||||
|
public void init() { |
||||
|
// 注册数据查询来源 |
||||
|
EsUtils.registerCluster(config.esMiniClusterName(), config.esMiniAddress()); // 配置文件中的 es-target |
||||
|
} |
||||
|
|
||||
|
public void tryAcquire() { |
||||
|
// 获取 task_type 3 crawl_status = 0 的任务进行上传,获取到后先将状态改成1 表示正在上传 |
||||
|
List<UploadTask> taskList2 = uploadTaskRepository.getTaskNeedUpLoad(); |
||||
|
for (UploadTask task : taskList2) { |
||||
|
Map<Long, List<? extends Number>> cache = Maps.newHashMap(); |
||||
|
long taskId = task.getId().longValue(); |
||||
|
cache.put(taskId, Lists.newArrayList(0L, 0L, 0, 1, 1)); |
||||
|
try { // 将数据库中任务的状态暂时改为 4 |
||||
|
uploadTaskRepository.updateCrawlStatus(taskId,1,0,0,0); |
||||
|
P_TASK_CACHE_RANGE.put(cache); |
||||
|
} catch (InterruptedException e) { |
||||
|
Thread.currentThread().interrupt(); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
public void produce() { |
||||
|
Map<Long, List<? extends Number>> range = P_TASK_CACHE_RANGE.poll();// poll -->若队列为空,返回null |
||||
|
if (Objects.isNull(range)) { |
||||
|
return; |
||||
|
} |
||||
|
long taskId = 0L; |
||||
|
for (Map.Entry<Long, List<? extends Number>> entry : range.entrySet()) { |
||||
|
entry.getValue(); |
||||
|
taskId = entry.getKey(); |
||||
|
} |
||||
|
// 这块可能需要改一下,因为 site_type 需要关联表才能拿到哦! |
||||
|
UploadTask task = uploadTaskRepository.findById(taskId).get(); |
||||
|
LOGGER.info("开始上传的任务是:" + JSONObject.toJSONString(task)); |
||||
|
String subjectId = task.getSubjectId().toString(); |
||||
|
String crawlDataFlag = task.getCrawlDataFlag(); |
||||
|
String zipPath = config.getUploadZipPath(); |
||||
|
String zipName = task.getFileName(); |
||||
|
String zipFileName = ZipUtils.getZipFileName(zipName,zipPath);// zip解压到指定的文件夹中。名字与 zip 名保持一致。 |
||||
|
// 根据数据库中 fileName 可知已经上传的文件的名称,从配置文件中获取文件的存储路径,组装后拿到文件开始解压 |
||||
|
// 解压zip ,校验数据,进行上传 |
||||
|
Map<String, List<String>> fileNameMap = ZipUtils.unZip(new File(zipPath+zipName),zipPath+zipFileName); |
||||
|
// fileNameMap 是解压后的所有文件名称的 Map ,如果是 Excel |
||||
|
if(fileNameMap.size() == 0){ |
||||
|
// 解压后的文件中没有东西,或者找不到 zip 文件,将状态改成5 |
||||
|
uploadTaskRepository.updateCrawlStatus(taskId,5,0,0,0); |
||||
|
LOGGER.error("[上传失败] 已经上传完的任务是:" + taskId + " ,但是解析文件失败,得将 crawl_status 改成5 !"); |
||||
|
}else { |
||||
|
String fileName = fileNameMap.get("excelName").get(0); |
||||
|
fileNameMap.remove("excelName"); |
||||
|
int dataTotal = 0; |
||||
|
Map<String,Long> pubTimeMap = new HashMap<>(); |
||||
|
if (fileName.contains("xlsx")) { // 传的是Excel |
||||
|
LOGGER.info("上传的是 Excel 类型的数据"); |
||||
|
try { |
||||
|
Map<String,Object> returnMap = upLoadExcelService.parseExcel2(subjectId, zipPath + zipFileName + "/", fileName, fileNameMap, crawlDataFlag); |
||||
|
dataTotal = Integer.valueOf(returnMap.get("dataCount").toString()); |
||||
|
pubTimeMap = (Map<String, Long>) returnMap.get("pubTimeMap"); |
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} else { // 传的是文本数据 |
||||
|
LOGGER.info("上传的是 非 Excel 类型的数据"); |
||||
|
String theFinalFilePath = zipPath + zipFileName + "/" + fileName; |
||||
|
dataTotal = upLoadExcelService.uploadTxt(subjectId, theFinalFilePath, crawlDataFlag); |
||||
|
} |
||||
|
// 完成后将数据库中 crawl_status改为3 表示完成 |
||||
|
if (dataTotal == 0) { |
||||
|
LOGGER.error("[上传失败] 已经上传完的任务是:" + taskId + " , 但是写入成功的数据是0条,得将 crawl_status 改成5 !"); |
||||
|
uploadTaskRepository.updateCrawlStatus(taskId, 5, dataTotal,0,0); |
||||
|
} else { |
||||
|
LOGGER.info("[上传成功] 已经上传完的任务是:" + taskId + " ,可以将 crawl_status 改成3 了!"); |
||||
|
// 这块改状态之前应该先 sleep 一下,因为数据写ES是有一定延时的。 |
||||
|
try { |
||||
|
Thread.sleep(30000); |
||||
|
} catch (InterruptedException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
long pubMin = pubTimeMap.get("min"); |
||||
|
long pubMax = pubTimeMap.get("max"); |
||||
|
uploadTaskRepository.updateCrawlStatus(taskId, 3, dataTotal,pubMin,pubMax); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
// public static void unZipGetFileType(File srcFile) throws RuntimeException { |
||||
|
// // 判断源文件是否存在 |
||||
|
// if (!srcFile.exists()) { |
||||
|
// throw new RuntimeException(srcFile.getPath() + "所指文件不存在"); |
||||
|
// } |
||||
|
// |
||||
|
// // 开始解压 |
||||
|
// ZipFile zipFile = null; |
||||
|
// try { |
||||
|
// zipFile = new ZipFile(srcFile); |
||||
|
// Enumeration<?> entries = zipFile.entries(); |
||||
|
// ZipEntry entry = (ZipEntry) entries.nextElement(); |
||||
|
// String fileName = entry.getName(); |
||||
|
// System.out.println(fileName); |
||||
|
// String substring = fileName.substring(fileName.lastIndexOf(".")+1, fileName.length()); |
||||
|
// System.out.println(substring); |
||||
|
// |
||||
|
// } catch (Exception e) { |
||||
|
// throw new RuntimeException("unzip error from ZipUtils", e); |
||||
|
// } finally { |
||||
|
// if (zipFile != null) { |
||||
|
// try { |
||||
|
// zipFile.close(); |
||||
|
// } catch (IOException e) { |
||||
|
// e.printStackTrace(); |
||||
|
// } |
||||
|
// } |
||||
|
// } |
||||
|
// } |
||||
|
// |
||||
|
// public static void unZip(File srcFile, String destDirPath) throws RuntimeException { |
||||
|
// long start = System.currentTimeMillis(); |
||||
|
// // 判断源文件是否存在 |
||||
|
// if (!srcFile.exists()) { |
||||
|
// throw new RuntimeException(srcFile.getPath() + "所指文件不存在"); |
||||
|
// } |
||||
|
// |
||||
|
// // 开始解压 |
||||
|
// ZipFile zipFile = null; |
||||
|
// try { |
||||
|
// zipFile = new ZipFile(srcFile); |
||||
|
// Enumeration<?> entries = zipFile.entries(); |
||||
|
// while (entries.hasMoreElements()) { |
||||
|
// ZipEntry entry = (ZipEntry) entries.nextElement(); |
||||
|
// System.out.println("解压" + entry.getName()); |
||||
|
// // 如果是文件夹,就创建个文件夹 |
||||
|
// if (entry.isDirectory()) { |
||||
|
// String dirPath = destDirPath + "/" + entry.getName(); |
||||
|
// File dir = new File(dirPath); |
||||
|
// dir.mkdirs(); |
||||
|
// } else { |
||||
|
// // 如果是文件,就先创建一个文件,然后用io流把内容copy过去 |
||||
|
// File targetFile = new File(destDirPath + "/" + entry.getName()); |
||||
|
// // 保证这个文件的父文件夹必须要存在 |
||||
|
// if (!targetFile.getParentFile().exists()) { |
||||
|
// targetFile.getParentFile().mkdirs(); |
||||
|
// } |
||||
|
// targetFile.createNewFile(); |
||||
|
// // 将压缩文件内容写入到这个文件中 |
||||
|
// InputStream is = zipFile.getInputStream(entry); |
||||
|
// FileOutputStream fos = new FileOutputStream(targetFile); |
||||
|
// int len; |
||||
|
// byte[] buf = new byte[1024]; |
||||
|
// while ((len = is.read(buf)) != -1) { |
||||
|
// fos.write(buf, 0, len); |
||||
|
// } |
||||
|
// // 关流顺序,先打开的后关闭 |
||||
|
// fos.close(); |
||||
|
// is.close(); |
||||
|
// } |
||||
|
// } |
||||
|
// long end = System.currentTimeMillis(); |
||||
|
// System.out.println("解压完成,耗时:" + (end - start) + " ms"); |
||||
|
// } catch (Exception e) { |
||||
|
// throw new RuntimeException("unzip error from ZipUtils", e); |
||||
|
// } finally { |
||||
|
// if (zipFile != null) { |
||||
|
// try { |
||||
|
// zipFile.close(); |
||||
|
// } catch (IOException e) { |
||||
|
// e.printStackTrace(); |
||||
|
// } |
||||
|
// } |
||||
|
// } |
||||
|
// } |
||||
|
|
||||
|
} |
@ -0,0 +1,321 @@ |
|||||
|
package com.bfd.mf.job.util; |
||||
|
|
||||
|
import org.apache.commons.lang3.StringUtils; |
||||
|
import org.apache.log4j.Logger; |
||||
|
|
||||
|
import java.text.ParseException; |
||||
|
import java.text.SimpleDateFormat; |
||||
|
import java.util.Date; |
||||
|
import java.util.regex.Matcher; |
||||
|
import java.util.regex.Pattern; |
||||
|
|
||||
|
|
||||
|
public class DataCheckUtil { |
||||
|
|
||||
|
public static Pattern datePattrn = Pattern.compile("^\\d{4}\\-\\d{2}\\-\\d{2}\\s\\d{2}\\:\\d{2}:\\d{2}$"); |
||||
|
|
||||
|
public static Pattern dayPattrn = Pattern.compile("^\\d{2,4}\\-\\d{1,2}\\-\\d{1,2}$"); |
||||
|
|
||||
|
private static SimpleDateFormat ddf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
|
||||
|
public static Pattern p = Pattern.compile("\\s+"); |
||||
|
|
||||
|
private static final Logger LOG = Logger.getLogger(DataCheckUtil.class); |
||||
|
|
||||
|
public static String chechData2(String dataStr){ |
||||
|
dataStr = dataStr.replace("Z",""); |
||||
|
dataStr = checkData(dataStr); |
||||
|
Matcher matcher = datePattrn.matcher(dataStr); |
||||
|
if(!matcher.find()){ |
||||
|
System.out.println("格式错误,使用当前时间 : " + dataStr); |
||||
|
dataStr = DateUtil.getDateTime(); |
||||
|
}else{ |
||||
|
dataStr = matcher.group(0); |
||||
|
} |
||||
|
return dataStr; |
||||
|
} |
||||
|
|
||||
|
public static String checkData(String dataStr){ |
||||
|
SimpleDateFormat ddf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
if(StringUtils.isBlank(dataStr)){ |
||||
|
return ddf.format(new Date()); |
||||
|
} |
||||
|
if(dataStr.contains("-:")){ |
||||
|
dataStr = dataStr.replace("-:",":"); |
||||
|
} |
||||
|
if(dataStr.contains(":-")){ |
||||
|
dataStr = dataStr.replace(":-",":"); |
||||
|
} |
||||
|
|
||||
|
Matcher matcher = datePattrn.matcher(dataStr); |
||||
|
|
||||
|
if(!matcher.find()){ |
||||
|
dataStr = dataStr.trim(); |
||||
|
if(!p.matcher(dataStr).find()){ |
||||
|
if(!dayPattrn.matcher(dataStr).find()){ |
||||
|
return ddf.format(new Date()); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
String[] dates = dataStr.split("\\s+"); |
||||
|
String years = ""; |
||||
|
String times = ""; |
||||
|
if(dates.length == 2){ |
||||
|
years = dates[0]; |
||||
|
times = dates[1]; |
||||
|
}else{ |
||||
|
years = dates[0]; |
||||
|
} |
||||
|
|
||||
|
if(years.contains("/")){ |
||||
|
years = years.replace("/", "-"); |
||||
|
} |
||||
|
String[] yearStr = years.split("-"); |
||||
|
String yms = "" ; |
||||
|
if(yearStr.length == 3){ |
||||
|
String year = yearStr[0]; |
||||
|
String month = yearStr[1]; |
||||
|
String day = yearStr[2]; |
||||
|
if(year.length() == 2){ |
||||
|
year = "20"+year; |
||||
|
} |
||||
|
if(month.length() == 1){ |
||||
|
month = "0"+month; |
||||
|
} |
||||
|
if(day.length() == 1){ |
||||
|
day = "0"+day; |
||||
|
} |
||||
|
yms = year+"-"+month+"-"+day; |
||||
|
} |
||||
|
|
||||
|
String hms = ""; |
||||
|
if(StringUtils.isBlank(times)){ |
||||
|
hms = "00:00:00"; |
||||
|
}else{ |
||||
|
times = times.replace("/", ":"); |
||||
|
if(times.contains(":")){ |
||||
|
String[] timeStr = times.split(":"); |
||||
|
if( timeStr.length >= 3 ){ |
||||
|
String hours = timeStr[0]; |
||||
|
String mins = timeStr[1]; |
||||
|
String s = timeStr[2]; |
||||
|
|
||||
|
if(hours.length() == 1){ |
||||
|
hours = "0"+hours; |
||||
|
} |
||||
|
if(mins.length() == 1){ |
||||
|
mins = "0"+mins; |
||||
|
} |
||||
|
if(s.length() == 1){ |
||||
|
s = "0"+s; |
||||
|
} |
||||
|
hms = hours+":"+mins+":"+s; |
||||
|
}else if(timeStr.length == 2){ |
||||
|
String hours = timeStr[0]; |
||||
|
String mins = timeStr[1]; |
||||
|
String s = "00"; |
||||
|
if(hours.length() == 1){ |
||||
|
hours = "0"+hours; |
||||
|
} |
||||
|
if(mins.length() == 1){ |
||||
|
mins = "0"+mins; |
||||
|
} |
||||
|
hms = hours+":"+mins+":"+s; |
||||
|
} else { |
||||
|
String hours = timeStr[0]; |
||||
|
String mins = "00" ; |
||||
|
String s = "00"; |
||||
|
if(hours.length() == 1){ |
||||
|
hours = "0"+hours; |
||||
|
} |
||||
|
hms = hours+":"+mins+":"+s; |
||||
|
} |
||||
|
}else{ |
||||
|
if(isNum(times) && times.length()==2){ |
||||
|
hms = times+":00:00"; |
||||
|
}else if(isNum(times) && times.length()==1){ |
||||
|
hms = "0"+times+":00:00"; |
||||
|
}else{ |
||||
|
hms = "00:00:00" ; |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
if(StringUtils.isBlank(yms)){ |
||||
|
return ddf.format(new Date()); |
||||
|
} |
||||
|
if(yms != "" || hms != ""){ |
||||
|
return yms+" "+hms; |
||||
|
} |
||||
|
} |
||||
|
return dataStr ; |
||||
|
} |
||||
|
|
||||
|
private static boolean isNum(String time){ |
||||
|
Pattern p = Pattern.compile("\\d+"); |
||||
|
if(p.matcher(time).find()){ |
||||
|
return true ; |
||||
|
} |
||||
|
return false ; |
||||
|
} |
||||
|
|
||||
|
public static String convertStringTotime(String datetime){ |
||||
|
if(StringUtils.isBlank(datetime)){ |
||||
|
return DateUtil.getDateTime(System.currentTimeMillis()); |
||||
|
} |
||||
|
String creationTime = ""; |
||||
|
if(datetime.length() == 13){ |
||||
|
creationTime = DateUtil.getDateTime(Long.valueOf(datetime)); |
||||
|
}else{ |
||||
|
creationTime = DateUtil.getDateTime(Long.valueOf(datetime) *1000); |
||||
|
} |
||||
|
return creationTime ; |
||||
|
|
||||
|
} |
||||
|
|
||||
|
public static long convertStringToLong(String datetime){ |
||||
|
if(StringUtils.isBlank(datetime)){ |
||||
|
return System.currentTimeMillis(); |
||||
|
} |
||||
|
long creationTime ; |
||||
|
if(datetime.length() == 13){ |
||||
|
creationTime = Long.valueOf(datetime); |
||||
|
}else{ |
||||
|
creationTime = Long.valueOf(datetime) *1000; |
||||
|
} |
||||
|
return creationTime ; |
||||
|
} |
||||
|
|
||||
|
public static long convertTimeTotime(String datetime){ |
||||
|
if(StringUtils.isBlank(datetime)){ |
||||
|
return System.currentTimeMillis() / 1000; |
||||
|
} |
||||
|
long creationTime ; |
||||
|
if(datetime.length() == 13){ |
||||
|
creationTime = Long.valueOf(datetime) / 1000; |
||||
|
}else{ |
||||
|
creationTime = Long.valueOf(datetime) ; |
||||
|
} |
||||
|
return creationTime ; |
||||
|
|
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* String 转 long |
||||
|
*/ |
||||
|
public static long convertDateTotime(String datetime){ |
||||
|
if(StringUtils.isBlank(datetime)){ |
||||
|
return System.currentTimeMillis() / 1000; |
||||
|
} |
||||
|
long creationTime = 0; |
||||
|
try { |
||||
|
if(null != datetime && !("null").equals(datetime)) { |
||||
|
SimpleDateFormat ddf1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
creationTime = Long.valueOf(ddf1.parse(datetime).getTime()) / 1000; |
||||
|
}else{ |
||||
|
creationTime = new Date().getTime()/1000; |
||||
|
} |
||||
|
} catch (Exception e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return creationTime ; |
||||
|
|
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 获取当前的 string 类型时间 |
||||
|
*/ |
||||
|
public static String getCurrentTime(){ |
||||
|
SimpleDateFormat ddf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
return ddf.format(new Date()); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* long 转 string pubTimeStr crawlTimeStr createTimeStr |
||||
|
*/ |
||||
|
public static String getCurrentTime(long dateTime){ |
||||
|
SimpleDateFormat ddf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
return ddf.format(new Date(dateTime)); |
||||
|
} |
||||
|
/** |
||||
|
* String 转 long |
||||
|
*/ |
||||
|
// long 转为 时间格式为 yyyy-MM-dd'T'HH:mm:ss.SSSXXX 的时间 pubDate crawlDate createDate |
||||
|
public static String getDate(long dateTime){ |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); |
||||
|
return sdf.format(new Date(dateTime)); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* String 转 long |
||||
|
*/ |
||||
|
// String 转为 时间格式为 yyyy-MM-dd'T'HH:mm:ss.SSSXXX 的时间 pubDate crawlDate createDate |
||||
|
public static String getDate(String dateTime){ |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); |
||||
|
SimpleDateFormat ddf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
try { |
||||
|
Date date = ddf.parse(dateTime) ; |
||||
|
return sdf.format(date); |
||||
|
} catch (ParseException e) { |
||||
|
e.printStackTrace(); |
||||
|
LOG.error("DataCheckUtil getDate() err data:"+dateTime); |
||||
|
} |
||||
|
return sdf.format(new Date()); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* String 转 long |
||||
|
*/ |
||||
|
public static long getDay(long dateTime){ |
||||
|
try{ |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); |
||||
|
String dayStr = sdf.format(new Date(dateTime)); |
||||
|
Date date = sdf.parse(dayStr); |
||||
|
return date.getTime(); |
||||
|
}catch(Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
LOG.error("DataCheckUtil getDay() err data:"+dateTime); |
||||
|
} |
||||
|
return 0; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* String 转 long |
||||
|
*/ |
||||
|
public static long getDay(String dateTime){ |
||||
|
try{ |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); |
||||
|
Date date = sdf.parse(dateTime); |
||||
|
return date.getTime(); |
||||
|
}catch(Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
LOG.error("DataCheckUtil getDay2() err data:"+dateTime); |
||||
|
} |
||||
|
return 0; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
// public static void main(String[] args) { |
||||
|
// //System.out.println(checkData("")); |
||||
|
// /*System.out.println(System.currentTimeMillis()); |
||||
|
// System.out.println(Calendar.getInstance().getTimeInMillis() / 1000); |
||||
|
// System.out.println(new Date().getTime() / 1000); |
||||
|
// System.out.println(DateUtil.getDateTime((System.currentTimeMillis() / 1000) * 1000)); |
||||
|
// System.out.println(convertStringTotime("1558077405")); |
||||
|
// System.out.println(convertTimeTotime(null));*/ |
||||
|
// //System.out.println(DateUtil.getTimeMillis("2019-03-01 01:01:01")); |
||||
|
// |
||||
|
// /*String aa = DataCheckUtil.convertStringTotime("1563245342"); |
||||
|
// System.out.println(aa);*/ |
||||
|
// /*SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); |
||||
|
// try { |
||||
|
// Date date = sdf.parse("2019-03-01"); |
||||
|
// System.out.println(date.getTime()); |
||||
|
// } catch (ParseException e) { |
||||
|
// // TODO Auto-generated catch block |
||||
|
// e.printStackTrace(); |
||||
|
// }*/ |
||||
|
// System.out.println(getDate("2019-03-01 01:01:01")); |
||||
|
// } |
||||
|
|
||||
|
} |
@ -0,0 +1,365 @@ |
|||||
|
/* |
||||
|
* Copyright (C) 2016 Baifendian Corporation |
||||
|
* <p> |
||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
|
* you may not use this file except in compliance with the License. |
||||
|
* You may obtain a copy of the License at |
||||
|
* <p> |
||||
|
* http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
* <p> |
||||
|
* Unless required by applicable law or agreed to in writing, software |
||||
|
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
|
* See the License for the specific language governing permissions and |
||||
|
* limitations under the License. |
||||
|
*/ |
||||
|
|
||||
|
package com.bfd.mf.job.util; |
||||
|
|
||||
|
import com.bfd.nlp.common.util.string.TStringUtils; |
||||
|
|
||||
|
import java.io.UnsupportedEncodingException; |
||||
|
import java.text.ParseException; |
||||
|
import java.text.SimpleDateFormat; |
||||
|
import java.util.Calendar; |
||||
|
import java.util.Date; |
||||
|
import java.util.GregorianCalendar; |
||||
|
|
||||
|
public class DateUtil { |
||||
|
|
||||
|
private static final String DATE_UNIT_DAY = "D"; |
||||
|
public static final String DATE_UNIT_HOUR = "H"; |
||||
|
|
||||
|
public static String TIME_FORMAT = "yyyy-MM-dd HH:mm:ss"; |
||||
|
public static String DATE_FORMAT = "yyyy-MM-dd"; |
||||
|
public static String DATE_FORMAT2 = "yyyy.MM.dd"; |
||||
|
|
||||
|
/** |
||||
|
* @param startTime 开始时间 |
||||
|
* @param endTime 结束时间 |
||||
|
* @param unit D H |
||||
|
* @return |
||||
|
*/ |
||||
|
public static double getTimeIntervalByUnit(long startTime, long endTime, String unit) { |
||||
|
int interval = 0; |
||||
|
long dateDistance = endTime - startTime; |
||||
|
if (null == unit || dateDistance <= 0) |
||||
|
return -1; |
||||
|
if (DATE_UNIT_DAY.equals(unit)) |
||||
|
interval = 24 * 3600 * 1000; |
||||
|
if (DATE_UNIT_HOUR.equals(unit)) |
||||
|
interval = 3600 * 1000; |
||||
|
return Math.ceil(dateDistance / interval); |
||||
|
} |
||||
|
|
||||
|
/* |
||||
|
* 20160807190815678:yyyyMMddhhmmssSSS |
||||
|
*/ |
||||
|
public static String getTimeStrForNow() { |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS"); |
||||
|
return sdf.format(new Date()); |
||||
|
} |
||||
|
|
||||
|
private static String getTimeStrDefault() { |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS"); |
||||
|
Date date = new Date(); |
||||
|
date.setYear(1970); |
||||
|
return sdf.format(date); |
||||
|
} |
||||
|
|
||||
|
public static byte[] timeStr2Chars(String timeStr) { |
||||
|
try { |
||||
|
return timeStr.getBytes("UTF-8"); |
||||
|
} catch (UnsupportedEncodingException e) { |
||||
|
// TODO Auto-generated catch block |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return new byte[0]; |
||||
|
} |
||||
|
|
||||
|
public static long timeStr2Long(String timeStr) { |
||||
|
if (TStringUtils.isEmpty(timeStr)) { |
||||
|
String defTm = getTimeStrDefault(); |
||||
|
return Long.parseLong(defTm); |
||||
|
} |
||||
|
return Long.parseLong(timeStr); |
||||
|
} |
||||
|
|
||||
|
private static Date parseDate(long time) { |
||||
|
return new Date(time); |
||||
|
} |
||||
|
|
||||
|
/*** |
||||
|
* timestamp to yyyy-MM-dd |
||||
|
* |
||||
|
* @param timestamp |
||||
|
* @return |
||||
|
*/ |
||||
|
public static String parseDateByday(long timestamp) { |
||||
|
Date date = parseDate(timestamp); |
||||
|
SimpleDateFormat format = new SimpleDateFormat(DATE_FORMAT); |
||||
|
return format.format(date); |
||||
|
} |
||||
|
|
||||
|
public static String parseDateByday2(long timestamp) { |
||||
|
Date date = parseDate(timestamp); |
||||
|
SimpleDateFormat format = new SimpleDateFormat(DATE_FORMAT2); |
||||
|
return format.format(date); |
||||
|
} |
||||
|
|
||||
|
/*** |
||||
|
* timestamp to yyyy-MM-dd HH:mm:ss |
||||
|
* |
||||
|
* @param timestamp |
||||
|
* @return |
||||
|
*/ |
||||
|
public static String parseDateByTime(long timestamp) { |
||||
|
Date date = parseDate(timestamp); |
||||
|
SimpleDateFormat format = new SimpleDateFormat(TIME_FORMAT); |
||||
|
return format.format(date); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* timestamp with special format |
||||
|
* |
||||
|
* @param timestamp |
||||
|
* @param format |
||||
|
* @return |
||||
|
*/ |
||||
|
public static String parseDateByFormat(long timestamp, String format) { |
||||
|
Date date = parseDate(timestamp); |
||||
|
SimpleDateFormat dateFormat = new SimpleDateFormat(format); |
||||
|
return dateFormat.format(date); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 获取今天是周几 |
||||
|
* |
||||
|
* @return 一个表示周几的数字 |
||||
|
*/ |
||||
|
public static int getDay() { |
||||
|
Calendar cal = Calendar.getInstance(); |
||||
|
int day = cal.get(Calendar.DAY_OF_WEEK) - 1; |
||||
|
day = day == 0 ? 7 : day; |
||||
|
return day; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 获取现在是今天的多少秒 |
||||
|
* |
||||
|
* @return 一个数字表示现在是今天的多少秒 |
||||
|
*/ |
||||
|
public static int getSecondsNow() { |
||||
|
Calendar curDate = Calendar.getInstance(); |
||||
|
Calendar tommorowDate = new GregorianCalendar(curDate |
||||
|
.get(Calendar.YEAR), curDate.get(Calendar.MONTH), curDate |
||||
|
.get(Calendar.DATE) + 1, 0, 0, 0); |
||||
|
return 24 * 3600 - ((int) (tommorowDate.getTimeInMillis() - curDate.getTimeInMillis()) / 1000); |
||||
|
} |
||||
|
|
||||
|
public static class CronDate extends Date { |
||||
|
private int hour; |
||||
|
private int minute; |
||||
|
|
||||
|
public CronDate(int h, int m) { |
||||
|
this.hour = h; |
||||
|
this.minute = m; |
||||
|
} |
||||
|
|
||||
|
CronDate() { |
||||
|
this.hour = 0; |
||||
|
this.minute = 0; |
||||
|
} |
||||
|
|
||||
|
int getHour() { |
||||
|
return hour; |
||||
|
} |
||||
|
|
||||
|
void setHour(int hour) { |
||||
|
this.hour = hour; |
||||
|
} |
||||
|
|
||||
|
public int getMinute() { |
||||
|
return minute; |
||||
|
} |
||||
|
|
||||
|
public void setMinute(int minute) { |
||||
|
this.minute = minute; |
||||
|
} |
||||
|
|
||||
|
public boolean before(CronDate date) { |
||||
|
if (null == date) { |
||||
|
return false; |
||||
|
} |
||||
|
if (date.getHour() != this.getHour()) { |
||||
|
return (this.getHour() - date.getHour() < 0); |
||||
|
} |
||||
|
// compare minute |
||||
|
return (this.getMinute() - date.getMinute() < 0); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public String toString() { |
||||
|
StringBuilder sb = new StringBuilder(); |
||||
|
if (hour >= 10) { |
||||
|
sb.append(hour); |
||||
|
} else { |
||||
|
sb.append("0").append(hour); |
||||
|
} |
||||
|
sb.append(":"); |
||||
|
if (minute >= 10) { |
||||
|
sb.append(minute); |
||||
|
} else { |
||||
|
sb.append("0").append(minute); |
||||
|
} |
||||
|
return sb.toString(); |
||||
|
} |
||||
|
// @Override |
||||
|
// public String toString() { |
||||
|
// Date date = new Date(); |
||||
|
// date.setHours(hour); |
||||
|
// date.setMinutes(minute); |
||||
|
// String str = cronDdateFormate.format(date); |
||||
|
// return str; |
||||
|
// } |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* @param dateStr |
||||
|
* @return |
||||
|
*/ |
||||
|
public static CronDate parseDateFromStr(String dateStr) { |
||||
|
if (TStringUtils.isEmpty(dateStr)) { |
||||
|
return null; |
||||
|
} |
||||
|
String[] ts = dateStr.split(":"); |
||||
|
if (null == ts || ts.length == 0) { |
||||
|
return null; |
||||
|
} |
||||
|
CronDate date = new CronDate(); |
||||
|
for (int i = 0; i < ts.length; i++) { |
||||
|
String s = ts[i]; |
||||
|
int num = parseDoubleStr(s); |
||||
|
if (i == 0) { |
||||
|
date.setHour(num); |
||||
|
} else if (i == 1) { |
||||
|
date.setMinute(num); |
||||
|
} |
||||
|
} |
||||
|
return date; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* @param st |
||||
|
* @return |
||||
|
*/ |
||||
|
private static Integer parseDoubleStr(String st) { |
||||
|
if (TStringUtils.isEmpty(st)) { |
||||
|
return null; |
||||
|
} |
||||
|
while (st.startsWith("0") && st.length() > 1) { |
||||
|
st = st.substring(1); |
||||
|
} |
||||
|
if (TStringUtils.isEmpty(st)) { |
||||
|
return 0; |
||||
|
} |
||||
|
return Integer.parseInt(st); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 获取当前时间的小时数和分钟数 |
||||
|
* |
||||
|
* @return |
||||
|
*/ |
||||
|
public static int[] getCurrentHourAndMinute() { |
||||
|
int[] dat = new int[2]; |
||||
|
Date date = new Date(); |
||||
|
dat[0] = date.getHours(); |
||||
|
dat[1] = date.getMinutes(); |
||||
|
return dat; |
||||
|
} |
||||
|
|
||||
|
// public static String extractDataScope(long from, long to, boolean fileName) { |
||||
|
// return fileName ? |
||||
|
// (MfTimeUtil.getCSTDateStr(from, "yyyyMMdd") + "_" |
||||
|
// + MfTimeUtil.getCSTDateStr(to, "yyyyMMdd")) |
||||
|
// : ("[" + MfTimeUtil.getCSTDateStr(from, "yyyy-MM-dd") + " ~ " |
||||
|
// + MfTimeUtil.getCSTDateStr(to, "yyyy-MM-dd") + "]"); |
||||
|
// |
||||
|
// } |
||||
|
|
||||
|
public static Date stringToDate(String dateStr) { |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
try { |
||||
|
return sdf.parse(dateStr); |
||||
|
} catch (ParseException e) { |
||||
|
return new Date(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 获得服务器当前日期及时间,以格式为:yyyy-MM-dd HH:mm:ss的日期字符串形式返回 |
||||
|
*/ |
||||
|
public static String getDateTime(){ |
||||
|
try{ |
||||
|
SimpleDateFormat datetime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
return datetime.format(Calendar.getInstance().getTime()); |
||||
|
} catch(Exception e){ |
||||
|
//log.debug("DateUtil.getDateTime():" + e.getMessage()); |
||||
|
return ""; |
||||
|
} |
||||
|
} |
||||
|
/** |
||||
|
* 获得服务器当前日期及时间,以格式为:yyyy-MM-dd HH:mm:ss的日期字符串形式返回 |
||||
|
*/ |
||||
|
public static String getDateTime(long date){ |
||||
|
try{ |
||||
|
SimpleDateFormat datetime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
return datetime.format(new Date(date)); |
||||
|
} catch(Exception e){ |
||||
|
// log.debug("DateUtil.getDateTime():" + e.getMessage()); |
||||
|
return ""; |
||||
|
} |
||||
|
} |
||||
|
public static long getcurr(){ |
||||
|
Date date = new Date(); |
||||
|
Long l_date = date.getTime(); |
||||
|
return l_date; |
||||
|
} |
||||
|
|
||||
|
// public static long getDayStart(long time){ |
||||
|
// long zero = time/(1000*3600*24)*(1000*3600*24)- TimeZone.getDefault().getRawOffset();//今天零点零分零秒的毫秒数 |
||||
|
// long zero2 = time/(1000*3600*24)*(1000*3600*24) - TimeZone.getDefault().getRawOffset(); |
||||
|
// return zero; |
||||
|
// } |
||||
|
// public static long getDayEnd(long time){ |
||||
|
// //long zero=time/(1000*3600*24)*(1000*3600*24)- TimeZone.getDefault().getRawOffset();//今天零点零分零秒的毫秒数 |
||||
|
// long twelve=time+24*60*60*1000-1;//今天23点59分59秒的毫秒数 |
||||
|
// return twelve; |
||||
|
// } |
||||
|
|
||||
|
// public static void main(String[] args) { |
||||
|
// long time = 1611591055000L ; |
||||
|
// long start = getDayStart(time); |
||||
|
// long end = getDayEnd(start); |
||||
|
// |
||||
|
// |
||||
|
// System.out.println(time); |
||||
|
// System.out.println(start); |
||||
|
// System.out.println(end); |
||||
|
// |
||||
|
// System.out.println(parseDateByday(time)); |
||||
|
// System.out.println(parseDateByday(start)); |
||||
|
// System.out.println(parseDateByday(end)); |
||||
|
// |
||||
|
// |
||||
|
// long zero=time/(1000*3600*24)*(1000*3600*24)-TimeZone.getDefault().getRawOffset();//今天零点零分零秒的毫秒数 |
||||
|
// long twelve=zero+24*60*60*1000-1;//今天23点59分59秒的毫秒数 |
||||
|
// long yesterday=System.currentTimeMillis()-24*60*60*1000;//昨天的这一时间的毫秒数 |
||||
|
// System.out.println(new Timestamp(time));//当前时间 |
||||
|
// System.out.println(new Timestamp(yesterday));//昨天这一时间点 |
||||
|
// System.out.println(new Timestamp(zero));//今天零点零分零秒 |
||||
|
// System.out.println(new Timestamp(twelve));//今天23点59分59秒 |
||||
|
// |
||||
|
// } |
||||
|
} |
@ -0,0 +1,286 @@ |
|||||
|
package com.bfd.mf.job.util; |
||||
|
|
||||
|
import java.text.SimpleDateFormat; |
||||
|
import java.util.ArrayList; |
||||
|
import java.util.Date; |
||||
|
import java.util.HashMap; |
||||
|
import java.util.List; |
||||
|
import java.util.Map; |
||||
|
import java.util.Properties; |
||||
|
|
||||
|
import javax.mail.Session; |
||||
|
import javax.mail.Transport; |
||||
|
import javax.mail.internet.InternetAddress; |
||||
|
import javax.mail.internet.MimeMessage; |
||||
|
|
||||
|
import com.bfd.crawler.utils.JsonUtils; |
||||
|
import org.apache.http.HttpEntity; |
||||
|
import org.apache.http.HttpResponse; |
||||
|
import org.apache.http.client.HttpClient; |
||||
|
import org.apache.http.client.methods.HttpPost; |
||||
|
import org.apache.http.entity.StringEntity; |
||||
|
import org.apache.http.impl.client.HttpClientBuilder; |
||||
|
import org.apache.http.util.EntityUtils; |
||||
|
import org.apache.log4j.Logger; |
||||
|
|
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* 邮件发送处理 |
||||
|
* @author yabo.li |
||||
|
* |
||||
|
*/ |
||||
|
public class EMailUtils { |
||||
|
private static final Logger LOG = Logger.getLogger(EMailUtils.class); |
||||
|
// 发件人的 邮箱 和 密码(替换为自己的邮箱和密码) |
||||
|
// PS: 某些邮箱服务器为了增加邮箱本身密码的安全性,给 SMTP 客户端设置了独立密码(有的邮箱称为“授权码”), |
||||
|
// 对于开启了独立密码的邮箱, 这里的邮箱密码必需使用这个独立密码(授权码)。 |
||||
|
public static String myEmailAccount = "bfd_crawler_alarm@baifendian.com"; |
||||
|
public static String myEmailPassword = "bfd_crawler_alarm"; |
||||
|
|
||||
|
// 发件人邮箱的 SMTP 服务器地址, 必须准确, 不同邮件服务器地址不同, 一般(只是一般, 绝非绝对)格式为: smtp.xxx.com |
||||
|
// 网易163邮箱的 SMTP 服务器地址为: smtp.163.com |
||||
|
private static String myEmailSMTPHost = "smtp.baifendian.com"; |
||||
|
|
||||
|
// 收件人邮箱(替换为自己知道的有效邮箱) |
||||
|
public static String receiveMailAccount = "chaofan.tan@baifendian.com"; |
||||
|
private String confPath = "../etc/config.properties"; |
||||
|
|
||||
|
private static EMailUtils instance = null; |
||||
|
private String protocol = "smtp"; |
||||
|
private String smtpAuth = "true"; |
||||
|
private static String emailEncode = "UTF-8"; |
||||
|
private static String emailTitle = "[{cid}]数据采集异常报警 — 智能数据采集平台"; |
||||
|
private static String emailContent = "你好:\r\n\r\n报警对象:{cid}-{categoryName} \r\n报警原因:{type},请及时检查!。 \r\n\r\n报警时间:{time}"; |
||||
|
|
||||
|
private EMailUtils() { |
||||
|
LOG.info("EMailUtils:init"); |
||||
|
// Properties pro = LoadConfig.getInstance().getPro(confPath); |
||||
|
Properties pro = new Properties(); |
||||
|
/** |
||||
|
* 注释了读配置文件,直接写死了配置 |
||||
|
* crawl.alert.mail.transport.protocol=smtp |
||||
|
crawl.alert.mail.smtp.host=intmail.baifendian.com |
||||
|
crawl.alert.mail.smtp.auth=true |
||||
|
crawl.alert.email.userName=bfd_crawler_alarm@baifendian.com |
||||
|
crawl.alert.email.userPasswd=z26Iyf3vMRb5ejrI |
||||
|
crawl.alert.email.emailEncode=UTF-8 |
||||
|
*/ |
||||
|
protocol =pro.getProperty("crawl.alert.mail.transport.protocol", "smtp"); |
||||
|
myEmailSMTPHost =pro.getProperty("crawl.alert.mail.smtp.host", "intmail.baifendian.com"); |
||||
|
smtpAuth = pro.getProperty("crawl.alert.mail.smtp.auth", "true"); |
||||
|
myEmailAccount = pro.getProperty("crawl.alert.email.userName", "bfd_crawler_alarm@baifendian.com"); |
||||
|
myEmailPassword = pro.getProperty("crawl.alert.email.userPasswd", "z26Iyf3vMRb5ejrI"); |
||||
|
emailEncode = pro.getProperty("crawl.alert.email.emailEncode", "UTF-8"); |
||||
|
emailTitle = pro.getProperty("crawl.alert.email.emailTitle", "[{cid}]数据采集异常报警 — 智能数据采集平台"); |
||||
|
emailContent = pro.getProperty("crawl.alert.email.emailContent1", "你好:\r\n\r\n报警对象:{cid}-{categoryName} \r\n报警原因:{type},请及时检查!。 \r\n\r\n报警时间:{time}\r\n\r\n排查线索:{sample}"); |
||||
|
|
||||
|
|
||||
|
LOG.info("EMailUtils protocol:" + protocol + " myEmailSMTPHost:" + myEmailSMTPHost |
||||
|
+ " smtpAuth: " + smtpAuth + " myEmailAccount: " + myEmailAccount |
||||
|
+ " emailEncode: " + emailEncode + " config path: " + confPath); |
||||
|
} |
||||
|
|
||||
|
public static EMailUtils getInstance() { |
||||
|
if (instance == null) { |
||||
|
synchronized (EMailUtils.class) { |
||||
|
if (instance == null) { |
||||
|
instance = new EMailUtils(); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
return instance; |
||||
|
} |
||||
|
|
||||
|
public void setConfigPath (String confPath) { |
||||
|
this.confPath = confPath; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public void sendWechat(List<String> emailList, String message) { |
||||
|
HttpClientBuilder httpBuilder = HttpClientBuilder.create(); |
||||
|
HttpClient client = httpBuilder.build(); |
||||
|
HttpPost httppost = new HttpPost("http://172.18.1.181:8412/sendwechatalarm/"); //Constants.getWechatURL() |
||||
|
try { |
||||
|
Map<String, Object> requestMap = new HashMap<String, Object>(); |
||||
|
requestMap.put("emails", emailList); |
||||
|
requestMap.put("message", message); |
||||
|
StringEntity entity = new StringEntity(JsonUtils.toJSONString(requestMap),"UTF-8"); |
||||
|
entity.setContentType("application/json"); |
||||
|
httppost.setEntity(entity); |
||||
|
HttpResponse response = client.execute(httppost); |
||||
|
HttpEntity en = response.getEntity(); |
||||
|
String content = EntityUtils.toString(en,"utf8"); |
||||
|
LOG.info("SENT WECHAT ALARM:" + JsonUtils.toJSONString(emailList) + " " + JsonUtils.toJSONString(requestMap)); |
||||
|
} catch (Exception e) { |
||||
|
e.printStackTrace(); |
||||
|
} finally { |
||||
|
client = null; |
||||
|
httpBuilder = null; |
||||
|
httppost = null; |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
public void sendEmail(int type, Map<String, Object> siteMessage, List<String> emailList, String time1) { |
||||
|
LOG.info("有报警任务,开始发送邮件"); |
||||
|
try { |
||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
|
String cid = (String) siteMessage.get("cid"); |
||||
|
String categoryName = null; |
||||
|
String title = emailTitle.replace("{cid}", cid); |
||||
|
String content = null; |
||||
|
|
||||
|
content = emailContent.replace("{cid}", cid); |
||||
|
|
||||
|
if (siteMessage.containsKey("categoryName")) { |
||||
|
categoryName = (String) siteMessage.get("categoryName"); |
||||
|
content = content.replace("{categoryName}", categoryName); |
||||
|
} else { |
||||
|
content = content.replace("-{categoryName}",""); |
||||
|
} |
||||
|
if (siteMessage.containsKey("sample")) { |
||||
|
categoryName = (String) siteMessage.get("sample"); |
||||
|
content = content.replace("{sample}", categoryName); |
||||
|
} else { |
||||
|
content = content.replace("{sample}",""); |
||||
|
} |
||||
|
content = content.replace("{time}", sdf.format(new Date())); |
||||
|
//需要分类处理 |
||||
|
|
||||
|
switch(type) { |
||||
|
case 1: |
||||
|
content = content.replace("{type}", "数据的时间格式有误"); |
||||
|
break; |
||||
|
case 2: |
||||
|
content = content.replace("{type}", "任务下发后" + time1 + "分钟数据未及时返回"); |
||||
|
break; |
||||
|
case 3: |
||||
|
content = content.replace("{type}", "任务下发后" + time1 + "分钟数据未及时返回"); |
||||
|
break; |
||||
|
case 4: |
||||
|
content = content.replace("{type}", "数据字段丢失,字段丢失为"+time1); |
||||
|
break; |
||||
|
case 5: |
||||
|
content = content.replace("{type}", "数据关键字段值为空"); |
||||
|
break; |
||||
|
case 6: |
||||
|
content = confPath.replace("{type}","解析失败次数超过100次"); |
||||
|
break; |
||||
|
default: |
||||
|
return ; |
||||
|
} |
||||
|
|
||||
|
LOG.info("EMailUtils:sendEmail get: siteMessage:" + siteMessage + " emailList:" + emailList + " content:" + content); |
||||
|
Properties props = new Properties(); // 参数配置 |
||||
|
props.setProperty("mail.transport.protocol", protocol); // 使用的协议(JavaMail规范要求) |
||||
|
props.setProperty("mail.smtp.host", myEmailSMTPHost); // 发件人的邮箱的 SMTP 服务器地址 |
||||
|
props.setProperty("mail.smtp.auth", smtpAuth); // 需要请求认证 |
||||
|
Session session = Session.getInstance(props); |
||||
|
session.setDebug(true); |
||||
|
MimeMessage message = createMimeMessage(session, myEmailAccount, emailList, title, content); |
||||
|
Transport transport = session.getTransport(); |
||||
|
transport.connect(myEmailAccount, myEmailPassword); |
||||
|
transport.sendMessage(message, message.getAllRecipients()); |
||||
|
List<String> emails = new ArrayList(); |
||||
|
for (String email:emailList) { |
||||
|
emails.add(email.replace("@percent.cn", "")); |
||||
|
} |
||||
|
sendWechat(emails,content); |
||||
|
transport.close(); |
||||
|
} catch (Throwable e) { |
||||
|
e.printStackTrace(); |
||||
|
LOG.error("EMailUtils:sendEmail error. title:" + siteMessage + " emailList:" + emailList); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
// public static void main(String[] args) throws Exception { |
||||
|
// // 1. 创建参数配置, 用于连接邮件服务器的参数配置 |
||||
|
// Properties props = new Properties(); // 参数配置 |
||||
|
// props.setProperty("mail.transport.protocol", "smtp"); // 使用的协议(JavaMail规范要求) |
||||
|
// props.setProperty("mail.smtp.host", myEmailSMTPHost); // 发件人的邮箱的 SMTP 服务器地址 |
||||
|
// props.setProperty("mail.smtp.auth", "true"); // 需要请求认证 |
||||
|
// |
||||
|
// // PS: 某些邮箱服务器要求 SMTP 连接需要使用 SSL 安全认证 (为了提高安全性, 邮箱支持SSL连接, 也可以自己开启), |
||||
|
// // 如果无法连接邮件服务器, 仔细查看控制台打印的 log, 如果有有类似 “连接失败, 要求 SSL 安全连接” 等错误, |
||||
|
// // 打开下面 /* ... */ 之间的注释代码, 开启 SSL 安全连接。 |
||||
|
// /* |
||||
|
// // SMTP 服务器的端口 (非 SSL 连接的端口一般默认为 25, 可以不添加, 如果开启了 SSL 连接, |
||||
|
// // 需要改为对应邮箱的 SMTP 服务器的端口, 具体可查看对应邮箱服务的帮助, |
||||
|
// // QQ邮箱的SMTP(SLL)端口为465或587, 其他邮箱自行去查看) |
||||
|
// final String smtpPort = "465"; |
||||
|
// props.setProperty("mail.smtp.port", smtpPort); |
||||
|
// props.setProperty("mail.smtp.socketFactory.class", "javax.net.ssl.SSLSocketFactory"); |
||||
|
// props.setProperty("mail.smtp.socketFactory.fallback", "false"); |
||||
|
// props.setProperty("mail.smtp.socketFactory.port", smtpPort); |
||||
|
// */ |
||||
|
// |
||||
|
// // 2. 根据配置创建会话对象, 用于和邮件服务器交互 |
||||
|
// Session session = Session.getInstance(props); |
||||
|
// session.setDebug(true); // 设置为debug模式, 可以查看详细的发送 log |
||||
|
// List<String> emails = new ArrayList<String>(); |
||||
|
// // 3. 创建一封邮件 |
||||
|
// MimeMessage message = createMimeMessage(session, myEmailAccount, emails, "小司机", "小司机去开车"); |
||||
|
// |
||||
|
// // 4. 根据 Session 获取邮件传输对象 |
||||
|
// Transport transport = session.getTransport(); |
||||
|
// |
||||
|
// // 5. 使用 邮箱账号 和 密码 连接邮件服务器, 这里认证的邮箱必须与 message 中的发件人邮箱一致, 否则报错 |
||||
|
// // |
||||
|
// // PS_01: 成败的判断关键在此一句, 如果连接服务器失败, 都会在控制台输出相应失败原因的 log, |
||||
|
// // 仔细查看失败原因, 有些邮箱服务器会返回错误码或查看错误类型的链接, 根据给出的错误 |
||||
|
// // 类型到对应邮件服务器的帮助网站上查看具体失败原因。 |
||||
|
// // |
||||
|
// // PS_02: 连接失败的原因通常为以下几点, 仔细检查代码: |
||||
|
// // (1) 邮箱没有开启 SMTP 服务; |
||||
|
// // (2) 邮箱密码错误, 例如某些邮箱开启了独立密码; |
||||
|
// // (3) 邮箱服务器要求必须要使用 SSL 安全连接; |
||||
|
// // (4) 请求过于频繁或其他原因, 被邮件服务器拒绝服务; |
||||
|
// // (5) 如果以上几点都确定无误, 到邮件服务器网站查找帮助。 |
||||
|
// // |
||||
|
// // PS_03: 仔细看log, 认真看log, 看懂log, 错误原因都在log已说明。 |
||||
|
// transport.connect(myEmailAccount, myEmailPassword); |
||||
|
// |
||||
|
// // 6. 发送邮件, 发到所有的收件地址, message.getAllRecipients() 获取到的是在创建邮件对象时添加的所有收件人, 抄送人, 密送人 |
||||
|
// transport.sendMessage(message, message.getAllRecipients()); |
||||
|
// |
||||
|
// // 7. 关闭连接 |
||||
|
// transport.close(); |
||||
|
// } |
||||
|
|
||||
|
/** |
||||
|
* 创建一封只包含文本的简单邮件 |
||||
|
* |
||||
|
* @param session 和服务器交互的会话 |
||||
|
* @param sendMail 发件人邮箱 |
||||
|
* @param receiveMail 收件人邮箱 |
||||
|
* @return |
||||
|
* @throws Exception |
||||
|
*/ |
||||
|
public static MimeMessage createMimeMessage(Session session, String sendMail, List<String> receiveMail ,String title, String content) throws Exception { |
||||
|
// 1. 创建一封邮件 |
||||
|
MimeMessage message = new MimeMessage(session); |
||||
|
|
||||
|
// 2. From: 发件人(昵称有广告嫌疑,避免被邮件服务器误认为是滥发广告以至返回失败,请修改昵称) |
||||
|
message.setFrom(new InternetAddress(sendMail, sendMail.split("@")[0], "UTF-8")); |
||||
|
|
||||
|
// 3. To: 收件人(可以增加多个收件人、抄送、密送) |
||||
|
for (String email : receiveMail) { |
||||
|
message.addRecipient(MimeMessage.RecipientType.TO, new InternetAddress(email, email.split("@")[0], "UTF-8")); |
||||
|
} |
||||
|
// 4. Subject: 邮件主题(标题有广告嫌疑,避免被邮件服务器误认为是滥发广告以至返回失败,请修改标题) |
||||
|
message.setSubject(title, emailEncode); |
||||
|
|
||||
|
// 5. Content: 邮件正文(可以使用html标签)(内容有广告嫌疑,避免被邮件服务器误认为是滥发广告以至返回失败,请修改发送内容) |
||||
|
message.setText(content);//setContent(content, "text/html;charset=UTF-8"); |
||||
|
|
||||
|
// 6. 设置发件时间 |
||||
|
message.setSentDate(new Date()); |
||||
|
|
||||
|
// 7. 保存设置 |
||||
|
message.saveChanges(); |
||||
|
|
||||
|
return message; |
||||
|
} |
||||
|
|
||||
|
} |
@ -0,0 +1,451 @@ |
|||||
|
package com.bfd.mf.job.util; |
||||
|
|
||||
|
import com.alibaba.fastjson.JSON; |
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import com.google.common.collect.Lists; |
||||
|
import com.google.common.collect.Maps; |
||||
|
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; |
||||
|
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; |
||||
|
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; |
||||
|
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; |
||||
|
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; |
||||
|
import org.elasticsearch.action.bulk.BulkRequestBuilder; |
||||
|
import org.elasticsearch.action.bulk.BulkResponse; |
||||
|
import org.elasticsearch.action.index.IndexResponse; |
||||
|
import org.elasticsearch.action.search.ClearScrollRequestBuilder; |
||||
|
import org.elasticsearch.action.search.SearchRequestBuilder; |
||||
|
import org.elasticsearch.action.search.SearchResponse; |
||||
|
import org.elasticsearch.action.support.IndicesOptions; |
||||
|
import org.elasticsearch.action.support.master.AcknowledgedResponse; |
||||
|
import org.elasticsearch.client.transport.TransportClient; |
||||
|
import org.elasticsearch.common.settings.Settings; |
||||
|
import org.elasticsearch.common.transport.TransportAddress; |
||||
|
import org.elasticsearch.common.unit.TimeValue; |
||||
|
import org.elasticsearch.common.xcontent.XContentType; |
||||
|
import org.elasticsearch.index.query.QueryBuilder; |
||||
|
import org.elasticsearch.search.SearchHit; |
||||
|
import org.elasticsearch.transport.client.PreBuiltTransportClient; |
||||
|
import org.joda.time.LocalDateTime; |
||||
|
import org.joda.time.format.DateTimeFormat; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.util.Assert; |
||||
|
import org.springframework.util.CollectionUtils; |
||||
|
|
||||
|
import java.net.InetAddress; |
||||
|
import java.util.List; |
||||
|
import java.util.Map; |
||||
|
import java.util.concurrent.TimeUnit; |
||||
|
import java.util.function.Consumer; |
||||
|
|
||||
|
public abstract class EsUtils2 { |
||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(EsUtils.class); |
||||
|
private static final Map<String, TransportClient> CLIENT_MAP = Maps.newHashMap(); |
||||
|
|
||||
|
public static void registerCluster(String clusterName, String[] addresses) { |
||||
|
System.setProperty("es.set.netty.runtime.available.processors", "false"); |
||||
|
Assert.hasLength(clusterName, "Param clusterName must not be empty."); |
||||
|
Assert.notEmpty(addresses, "Param addresses must not be empty."); |
||||
|
Settings settings = Settings.builder() |
||||
|
.put("cluster.name", clusterName).build(); |
||||
|
TransportClient client = new PreBuiltTransportClient(settings); |
||||
|
try { |
||||
|
for (int i = 0; i < addresses.length; i++) { |
||||
|
String[] ipAndPort = addresses[i].split(":"); |
||||
|
String ip = ipAndPort[0]; |
||||
|
int port = Integer.parseInt(ipAndPort[1]); |
||||
|
client.addTransportAddress(new TransportAddress(InetAddress.getByName(ip), port)); |
||||
|
} |
||||
|
CLIENT_MAP.put(clusterName, client); |
||||
|
} catch (Exception e) { |
||||
|
throw new RuntimeException(e); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 查询 |
||||
|
* produce 查询主贴的时候会调用这个方法哦 |
||||
|
* @param clusterName |
||||
|
* @param indices |
||||
|
* @param size |
||||
|
* @param consumer |
||||
|
*/ |
||||
|
public static void scrollQuery(String clusterName, String indices, String type, |
||||
|
QueryBuilder queryBuilder, Integer size, int minutes, |
||||
|
Consumer<List<JSONObject>> consumer) { |
||||
|
TransportClient client = getClient(clusterName); |
||||
|
|
||||
|
SearchRequestBuilder searchRequestBuilder = client.prepareSearch() |
||||
|
.setIndices(indices) |
||||
|
.setIndicesOptions(IndicesOptions.fromOptions(true, true, |
||||
|
true, false)) |
||||
|
.setTypes(type) |
||||
|
.setQuery(queryBuilder) |
||||
|
.setScroll(TimeValue.timeValueMinutes(minutes)) |
||||
|
.setSize(size); |
||||
|
|
||||
|
long s = System.currentTimeMillis(); |
||||
|
SearchResponse response = searchRequestBuilder.execute().actionGet(); |
||||
|
long e = System.currentTimeMillis(); |
||||
|
LOGGER.debug("First query es, size:{}, took:{} ms.", |
||||
|
response.getHits().getHits().length, (e - s)); |
||||
|
List<String> scrollIds = Lists.newArrayList(); |
||||
|
while (response.getHits().getHits().length > 0) { |
||||
|
List<JSONObject> dataList = Lists.newLinkedList(); |
||||
|
for (SearchHit hit : response.getHits().getHits()) { |
||||
|
dataList.add(JSON.parseObject(hit.getSourceAsString())); |
||||
|
} |
||||
|
consumer.accept(dataList); |
||||
|
if (dataList.size() < size) { |
||||
|
break; |
||||
|
} |
||||
|
String scrollId = response.getScrollId(); |
||||
|
scrollIds.add(scrollId); |
||||
|
long s1 = System.currentTimeMillis(); |
||||
|
response = client.prepareSearchScroll(scrollId) |
||||
|
.setScroll(TimeValue.timeValueMinutes(minutes)) |
||||
|
.execute() |
||||
|
.actionGet(); |
||||
|
long e1 = System.currentTimeMillis(); |
||||
|
LOGGER.debug("Query es, size:{}, took:{} ms", |
||||
|
response.getHits().getHits().length, (e1 - s1)); |
||||
|
} |
||||
|
if (!CollectionUtils.isEmpty(scrollIds)) { |
||||
|
ClearScrollRequestBuilder clearScroll = client.prepareClearScroll() |
||||
|
.setScrollIds(scrollIds); |
||||
|
client.clearScroll(clearScroll.request()); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 查询前[size]满足条件的数据 |
||||
|
* |
||||
|
* @param clusterName |
||||
|
* @param index |
||||
|
* @param queryBuilder |
||||
|
* @param size |
||||
|
* @return 没有数据:返回null,有数据:返回数据列表 |
||||
|
*/ |
||||
|
public static List<JSONObject> query(String clusterName, String index, final QueryBuilder queryBuilder, int size) { |
||||
|
TransportClient client = getClient(clusterName); |
||||
|
|
||||
|
SearchResponse response = client.prepareSearch() |
||||
|
.setIndices(index) |
||||
|
.setIndicesOptions(IndicesOptions.fromOptions(true, true, |
||||
|
true, false)) |
||||
|
.setSize(size) |
||||
|
.setFrom(0) |
||||
|
.setQuery(queryBuilder) |
||||
|
.execute().actionGet(); |
||||
|
if (response.getHits().totalHits > 0) { |
||||
|
List<JSONObject> dataList = Lists.newLinkedList(); |
||||
|
SearchHit[] hits = response.getHits().getHits(); |
||||
|
for (int i = 0; i < hits.length; i++) { |
||||
|
JSONObject data = new JSONObject(); |
||||
|
data.putAll(hits[i].getSourceAsMap()); |
||||
|
dataList.add(data); |
||||
|
} |
||||
|
return dataList; |
||||
|
} |
||||
|
|
||||
|
return null; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 根据时间范围获取index集合 |
||||
|
* |
||||
|
* @param startMills 起始时间(ms) |
||||
|
* @param endMils 结束时间(ms) |
||||
|
* @return |
||||
|
*/ |
||||
|
public static String[] getIndices(String prefix, String separator, |
||||
|
long startMills, long endMils, String pattern) { |
||||
|
List<String> indexList = Lists.newArrayList(); |
||||
|
LocalDateTime start = new LocalDateTime(startMills); |
||||
|
LocalDateTime end = new LocalDateTime(endMils); |
||||
|
for (LocalDateTime dt = start; dt.isBefore(end); dt = dt.plusDays(1)) { |
||||
|
String dtStr = dt.toString(DateTimeFormat.forPattern(pattern)); |
||||
|
String index = new StringBuilder() |
||||
|
.append(prefix) |
||||
|
.append(separator) |
||||
|
.append(dtStr) |
||||
|
.toString(); |
||||
|
indexList.add(index); |
||||
|
} |
||||
|
|
||||
|
String[] indices = new String[indexList.size()]; |
||||
|
indices = indexList.toArray(indices); |
||||
|
return indices; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 根据时间范围获取index集合 |
||||
|
* |
||||
|
* @param startMills 起始时间(ms) |
||||
|
* @param endMils 结束时间(ms) |
||||
|
* @return |
||||
|
*/ |
||||
|
public static String[] getIndices(String prefix, String separator, |
||||
|
long startMills, long endMils, String pattern, Long upperMills, String standbyIndex) { |
||||
|
List<String> indexList = Lists.newArrayList(); |
||||
|
LocalDateTime start = new LocalDateTime(startMills); |
||||
|
LocalDateTime end = new LocalDateTime(endMils); |
||||
|
LocalDateTime upper = new LocalDateTime(upperMills); |
||||
|
if (start.isBefore(upper)) { |
||||
|
indexList.add(standbyIndex); |
||||
|
start = upper; |
||||
|
} |
||||
|
for (LocalDateTime dt = start; dt.isEqual(end) || dt.isBefore(end); dt = dt.plusDays(1)) { |
||||
|
String dtStr = dt.toString(DateTimeFormat.forPattern(pattern)); |
||||
|
String index = new StringBuilder() |
||||
|
.append(prefix) |
||||
|
.append(separator) |
||||
|
.append(dtStr) |
||||
|
.toString(); |
||||
|
indexList.add(index); |
||||
|
} |
||||
|
|
||||
|
String[] indices = new String[indexList.size()]; |
||||
|
indices = indexList.toArray(indices); |
||||
|
return indices; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 根据indexName获取一定存在的index |
||||
|
* 如果indexName存在则返回,不存在则创建 |
||||
|
* |
||||
|
* @param clusterName |
||||
|
* @param indexName |
||||
|
* @param type |
||||
|
* @param mappingFile |
||||
|
* @return |
||||
|
*/ |
||||
|
// public static String getOrCreateIndex(String clusterName, String indexName, String type, |
||||
|
// int shard, int replica, String mappingFile) { |
||||
|
// try { |
||||
|
// if (!EsUtils.exists(clusterName, indexName)) { |
||||
|
// byte[] bytes = Files.readAllBytes(Paths.get(mappingFile)); |
||||
|
// String mappingDef = new String(bytes); |
||||
|
// boolean flag = EsUtils.createIndex(clusterName, indexName, type, |
||||
|
// shard, replica, mappingDef); |
||||
|
// if (!flag) { |
||||
|
// throw new RuntimeException("Create index " + indexName + " error."); |
||||
|
// } |
||||
|
// } |
||||
|
// } catch (Exception e) { |
||||
|
// throw new RuntimeException(e); |
||||
|
// } |
||||
|
// |
||||
|
// return indexName; |
||||
|
// } |
||||
|
|
||||
|
/** |
||||
|
* index一个文档 |
||||
|
* |
||||
|
* @param clusterName |
||||
|
* @param indexName |
||||
|
* @param data |
||||
|
* @return |
||||
|
*/ |
||||
|
public static String index(String clusterName, String indexName, String type, final JSONObject data, String idField) { |
||||
|
TransportClient client = getClient(clusterName); |
||||
|
IndexResponse response = client.prepareIndex(indexName, type) |
||||
|
.setSource(data, XContentType.JSON) |
||||
|
.setId(data.getString(idField)) |
||||
|
.get(); |
||||
|
return response.getId(); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* index一个文档 |
||||
|
* |
||||
|
* @param clusterName |
||||
|
* @param indexName |
||||
|
* @param dataList |
||||
|
* @return |
||||
|
*/ |
||||
|
public static void index(String clusterName, String indexName, String type, final List<JSONObject> dataList, String idField) { |
||||
|
if (CollectionUtils.isEmpty(dataList)) { |
||||
|
return; |
||||
|
} |
||||
|
TransportClient client = getClient(clusterName); |
||||
|
for (int i = 0; i < dataList.size(); i++) { |
||||
|
JSONObject data = dataList.get(i); |
||||
|
client.prepareIndex(indexName, type) |
||||
|
.setSource(data, XContentType.JSON) |
||||
|
.setId(data.getString(idField)) |
||||
|
.get(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 批量index文档 |
||||
|
* @param clusterName |
||||
|
* @param bulkItemList |
||||
|
* @return |
||||
|
*/ |
||||
|
public static boolean bulkIndex(String clusterName, final List<BulkItem> bulkItemList, String idField) { |
||||
|
if (CollectionUtils.isEmpty(bulkItemList)) { |
||||
|
return true; |
||||
|
} |
||||
|
TransportClient client = getClient(clusterName); |
||||
|
BulkRequestBuilder rb = client.prepareBulk(); |
||||
|
for (BulkItem item : bulkItemList) { |
||||
|
rb.add(client.prepareIndex(item.getIndexName(), item.getType(), item.getData().getString(idField)) |
||||
|
.setSource(item.getData(), XContentType.JSON)); |
||||
|
} |
||||
|
BulkResponse response = rb.get(); |
||||
|
LOGGER.info("Bulk index, size:{}.", bulkItemList.size()); |
||||
|
return response.hasFailures(); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 判断索引是否存在 |
||||
|
* |
||||
|
* @param clusterName |
||||
|
* @param indexName |
||||
|
* @return |
||||
|
*/ |
||||
|
public static Boolean exists(String clusterName, String indexName) { |
||||
|
TransportClient client = getClient(clusterName); |
||||
|
IndicesExistsRequest request = new IndicesExistsRequest() |
||||
|
.indices(indexName); |
||||
|
IndicesExistsResponse response = client.admin().indices().exists(request).actionGet(); |
||||
|
return response.isExists(); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 创建一个index |
||||
|
* |
||||
|
* @param clusterName |
||||
|
* @param indexName |
||||
|
* @param type |
||||
|
* @param shardCount |
||||
|
* @param replicaCount |
||||
|
* @param mappingDef |
||||
|
* @return |
||||
|
*/ |
||||
|
public static Boolean createIndex(String clusterName, String indexName, String type, |
||||
|
Integer shardCount, Integer replicaCount, String mappingDef) { |
||||
|
TransportClient client = getClient(clusterName); |
||||
|
CreateIndexRequest request = new CreateIndexRequest(indexName); |
||||
|
request.settings(Settings.builder() |
||||
|
.put("index.number_of_shards", shardCount) |
||||
|
.put("index.number_of_replicas", replicaCount) |
||||
|
.put("index.refresh_interval", 2, TimeUnit.SECONDS) |
||||
|
.put("index.analysis.filter.shingle_filter.type", "shingle") |
||||
|
.put("index.analysis.filter.shingle_filter.min_shingle_size", 2) |
||||
|
.put("index.analysis.filter.shingle_filter.max_shingle_size", 2) |
||||
|
.put("index.analysis.filter.shingle_filter.output_unigrams", false) |
||||
|
.put("index.analysis.analyzer.shingle_analyzer.type", "custom") |
||||
|
.put("index.analysis.analyzer.shingle_analyzer.tokenizer", "ik_smart") |
||||
|
.putArray("index.analysis.analyzer.shingle_analyzer.filter", "lowercase", "shingle_filter") |
||||
|
); |
||||
|
|
||||
|
request.mapping(type, mappingDef, XContentType.JSON); |
||||
|
CreateIndexResponse createIndexResponse = client.admin().indices().create(request).actionGet(); |
||||
|
boolean acknowledged = createIndexResponse.isAcknowledged(); |
||||
|
boolean shardsAcknowledged = createIndexResponse.isShardsAcked(); |
||||
|
if (acknowledged && shardsAcknowledged) { |
||||
|
return true; |
||||
|
} |
||||
|
|
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 删除index |
||||
|
* |
||||
|
* @param clusterName |
||||
|
* @param indexName |
||||
|
* @return |
||||
|
*/ |
||||
|
public static Boolean deleteIndex(String clusterName, String indexName) { |
||||
|
TransportClient client = getClient(clusterName); |
||||
|
DeleteIndexRequest request = new DeleteIndexRequest() |
||||
|
.indices(indexName); |
||||
|
AcknowledgedResponse response = client.admin().indices().delete(request).actionGet(); |
||||
|
return response.isAcknowledged(); |
||||
|
} |
||||
|
|
||||
|
private static TransportClient getClient(String clusterName) { |
||||
|
return CLIENT_MAP.get(clusterName); |
||||
|
} |
||||
|
|
||||
|
public static BulkItem buildBulkItem(String indexName, String type, final JSONObject data) { |
||||
|
return new BulkItem() |
||||
|
.setIndexName(indexName) |
||||
|
.setType(type) |
||||
|
.setData(data); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* 查询某个Index 的总量 |
||||
|
*/ |
||||
|
|
||||
|
public static Long scrollQuery(String clusterName, String indices, String type, |
||||
|
QueryBuilder queryBuilder){ |
||||
|
Long totalHits = 0L; |
||||
|
try{ |
||||
|
TransportClient client = getClient(clusterName); |
||||
|
SearchRequestBuilder searchRequestBuilder = client.prepareSearch() |
||||
|
.setIndices(indices) |
||||
|
.setIndicesOptions(IndicesOptions.fromOptions(true, true, |
||||
|
true, false)) |
||||
|
.setTypes(type) |
||||
|
.setQuery(queryBuilder); |
||||
|
SearchResponse response = searchRequestBuilder.execute().actionGet(); |
||||
|
totalHits = response.getHits().totalHits; |
||||
|
}catch (Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return totalHits; |
||||
|
} |
||||
|
|
||||
|
public static class BulkItem { |
||||
|
String indexName; |
||||
|
String type; |
||||
|
JSONObject data; |
||||
|
|
||||
|
public String getIndexName() { |
||||
|
return indexName; |
||||
|
} |
||||
|
|
||||
|
public BulkItem setIndexName(String indexName) { |
||||
|
this.indexName = indexName; |
||||
|
return this; |
||||
|
} |
||||
|
|
||||
|
public String getType() { |
||||
|
return type; |
||||
|
} |
||||
|
|
||||
|
public BulkItem setType(String type) { |
||||
|
this.type = type; |
||||
|
return this; |
||||
|
} |
||||
|
|
||||
|
public JSONObject getData() { |
||||
|
return data; |
||||
|
} |
||||
|
|
||||
|
public BulkItem setData(JSONObject data) { |
||||
|
this.data = data; |
||||
|
return this; |
||||
|
} |
||||
|
|
||||
|
public BulkItem setStringData(String data) { |
||||
|
this.type = data; |
||||
|
return this; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// public static void etl(String srcClusterName, String srcIndex, String srcType, QueryBuilder qb, |
||||
|
// Integer size, int minutes, |
||||
|
// String tarClusterName, String tarIndex, String tarType, String idField) { |
||||
|
// scrollQuery(srcClusterName, new String[]{srcClusterName}, srcType, qb, size, minutes, dataList -> { |
||||
|
// EsUtils.index(tarClusterName, tarIndex, tarType, dataList, idField); |
||||
|
// }); |
||||
|
// } |
||||
|
} |
@ -0,0 +1,239 @@ |
|||||
|
package com.bfd.mf.job.util; |
||||
|
|
||||
|
import it.sauronsoftware.jave.Encoder; |
||||
|
|
||||
|
import javax.imageio.ImageIO; |
||||
|
import javax.imageio.ImageReader; |
||||
|
import javax.imageio.stream.FileImageInputStream; |
||||
|
import javax.imageio.stream.ImageInputStream; |
||||
|
import java.awt.image.BufferedImage; |
||||
|
import java.io.*; |
||||
|
import java.util.ArrayList; |
||||
|
import java.util.Iterator; |
||||
|
import java.util.List; |
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* Created by BFD-229 on 2017/7/6. |
||||
|
*/ |
||||
|
public class ReadLine { |
||||
|
|
||||
|
public static List<String> readLine( File fileName){ |
||||
|
List<String> list = new ArrayList<String> (); |
||||
|
String line; |
||||
|
try { |
||||
|
InputStreamReader read = new InputStreamReader(new FileInputStream(fileName), "utf-8"); |
||||
|
BufferedReader reader = new BufferedReader(read); |
||||
|
while ((line = reader.readLine()) != null) { |
||||
|
try { |
||||
|
if (line.length() > 0) { |
||||
|
list.add(line); |
||||
|
} |
||||
|
} catch (Exception e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
return list; |
||||
|
}catch (UnsupportedEncodingException e) { |
||||
|
e.printStackTrace(); |
||||
|
return null; |
||||
|
} catch (FileNotFoundException e) { |
||||
|
e.printStackTrace(); |
||||
|
return null; |
||||
|
} catch (IOException e) { |
||||
|
e.printStackTrace(); |
||||
|
return null; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
// public static List<JSONObject> readLine(File fileName){ |
||||
|
// List<JSONObject> list = new ArrayList<JSONObject> (); |
||||
|
// String line; |
||||
|
// try { |
||||
|
// InputStreamReader read = new InputStreamReader(new FileInputStream(fileName), "utf-8"); |
||||
|
// BufferedReader reader = new BufferedReader(read); |
||||
|
// while ((line = reader.readLine()) != null) { |
||||
|
// try { |
||||
|
// if (line.length() > 0) { |
||||
|
// list.add(line); |
||||
|
// } |
||||
|
// } catch (Exception e) { |
||||
|
// e.printStackTrace(); |
||||
|
// } |
||||
|
// } |
||||
|
// return list; |
||||
|
// }catch (UnsupportedEncodingException e) { |
||||
|
// e.printStackTrace(); |
||||
|
// return null; |
||||
|
// } catch (FileNotFoundException e) { |
||||
|
// e.printStackTrace(); |
||||
|
// return null; |
||||
|
// } catch (IOException e) { |
||||
|
// e.printStackTrace(); |
||||
|
// return null; |
||||
|
// } |
||||
|
// } |
||||
|
|
||||
|
// 读取文件内容 |
||||
|
public static String readFile(String path){ |
||||
|
File file = new File(path); |
||||
|
StringBuilder result = new StringBuilder(); |
||||
|
try{ |
||||
|
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"));//构造一个BufferedReader类来读取文件 |
||||
|
String s = null; |
||||
|
while((s = br.readLine())!=null){//使用readLine方法,一次读一行 |
||||
|
result.append( System.lineSeparator() + s); |
||||
|
} |
||||
|
br.close(); |
||||
|
}catch(Exception e){ |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
return result.toString(); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public static void readFiles(File file){ |
||||
|
if (file.exists()) { |
||||
|
System.err.println("exist"); |
||||
|
try { |
||||
|
FileInputStream fis = new FileInputStream(file); |
||||
|
InputStreamReader isr = new InputStreamReader(fis, "UTF-8"); |
||||
|
BufferedReader br = new BufferedReader(isr); |
||||
|
String line; |
||||
|
while((line = br.readLine()) != null){ |
||||
|
System.out.println(line); |
||||
|
} |
||||
|
br.close(); |
||||
|
isr.close(); |
||||
|
fis.close(); |
||||
|
} catch (FileNotFoundException e) { |
||||
|
e.printStackTrace(); |
||||
|
} catch (UnsupportedEncodingException e) { |
||||
|
e.printStackTrace(); |
||||
|
} catch (IOException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public static String getResolution1(File file) throws IOException { |
||||
|
BufferedImage image = ImageIO.read(file); |
||||
|
return image.getWidth() + "x" + image.getHeight(); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
// public static String getResolution(File file){ |
||||
|
// Encoder encoder = new Encoder(); |
||||
|
// try { |
||||
|
// MultimediaInfo m = encoder.getInfo(file); |
||||
|
// int height = m.getVideo().getSize().getHeight(); |
||||
|
// int width = m.getVideo().getSize().getWidth(); |
||||
|
// System.out.println("width:"+width); |
||||
|
// System.out.println("height:" + height); |
||||
|
// FileInputStream fis = new FileInputStream(source); |
||||
|
// FileChannel fc = fis.getChannel(); |
||||
|
// BigDecimal fileSize = new BigDecimal(fc.size()); |
||||
|
// String size = fileSize.divide(new BigDecimal(1048576), 2, RoundingMode.HALF_UP) + "MB"; |
||||
|
// System.out.println("size:" + size); |
||||
|
// long duration = m.getDuration()/1000; |
||||
|
// System.out.println("duration:" + duration + "s"); |
||||
|
// } catch (Exception e) { |
||||
|
// e.printStackTrace(); |
||||
|
// } |
||||
|
// } |
||||
|
|
||||
|
public static String getImageDim(String path) { |
||||
|
String result = null; |
||||
|
String suffix = getFileSuffix(path); |
||||
|
//解码具有给定后缀的文件 |
||||
|
Iterator<ImageReader> iter = ImageIO.getImageReadersBySuffix(suffix); |
||||
|
// System.out.println(ImageIO.getImageReadersBySuffix(suffix)); |
||||
|
if (iter.hasNext()) { |
||||
|
ImageReader reader = iter.next(); |
||||
|
try { |
||||
|
ImageInputStream stream = new FileImageInputStream(new File(path)); |
||||
|
reader.setInput(stream); |
||||
|
int width = reader.getWidth(reader.getMinIndex()); |
||||
|
int height = reader.getHeight(reader.getMinIndex()); |
||||
|
result = width + "×" + height; |
||||
|
} catch (IOException e) { |
||||
|
e.printStackTrace(); |
||||
|
} finally { |
||||
|
reader.dispose(); |
||||
|
} |
||||
|
} |
||||
|
// System.out.println("getImageDim:" + result); |
||||
|
return result; |
||||
|
} |
||||
|
|
||||
|
private static String getFileSuffix(final String path) { |
||||
|
String result = null; |
||||
|
if (path != null) { |
||||
|
result = ""; |
||||
|
if (path.lastIndexOf('.') != -1) { |
||||
|
result = path.substring(path.lastIndexOf('.')); |
||||
|
if (result.startsWith(".")) { |
||||
|
result = result.substring(1); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
// System.out.println("getFileSuffix:" + result); |
||||
|
return result; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public static String videosize(String video) { |
||||
|
File source = new File(video); |
||||
|
Encoder encoder = new Encoder(); |
||||
|
try { |
||||
|
it.sauronsoftware.jave.MultimediaInfo m = encoder.getInfo(source); |
||||
|
return m.getVideo().getSize().getHeight() + "×" + m.getVideo().getSize().getWidth(); |
||||
|
} catch (Exception e) { |
||||
|
e.printStackTrace(); |
||||
|
return null; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
// public static String getVideoTime (String path){ |
||||
|
// File source = new File(path); |
||||
|
// Encoder encoder = new Encoder(); |
||||
|
// File[] file = source.listFiles(); |
||||
|
// long sum =0; |
||||
|
// for (File file2 : file) { |
||||
|
// try { |
||||
|
// MultimediaInfo m = encoder.getInfo(file2); |
||||
|
// long ls = m.getDuration()/1000; //ls是获取到的秒数 |
||||
|
// sum += ls; |
||||
|
// } catch (Exception e) { |
||||
|
// e.printStackTrace(); |
||||
|
// } |
||||
|
// } |
||||
|
// double sum1 = (double)sum; |
||||
|
// double sum2 =sum1/3600;// 转换成为了小时 |
||||
|
// System.out.println(sum2); |
||||
|
// return sum2+""; |
||||
|
// } |
||||
|
// |
||||
|
|
||||
|
|
||||
|
// public static byte[] readFile(String path){ |
||||
|
// try { |
||||
|
// FileInputStream fileInputStream = new FileInputStream(path); |
||||
|
// BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(fileInputStream)); |
||||
|
// String line = null; |
||||
|
// while ((line = bufferedReader.readLine()) != null) { |
||||
|
// System.out.println(line); |
||||
|
// } |
||||
|
// fileInputStream.close(); |
||||
|
// }catch (Exception e){ |
||||
|
// e.printStackTrace(); |
||||
|
// } |
||||
|
// } |
||||
|
|
||||
|
|
||||
|
|
||||
|
} |
@ -0,0 +1,119 @@ |
|||||
|
package com.bfd.mf.job.util; |
||||
|
|
||||
|
import com.bfd.mf.job.worker.UpLoadProducer; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
|
||||
|
import java.io.File; |
||||
|
import java.io.FileOutputStream; |
||||
|
import java.io.IOException; |
||||
|
import java.io.InputStream; |
||||
|
import java.util.*; |
||||
|
import java.util.zip.ZipEntry; |
||||
|
import java.util.zip.ZipFile; |
||||
|
|
||||
|
import static org.apache.lucene.store.BufferedIndexInput.BUFFER_SIZE; |
||||
|
|
||||
|
public class ZipUtils { |
||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(ZipUtils.class); |
||||
|
/** |
||||
|
* zip解压 |
||||
|
* @param srcFile zip源文件 |
||||
|
* @param destDirPath 解压后的目标文件夹 |
||||
|
* @throws RuntimeException 解压失败会抛出运行时异常 |
||||
|
*/ |
||||
|
|
||||
|
public static Map<String,List<String>> unZip(File srcFile, String destDirPath) throws RuntimeException { |
||||
|
Map<String,List<String>> fileNameMap = new HashMap<>(); |
||||
|
long start = System.currentTimeMillis(); |
||||
|
// 判断源文件是否存在 |
||||
|
if (!srcFile.exists()) { |
||||
|
return fileNameMap; |
||||
|
// throw new RuntimeException(srcFile.getPath() + "所指文件不存在"); |
||||
|
} |
||||
|
// 开始解压 |
||||
|
ZipFile zipFile = null; |
||||
|
try { |
||||
|
zipFile = new ZipFile(srcFile); |
||||
|
Enumeration<?> entries = zipFile.entries(); |
||||
|
while (entries.hasMoreElements()) { |
||||
|
ZipEntry entry = (ZipEntry) entries.nextElement(); |
||||
|
// System.out.println("解压后文件名称 :" + entry.getName()); |
||||
|
List fileNameList = new ArrayList<>(); |
||||
|
if(entry.getName().contains(".xlsx")){ |
||||
|
fileNameList.add(entry.getName()); |
||||
|
fileNameMap.put("excelName",fileNameList); |
||||
|
}else if(entry.getName().contains("txt")){ |
||||
|
fileNameList.add(entry.getName()); |
||||
|
fileNameMap.put("excelName",fileNameList); |
||||
|
}else{ |
||||
|
if(entry.getName().contains("/")) { |
||||
|
String files[] = entry.getName().split("/"); |
||||
|
String key = entry.getName().split("/")[0]; |
||||
|
if (files.length >1) { |
||||
|
String value = entry.getName().split("/")[1]; |
||||
|
if (fileNameMap.containsKey(key)) { |
||||
|
fileNameList = fileNameMap.get(key); |
||||
|
fileNameList.add(value); |
||||
|
fileNameMap.put(key, fileNameList); |
||||
|
} else { |
||||
|
fileNameList.add(value); |
||||
|
fileNameMap.put(key, fileNameList); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
// 如果是文件夹,就创建个文件夹 |
||||
|
if (entry.isDirectory()) { |
||||
|
String dirPath = destDirPath + "/" + entry.getName(); |
||||
|
File dir = new File(dirPath); |
||||
|
dir.mkdirs(); |
||||
|
} else { |
||||
|
// 如果是文件,就先创建一个文件,然后用io流把内容copy过去 |
||||
|
File targetFile = new File(destDirPath + "/" + entry.getName()); |
||||
|
// 保证这个文件的父文件夹必须要存在 |
||||
|
if(!targetFile.getParentFile().exists()){ |
||||
|
targetFile.getParentFile().mkdirs(); |
||||
|
} |
||||
|
targetFile.createNewFile(); |
||||
|
// 将压缩文件内容写入到这个文件中 |
||||
|
InputStream is = zipFile.getInputStream(entry); |
||||
|
FileOutputStream fos = new FileOutputStream(targetFile); |
||||
|
int len; |
||||
|
byte[] buf = new byte[BUFFER_SIZE]; |
||||
|
while ((len = is.read(buf)) != -1) { |
||||
|
fos.write(buf, 0, len); |
||||
|
} |
||||
|
// 关流顺序,先打开的后关闭 |
||||
|
fos.close(); |
||||
|
is.close(); |
||||
|
} |
||||
|
} |
||||
|
long end = System.currentTimeMillis(); |
||||
|
LOGGER.info("解压完成,耗时:" + (end - start) +" ms"); |
||||
|
} catch (Exception e) { |
||||
|
e.printStackTrace(); |
||||
|
throw new RuntimeException("unzip error from ZipUtils", e); |
||||
|
} finally { |
||||
|
if(zipFile != null){ |
||||
|
try { |
||||
|
zipFile.close(); |
||||
|
} catch (IOException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
return fileNameMap; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public static String getZipFileName(String zipName, String zipPath) { |
||||
|
String zipFileName = zipName.replace(".zip",""); |
||||
|
// 判断zip这个文件夹是否存在,不存在则创建 |
||||
|
File zipFile=new File(zipPath+zipFileName); |
||||
|
if(!zipFile.exists()){//如果文件夹不存在 |
||||
|
zipFile.mkdir();//创建文件夹 |
||||
|
} |
||||
|
return zipFileName; |
||||
|
} |
||||
|
} |
@ -0,0 +1,38 @@ |
|||||
|
package com.bfd.mf.job.worker; |
||||
|
|
||||
|
import com.bfd.mf.job.config.AppConfig; |
||||
|
import com.bfd.mf.job.service.alarm.AlarmService; |
||||
|
import com.bfd.mf.job.service.taskCount.TaskCountService; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
@Component |
||||
|
public class AlarmProducer extends AbstractWorker { |
||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(AlarmProducer.class); |
||||
|
|
||||
|
@Autowired |
||||
|
private AppConfig config; |
||||
|
@Autowired |
||||
|
private AlarmService alarmService; |
||||
|
@Override |
||||
|
protected Integer getThreadCount() { |
||||
|
return config.getAlarmProducerThreadCount(); |
||||
|
} |
||||
|
@Override |
||||
|
protected String getThreadNameFormat() { |
||||
|
return "alarm-producer-%d"; |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
protected void work(String json) { |
||||
|
LOGGER.info("[AlarmProducer] work start ... "); |
||||
|
alarmService.produce(); |
||||
|
try { |
||||
|
Thread.sleep(config.getIntervalTime()); |
||||
|
} catch (InterruptedException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
} |
@ -0,0 +1,40 @@ |
|||||
|
package com.bfd.mf.job.worker; |
||||
|
|
||||
|
import com.bfd.mf.job.config.AppConfig; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
@Component |
||||
|
public class SQOutPutProducer extends AbstractWorker { |
||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(SQOutPutProducer.class); |
||||
|
|
||||
|
@Autowired |
||||
|
private AppConfig config; |
||||
|
|
||||
|
@Override |
||||
|
protected Integer getThreadCount() { |
||||
|
return config.getQueryProducerThreadCount(); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
protected String getThreadNameFormat() { |
||||
|
return "backtrace-producer-%d"; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 这个 是用来 做数据拉取的,专门针对专题数据的拉取 |
||||
|
*/ |
||||
|
@Override |
||||
|
protected void work(String json) { |
||||
|
LOGGER.info("[SQ - OutPutProducer] work start ... "); |
||||
|
// outputService.tryAcquire(); |
||||
|
// outputService.produce(); |
||||
|
try { |
||||
|
Thread.sleep(config.getIntervalTime()); |
||||
|
} catch (InterruptedException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
} |
Some files were not shown because too many files changed in this diff
Write
Preview
Loading…
Cancel
Save
Reference in new issue