diff --git a/cl_query_data_job/src/main/java/com/bfd/mf/job/service/backtrace/BacktraceService.java b/cl_query_data_job/src/main/java/com/bfd/mf/job/service/backtrace/BacktraceService.java index e295c81..1361911 100644 --- a/cl_query_data_job/src/main/java/com/bfd/mf/job/service/backtrace/BacktraceService.java +++ b/cl_query_data_job/src/main/java/com/bfd/mf/job/service/backtrace/BacktraceService.java @@ -53,12 +53,12 @@ public class BacktraceService { @PostConstruct public void init() { // 注册数据查询来源 - EsUtils.registerCluster(config.esNormalClusterName(), config.esNormalAddress());// 配置文件中的 es-source +// EsUtils.registerCluster(config.esNormalClusterName(), config.esNormalAddress());// 配置文件中的 es-source EsUtils.registerCluster(config.esMiniClusterName(), config.esMiniAddress()); // 配置文件中的 es-target - pRateLimiter = RateLimiter.create(1.0D / config.getPeriodS()); - cRateLimiter = RateLimiter.create(1.0D / config.getPeriodS()); - dataRateLimiter = RateLimiter.create(config.esMiniBulkRate()); - // kafkaProducer = Kafka010Utils.getProducer(config.getBrokerList()); +// pRateLimiter = RateLimiter.create(1.0D / config.getPeriodS()); +// cRateLimiter = RateLimiter.create(1.0D / config.getPeriodS()); +// dataRateLimiter = RateLimiter.create(config.esMiniBulkRate()); +// kafkaProducer = Kafka010Utils.getProducer(config.getBrokerList()); } /* diff --git a/cl_query_data_job/src/main/java/com/bfd/mf/job/service/query/QueryService.java b/cl_query_data_job/src/main/java/com/bfd/mf/job/service/query/QueryService.java index d469c6b..78921f0 100644 --- a/cl_query_data_job/src/main/java/com/bfd/mf/job/service/query/QueryService.java +++ b/cl_query_data_job/src/main/java/com/bfd/mf/job/service/query/QueryService.java @@ -61,9 +61,9 @@ public class QueryService { @PostConstruct public void init() { // 注册数据查询来源 - EsUtils.registerCluster(config.esNormalClusterName(), config.esNormalAddress());// 配置文件中的 es-source +// EsUtils.registerCluster(config.esNormalClusterName(), config.esNormalAddress());// 配置文件中的 es-source EsUtils.registerCluster(config.esMiniClusterName(), config.esMiniAddress()); // 配置文件中的 es-target - pRateLimiter = RateLimiter.create(1.0D / config.getPeriodS()); +// pRateLimiter = RateLimiter.create(1.0D / config.getPeriodS()); kafkaProducer = Kafka010Utils.getProducer(config.getBrokerList()); // cRateLimiter = RateLimiter.create(1.0D / config.getPeriodS()); diff --git a/cl_query_data_job/src/main/java/com/bfd/mf/job/service/statistics/StatisticsService.java b/cl_query_data_job/src/main/java/com/bfd/mf/job/service/statistics/StatisticsService.java index 25fa197..62b9d65 100644 --- a/cl_query_data_job/src/main/java/com/bfd/mf/job/service/statistics/StatisticsService.java +++ b/cl_query_data_job/src/main/java/com/bfd/mf/job/service/statistics/StatisticsService.java @@ -38,7 +38,7 @@ public class StatisticsService { @PostConstruct public void init() { // 注册数据查询来源 - EsUtils.registerCluster(config.esNormalClusterName(), config.esNormalAddress());// 配置文件中的 es-source +// EsUtils.registerCluster(config.esNormalClusterName(), config.esNormalAddress());// 配置文件中的 es-source EsUtils.registerCluster(config.esMiniClusterName(), config.esMiniAddress()); // 配置文件中的 es-target } @@ -49,14 +49,14 @@ public class StatisticsService { LOGGER.info("------------------------------------------------------------------ StatisticsService ------------------------------------------------------"); long start = System.currentTimeMillis(); //-------统计134上的总量------------------------------------------------------------------------------------ - String clusterName = config.esNormalClusterName(); // 获得 134 的 clusterName - statisticsTotal(clusterName); + // String clusterName = config.esNormalClusterName(); // 获得 134 的 clusterName + //statisticsTotal(clusterName); long end = System.currentTimeMillis(); LOGGER.info("Statistics Total, took:{} ms.",(end - start)); //-------统计147上的 每个任务的总量------------------------------------------------------------------------- start = System.currentTimeMillis(); - clusterName = config.esMiniClusterName(); // 获得 147 的 clusterName + String clusterName = config.esMiniClusterName(); // 获得 147 的 clusterName statisticsTask(clusterName); end = System.currentTimeMillis(); LOGGER.info("Statistics Task, took:{} ms.",(end - start)); @@ -71,13 +71,13 @@ public class StatisticsService { end = System.currentTimeMillis(); LOGGER.info("Statistics Subject Normal, took:{} ms.",(end - start)); // 如果是【欧莱雅】任务的,得用这个方式统计呀 - start = System.currentTimeMillis(); - List subjectIds1 = subjectRepository.findAllOlySubjectIds(); - for (BigInteger subjectId: subjectIds1) { - statisticsSubject(subjectId,clusterName); - } - end = System.currentTimeMillis(); - LOGGER.info("Statistics Subject OLY, took:{} ms.",(end - start)); +// start = System.currentTimeMillis(); +// List subjectIds1 = subjectRepository.findAllOlySubjectIds(); +// for (BigInteger subjectId: subjectIds1) { +// statisticsSubject(subjectId,clusterName); +// } +// end = System.currentTimeMillis(); +// LOGGER.info("Statistics Subject OLY, took:{} ms.",(end - start)); } @@ -151,7 +151,7 @@ public class StatisticsService { }else{ siteTodayCount = 0; } - switch (i) { + switch (i) { // case 0: subjectCrawlDatFlagMap.put("keyword", siteCount); subjectCrawlDataFlagTodayMap.put("keyword", siteTodayCount); @@ -238,6 +238,7 @@ public class StatisticsService { List taskList = taskRepository.findAllBydel0(); // 遍历任务List ,根据条件组装ES查询语句去对应的索引下查结果,然后回写到任务表中 for (Task task: taskList) { + System.out.println(" 任务ID ===== " + task); Long taskId = task.getId().longValue(); String crawlDataFlag = task.getCrawlDataFlag(); String indexNamePre = config.getIndexNamePre(); diff --git a/cl_query_data_job/src/main/resources/application.yml b/cl_query_data_job/src/main/resources/application.yml index 5f36ade..48f15d5 100644 --- a/cl_query_data_job/src/main/resources/application.yml +++ b/cl_query_data_job/src/main/resources/application.yml @@ -6,9 +6,9 @@ logging: spring: datasource: driver-class-name: com.mysql.jdbc.Driver - username: root - password: Bfd123!@# - url: jdbc:mysql://172.18.1.134:3306/intelligent_crawl?useOldAliasMetadataBehavior=true&characterEncoding=UTF-8&zeroDateTimeBehavior=round + username: crawl + password: crawl + url: jdbc:mysql://172.18.1.181:3306/intelligent_crawl?useOldAliasMetadataBehavior=true&characterEncoding=UTF-8&zeroDateTimeBehavior=round hikari: maximum-pool-size: 10 minimum-idle: 1 @@ -29,14 +29,14 @@ worker: ## 服务的状态,true 为启动 enable-analysis-producer: false enable-analysis-consumer: false - enable-statistics-producer: false + enable-statistics-producer: true enable-query-producer: false enable-backtrace-producer: false enable-rw-oly-producer: false enable-up-load-producer: false enable-output-producer: false enable-taskcount-producer: false - enable-alarm-producer: true + enable-alarm-producer: false ## 启动服务的线程数 statistics-producer-thread-count: 1 query-producer-thread-count: 10 @@ -64,6 +64,29 @@ worker: uploadZipPath : /opt/nfsdata/uploadFiles/ indexNamePre : cl_major_ + +# es-normal: +# name: SQ_Normal +# address: 172.16.10.61:9301 +# upper: 2000-01-01 +# standby: cl_major_* +# es-reply-source: +# name: SQ_Normal +# address: 172.16.10.61:9301 +# upper: 2000-01-01 +# standby: cl_major_* +# es-mini: +# name: SQ_Normal +# address: 172.16.10.61:9301 +# bulk-thread-count: 5 +# bulk-rate: 3 +# bulk-size: 100 +# es-logstash: +# name: SQ_Normal +# address: 172.16.10.61:9301 +# upper: 2021-01-01 +# standby: logstash-2021.05.13 + es-normal: name: SQ_Normal_new address: 172.18.1.134:9301 diff --git a/cl_search_api/src/main/java/com/bfd/mf/common/util/es/EsUtils.java b/cl_search_api/src/main/java/com/bfd/mf/common/util/es/EsUtils.java index 1f1d07b..fcfb301 100644 --- a/cl_search_api/src/main/java/com/bfd/mf/common/util/es/EsUtils.java +++ b/cl_search_api/src/main/java/com/bfd/mf/common/util/es/EsUtils.java @@ -183,7 +183,7 @@ public abstract class EsUtils { .setQuery(boolQueryBuilder) .setSearchType(SearchType.DEFAULT) .setSize(limit) - .setScroll(new TimeValue(20000)) + .setScroll(new TimeValue(300000)) .execute() .actionGet();//注意:首次搜索并不包含数据 }catch (Exception e){ @@ -195,7 +195,7 @@ public abstract class EsUtils { try { searchResponse = client .prepareSearchScroll(scrollId) - .setScroll(new TimeValue(20000)) + .setScroll(new TimeValue(300000)) .execute() .actionGet(); } catch (Exception e) { diff --git a/cl_search_api/src/main/java/com/bfd/mf/controller/SearchDataController.java b/cl_search_api/src/main/java/com/bfd/mf/controller/SearchDataController.java index 25465be..dc5af7a 100644 --- a/cl_search_api/src/main/java/com/bfd/mf/controller/SearchDataController.java +++ b/cl_search_api/src/main/java/com/bfd/mf/controller/SearchDataController.java @@ -34,8 +34,6 @@ public class SearchDataController { @Autowired private SearchDataService searchDataService; @Autowired - private SiteRepository siteRepository; - @Autowired private UpdateService updateService; @@ -58,13 +56,9 @@ public class SearchDataController { }else if(subjectId.equals("")){ // 没有专题 result.put(ESConstant.ALLDOCNUMBER,0L); result.put(ESConstant.MONITORLISTS,new ArrayList<>()); - }else{ // 专题数据导出 - //Integer searchType = queryRequest.getSearchType(); -// if(searchType == 0) { -// result = searchDataService.exportDataInSubjectIndexTestGroupBy(queryRequest); -// }else{ + }else{ + // 专题数据导出 result = searchDataService.exportDataInSubjectIndex(queryRequest); - // } } return ResponseWrapper.buildResponse(RTCodeEnum.C_OK, result); }else { diff --git a/cl_search_api/src/main/resources/application.yml b/cl_search_api/src/main/resources/application.yml index b4a443b..8fc57eb 100644 --- a/cl_search_api/src/main/resources/application.yml +++ b/cl_search_api/src/main/resources/application.yml @@ -11,8 +11,8 @@ spring: datasource: driver-class-name: com.mysql.jdbc.Driver username: root - password: bfd123 - url: jdbc:mysql://172.26.11.113:3306/intelligent_crawl_item?useOldAliasMetadataBehavior=true&characterEncoding=UTF-8&zeroDateTimeBehavior=round + password: Bfd123!@# + url: jdbc:mysql://172.18.1.134:3306/intelligent_crawl?useOldAliasMetadataBehavior=true&characterEncoding=UTF-8&zeroDateTimeBehavior=round hikari: maximum-pool-size: 10 diff --git a/logs/sdkclient_error.log b/logs/sdkclient_error.log deleted file mode 100644 index adb7215..0000000 --- a/logs/sdkclient_error.log +++ /dev/null @@ -1,4 +0,0 @@ -2021-05-22 17:21:45,031 ERROR [com.bfd.mf.job.util.EMailUtils] EMailUtils:sendEmail error. title:{cid=Nreuters} emailList:[jing.du@percent.cn] -2021-05-22 17:21:45,039 ERROR [com.bfd.mf.job.util.EMailUtils] EMailUtils:sendEmail error. title:{cid=Nrusi} emailList:[jing.du@percent.cn] -2021-05-22 17:21:45,045 ERROR [com.bfd.mf.job.util.EMailUtils] EMailUtils:sendEmail error. title:{cid=Nusadefense} emailList:[jing.du@percent.cn] -2021-05-22 17:21:45,053 ERROR [com.bfd.mf.job.util.EMailUtils] EMailUtils:sendEmail error. title:{cid=Nenotrans} emailList:[jing.du@percent.cn] diff --git a/logs/sdkclient_error.log.2021-05-12-14 b/logs/sdkclient_error.log.2021-05-12-14 deleted file mode 100644 index e69de29..0000000