# coding:utf8 import os, sys import io from jsonpath_ng import jsonpath, parse import uuid sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') cur_dir = os.path.dirname(os.path.abspath(__file__)) or os.getcwd() par_dir = os.path.abspath(os.path.join(cur_dir, os.path.pardir)) sys.path.append(cur_dir) sys.path.append(par_dir) import json from django.http import HttpResponse from text_analysis.tools import to_kafka from django.views.decorators.csrf import csrf_exempt from log_util.set_logger import set_logger logging = set_logger('logs/results.log') import traceback import queue import requests from text_analysis.tools.tool import parse_data from text_analysis.chroma1 import LangChainChroma import time from datetime import datetime import os # 任务队列 global task_queue task_queue = queue.Queue() # LC = LangChainChroma() @csrf_exempt def createChroma(request): if request.method == 'POST': try: raw_data = json.loads(request.body) task_queue.put(raw_data) return HttpResponse(json.dumps({"code": 1, "msg": "请求正常!"}, ensure_ascii=False)) except: logging.error(traceback.format_exc()) return HttpResponse(json.dumps({"code": 0, "msg": "请求json格式不正确!"}, ensure_ascii=False)) else: return HttpResponse(json.dumps({"code": 0, "msg": "请求方式错误,改为post请求"}, ensure_ascii=False)) def upload(): while True: try: if task_queue.qsize() > 0: logging.info("取任务队列长度{}".format(task_queue.qsize())) raw_data = task_queue.get() output=raw_data["output"] res_tmp={key: "" for key in output} if "id" in res_tmp.keys(): res_tmp["id"]=str(uuid.uuid4()) res_tmp["isLast"]=1 logging.info("任务数据为:{}".format(raw_data)) # chunkSize=parse_data(raw_data,raw_data["input"]["chunkSize"]) content=parse_data(raw_data,raw_data["input"]["content"]) fieldName=parse_data(raw_data,raw_data["input"]["fieldName"]) dataId=parse_data(raw_data,raw_data["input"]["dataId"]) # dataId=raw_data["dataId"] if content and fieldName and dataId: vector_db=LangChainChroma(fieldName) docs=vector_db.text_splitter.split_text(content) res,db_count=vector_db.add_documents(docs,dataId) vector_db.db_close() logging.info('当前数据划分{}个块。数据库{}共有{}个块'.format(len(res), fieldName,db_count)) # res=LC.addChroma(content,fieldName,logging,chunkSize) res_tmp['resultsID']=res raw_data["result"] = {"successCode": "", "errorLog": "", "results": ""} if res: res_tmp["status"]=1 raw_data["result"]["successCode"] = "1" raw_data["result"]["status"] = 1 raw_data["result"]["message"] = "成功" else: res_tmp["status"]=3 raw_data["result"]["successCode"] = "0" raw_data["result"]["status"] = 2 raw_data["result"]["message"] = "异常" else: res_tmp["status"] = 3 raw_data["result"]["successCode"] = "0" raw_data["result"]["errorLog"] = "请检查content/fieldName/dataId,要求非空" raw_data["result"]["status"] = 2 raw_data["result"]["message"] = "请检查content/fieldName/dataId,要求非空" res_tmp_json = json.dumps(res_tmp, ensure_ascii=False) raw_data["result"]["results"]=res_tmp_json logging.info("结果数据为:{}".format(raw_data)) to_kafka.send_kafka(raw_data, logging) else: # 暂无任务,进入休眠 time.sleep(10) except: raw_data["result"]={} raw_data["result"]["successCode"] = "0" raw_data["result"]["errorLog"] = traceback.format_exc() res_tmp["status"] = 3 raw_data["result"]["status"] = 2 raw_data["result"]["message"] = "异常" raw_data["result"]["results"] = json.dumps(res_tmp, ensure_ascii=False) logging.error(traceback.format_exc()) to_kafka.send_kafka(raw_data, logging)