chatgpt大模型
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 

148 lines
6.4 KiB

# coding:utf8
import os, sys
import io
import time
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8')
cur_dir = os.path.dirname(os.path.abspath(__file__)) or os.getcwd()
par_dir = os.path.abspath(os.path.join(cur_dir, os.path.pardir))
sys.path.append(cur_dir)
sys.path.append(par_dir)
import json
from django.http import HttpResponse
from text_analysis.tools import to_kafka
from django.views.decorators.csrf import csrf_exempt
from log_util.set_logger import set_logger
logging = set_logger('logs/results.log')
import traceback
# import queue
import requests
from text_analysis.tools.tool import get_content,parse_gptResult
import uuid
import time
from kazoo.client import KazooClient
from kazoo.protocol.states import EventType
import queue
task_queue = queue.PriorityQueue()
stop_dict={}
from text_analysis.read_config import load_config
config=load_config()
@csrf_exempt
def chatGptNew(request):
if request.method == 'POST':
try:
# txt=request.body.encode("utf-8")
raw_data = json.loads(request.body)
if "trace" in raw_data.keys() and raw_data["trace"]==True:
task_queue.put((-1,time.time(), raw_data))
else:
task_queue.put((1, time.time(),raw_data))
return HttpResponse(json.dumps({"code": 1, "msg": "请求正常!"}, ensure_ascii=False))
except:
logging.error(traceback.format_exc())
return HttpResponse(json.dumps({"code": 0, "msg": "请求json格式不正确!"}, ensure_ascii=False))
else:
return HttpResponse(json.dumps({"code": 0, "msg": "请求方式错误,改为post请求"}, ensure_ascii=False))
def chatgpt():
while True:
try:
if task_queue.qsize()>0:
p,t,raw_data = task_queue.get(timeout=1)
logging.info("当前任务队列长度{}".format(task_queue.qsize()+1))
output = raw_data["output"]
res_tmp = {key: "" for key in output}
if "id" in res_tmp.keys():
res_tmp["id"] = str(uuid.uuid4())
res_tmp["isLast"]=1
task_id=raw_data["scenes_id"]
task_version=raw_data["version"]
# logging.info("任务数据为:{}".format(raw_data))
logging.info("当前version信息为:{}".format(stop_dict))
if task_id in stop_dict.keys() and task_version!=stop_dict[task_id]["version"]:
logging.info("已暂停任务,过滤掉。{}".format(raw_data))
continue
data = get_content(raw_data, logging)
url = config["gptmodel"]["url"]
headers = {
"Content-Type": "application/json;charset=UTF-8",
"Authorization": "Bearer " + data["authorization"]
}
payload = json.dumps({
"model": data["model"],
"messages": [{"role": "user", "content": data["prompt"]}],
"temperature": float(data["temperature"]),
"top_p": float(data["top_p"]),
"n": int(data["n"])
})
proxies = {
'http': 'http://jian.mao:maojian123@@oversea_vpn.baifendian.com:3128',
'https': 'http://jian.mao:maojian123@@oversea_vpn.baifendian.com:3128'
}
response = requests.request("POST", url, headers=headers, data=payload,timeout=180,proxies=proxies)
logging.info("GPT返回值:{}-{}".format(response,response.text))
d = json.loads(response.text)
result = d['choices'][0]['message']['content']
#添加 0是文本,1是json格式
fieldType = raw_data["input"]['fieldType']
if fieldType == 0:
res_tmp["content"] = result
res_tmp_json = json.dumps(res_tmp, ensure_ascii=False)
raw_data["result"] = {"successCode": "1", "errorLog": "", "results": res_tmp_json,"status":1,"message":"成功"}
else:
res=parse_gptResult(res_tmp,result)
if res:
res["isLast"]=1
res_tmp_json = json.dumps(res, ensure_ascii=False)
raw_data["result"] = {"successCode": "1", "errorLog": "", "results": res_tmp_json,"status":1,"message":"成功"}
else:
res_tmp_json = json.dumps(res_tmp, ensure_ascii=False)
raw_data["result"] = {"successCode": "0", "errorLog": "GPT返回值不是json格式,无法解析!", "results": res_tmp_json,"status":2,"message":"GPT返回结果非json格式"}
logging.info(raw_data)
to_kafka.send_kafka(raw_data, logging)
else:
time.sleep(10)
except queue.Empty:
#从空队列取任务
logging.info("该线程任务队列为空,等待新任务")
except:
res_tmp_json = json.dumps(res_tmp, ensure_ascii=False)
raw_data["result"] = {"successCode": "0", "errorLog": traceback.format_exc(), "results": res_tmp_json,"status":2,"message":"异常"}
logging.info("调用gpt失败{}-{}".format(raw_data, traceback.format_exc()))
to_kafka.send_kafka(raw_data, logging)
def zk_monitoring():
try:
#线上环境
zk = KazooClient(hosts=config['zookeeper']['zkhost'])
#测试环境
# zk = KazooClient(hosts='172.16.12.55:2181,172.16.12.56:2181,172.16.12.57:2181')
zk.start()
# 设置监听器
@zk.DataWatch("/analyze")
def watch_node(data, stat, event):
if event is not None and event.type == EventType.CHANGED:
data, stat = zk.get("/analyze")
logging.info("执行删除操作:{}".format(data))
d = json.loads(data)
id = d["scenes_id"]
stop_dict[id] = {}
stop_dict[id]["version"] = d["version"]
stop_dict[id]["operation"] = d["operation"]
# 保持程序运行以监听节点变化
try:
while True:
time.sleep(1)
except:
logging.info("Stopping...")
# 关闭连接
zk.stop()
zk.close()
except:
logging.error(traceback.format_exc())