You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
184 lines
8.2 KiB
184 lines
8.2 KiB
# coding:utf8
|
|
import os, sys
|
|
import io
|
|
from jsonpath_ng import jsonpath, parse
|
|
|
|
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8')
|
|
cur_dir = os.path.dirname(os.path.abspath(__file__)) or os.getcwd()
|
|
par_dir = os.path.abspath(os.path.join(cur_dir, os.path.pardir))
|
|
sys.path.append(cur_dir)
|
|
sys.path.append(par_dir)
|
|
import json
|
|
from django.http import HttpResponse
|
|
from text_analysis.tools import to_kafka
|
|
from django.views.decorators.csrf import csrf_exempt
|
|
from log_util.set_logger import set_logger
|
|
|
|
logging = set_logger('logs/results.log')
|
|
import traceback
|
|
import queue
|
|
import requests
|
|
from text_analysis.tools.tool import get_data
|
|
import time
|
|
from datetime import datetime
|
|
import os
|
|
|
|
# 任务队列
|
|
global task_queue
|
|
task_queue = queue.Queue()
|
|
# 数据队列
|
|
global data_queue
|
|
data_queue = queue.Queue()
|
|
|
|
|
|
@csrf_exempt
|
|
def ASR(request):
|
|
if request.method == 'POST':
|
|
try:
|
|
raw_data = json.loads(request.body)
|
|
task_queue.put(raw_data)
|
|
return HttpResponse(json.dumps({"code": 1, "msg": "请求正常!"}, ensure_ascii=False))
|
|
except:
|
|
logging.error(traceback.format_exc())
|
|
return HttpResponse(json.dumps({"code": 0, "msg": "请求json格式不正确!"}, ensure_ascii=False))
|
|
else:
|
|
return HttpResponse(json.dumps({"code": 0, "msg": "请求方式错误,改为post请求"}, ensure_ascii=False))
|
|
|
|
|
|
def upload():
|
|
while True:
|
|
try:
|
|
if task_queue.qsize() > 0:
|
|
logging.info("取任务队列长度{}".format(task_queue.qsize()))
|
|
raw_data = task_queue.get()
|
|
# index = raw_data["metadata"]["index"]
|
|
# datasource = raw_data["metadata"]["admin"]["datasource"]
|
|
# if datasource not in raw_data["data"].keys():
|
|
# logging.info("找不到相关数据源!—{}".format(raw_data))
|
|
# continue
|
|
# allFile = raw_data["data"][datasource]
|
|
# currentFile = eval(allFile)[index]
|
|
url=raw_data["metadata"]["admin"]["fileUrl"]
|
|
if '$.' in url:
|
|
# json.path表达式动态获取value
|
|
datasources = str(url).split(':')
|
|
# 0是数据源,1是JsonPath 表达式
|
|
datasourcestr = raw_data["data"][datasources[0]]
|
|
# print(datasourcestr)
|
|
datasource = json.loads(datasourcestr)
|
|
# 创建 JsonPath 表达式对象
|
|
expr = parse(datasources[1])
|
|
# 使用表达式来选择 JSON 元素
|
|
match = [match.value for match in expr.find(datasource)]
|
|
video_url = match[0]
|
|
fileName=video_url.rsplit('/')[-1]
|
|
if "http" not in video_url:
|
|
file = "https://caiji.percent.cn/" + video_url.lstrip("/")
|
|
# print(file)
|
|
# name=raw_data["metadata"]["admin"]["fileName"]
|
|
# if '$.' in name:
|
|
# # json.path表达式动态获取value
|
|
# datasources = str(name).split(':')
|
|
# # 0是数据源,1是JsonPath 表达式
|
|
# datasourcestr = raw_data["data"][datasources[0]]
|
|
# datasource = json.loads(datasourcestr)
|
|
# # 创建 JsonPath 表达式对象
|
|
# expr = parse(datasources[1])
|
|
# # 使用表达式来选择 JSON 元素
|
|
# match = [match.value for match in expr.find(datasource)]
|
|
# fileName = match[0]
|
|
|
|
currentFile={"content":"","fileName":fileName,"fileUrl":file}
|
|
language = raw_data["metadata"]["admin"]["fromLanguage"]
|
|
|
|
# 从gofast获取视频
|
|
myfile = requests.get(file)
|
|
starttime = datetime.now().strftime('%Y-%m-%d')
|
|
path = 'inputdata/' + starttime
|
|
if not os.path.exists(path):
|
|
os.makedirs(path)
|
|
with open(path + '/' + fileName, 'wb') as f:
|
|
f.write(myfile.content)
|
|
logging.info("视频从gofast下载完毕,开始上传-{}".format(fileName))
|
|
# 访问视频上传接口
|
|
url = "https://realtime.pdeepmatrix.com/apis/media/analysis/upload"
|
|
data = {'fromLanguage': language}
|
|
f = open(path + '/' + fileName, 'rb')
|
|
files = {'file': f}
|
|
response = requests.post(url, data=data, files=files)
|
|
d = json.loads(response.text)
|
|
if "code" in d.keys() and d["code"] == 200:
|
|
# 接口返回值data中存放视频获取结果的key
|
|
result = d["data"]
|
|
raw_data["result"] = {"successCode": "1", "errorLog": "", "results": currentFile, "dataKey": result}
|
|
data_queue.put(raw_data)
|
|
logging.info("视频上传成功{}".format(raw_data))
|
|
# to_kafka.send_kafka(raw_data,logging)
|
|
else:
|
|
logging.info("视频上传失败{}-{}".format(raw_data, d))
|
|
f.close()
|
|
# Todo删除视频文件
|
|
else:
|
|
# 暂无任务,进入休眠
|
|
time.sleep(10)
|
|
except:
|
|
logging.error(traceback.format_exc())
|
|
|
|
|
|
def getResult():
|
|
while True:
|
|
# 3秒钟结果获取一次
|
|
time.sleep(3)
|
|
try:
|
|
if data_queue.qsize() > 0:
|
|
logging.info("取数据队列长度{}".format(data_queue.qsize()))
|
|
raw_data = data_queue.get()
|
|
# print(raw_data)
|
|
# 根据视频key访问获取结果接口
|
|
dataKey = raw_data["result"]["dataKey"]
|
|
url = "https://realtime.pdeepmatrix.com/apis/media/analysis/getResult"
|
|
params = {'taskId': dataKey}
|
|
response = requests.get(url, params=params)
|
|
# print(response.text)
|
|
d = json.loads(response.text)
|
|
if "code" in d.keys() and d["code"] == 200:
|
|
results = ""
|
|
if d["data"]["code"] == "1":
|
|
for sentence in d["data"]["sentences"]:
|
|
results += sentence["text"]
|
|
raw_data["result"]["results"]["content"] = results
|
|
raw_data["result"]["results"] = json.dumps(raw_data["result"]["results"], ensure_ascii=False)
|
|
logging.info("视频解析获取结果成功{}".format(raw_data))
|
|
to_kafka.send_kafka(raw_data, logging)
|
|
elif d["data"]["code"] == "0":
|
|
# 正在解析中,将任务再次放回数据队列
|
|
data_queue.put(raw_data)
|
|
logging.info("视频未解析完毕,放回队列等待{}-{}".format(raw_data, d))
|
|
else:
|
|
# 解析失败
|
|
raw_data["result"]["successCode"] = "0"
|
|
raw_data["result"]["errorLog"] = response.text
|
|
raw_data["result"]["results"] = json.dumps(raw_data["result"]["results"], ensure_ascii=False)
|
|
logging.info("视频解析获取结果失败,数据{},接口返回值{}".format(raw_data, d))
|
|
to_kafka.send_kafka(raw_data, logging)
|
|
else:
|
|
raw_data["result"]["successCode"] = "0"
|
|
raw_data["result"]["errorLog"] = response.text
|
|
raw_data["result"]["results"] = json.dumps(raw_data["result"]["results"], ensure_ascii=False)
|
|
logging.info("视频解析获取结果失败,数据{},接口返回值{}".format(raw_data, d))
|
|
to_kafka.send_kafka(raw_data, logging)
|
|
|
|
else:
|
|
# 暂无任务,进入休眠
|
|
time.sleep(10)
|
|
except:
|
|
raw_data["result"]["successCode"] = "0"
|
|
raw_data["result"]["errorLog"] = traceback.format_exc()
|
|
raw_data["result"]["results"] = ""
|
|
logging.error(traceback.format_exc())
|
|
to_kafka.send_kafka(raw_data, logging)
|
|
|
|
|
|
|
|
|
|
|