假新闻识别应用
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 

45 lines
1.4 KiB

#coding:utf8
import json
import pymysql
import traceback
import pandas as pd
content_db = pymysql.connect(host='172.26.28.30', user='crawl', passwd='crawl123', db='test', port=3306,
charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor)
def to_mysql(sql,values):
content_db.ping(reconnect=True)
cursor = content_db.cursor()
cursor.execute(sql,values)
content_db.commit()
cursor.close()
def write_data_mysql():
data=pd.read_excel('假新闻数据输入/test.xlsx',keep_default_na=False)
try:
for i in data.index:
# line_key=list(data.loc[i].keys())
line_value=data.loc[i].values
# line_str=([str(x) for x in line_value])
line_str=[]
for index,x in enumerate(line_value):
line_str.append(x)
line_str=[0]+line_str
sql = "insert into TwitterAccount "+"values ("+ ','.join(['%s'] * len(line_str)) + ")"
# print(line_str)
# print(sql)
values=tuple(line_str)
# to_mysql(sql,values)
# content_db.ping(reconnect=True)
cursor = content_db.cursor()
cursor.execute(sql, values)
content_db.commit()
cursor.close()
print('%s条数据写入mysql'%(i+1))
except:
print(traceback.format_exc())
content_db.rollback()
write_data_mysql()
content_db.close()