# -*- coding:UTF-8 -*-
# @Time  : 2021/1/15 11:57
# @File  : crawler_data_id_from_mysql.py
# @email : litao@igengmei.com
# @author : litao


import pymysql

def con_sql(sql):
    # 从数据库的表里获取数据
# "jdbc:mysql://172.16.30.138/mimas_prod?user=mimas&password=GJL3UJe1Ck9ggL6aKnZCq4cRvM&rewriteBatchedStatements=true")
    # db = pymysql.connect(host='172.16.40.158', port=4000, user='st_user', passwd='aqpuBLYzEV7tML5RPsN1pntUzFy',
    #                      db='jerry_prod')
    db = pymysql.connect(host='172.16.30.138', port=3306, user='mimas', passwd='GJL3UJe1Ck9ggL6aKnZCq4cRvM',
                         db='mimas_prod')
    cursor = db.cursor()
    cursor.execute(sql)
    result = cursor.fetchall()
    db.close()
    return result

res_list = [("id","question_id","platform")]
sql = "select id,question_id,platform from api_answer where platform in (2,0,9)"
result = con_sql(sql)
for r in result:
    res_list.append(r)
import pandas

from crawler.maintenance.func_send_email_with_file import send_file_email
data = pandas.DataFrame(res_list)
data.to_excel("./data_file.xlsx")
send_file_email("", '', sender="litao@igengmei.com", email_group=["litao@igengmei.com"], email_msg_body_str="test",
                title_str="test", cc_group=["litao@igengmei.com"], file="./data_file.xlsx")