import requests
import pymysql
import random
from log_settings import *
import traceback
import logging
from auto_request import get_offline_comment
from auto_request import host, user, db, passwd

auto_reply_url = "http://47.93.162.11:8085/api/v1/reply/create_for_inner"


# 对所有马甲生产的帖子(包含历史帖子)
#
# 离线一次性插入{0,4}条评论
#
# 评论取自评论库,并自动随机关联马甲
#
# 注意需要禁止本帖作者评论

def get_user_id():
    user_data = open("user_id.txt", "r")
    user_list = []

    for i in user_data.readlines():
        user_list.append(int(i.strip()))

    return user_list


def reply(id, content, user_id):
    try:
        post_dict = {
            'user_id': user_id,
            'topic_id': id,
            'content': content
        }
        response = requests.post(url=auto_reply_url,
                                 data=post_dict)

        logging.info("response.text:%s" % response.text)

    except:

        logging.error("catch exception,logins:%s" % traceback.format_exc())


def get_data(num1, num2):
    try:
        pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
        cursor = pc.cursor()
        print(num1)
        print(num2)
        print("--------")
        cursor.execute(
            "SELECT id,user_id FROM topic WHERE is_online =1 and  (id > " + str(num1) + "  and id <= " + str(
                num2) + ") and (content_level = 4 or content_level = 5) ")
        data = cursor.fetchall()
        topic_id = list(data)
        user_list = get_user_id()
        topic_id_list = []
        if topic_id:
            for i in topic_id:
                if int(i[1]) in user_list:
                    topic_id_list.append(i)
        pc.close()
        logging.info("get topic_id_list:%s" % topic_id_list)
        return topic_id_list
    except:
        logging.error("catch exception,get_data:%s" % traceback.format_exc())
        return None


if __name__ == "__main__":

    try:

        time_list = [[0, 100000], [100000, 200000], [200000, 300000], [300000, 400000], [400000, 500000],[500000, 600000],[600000, 700000]]

        user_data = open("user_id.txt", "r")

        user_list = []

        for i in user_data.readlines():
            user_list.append(i)

        for i in time_list:
            topic_id = get_data(i[0], i[1])

            dicts = {}

            if topic_id:

                for id in topic_id:

                    rand_num = random.randint(0, 4)

                    for i in range(rand_num):

                        num = random.randint(0, len(user_list))

                        user_id = user_list[num]

                        if user_id != id[1]:
                            comment_list = get_offline_comment()

                            comment = comment_list[i]

                            reply(id[0], comment, user_id)


    except:

        logging.error("catch exception,main :%s" % traceback.format_exc())