diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..3a8b5b6044330307917c4562b4da3be37cc85ec2
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,4 @@
+crontab:
+	cp crontab.py  /data/log/physical/app/crontab.py && python /data/log/physical/app/crontab.py && python  /data/log/physical/app/crontabs.py
+celery:
+	celery -A physical worker -c 1 -Q vest -l debug
diff --git a/README.md b/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..42584896473d67b4b5f7ca66569cd0e36ffe01ad
--- /dev/null
+++ b/README.md
@@ -0,0 +1,60 @@
+strategy part like es perform
+容器运行命令
+docker run -it -p 9999:9999 -d \
+--volume=/etc/gm-config:/etc/gm-config \
+--volume=/etc/resolv.conf:/etc/resolv.conf  \
+--volume=/srv/apps/physical/physical/settings_local.py:/srv/apps/physical/physical/settings_local.py \
+--entrypoint="/bin/bash" physical-test:1.0 "-c" "source ~/.bashrc && celery -A physical worker -Q tapir-alpha --loglevel=DEBUG  --maxtasksperchild 500 -c 62"
+
+## CICD
+add a  .drone.yml and configure pipeline
+push to branch like-pre/*
+
+## 启动方式
+
+### 方式1:[program:physical-linucb] 消费kafka数据 
+一个while True服务,去消费kafka数据,维护redis中device的tag_map
+```python
+source /srv/envs/physical/bin/activate && python manage.py trans2es_data2es_parallel -S linucb
+```
+
+### 方式2:[program:physical] rpc服务
+启动rpc服务供后端调用, 接口都在search文件夹下的bind函数定义
+```python
+gunicorn gm_rpcd.wsgi:application --workers=1 --worker-class=gevent 
+--worker-connections=1024 
+--bind=0.0.0.0:9999 
+--user=gmuser 
+--chdir /srv/apps/physical/ 
+--timeout 600 
+--log-level=debug  
+--error-logfile=/data/log/physical/app/gunicorn_error.log 
+--access-logfile=/data/log/physical/app/gunicorn_access.log
+```
+
+### 方式3 [program:physical-celery] celery 消息队列服务
+mysql数据同步到es,入口文件:injection.data_sync.tasks.write_to_es
+数据同步工作大部分在数据管道,只有小部分在这,暂时先不用管这个服务,需求用不到
+```python
+celery -A physical worker -Q tapir-alpha --loglevel=DEBUG  --maxtasksperchild 500 -c 62
+```
+
+### 方式4 [program:physical-beat-celery] celery 消息队列定时(作用未知)
+```python
+celery -A physical beat
+```
+
+# 自动运行脚本
+### venus服务端脚本
+通过celery beat 运行
+配置说明文件:venus/setting/base.py
+具体脚本:venus/communityity/task/xxx.py
+目前共5个脚本
+1:每天保存用户扫脸图片:community.tasks.save_image_task.save_user_face_image
+2:每天3点0分评论自动点赞:community.tasks.crontab_reply_vote_task.crontab_reply_vote
+3:向用户推送兴趣标签:community.tasks.push_task.push_interest_tag_pictorial_to_user
+4:向用户推送经验标签:community.tasks.push_task.push_topic_draft_task
+5:推送主题草稿任务:community.tasks.push_task.push_topic_draft_task
+
+### physical策略端脚本
+在Alp-Test-Cos-test001服务器上,通过crontab -l查看
diff --git a/crontab.py b/crontab.py
new file mode 100644
index 0000000000000000000000000000000000000000..f55e73c1a45ab3bf8bfb50c4f133b65d28bb5671
--- /dev/null
+++ b/crontab.py
@@ -0,0 +1,52 @@
+import random
+
+ontime_list = [
+    "0 9 * * *  source /srv/envs/physical/bin/activate  && python /data/log/physical/app/crontab.py",
+    "10 9 * * *  source /srv/envs/physical/bin/activate  && python  /data/log/physical/app/crontabs.py",
+    "0 9 * * * sh /data/log/cybertron/app/statistics_query.sh > /data/log/cybertron/app/statistics_query.log",
+    "54 */2 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_click_per_2h_by_post",
+    # "*/5 * * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m true_click_one",
+    # "02,12,22,32,42,52  * * * *   source /srv/envs/physical/bin/activate  && cd /srv/apps/physical &&  python manage.py  trans2es_mapping2es-m true_click_two",
+    # "00,10,20,30,40,50  * * * *  source /srv/envs/physical/bin/activate  && cd /srv/apps/physical &&  python manage.py  trans2es_mapping2es -m true_click_three",
+    # "02,12,22,32,42,52  * * * *   source /srv/envs/physical/bin/activate  && cd /srv/apps/physical &&  python manage.py  trans2es_mapping2es-m true_click_four",
+    # "06,16,26,36,46,56  * * * *  source /srv/envs/physical/bin/activate  && cd /srv/apps/physical &&  python manage.py  trans2es_mapping2es -m true_click_five",
+    "0 14 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_star_urge",
+    "0 10 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_urge1",
+    "30 10 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_urge2",
+    "0 10 * * 3 source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_lunch_app",
+    "30  10 * * 3 source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_lunch_app2",
+    # "*/5 * * * 1 source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_follow_per_5m_by_followed",
+    "1 */2 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_follow_per_2h_by_post_and_regist",
+    "0 9  * * *  source /srv/envs/physical/bin/activate  && cd /srv/apps/physical &&   python manage.py  trans2es_mapping2es -m  get_login_session",
+    "0 0  * * 3  source /srv/envs/physical/bin/activate  && cd /srv/apps/physical &&  python manage.py  trans2es_mapping2es -m  get_user_id",
+    # "0  14,18,22 *  *  *  source /srv/envs/physical/bin/activate  && cd /srv/apps/physical &&  python manage.py  trans2es_mapping2es -m  principal_online_comment1",
+    "25 */2 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical &&  python manage.py  trans2es_mapping2es -m auto_reply_per_2h_to_topic",
+    "0 9 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_click_per_1d_by_post",
+    "1 9 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_follow_per_1d_by_regist",
+    "2 9 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_follow_per_1d_by_post",
+    "3 9 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_reply_per_1d_to_pictorial",
+    "4 9 * * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m auto_reply_per_1d_to_topic"
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m answer_reply1",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m answer_reply2",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m answer_reply3",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m answer_reply5",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m answer_reply7",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m yesterday_topic_reply",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m before_yesterday_topic_reply",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m three_days_ago_topic_reply",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m five_days_ago_topic_reply",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m seven_days_ago_reply",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m reply_comment1",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m reply_comment3",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m reply_comment2",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m reply_comment5",
+    # "* * * source /srv/envs/physical/bin/activate  && cd /srv/apps/physical && python manage.py  trans2es_mapping2es -m reply_comment7"
+]
+
+
+data = open("/data/log/physical/app/conf.txt", "w")
+# data = open("abc.txt", "w")
+
+for var in ontime_list:
+    data.write(var)
+    data.write("\n")
diff --git a/libs/es.py b/libs/es.py
index 094a0e1c67a55324e40741862d52363009c55233..95bcc154cc65a6925b49cad8a0de952ef724aded 100644
--- a/libs/es.py
+++ b/libs/es.py
@@ -188,19 +188,22 @@ class ESPerform(object):
     @classmethod
     def get_search_results(cls, es_cli, sub_index_name, query_body, offset=0, size=10,
                            auto_create_index=False, doc_type="_doc", aggregations_query=False, is_suggest_request=False,
-                           batch_search=False, routing=None):
+                           batch_search=False, routing=None,if_official_index_name=False):
         try:
             assert (es_cli is not None)
 
-            official_index_name = cls.get_official_index_name(sub_index_name, "read")
-            index_exists = es_cli.indices.exists(official_index_name)
-            if not index_exists:
-                if not auto_create_index:
-                    logging.error("index:%s is not existing,get_search_results error!" % official_index_name)
-                    return None
-                else:
-                    cls.create_index(es_cli, sub_index_name)
-                    cls.put_index_mapping(es_cli, sub_index_name)
+            if if_official_index_name:
+                official_index_name = sub_index_name
+            else:
+                official_index_name = cls.get_official_index_name(sub_index_name, "read")
+                index_exists = es_cli.indices.exists(official_index_name)
+                if not index_exists:
+                    if not auto_create_index:
+                        logging.error("index:%s is not existing,get_search_results error!" % official_index_name)
+                        return None
+                    else:
+                        cls.create_index(es_cli, sub_index_name)
+                        cls.put_index_mapping(es_cli, sub_index_name)
 
             logging.info("duan add,query_body:%s" % str(query_body).encode("utf-8"))
 
@@ -401,6 +404,7 @@ class ESPerform(object):
                                     {"term": {"content_level": 6}},
                                     {"term": {"is_online": True}},
                                     {"term": {"is_deleted": False}},
+                                    {"term": {"is_new_topic": False}},
                                     {"terms": {"tag_list": tag_id}}
                                 ]
                             }
@@ -411,9 +415,18 @@ class ESPerform(object):
                     }
                 },
                 "_source": {
-                    "include": ["id", "user_id", "latest_reply_time"]
+                    "include": ["id", "user_id", "latest_reply_time", "topic_ctr_30", "topic_ctr_all", "like_rate_30", "like_rate_all"]
                 },
                 "sort": [
+                    {
+                        "_script": {
+                            "order": "desc",
+                            "script": {
+                                "inline": "10*doc['topic_ctr_30'].value+doc['like_rate_30'].value+2*doc['topic_ctr_all'].value+doc['like_rate_all'].value"
+                            },
+                            "type": "number"
+                        }
+                    },
                     {"latest_reply_time": {"order": "desc"}},
                     {"create_time_val": {"order": "desc"}},
                     {"language_type": {"order": "asc"}},
@@ -439,17 +452,67 @@ class ESPerform(object):
             topic_id_dict = dict()
             for item in result_dict["hits"]:
                 topic_id_dict[str(item["_source"]["id"])] = item["_source"]["user_id"]
+            logging.info("get_tag_topic_list_dict:gyz" + str(q) + str(result_dict))
+            return topic_id_list, topic_id_dict
+        except:
+            logging.error("catch exception,err_msg:%s" % traceback.format_exc())
+            return list(), dict()
 
-            topic_id_dict_latest_reply_time = list()
-            for item in result_dict["hits"]:
-                topic_id_dict_latest_reply_time.append([item["_source"]["id"], item["_source"]["latest_reply_time"]])
-
-            logging.info("topic_id_list:%s" % str(topic_id_dict))
+    @classmethod
+    def get_tag_new_topic_list(cls, tag_id, have_read_topic_id_list, size=10):
+        try:
+            functions_list = list()
+            for id in tag_id:
+                functions_list.append(
+                    {
+                        "filter": {"term": {"tag_list": id}},
+                        "weight": 1
+                    }
+                )
+            q = {
+                "query": {
+                    "function_score": {
+                        "query": {
+                            "bool": {
+                                "must": [
+                                    {"term": {"content_level": 6}},
+                                    {"term": {"is_online": True}},
+                                    {"term": {"is_deleted": False}},
+                                    {"term": {"is_new_topic": True}},
+                                    {"terms": {"tag_list": tag_id}}
+                                ]
+                            }
+                        },
+                        "boost_mode": "sum",
+                        "score_mode": "sum",
+                        "functions": functions_list
+                    }
+                },
+                "_source": {
+                    "include": ["id", "user_id"]
+                },
+                "sort": [
+                    {"latest_reply_time": {"order": "desc"}},
+                    {"create_time_val": {"order": "desc"}},
+                    {"language_type": {"order": "asc"}},
+                ],
+                "collapse": {
+                    "field": "user_id"
+                }
+            }
 
-            logging.info("linucb_tag_id_list_2_same_tagset_ids:" + str(tag_id))
-            logging.info("linucb_tag_id_list_2_same_tagset_ids_2_topics_detail:" + str(topic_id_dict_latest_reply_time))
+            if len(have_read_topic_id_list) > 0:
+                q["query"]["function_score"]["query"]["bool"]["must_not"] = {
+                    "terms": {
+                        "id": have_read_topic_id_list
+                    }
+                }
+            result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name="topic-high-star",
+                                                       query_body=q,
+                                                       offset=0, size=size, routing="6")
 
-            return topic_id_list, topic_id_dict
+            topic_id_list = [item["_source"]["id"] for item in result_dict["hits"]]
+            return topic_id_list
         except:
             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
             return list()
@@ -476,9 +539,18 @@ class ESPerform(object):
                     }
                 },
                 "_source": {
-                    "include": ["id"]
+                    "include": ["id", "real_user_activate_time", "create_time", "pictorial_ctr_30", "pictorial_ctr_all", "like_rate_30", "like_rate_all"]
                 },
                 "sort": [
+                    {
+                        "_script": {
+                            "order": "desc",
+                            "script": {
+                                "inline": "10*doc['pictorial_ctr_30'].value+10*doc['like_rate_30'].value+3*doc['pictorial_ctr_all'].value+2*doc['like_rate_all'].value"
+                            },
+                            "type": "number"
+                        }
+                    },
                     {"real_user_activate_time": {"order": "desc"}},
                     {"create_time": {"order": "desc"}},
                 ],
@@ -495,8 +567,7 @@ class ESPerform(object):
                                                        offset=0, size=size)
 
             pictorial_id_list = [item["_source"]["id"] for item in result_dict["hits"]]
-            # logging.info("topic_id_list:%s" % str(topic_id_list))
-            # topic_id_dict = [{str(item["_source"]["id"]):item["_source"]["user_id"]} for item in result_dict["hits"]]
+            logging.info("get_tag_pictorial_id_list:gyz" + str(q) + str(result_dict))
             return pictorial_id_list
         except:
             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
diff --git a/libs/timelib.py b/libs/timelib.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b15d78bb87b674be951236778e8f4572e64a968
--- /dev/null
+++ b/libs/timelib.py
@@ -0,0 +1,35 @@
+import pytz
+import random
+from datetime import datetime, timedelta
+
+NOW = datetime.now()
+
+
+def tzlc(dt, truncate_to_sec=True):
+    if dt is None:
+        return None
+    if truncate_to_sec:
+        dt = dt.replace(microsecond=0)
+    return pytz.timezone('Asia/Shanghai').localize(dt)
+
+
+def eta_2_push_time(eta):
+    if eta:
+        eta = datetime.strptime(eta, '%Y-%m-%d %H:%M:%S')
+        eta = tzlc(eta)
+        return int((eta - datetime.fromtimestamp(0, pytz.timezone("UTC"))).total_seconds())
+    else:
+        push_time = None
+    return push_time
+
+
+def get_rand_time(hourlow=0, hourup=13, minutelow=0, minuteup=60):
+    hours = random.randint(hourlow, hourup)
+    minutes = random.randint(minutelow, minuteup)
+    # todo redis会自动给加8个小时,所以这边先写死减少8小时
+    now_time = NOW + timedelta(hours=hours, minutes=minutes) - timedelta(hours=8)
+    time = eta_2_push_time(now_time.strftime("%Y-%m-%d %H:%M:%S"))
+    print(datetime.fromtimestamp(time))
+    return datetime.fromtimestamp(time)
+
+
diff --git a/libs/tools.py b/libs/tools.py
index 337cf76ac90225e83241f27befbe4cebab2d1b53..96ce48edabc51da2407645461901aeb95ed91d6c 100644
--- a/libs/tools.py
+++ b/libs/tools.py
@@ -8,6 +8,7 @@ import traceback
 from libs.cache import redis_client
 import json
 import logging
+from django.db import connection
 
 
 def tzlc(dt, truncate_to_sec=True):
@@ -58,3 +59,12 @@ def get_have_read_lin_pictorial_id_list(device_id,user_id,query_type):
         logging.error("catch exception,err_msg:%s" % traceback.format_exc())
         return list()
 
+
+def is_connection_usable():
+    """判断当前mysql的链接是否正常,不正常就close掉"""
+    try:
+        connection.connection.ping()
+        return True
+    except:
+        # logging.error("catch exception,err_msg:%s" % traceback.format_exc())
+        return False
diff --git a/linucb/views/collect_data.py b/linucb/views/collect_data.py
index 75f29933c4d4378918bacdfb3a93daa1ebcceb28..32f697b84aa51199ab75d81e7d4fdf2c5dcb3c12 100644
--- a/linucb/views/collect_data.py
+++ b/linucb/views/collect_data.py
@@ -15,6 +15,7 @@ from libs.es import ESPerform
 from search.utils.common import *
 import libs.tools as Tools
 from trans2es.models.pictorial import CommunityPictorialHomeFeed
+from trans2es.models.portrait_stat import LikeDeviceTagStat
 from libs.error import logging_exception
 import os
 from search.views.tag import get_same_tagset_ids
@@ -49,22 +50,28 @@ class KafkaManager(object):
 class CollectData(object):
 
     def __init__(self):
+        # lin tag参数
         self.linucb_matrix_redis_prefix = "physical:linucb:device_id:"
-        #废弃
+        self.ctr_linucb_matrix_redis_prefix = "ctr_physical:linucb:device_id:"
+        # lin推荐tag
         self.linucb_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
-        #推荐帖子
+        self.ctr_linucb_recommend_redis_prefix = "ctr_physical:linucb:tag_recommend:device_id:"
+        # 推荐帖子
         self.linucb_recommend_topic_id_prefix = "physical:linucb:topic_recommend:device_id:"
+        self.ctr_linucb_recommend_topic_id_prefix = "ctr_physical:linucb:topic_recommend:device_id:"
         # 推荐榜单
         self.linucb_recommend_pictorial_id_prefix = "physical:linucb:pictorial_recommend:device_id:"
+        self.ctr_linucb_recommend_pictorial_id_prefix = "ctr_physical:linucb:pictorial_recommend:device_id:"
+
         self.tag_topic_id_redis_prefix = "physical:tag_id:topic_id_list:"
         self.click_recommend_redis_key_prefix = "physical:click_recommend:device_id:"
         # 默认
-        self.user_feature = [0,1]
+        self.user_feature = [0, 1]
 
 
-    def _get_user_linucb_info(self, device_id):
+    def _get_user_linucb_info(self, device_id, linucb_matrix_prefix):
         try:
-            redis_key = self.linucb_matrix_redis_prefix + str(device_id)
+            redis_key = linucb_matrix_prefix + str(device_id)
 
             # dict的key为标签ID,value为4个矩阵
             redis_linucb_tag_data_dict = redis_client.hgetall(redis_key)
@@ -75,21 +82,26 @@ class CollectData(object):
             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
             return dict()
 
-    def update_recommend_tag_list(self, device_id,user_feature=None,user_id=None,click_topic_tag_list=None,new_user_click_tag_list = []):
+    def update_recommend_tag_list(self, device_id, user_feature=None, user_id=None, click_topic_tag_list=None,
+                                  new_user_click_tag_list=[], linucb_matrix_prefix=None, linucb_recommend_tag_prefix=None,
+                                  linucb_topic_ids_prefix=None, linucb_pictorial_ids_prefix=None):
         try:
-            redis_linucb_tag_data_dict = self._get_user_linucb_info(device_id)
+            redis_linucb_tag_data_dict = self._get_user_linucb_info(device_id, linucb_matrix_prefix)
             if len(redis_linucb_tag_data_dict) == 0:
                 recommend_tag_list = list(LinUCB.get_default_tag_list(user_id))
-                LinUCB.init_device_id_linucb_info(redis_client, self.linucb_matrix_redis_prefix,device_id,recommend_tag_list)
+                LinUCB.init_device_id_linucb_info(redis_client, linucb_matrix_prefix, device_id, recommend_tag_list)
             else:
                 user_feature = user_feature if user_feature else self.user_feature
-                (recommend_tag_dict,recommend_tag_set) = LinUCB.linucb_recommend_tag(device_id,redis_linucb_tag_data_dict,user_feature,list(redis_linucb_tag_data_dict.keys()))
+                linucb_tag_list = list(redis_linucb_tag_data_dict.keys())
+                (recommend_tag_dict, recommend_tag_set) = LinUCB.linucb_recommend_tag(device_id,
+                                                                                      redis_linucb_tag_data_dict,
+                                                                                      user_feature, linucb_tag_list)
                 recommend_tag_list = list(recommend_tag_dict.keys())
 
             if len(recommend_tag_list) > 0:
-                tag_recommend_redis_key = self.linucb_recommend_redis_prefix + str(device_id)
+                tag_recommend_redis_key = linucb_recommend_tag_prefix + str(device_id)
                 redis_client.set(tag_recommend_redis_key, json.dumps(recommend_tag_list))
-                # redis_client.expire(tag_recommend_redis_key, 7*24*60*60)
+                redis_client.expire(tag_recommend_redis_key, 30*24*60*60)
 
                 have_read_topic_id_list = Tools.get_have_read_topic_id_list(device_id,user_id,TopicPageType.HOME_RECOMMEND)
                 have_read_lin_pictorial_id_list = Tools.get_have_read_lin_pictorial_id_list(device_id, user_id,
@@ -123,8 +135,8 @@ class CollectData(object):
                         # redis_client.hmset(click_recommend_redis_key, click_redis_data_dict)
 
                 tag_id_list = recommend_tag_list[0:20]
-                pictorial_recommend_redis_key = self.linucb_recommend_pictorial_id_prefix + str(device_id)
-                topic_recommend_redis_key = self.linucb_recommend_topic_id_prefix + str(device_id)
+                pictorial_recommend_redis_key = linucb_pictorial_ids_prefix + str(device_id)
+                topic_recommend_redis_key = linucb_topic_ids_prefix + str(device_id)
                 # redis_topic_data_dict = redis_client.hgetall(topic_recommend_redis_key)
                 # redis_topic_list = list()
                 # cursor = -1
@@ -169,19 +181,74 @@ class CollectData(object):
             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
             return False
 
-    def update_user_linucb_tag_info(self, reward, device_id, tag_id, user_feature=None):
+    def update_user_linucb_tag_info(self, reward, device_id, tag_id, user_feature, linucb_matrix_redis_prefix):
         try:
             user_feature = user_feature if user_feature else self.user_feature
-            return LinUCB.update_linucb_info(user_feature, reward, tag_id, device_id,self.linucb_matrix_redis_prefix,redis_client)
+            return LinUCB.update_linucb_info(user_feature, reward, tag_id, device_id, linucb_matrix_redis_prefix, redis_client)
         except:
             logging_exception()
             logging.error("update_user_linucb_tag_info error!")
             return False
 
 
+    def transfer_old_info2ctr_feature_key(self, device_id):
+        try:
+            # 移植老用户的lin标签参数信息到ctr特征策略
+            ctr_linucb_matrix_redis_prefix_key = self.ctr_linucb_matrix_redis_prefix + str(device_id)
+            linucb_matrix_redis_prefix_key = self.linucb_matrix_redis_prefix + str(device_id)
+            if redis_client.exists(ctr_linucb_matrix_redis_prefix_key):  #如果新策略存在lin信息,则不需要移植
+                return True
+            else:
+                if redis_client.exists(linucb_matrix_redis_prefix_key):
+                    older_device_info = redis_client.hgetall(linucb_matrix_redis_prefix_key)
+                    redis_client.hmset(ctr_linucb_matrix_redis_prefix_key, older_device_info)
+            # 移植老用户的lin推荐标签列表信息到ctr特征策略
+            ctr_linucb_recommend_redis_prefix = self.ctr_linucb_recommend_redis_prefix + str(device_id)
+            linucb_recommend_redis_prefix = self.linucb_recommend_redis_prefix + str(device_id)
+            if not redis_client.exists(ctr_linucb_recommend_redis_prefix):
+                if redis_client.exists(linucb_recommend_redis_prefix):
+                    older_device_info = redis_client.get(linucb_recommend_redis_prefix)
+                    redis_client.set(ctr_linucb_recommend_redis_prefix, older_device_info)
+            # 移植老用户的lin帖子推荐队列信息到ctr特征策略
+            linucb_recommend_topic_id_prefix = self.linucb_recommend_topic_id_prefix + str(device_id)
+            ctr_linucb_recommend_topic_id_prefix = self.ctr_linucb_recommend_topic_id_prefix + str(device_id)
+            if not redis_client.exists(ctr_linucb_recommend_topic_id_prefix):
+                if redis_client.exists(linucb_recommend_topic_id_prefix):
+                    older_device_info = redis_client.hgetall(linucb_recommend_topic_id_prefix)
+                    redis_client.hmset(ctr_linucb_recommend_topic_id_prefix, older_device_info)
+            # 移植老用户的lin榜单推荐队列信息到ctr特征策略
+            linucb_recommend_pictorial_id_prefix = self.linucb_recommend_pictorial_id_prefix + str(device_id)
+            ctr_linucb_recommend_pictorial_id_prefix = self.ctr_linucb_recommend_pictorial_id_prefix + str(device_id)
+            if not redis_client.exists(ctr_linucb_recommend_pictorial_id_prefix):
+                if redis_client.exists(linucb_recommend_pictorial_id_prefix):
+                    older_device_info = redis_client.hgetall(linucb_recommend_pictorial_id_prefix)
+                    redis_client.hmset(ctr_linucb_recommend_pictorial_id_prefix, older_device_info)
+            logging.info("transfer_old_info2ctr_feature_key sucess:"+str(device_id))
+            return True
+        except:
+            logging_exception()
+            logging.error("transfer_old_info2ctr_feature_key error!")
+            return False
+
+    def get_device_tag_ctr(self, device_id, tag_id):
+        # 获取用户在该tag下的ctr信息
+        try:
+            device_tag_ctr = LikeDeviceTagStat.objects.using(settings.SLAVE1_DB_NAME).filter(
+                device_id=device_id, tag_id=tag_id).values("tag_ctr_30")
+            if device_tag_ctr:
+                device_tag_ctr_value = device_tag_ctr[0].get("tag_ctr_30", 0.0)
+            else:
+                device_tag_ctr_value = 0.0
+            logging.info("get_device_tag_ctr" + str(device_id) + str(tag_id))
+            return device_tag_ctr_value
+        except:
+            logging_exception()
+            logging.error("get_device_tag_ctr error!")
+            return 0.0
+
     def consume_data_from_kafka(self,topic_name=None):
         try:
-            user_feature = [1,1]
+            user_feature = [1, 1]
 
             kafka_consumer_obj = KafkaManager.get_kafka_consumer_ins(topic_name)
             while True:
@@ -196,6 +263,7 @@ class CollectData(object):
                             if "type" in raw_val_dict and \
                                     (raw_val_dict["type"] in ("on_click_feed_topic_card","on_click_button")):
                                 click_topic_tag_list = list()
+                                device_id = ""
                                 if "on_click_feed_topic_card" == raw_val_dict["type"]:
                                     topic_id =  raw_val_dict["params"]["topic_id"]
                                     device_id = raw_val_dict["device"]["device_id"]
@@ -243,12 +311,34 @@ class CollectData(object):
 
                                 reward = 1 if is_click or is_vote else 0
 
+                                # 移植老用户的lin信息到ctr特征策略
+                                self.transfer_old_info2ctr_feature_key(device_id)
+
+                                # 更新不同策略的lin标签参数信息
                                 for tag_id in click_topic_tag_list:
-                                    self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature)
+                                    self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature,
+                                                                     self.linucb_matrix_redis_prefix)
+                                    # 获取tag的ctr信息
+                                    device_tag_ctr = self.get_device_tag_ctr(device_id, tag_id)
+                                    user_feature_ctr = [device_tag_ctr, device_tag_ctr]
+                                    self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature_ctr,
+                                                                     self.ctr_linucb_matrix_redis_prefix)
+
 
                                 # 更新该用户的推荐tag数据,放在 更新完成user tag行为信息之后
                                 if len(click_topic_tag_list)>0:
-                                    self.update_recommend_tag_list(device_id, user_feature, user_id,click_topic_tag_list=click_topic_tag_list)
+                                    self.update_recommend_tag_list(device_id, user_feature, user_id,
+                                                                   click_topic_tag_list=click_topic_tag_list,
+                                                                   linucb_matrix_prefix=self.linucb_matrix_redis_prefix,
+                                                                   linucb_recommend_tag_prefix=self.linucb_recommend_redis_prefix,
+                                                                   linucb_topic_ids_prefix=self.linucb_recommend_topic_id_prefix,
+                                                                   linucb_pictorial_ids_prefix=self.linucb_recommend_pictorial_id_prefix)
+                                    self.update_recommend_tag_list(device_id, user_feature, user_id,
+                                                                   click_topic_tag_list=click_topic_tag_list,
+                                                                   linucb_matrix_prefix=self.ctr_linucb_matrix_redis_prefix,
+                                                                   linucb_recommend_tag_prefix=self.ctr_linucb_recommend_redis_prefix,
+                                                                   linucb_topic_ids_prefix=self.ctr_linucb_recommend_topic_id_prefix,
+                                                                   linucb_pictorial_ids_prefix=self.ctr_linucb_recommend_pictorial_id_prefix)
                             # elif "type" in raw_val_dict and "page_precise_exposure" == raw_val_dict["type"]:
                             #     if isinstance(raw_val_dict["params"]["exposure_cards"],str):
                             #         exposure_cards_list = json.loads(raw_val_dict["params"]["exposure_cards"])
@@ -308,11 +398,12 @@ class CollectData(object):
                                     tagid_list = raw_val_dict["params"]["tagid_list"]
                                 else:
                                     tagid_list = list()
-                                logging.warning("unknown type msg:%s" % raw_val_dict.get("type", "missing type"))
-
                                 device_id = raw_val_dict["device"]["device_id"]
                                 user_id = raw_val_dict["user_id"] if "user_id" in raw_val_dict else None
 
+                                logging.info("interest_choice_click_next type:%s, device_id:%s, tag_ids:%s" % (
+                                raw_val_dict.get("type", "missing type"), str(device_id), str(tagid_list)))
+
                                 # if len(exposure_sql_query_results)>0:
                                 if len(tagid_list) > 0:
                                    tag_query_results = list(Tag.objects.using(settings.SLAVE1_DB_NAME).filter(
@@ -322,14 +413,32 @@ class CollectData(object):
                                    is_vote = 0
 
                                    reward = 1 if is_click or is_vote else 0
+
+                                   # 移植老用户的lin信息到ctr特征策略
+                                   self.transfer_old_info2ctr_feature_key(device_id)
+
                                    for tag_id in tag_query_results:
-                                       self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature)
+                                       self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature,
+                                                                        self.linucb_matrix_redis_prefix)
+                                       # 获取tag的ctr信息
+                                       device_tag_ctr = self.get_device_tag_ctr(device_id, tag_id)
+                                       user_feature_ctr = [device_tag_ctr, device_tag_ctr]
+                                       self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature_ctr,
+                                                                        self.ctr_linucb_matrix_redis_prefix)
 
                                    # 更新该用户的推荐tag数据,放在 更新完成user tag行为信息之后
                                    self.update_recommend_tag_list(device_id, user_feature, user_id,
-                                                                  new_user_click_tag_list=tag_query_results)
-                                else:
-                                    logging.warning("unknown type msg:%s" % raw_val_dict.get("type", "missing type"))
+                                                                  new_user_click_tag_list=tag_query_results,
+                                                                  linucb_matrix_prefix=self.linucb_matrix_redis_prefix,
+                                                                  linucb_recommend_tag_prefix=self.linucb_recommend_redis_prefix,
+                                                                  linucb_topic_ids_prefix=self.linucb_recommend_topic_id_prefix,
+                                                                  linucb_pictorial_ids_prefix=self.linucb_recommend_pictorial_id_prefix)
+                                   self.update_recommend_tag_list(device_id, user_feature, user_id,
+                                                                  new_user_click_tag_list=tag_query_results,
+                                                                  linucb_matrix_prefix=self.ctr_linucb_matrix_redis_prefix,
+                                                                  linucb_recommend_tag_prefix=self.ctr_linucb_recommend_redis_prefix,
+                                                                  linucb_topic_ids_prefix=self.ctr_linucb_recommend_topic_id_prefix,
+                                                                  linucb_pictorial_ids_prefix=self.ctr_linucb_recommend_pictorial_id_prefix)
                             # 用户点击个性化push进linucb
                             elif "type" in raw_val_dict and raw_val_dict["type"] == "on_click_push":
                                 # 后端已过滤,该tag_ids是帖子/榜单的编辑标签
@@ -354,10 +463,28 @@ class CollectData(object):
                                     is_click = 1
                                     is_vote = 0
                                     reward = 1 if is_click or is_vote else 0
+
+                                    # 移植老用户的lin信息到ctr特征策略
+                                    self.transfer_old_info2ctr_feature_key(device_id)
+
                                     for tag_id in tag_query_results:
-                                        self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature)
+                                        self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature,
+                                                                         self.linucb_matrix_redis_prefix)
+                                        # 获取tag的ctr信息
+                                        device_tag_ctr = self.get_device_tag_ctr(device_id, tag_id)
+                                        user_feature_ctr = [device_tag_ctr, device_tag_ctr]
+                                        self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature_ctr,
+                                                                         self.ctr_linucb_matrix_redis_prefix)
                                     self.update_recommend_tag_list(device_id, user_feature, user_id,
-                                                                   new_user_click_tag_list=tag_query_results)
+                                                                   linucb_matrix_prefix=self.linucb_matrix_redis_prefix,
+                                                                   linucb_recommend_tag_prefix=self.linucb_recommend_redis_prefix,
+                                                                   linucb_topic_ids_prefix=self.linucb_recommend_topic_id_prefix,
+                                                                   linucb_pictorial_ids_prefix=self.linucb_recommend_pictorial_id_prefix)
+                                    self.update_recommend_tag_list(device_id, user_feature, user_id,
+                                                                   linucb_matrix_prefix=self.ctr_linucb_matrix_redis_prefix,
+                                                                   linucb_recommend_tag_prefix=self.ctr_linucb_recommend_redis_prefix,
+                                                                   linucb_topic_ids_prefix=self.ctr_linucb_recommend_topic_id_prefix,
+                                                                   linucb_pictorial_ids_prefix=self.ctr_linucb_recommend_pictorial_id_prefix)
                                     logging.info("on_click_push topic type:%s, device_id:%s, tag_ids:%s" %
                                                  (raw_val_dict.get("type", "missing type"), str(device_id),
                                                   str(tagid_list)))
@@ -379,15 +506,77 @@ class CollectData(object):
                                     is_vote = 0
 
                                     reward = 1 if is_click or is_vote else 0
+                                    # 移植老用户的lin信息到ctr特征策略
+                                    self.transfer_old_info2ctr_feature_key(device_id)
+
                                     for i in range(5):
                                         for tag_id in tag_query_results_multi:
-                                            self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature)
+                                            self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature,
+                                                                             self.linucb_matrix_redis_prefix)
+                                            # 获取tag的ctr信息
+                                            device_tag_ctr = self.get_device_tag_ctr(device_id, tag_id)
+                                            user_feature_ctr = [device_tag_ctr, device_tag_ctr]
+                                            self.update_user_linucb_tag_info(reward, device_id, tag_id,
+                                                                             user_feature_ctr,
+                                                                             self.ctr_linucb_matrix_redis_prefix)
 
                                     # 更新该用户的推荐tag数据,放在 更新完成user tag行为信息之后
                                     self.update_recommend_tag_list(device_id, user_feature, user_id,
-                                                                   new_user_click_tag_list=tag_query_results)
+                                                                   linucb_matrix_prefix=self.linucb_matrix_redis_prefix,
+                                                                   linucb_recommend_tag_prefix=self.linucb_recommend_redis_prefix,
+                                                                   linucb_topic_ids_prefix=self.linucb_recommend_topic_id_prefix,
+                                                                   linucb_pictorial_ids_prefix=self.linucb_recommend_pictorial_id_prefix)
+                                    self.update_recommend_tag_list(device_id, user_feature, user_id,
+                                                                   linucb_matrix_prefix=self.ctr_linucb_matrix_redis_prefix,
+                                                                   linucb_recommend_tag_prefix=self.ctr_linucb_recommend_redis_prefix,
+                                                                   linucb_topic_ids_prefix=self.ctr_linucb_recommend_topic_id_prefix,
+                                                                   linucb_pictorial_ids_prefix=self.ctr_linucb_recommend_pictorial_id_prefix)
                                     logging.info("skin_check topic type:%s, device_id:%s, tag_query_results:%s" %
-                                                 (str(data['SYS']['action']), str(device_id), str(tag_query_results)))
+                                                 (str(data['SYS']['action']), str(device_id), str(tag_query_results_multi)))
+                            # 品牌问卷进linucb
+                              elif 'SYS' in data and 'APP' in data and 'action' in data['SYS'] and data['SYS']['action'] == "venus/community/survey_question/submit":
+                                device_id = data['SYS']['cl_id']
+                                tagid_list = list(data['APP'].get('answer_tag', []))
+                                user_id = data['SYS'].get('user_id', None)
+                                logging.info("survey_question type:%s, device_id:%s, answer_tag:%s" %
+                                             (str(data['SYS']['action']), str(device_id), str(tagid_list)))
+                                if len(tagid_list) > 0:
+                                    tag_query_results = list(Tag.objects.using(settings.SLAVE1_DB_NAME).filter(
+                                        id__in=tagid_list, is_online=True, is_deleted=False,
+                                        is_category=False).values_list("id", flat=True))
+                                    tag_query_results_multi = [i for i in tagid_list if i in tag_query_results]
+                                    is_click = 1
+                                    is_vote = 0
+
+                                    reward = 1 if is_click or is_vote else 0
+
+                                    # 移植老用户的lin信息到ctr特征策略
+                                    self.transfer_old_info2ctr_feature_key(device_id)
+
+                                    for i in range(5):
+                                        for tag_id in tag_query_results_multi:
+                                            self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature,
+                                                                             self.linucb_matrix_redis_prefix)
+                                            # 获取tag的ctr信息
+                                            device_tag_ctr = self.get_device_tag_ctr(device_id, tag_id)
+                                            user_feature_ctr = [device_tag_ctr, device_tag_ctr]
+                                            self.update_user_linucb_tag_info(reward, device_id, tag_id,
+                                                                             user_feature_ctr,
+                                                                             self.ctr_linucb_matrix_redis_prefix)
+
+                                    # 更新该用户的推荐tag数据,放在 更新完成user tag行为信息之后
+                                    self.update_recommend_tag_list(device_id, user_feature, user_id,
+                                                                   linucb_matrix_prefix=self.linucb_matrix_redis_prefix,
+                                                                   linucb_recommend_tag_prefix=self.linucb_recommend_redis_prefix,
+                                                                   linucb_topic_ids_prefix=self.linucb_recommend_topic_id_prefix,
+                                                                   linucb_pictorial_ids_prefix=self.linucb_recommend_pictorial_id_prefix)
+                                    self.update_recommend_tag_list(device_id, user_feature, user_id,
+                                                                   linucb_matrix_prefix=self.ctr_linucb_matrix_redis_prefix,
+                                                                   linucb_recommend_tag_prefix=self.ctr_linucb_recommend_redis_prefix,
+                                                                   linucb_topic_ids_prefix=self.ctr_linucb_recommend_topic_id_prefix,
+                                                                   linucb_pictorial_ids_prefix=self.ctr_linucb_recommend_pictorial_id_prefix)
+                                    logging.info("survey_question type:%s, device_id:%s, tagid_list:%s" %
+                                                 (str(data['SYS']['action']), str(device_id), str(tag_query_results_multi)))
                             else:
                                 if msg:
                                   logging.warning("unknown type msg:%s" % raw_val_dict.get("type", "missing type"))
@@ -396,7 +585,6 @@ class CollectData(object):
                             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
                             # 假设数据库连接异常,强制退出程序,supervisor重启linub
                             os._exit(0)
-
             return True
         except:
             logging_exception()
diff --git a/physical/__init__.py b/physical/__init__.py
index da18328164627724871ec05dbdc0a3ab2037f012..8bc29e6f6b14816024969b2402cbf4c32d6136e5 100644
--- a/physical/__init__.py
+++ b/physical/__init__.py
@@ -2,6 +2,6 @@ from __future__ import unicode_literals, absolute_import, print_function
 
 import pymysql
 from _celery import app as celery_app
-
+from vest import  *
 pymysql.install_as_MySQLdb()
 #__all__ = ('celery_app',)
diff --git a/physical/celery_task_router.py b/physical/celery_task_router.py
index 3bc1d6c011e2eec5191da6c6f3d1e15824546553..2ebd74a87f6ffd4fc05927716606febcee3f9213 100644
--- a/physical/celery_task_router.py
+++ b/physical/celery_task_router.py
@@ -9,6 +9,13 @@ class CeleryTaskRouter(object):
     queue_task_map = {
         "tapir-alpha": [
             'injection.data_sync.tasks.write_to_es',
+        ],
+        "vest": [
+            'vest.request.auto_request.click',
+            'vest.request.auto_request.reply',
+            'vest.request.auto_request.follow',
+            'vest.request.auto_request.reply2',
+            'vest.request.auto_request.pictorial_reply'
         ]
     }
 
diff --git a/search/utils/topic.py b/search/utils/topic.py
index e0f3f8f88be6b90e7a596ae2242931d96f190bef..3b9cc739f6d747c5170ba409f61dc97a848d9225 100644
--- a/search/utils/topic.py
+++ b/search/utils/topic.py
@@ -12,6 +12,7 @@ from search.utils.common import *
 from trans2es.models.pictorial import PictorialTopics
 from libs.cache import redis_client
 
+
 class TopicUtils(object):
 
     @classmethod
@@ -320,7 +321,7 @@ class TopicUtils(object):
                     'type': 'best_fields',
                     'operator': 'or',
                     'fields': ["content", "tag_name_list"],
-                    "analyzer":"gm_default_index"
+                    "analyzer": "gm_default_index"
                 }
                 query_function_score["boost_mode"] = "replace"
 
@@ -335,13 +336,14 @@ class TopicUtils(object):
                     {"range": {"content_level": {"gte": 3, "lte": 6}}}
                 )
 
-                collection_redis_key_name="physical:official_tag_name_set"
-                collect_tag_name_set=set()
+                collection_redis_key_name = "physical:official_tag_name_set"
+                collect_tag_name_set = set()
                 body = {
                     'text': query,
                     'analyzer': "gm_default_search"
                 }
-                analyze_res = ESPerform.get_analyze_results(es_cli=ESPerform.get_cli(), sub_index_name="topic",query_body=body)
+                analyze_res = ESPerform.get_analyze_results(es_cli=ESPerform.get_cli(), sub_index_name="topic",
+                                                            query_body=body)
                 for item in analyze_res["tokens"]:
                     token_word = item["token"]
                     # is_member = redis_client.sismember(collection_redis_key_name, token_word)
@@ -357,10 +359,10 @@ class TopicUtils(object):
                 }
                 functions_list += [
                     {
-                        "weight":10,
-                        "filter":{
-                            "term":{
-                                "language_type":1
+                        "weight": 10,
+                        "filter": {
+                            "term": {
+                                "language_type": 1
                             }
                         }
                     },
@@ -434,9 +436,9 @@ class TopicUtils(object):
                     topic_id_list.append(item["_source"]["id"])
 
             if has_score:
-                return topic_id_list,ret_data_list,topic_score_list
+                return topic_id_list, ret_data_list, topic_score_list
             else:
-                return topic_id_list,ret_data_list
+                return topic_id_list, ret_data_list
         except:
             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
             if has_score:
@@ -444,7 +446,6 @@ class TopicUtils(object):
             else:
                 return list(), list()
 
-
     @classmethod
     def userful_tag_topic_list(cls, user_id, have_read_topic_list, size,
                                index_type="topic-high-star", routing=None, useful_tag_list=[]):
@@ -528,10 +529,9 @@ class TopicUtils(object):
             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
             return []
 
-
     @classmethod
     def get_linucb_topic_info_for_debug(cls, size,
-                               index_type="topic-high-star", routing=None, linucb_topic_list=[]):
+                                        index_type="topic-high-star", routing=None, linucb_topic_list=[]):
         try:
             es_cli_obj = ESPerform.get_cli()
             if len(linucb_topic_list) == 0:
@@ -546,22 +546,22 @@ class TopicUtils(object):
                     }
                 }
                 q["_source"] = {
-                    "includes": ["id","content_level","edit_tag_list"]
+                    "includes": ["id", "content_level", "edit_tag_list"]
                 }
                 result_dict = ESPerform.get_search_results(es_cli_obj, sub_index_name=index_type, query_body=q,
                                                            size=size,
                                                            routing="6")
                 topic_id_dict = dict()
                 for item in result_dict["hits"]:
-                    topic_id_dict.update({item["_source"]["id"]:{"content_level":item["_source"]["content_level"],"edit_tag_list":item["_source"]["edit_tag_list"]}})
+                    topic_id_dict.update({item["_source"]["id"]: {"content_level": item["_source"]["content_level"],
+                                                                  "edit_tag_list": item["_source"]["edit_tag_list"]}})
                 return topic_id_dict
         except:
             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
             return []
 
-
     @classmethod
-    def get_linucb_pictorial_info_for_debug(cls,size,linucb_pictorial_list = []):
+    def get_linucb_pictorial_info_for_debug(cls, size, linucb_pictorial_list=[]):
         try:
             q = {
                 "query": {
@@ -593,8 +593,6 @@ class TopicUtils(object):
             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
             return list()
 
-
-
     @classmethod
     def get_topic_detail_recommend_list(cls, user_id, topic_id, topic_tag_list, topic_pictorial_id, topic_user_id,
                                         filter_topic_user_id, have_read_topic_list, offset, size, es_cli_obj=None,
@@ -794,9 +792,9 @@ class TopicUtils(object):
             # "includes": ["id", "pictorial_id", "user_id", "_score", "create_time", "content_level"]
 
             q['sort'] = [
-                     {"latest_reply_time": {"order": "desc"}},
-            #         {"create_time": {"order": "desc"}}
-                 ]
+                {"latest_reply_time": {"order": "desc"}},
+                #         {"create_time": {"order": "desc"}}
+            ]
 
             result_dict = ESPerform.get_search_results(es_cli_obj, sub_index_name=index_type, query_body=q, size=size,
                                                        routing=routing)
@@ -1294,7 +1292,20 @@ class TopicUtils(object):
                         "query": v
                     }
                 })
+            elif k == "is_shadow":
 
+                if v == 0:
+                    f.append({
+                        "term": {
+                            "is_shadow": False
+                        }
+                    })
+                else:
+                    f.append({
+                        "term": {
+                            "is_shadow": True
+                        }
+                    })
             elif k == "virtual_content_level":
                 f.append({
                     "match": {k: v}
@@ -1328,7 +1339,19 @@ class TopicUtils(object):
                             }
                         }
                     })
+            elif k == "is_kol":
+                f.append({
+                    "term": {
+                        "user_is_kol": True
+                    }
+                })
 
+            elif k == "is_edit":
+                f.append({
+                    "term": {
+                        "user_is_edit": True
+                    }
+                })
             # elif k == "pictorial_id":
             #     f.append({
             #         "nested": {
diff --git a/search/views/group.py b/search/views/group.py
index fb979ce0f8c56edd7f40e67f84b788d25c9b9fe0..88cc973df45122e91af1f6f5300adbbdfa959d62 100644
--- a/search/views/group.py
+++ b/search/views/group.py
@@ -303,7 +303,7 @@ def pictorial_topic(topic_id=-1, offset=0, size=10):
 
 
 @bind("physical/search/pictorial_topic_sort")
-def pictorial_topic_sort(pictorial_id=-1, offset=0, size=10, sort_type=PICTORIAL_TOPIC_SORT.HOT):
+def pictorial_topic_sort(pictorial_id=-1, offset=0, size=10, sort_type=PICTORIAL_TOPIC_SORT.HOT, user_id=-1):
     """
     :remark 画报排序 人气 部分
     人气按照票数从大到小排序,相同票数按照图片票数更新时间由旧到新排序
@@ -420,7 +420,8 @@ def pictorial_topic_sort(pictorial_id=-1, offset=0, size=10, sort_type=PICTORIAL
         pict_pictorial_ids_list = []
         # 获取es链接对象
         es_cli_obj = ESPerform.get_cli()
-        result_dict = ESPerform.get_search_results(es_cli_obj, "topic", q, offset, size)
+        result_dict = ESPerform.get_search_results(es_cli_obj, "topic", query_body=q, offset=offset, size=size)
+        # result_dict = ESPerform.get_search_results(es_cli=es_cli_obj, sub_index_name="mv-alpha-topic-prod-190905001", query_body=q, offset=offset, size=size,if_official_index_name=True)
 
         # logging.info("get pictorial_topic_sort res:%s" % result_dict)
 
diff --git a/search/views/tag.py b/search/views/tag.py
index 6782e93c79bfb2a8346525b48b6c36cb11d9a594..342be413fea2f1f98c8c1d85b6d38ce930bf8daa 100644
--- a/search/views/tag.py
+++ b/search/views/tag.py
@@ -11,9 +11,11 @@ from libs.cache import redis_client
 from search.utils.common import *
 from trans2es.models.tag import TopicTag, AccountUserTag, CommunityTagFollow, Tag
 import time
+from libs.tools import is_connection_usable
 from trans2es.models.tag import CommunityTagSetRelation
 from django.conf import settings
 from libs.error import logging_exception
+from django.db import connection
 
 
 def get_highlight(fields=[]):
@@ -235,27 +237,26 @@ def get_same_tagset_ids(tag_list):
     :param tag_list:
     :return:
     """
+    all_tag = list()
+    if isinstance(tag_list, int):
+        all_tag.append(tag_list)
+    else:
+        all_tag = tag_list
     try:
-        all_tag = list()
-        if isinstance(tag_list, int):
-            all_tag.append(tag_list)
-
-        else:
-            all_tag = tag_list
-
+        if not is_connection_usable():
+            connection.close()
+            logging.error("unable mysql connection and close")
         tag_set_list_id = list(
-            CommunityTagSetRelation.objects.filter(tag_id__in=all_tag, is_deleted=False).values_list("tag_set_id",
+            CommunityTagSetRelation.objects.using(settings.SLAVE1_DB_NAME).filter(tag_id__in=all_tag, is_deleted=False).values_list("tag_set_id",
                                                                                                      flat=True))
         tag_ids = list(
-            CommunityTagSetRelation.objects.filter(tag_set_id__in=tag_set_list_id, is_deleted=False).values_list(
+            CommunityTagSetRelation.objects.using(settings.SLAVE1_DB_NAME).filter(tag_set_id__in=tag_set_list_id, is_deleted=False).values_list(
                 "tag_id",
                 flat=True))
-
         all_tag.extend(tag_ids)
         logging.info("get_same_tagset_ids:%s" % str(all_tag))
-
         return list(set(all_tag))
     except:
         logging_exception()
         logging.error("catch exception,err_msg:%s" % traceback.format_exc())
-        return []
+        return all_tag
diff --git a/search/views/topic.py b/search/views/topic.py
index 574644c783cf67124b21d42242539a31a778b110..85e9270bea093c93e91cc3da831b00b434c687d3 100644
--- a/search/views/topic.py
+++ b/search/views/topic.py
@@ -8,7 +8,6 @@ import traceback
 import json
 from search.utils.topic import TopicUtils
 from trans2es.models.topic import TopicHomeRecommend
-from libs.es import ESPerform
 from libs.cache import redis_client
 from search.utils.common import *
 from libs.es import ESPerform
@@ -17,8 +16,8 @@ from libs.tools import get_have_read_lin_pictorial_id_list
 import datetime
 from trans2es.models.tag import Tag
 from search.views.tag import get_same_tagset_ids
-from linucb.views.collect_data import CollectData
 from linucb.views.linucb import LinUCB
+from alpha_types.physical.enum import STRATEGY_TYPE
 
 
 def get_discover_page_topic_ids(user_id, device_id, size, query_type=TopicPageType.FIND_PAGE):
@@ -54,9 +53,9 @@ def get_discover_page_topic_ids(user_id, device_id, size, query_type=TopicPageTy
         return []
 
 
-def get_home_recommend_topic_ids(user_id, device_id, tag_id, offset, size, query=None,
+def get_home_recommend_topic_ids(user_id, device_id, tag_id, offset, underexposure_lin_topic_count=0, size=0, query=None,
                                  query_type=TopicPageType.HOME_RECOMMEND, promote_topic_list=[], disable_collpase=False,
-                                 usefulrecall=-1, useful_tag_list=[], has_score= False,is_gray=-1):
+                                 usefulrecall=-1, useful_tag_list=[], has_score=False, gray_list=[1]):
     try:
         topic_star_routing = "6"
         index_type = "topic-high-star"
@@ -109,6 +108,7 @@ def get_home_recommend_topic_ids(user_id, device_id, tag_id, offset, size, query
         recommend_topic_user_list = list()
         attention_tag_list = list()
         recommend_topic_list = list()
+        underexposure_lin_topic_ids = list()
         if query is None:
             if user_id != -1:
                 # 有用标签召回
@@ -124,8 +124,30 @@ def get_home_recommend_topic_ids(user_id, device_id, tag_id, offset, size, query
                     have_read_topic_id_list.extend(useful_topic_id_list)
 
                 # linucb 推荐帖子
-                topic_recommend_redis_key = "physical:linucb:topic_recommend:device_id:" + str(device_id)
-
+                linucb_recommend_tags = list()
+                if STRATEGY_TYPE.CTR_GRAY in gray_list:
+                    topic_recommend_redis_key = "ctr_physical:linucb:topic_recommend:device_id:" + str(device_id)
+                    linucb_recommend_tags_key = "ctr_physical:linucb:tag_recommend:device_id:" + str(device_id)
+                    linucb_recommend_tags = redis_client.get(linucb_recommend_tags_key)
+                    if linucb_recommend_tags:
+                        linucb_recommend_tags = json.loads(linucb_recommend_tags)
+                else:
+                    topic_recommend_redis_key = "physical:linucb:topic_recommend:device_id:" + str(device_id)
+                    linucb_recommend_tags_key = "physical:linucb:tag_recommend:device_id:" + str(device_id)
+                    linucb_recommend_tags = redis_client.get(linucb_recommend_tags_key)
+                    if linucb_recommend_tags:
+                        linucb_recommend_tags = json.loads(linucb_recommend_tags)
+
+                # linucb 推荐新帖子
+                if linucb_recommend_tags:
+                    linucb_recommend_tags_set_tags = get_same_tagset_ids(linucb_recommend_tags)
+                    if underexposure_lin_topic_count:
+                        underexposure_lin_topic_ids = ESPerform.get_tag_new_topic_list(linucb_recommend_tags_set_tags, have_read_topic_id_list, underexposure_lin_topic_count)
+                        size = size - len(underexposure_lin_topic_ids)
+                        have_read_topic_id_list.extend(underexposure_lin_topic_ids)
+                        redis_client.publish("new_topic_impression", json.dumps(underexposure_lin_topic_ids))
+
+                # linucb 推荐老帖子
                 recommend_topic_dict = redis_client.hgetall(topic_recommend_redis_key)
                 linucb_recommend_topic_id_list = list()
                 recommend_topic_list = list()
@@ -220,43 +242,46 @@ def get_home_recommend_topic_ids(user_id, device_id, tag_id, offset, size, query
 
         # 失效时间为第二天凌晨3点
         # if redis_client.ttl(redis_key)<0:
-        today = datetime.datetime.strptime(str(datetime.date.today()), "%Y-%m-%d")
-        if is_gray==0:
-            end_day = today + datetime.timedelta(days=10)
-        else:
-            end_day = today + datetime.timedelta(days=1)
-        nowTime = datetime.datetime.now()
-        expire_time = (end_day - nowTime).seconds + 3 * 60 * 60
-        redis_client.expire(redis_key, expire_time)
+        # today = datetime.datetime.strptime(str(datetime.date.today()), "%Y-%m-%d")
+        # if STRATEGY_TYPE.READ_GRAY in gray_list:
+        #     end_day = today + datetime.timedelta(days=10)
+        # else:
+        #     end_day = today + datetime.timedelta(days=1)
+        # nowTime = datetime.datetime.now()
+        # expire_time = (end_day - nowTime).seconds + 3 * 60 * 60
+        # redis_client.expire(redis_key, expire_time)
 
         ret_list = rank_topic_id_list if query is None else ret_data_list
         if usefulrecall != -1:
             if has_score:
-                return recommend_topic_list, ret_list, useful_topic_id_list, rank_topic_score
+                return underexposure_lin_topic_ids, recommend_topic_list, ret_list, useful_topic_id_list, rank_topic_score
             else:
-                return recommend_topic_list, ret_list, useful_topic_id_list
+                return underexposure_lin_topic_ids, recommend_topic_list, ret_list, useful_topic_id_list
         else:
             if has_score:
-                return recommend_topic_list, ret_list, rank_topic_score
+                return underexposure_lin_topic_ids, recommend_topic_list, ret_list, rank_topic_score
             else:
-                return recommend_topic_list, ret_list
+                return underexposure_lin_topic_ids, recommend_topic_list, ret_list
     except:
         logging.error("catch exception,err_msg:%s" % traceback.format_exc())
         if usefulrecall != -1:
             if has_score:
-                return [], [], [], []
+                return [], [], [], [], []
             else:
-                return [], [], []
+                return [], [], [], []
         else:
             if has_score:
-                return [], [], []
+                return [], [], [], []
             else:
-                return [], []
+                return [], [], []
 
 
-def get_home_recommend_pictorial_ids(user_id=-1, device_id="", size=4):
+def get_home_recommend_pictorial_ids(user_id=-1, device_id="", size=4, gray_list=[1]):
     try:
-        pictorial_recommend_redis_key = "physical:linucb:pictorial_recommend:device_id:" + str(device_id)
+        if STRATEGY_TYPE.CTR_GRAY in gray_list:
+            pictorial_recommend_redis_key = "ctr_physical:linucb:pictorial_recommend:device_id:" + str(device_id)
+        else:
+            pictorial_recommend_redis_key = "physical:linucb:pictorial_recommend:device_id:" + str(device_id)
         have_read_lin_pictorial_id_list = get_have_read_lin_pictorial_id_list(device_id, user_id,
                                                                               TopicPageType.HOME_RECOMMEND)
         pictorial_recommend_dict = redis_client.hgetall(pictorial_recommend_redis_key)
@@ -278,9 +303,10 @@ def get_home_recommend_pictorial_ids(user_id=-1, device_id="", size=4):
             redis_field_list = 'have_read_pictorial_list'
             redis_client.hset(redis_key, redis_field_list, have_read_lin_pictorial_id_list)
             today = datetime.datetime.strptime(str(datetime.date.today()), "%Y-%m-%d")
-            tomorrow = today + datetime.timedelta(days=1)
+            # 问题改成10天已读
+            end_day = today + datetime.timedelta(days=10)
             nowTime = datetime.datetime.now()
-            expire_time = (tomorrow - nowTime).seconds + 3 * 60 * 60
+            expire_time = (end_day - nowTime).seconds + 3 * 60 * 60
             redis_client.expire(redis_key, expire_time)
             return recommend_pictorial_ids
         else:
@@ -290,18 +316,22 @@ def get_home_recommend_pictorial_ids(user_id=-1, device_id="", size=4):
         return []
 
 
-def get_topic_and_pictorial_detail_for_debug(device_id = "",linucb_topic_id_list = [],rank_topic_id_list = [],linucb_pictorial_id_list = [],rank_topic_score = []):
+def get_topic_and_pictorial_detail_for_debug(device_id="", underexposure_lin_topic_ids=[], linucb_topic_id_list=[],
+                                                 rank_topic_id_list=[], linucb_pictorial_id_list=[],
+                                                 rank_topic_score=[], gray_list=[1]):
     try:
-        linucb_tag_dict = get_device_linucb_tag(device_id,size=20,has_score=True)
+        linucb_tag_dict = get_device_linucb_tag(device_id, size=20, has_score=True, gray_list=gray_list)
         linucb_topic_results = dict()
         linucb_pictorial_results = dict()
         rank_topic_results = dict()
-        if len(linucb_topic_id_list) >0:
-            linucb_topic_dict = TopicUtils.get_linucb_topic_info_for_debug(len(linucb_topic_id_list),linucb_topic_list=linucb_topic_id_list)
-            linucb_topic_results = dict()
+        linucb_topic_id_list_all = linucb_topic_id_list + underexposure_lin_topic_ids
+        underexposure_lin_topic_results = dict()
+        exposure_lin_topic_results = dict()
+        if len(linucb_topic_id_list_all) > 0:
+            linucb_topic_dict = TopicUtils.get_linucb_topic_info_for_debug(len(linucb_topic_id_list_all),
+                                                                           linucb_topic_list=linucb_topic_id_list_all)
             if len(linucb_topic_dict) > 0:
-                # num = 1
-                for topic_id in linucb_topic_id_list:
+                for topic_id in linucb_topic_id_list_all:
                     topic_id = topic_id
                     edit_tag_List = linucb_topic_dict[topic_id]['edit_tag_list']
                     content_level = linucb_topic_dict[topic_id]['content_level']
@@ -313,29 +343,29 @@ def get_topic_and_pictorial_detail_for_debug(device_id = "",linucb_topic_id_list
                                 {"id": edit_tag, "score": linucb_tag_dict[edit_tag]["score"],
                                  "name": linucb_tag_dict[edit_tag]["name"]})
                         else:
-                            edit_tag_name_score_list.append({"id": edit_tag, "score": 0, "name": edit_tag_List_dict[edit_tag]})
-                    linucb_topic_results.update({str(topic_id):{"id":topic_id,"content_level":content_level,"edit_tags":edit_tag_name_score_list}})
-                logging.info("linucb_topic:{}".format(linucb_topic_results))
-                # num += 1
-        if len(rank_topic_id_list) >0:
+                            edit_tag_name_score_list.append(
+                                {"id": edit_tag, "score": 0, "name": edit_tag_List_dict[edit_tag]})
+                    linucb_topic_results.update({str(topic_id): {"id": topic_id, "content_level": content_level,
+                                                                 "edit_tags": edit_tag_name_score_list}})
+                # logging.info("linucb_topic:{}".format(linucb_topic_results))
+                underexposure_lin_topic_results = {k: v for k, v in linucb_topic_results.items() if
+                                                   int(k) in underexposure_lin_topic_ids}
+                exposure_lin_topic_results = {k: v for k, v in linucb_topic_results.items() if
+                                              int(k) in linucb_topic_id_list}
+        if len(rank_topic_id_list) > 0:
             rank_topic_dict = TopicUtils.get_linucb_topic_info_for_debug(len(rank_topic_id_list),
-                                                                           linucb_topic_list=rank_topic_id_list)
-            rank_topic_results = dict()
+                                                                         linucb_topic_list=rank_topic_id_list)
             if len(rank_topic_dict) > 0:
-                # num = 1
                 for i in range(len(rank_topic_id_list)):
                     topic_id = rank_topic_id_list[i]
                     score = rank_topic_score[i]
-                    rank_topic_results.update({str(topic_id): {"id": topic_id, "rank":score}})
-                # num += 1
-                logging.info("rank_topic:{}".format(rank_topic_results))
-        if len(linucb_pictorial_id_list) >0:
+                    rank_topic_results.update({str(topic_id): {"id": topic_id, "rank": score}})
+                # logging.info("rank_topic:{}".format(rank_topic_results))
+        if len(linucb_pictorial_id_list) > 0:
             if len(linucb_pictorial_id_list) > 0:
                 linucb_pictorial_dict = TopicUtils.get_linucb_pictorial_info_for_debug(len(linucb_pictorial_id_list),
-                                                                               linucb_pictorial_list=linucb_pictorial_id_list)
-                linucb_pictorial_results = dict()
+                                                                                       linucb_pictorial_list=linucb_pictorial_id_list)
                 if len(linucb_pictorial_dict) > 0:
-                    # num = 1
                     for pictorial_id in linucb_pictorial_id_list:
                         pictorial_id = pictorial_id
                         edit_tag_List = linucb_pictorial_dict[pictorial_id]['edit_tag_list']
@@ -347,17 +377,19 @@ def get_topic_and_pictorial_detail_for_debug(device_id = "",linucb_topic_id_list
                                     {"id": edit_tag, "score": linucb_tag_dict[edit_tag]["score"],
                                      "name": linucb_tag_dict[edit_tag]["name"]})
                             else:
-                                edit_tag_name_score_list.append({"id": edit_tag, "score": 0, "name": edit_tag_List_dict[edit_tag]})
+                                edit_tag_name_score_list.append(
+                                    {"id": edit_tag, "score": 0, "name": edit_tag_List_dict[edit_tag]})
                         linucb_pictorial_results.update({str(pictorial_id): {"id": pictorial_id,
-                                                                "edit_tags": edit_tag_name_score_list}})
-                    logging.info("linucb_pictorial:{}".format(linucb_pictorial_results))
-                    # num += 1
-        result = { "linucb_topic":linucb_topic_results,"rank_topic":rank_topic_results,"linucb_pictorial":linucb_pictorial_results}
+                                                                             "edit_tags": edit_tag_name_score_list}})
+                    # logging.info("linucb_pictorial:{}".format(linucb_pictorial_results))
+        result = {"underexposure_lin_topic": underexposure_lin_topic_results,
+                  "exposure_lin_topic": exposure_lin_topic_results, "rank_topic": rank_topic_results,
+                  "linucb_pictorial": linucb_pictorial_results}
         logging.info("get_topic_and_pictorial_detail_for_debug:result:{}:device_id:{}".format(result, device_id))
         return result
     except:
         logging.error("catch exception,err_msg:%s" % traceback.format_exc())
-        return { "linucb_topic":{},"rank_topic":{},"linucb_pictorial":{}}
+        return {"underexposure_lin_topic": {}, "exposure_lin_topic": {}, "rank_topic": {}, "linucb_pictorial": {}}
 
 
 @bind("physical/search/query_tag_id_by_topic")
@@ -370,8 +402,8 @@ def query_tag_id_by_topic(offset=0, size=10, topic_id_list=[], user_id=-1):
 
 
 @bind("physical/search/home_recommend")
-def home_recommend(device_id="", user_id=-1, offset=0, lin_pictorial_count=4, size=10, query_type=TopicPageType.HOME_RECOMMEND,
-                   promote_topic_list=[], usefulrecall=-1, useful_tag_list=[], is_debug=False,is_gray=-1):
+def home_recommend(device_id="", user_id=-1, offset=0, underexposure_lin_topic_count =4, lin_pictorial_count=4, size=10, query_type=TopicPageType.HOME_RECOMMEND,
+                   promote_topic_list=[], usefulrecall=-1, useful_tag_list=[], is_debug=False,gray_list=[1]):
     """
     :remark:首页推荐,目前只推荐日记
     :param session_id:
@@ -412,77 +444,88 @@ def home_recommend(device_id="", user_id=-1, offset=0, lin_pictorial_count=4, si
 
         else:
             if usefulrecall != -1 and len(useful_tag_list) > 0:
-                recommend_pictorial_ids = get_home_recommend_pictorial_ids(user_id, device_id, lin_pictorial_count)
+                recommend_pictorial_ids = get_home_recommend_pictorial_ids(user_id, device_id, lin_pictorial_count,
+                                                                           gray_list=gray_list)
                 size = size - len(recommend_pictorial_ids)
                 if is_debug:
                     has_score = True
-                    recommend_topic_ids, rank_topic_ids, useful_topic_ids, rank_topic_score = get_home_recommend_topic_ids(
+                    underexposure_lin_topic_ids, recommend_topic_ids, rank_topic_ids, useful_topic_ids, rank_topic_score = get_home_recommend_topic_ids(
                         user_id, device_id,
                         tag_id=0, offset=0,
+                        underexposure_lin_topic_count=underexposure_lin_topic_count,
                         size=size,
                         query_type=query_type,
                         promote_topic_list=promote_topic_list,
                         usefulrecall=usefulrecall,
-                        useful_tag_list=useful_tag_list, has_score=has_score,is_gray=is_gray)
+                        useful_tag_list=useful_tag_list, has_score=has_score, gray_list=gray_list)
 
                 else:
                     has_score = False
-                    recommend_topic_ids, rank_topic_ids, useful_topic_ids = get_home_recommend_topic_ids(user_id,
+                    underexposure_lin_topic_ids, recommend_topic_ids, rank_topic_ids, useful_topic_ids = get_home_recommend_topic_ids(user_id,
                                                                                                          device_id,
                                                                                                          tag_id=0,
                                                                                                          offset=0,
+                                                                                                         underexposure_lin_topic_count=underexposure_lin_topic_count,
                                                                                                          size=size,
                                                                                                          query_type=query_type,
                                                                                                          promote_topic_list=promote_topic_list,
                                                                                                          usefulrecall=usefulrecall,
                                                                                                          useful_tag_list=useful_tag_list,
-                                                                                                         has_score=has_score,is_gray=is_gray)
+                                                                                                         has_score=has_score,
+                                                                                                         gray_list=gray_list)
                 if not is_debug:
-                    return {"linucb_topic_ids": recommend_topic_ids, "rank_topic_ids": rank_topic_ids,
+                    return {"underexposure_lin_topic_ids": underexposure_lin_topic_ids, "linucb_topic_ids": recommend_topic_ids, "rank_topic_ids": rank_topic_ids,
                             "useful_topic_ids": useful_topic_ids, "linucb_pictorial_ids": recommend_pictorial_ids}
                 else:
-                    results = get_topic_and_pictorial_detail_for_debug(device_id, recommend_topic_ids, rank_topic_ids,
-                                                                       recommend_pictorial_ids, rank_topic_score)
-                    return {"linucb_topic_ids": recommend_topic_ids, "rank_topic_ids": rank_topic_ids,
+                    results = get_topic_and_pictorial_detail_for_debug(device_id, underexposure_lin_topic_ids, recommend_topic_ids, rank_topic_ids,
+                                                                       recommend_pictorial_ids, rank_topic_score, gray_list)
+                    return {"underexposure_lin_topic_ids": underexposure_lin_topic_ids, "linucb_topic_ids": recommend_topic_ids, "rank_topic_ids": rank_topic_ids,
                             "useful_topic_ids": useful_topic_ids, "linucb_pictorial_ids": recommend_pictorial_ids,
                             "debug_model_data": results}
 
             else:
-                recommend_pictorial_ids = get_home_recommend_pictorial_ids(user_id, device_id, lin_pictorial_count)
+                recommend_pictorial_ids = get_home_recommend_pictorial_ids(user_id, device_id, lin_pictorial_count,
+                                                                           gray_list=gray_list)
                 size = size - len(recommend_pictorial_ids)
                 if is_debug:
                     has_score = True
-                    recommend_topic_ids, rank_topic_ids, rank_topic_score = get_home_recommend_topic_ids(user_id,
+                    underexposure_lin_topic_ids, recommend_topic_ids, rank_topic_ids, rank_topic_score = get_home_recommend_topic_ids(user_id,
                                                                                                          device_id,
                                                                                                          tag_id=0,
                                                                                                          offset=0,
+                                                                                                         underexposure_lin_topic_count=underexposure_lin_topic_count,
                                                                                                          size=size,
                                                                                                          query_type=query_type,
                                                                                                          promote_topic_list=promote_topic_list,
-                                                                                                         has_score=has_score,is_gray=is_gray)
+                                                                                                         has_score=has_score,
+                                                                                                         gray_list=gray_list)
 
                 else:
                     has_score = False
-                    recommend_topic_ids, rank_topic_ids = get_home_recommend_topic_ids(user_id, device_id, tag_id=0,
-                                                                                       offset=0, size=size,
+                    underexposure_lin_topic_ids, recommend_topic_ids, rank_topic_ids = get_home_recommend_topic_ids(user_id, device_id, tag_id=0,
+                                                                                       offset=0,
+                                                                                       underexposure_lin_topic_count=underexposure_lin_topic_count,
+                                                                                       size=size,
                                                                                        query_type=query_type,
                                                                                        promote_topic_list=promote_topic_list,
-                                                                                       has_score=has_score,is_gray=is_gray)
+                                                                                       has_score=has_score,
+                                                                                       gray_list=gray_list)
 
                 if not is_debug:
-                    return {"linucb_topic_ids": recommend_topic_ids, "rank_topic_ids": rank_topic_ids,
+                    return {"underexposure_lin_topic_ids": underexposure_lin_topic_ids, "linucb_topic_ids": recommend_topic_ids, "rank_topic_ids": rank_topic_ids,
                             "linucb_pictorial_ids": recommend_pictorial_ids}
                 else:
-                    results = get_topic_and_pictorial_detail_for_debug(device_id, recommend_topic_ids, rank_topic_ids,
-                                                                       recommend_pictorial_ids, rank_topic_score)
-                    return {"linucb_topic_ids": recommend_topic_ids, "rank_topic_ids": rank_topic_ids,
+                    results = get_topic_and_pictorial_detail_for_debug(device_id, underexposure_lin_topic_ids, recommend_topic_ids, rank_topic_ids,
+                                                                       recommend_pictorial_ids, rank_topic_score, gray_list)
+                    return {"underexposure_lin_topic_ids": underexposure_lin_topic_ids, "linucb_topic_ids": recommend_topic_ids, "rank_topic_ids": rank_topic_ids,
                             "linucb_pictorial_ids": recommend_pictorial_ids, "debug_model_data": results}
     except:
         logging.error("catch exception,err_msg:%s" % traceback.format_exc())
         if usefulrecall != -1:
-            return {"linucb_topic_ids": [], "rank_topic_ids": [], "useful_topic_ids": [],"linucb_pictorial_ids": [], "debug_model_data": {}}
+            return {"underexposure_lin_topic_ids": [], "linucb_topic_ids": [], "rank_topic_ids": [], "useful_topic_ids": [],"linucb_pictorial_ids": [], "debug_model_data": {}}
         else:
-            return {"linucb_topic_ids": [], "rank_topic_ids": [], "linucb_pictorial_ids": [],"debug_model_data": {}}
+            return {"underexposure_lin_topic_ids": [], "linucb_topic_ids": [], "rank_topic_ids": [], "linucb_pictorial_ids": [],"debug_model_data": {}}
+
 
 
 @bind("physical/search/discover_page")
@@ -528,10 +571,10 @@ def home_query(device_id="", tag_id=-1, user_id=-1, query="", offset=0, size=10,
         if not isinstance(device_id, str):
             device_id = ""
 
-        recommend_topic_list, rank_topic_id_list = get_home_recommend_topic_ids(user_id, device_id, tag_id,
+        unexposure_lin_topic, recommend_topic_list, rank_topic_id_list = get_home_recommend_topic_ids(user_id, device_id, tag_id,
                                                                                 offset=offset, size=size, query=query)
         if len(rank_topic_id_list) > 0 and len(rank_topic_id_list) < size:
-            recommend_topic_list, rank_topic_id_list = get_home_recommend_topic_ids(user_id, device_id, tag_id,
+            unexposure_lin_topic, recommend_topic_list, rank_topic_id_list = get_home_recommend_topic_ids(user_id, device_id, tag_id,
                                                                                     offset=offset, size=size,
                                                                                     query=query, disable_collpase=True)
 
@@ -716,7 +759,7 @@ def query_topic_by_tag_aggregation(user_id, tag_id, offset, size):
 
 
 @bind("physical/search/topic")
-def topic_search(filters, nfilters=None, sorts_by=None, offset=0, size=10):
+def topic_search(filters, nfilters=None, sorts_by=None, offset=0, size=10, user_id=-1):
     """帖子搜索。"""
 
     try:
@@ -758,23 +801,24 @@ def query_topic_by_user_similarity(topic_similarity_score_dict, offset=0, size=1
 
 
 @bind("physical/search/clear_history_for_debug")
-def clear_history(device_id=None,user_id=-1):
-   try:
+def clear_history(device_id=None, user_id=-1):
+    try:
         msg = list()
         redis_key_list = list()
         if device_id:
-            redis_key_list.extend(('physical:linucb:device_id:%s','physical:home_recommend:device_id:%s:query_type:1',
-                                   'physical:linucb:tag_recommend:device_id:%s',
-                                   'physical:linucb:topic_recommend:device_id:%s',
-                                   'physical:linucb:pictorial_recommend:device_id:%s',
-                                   'physical:home_pictorial_recommend:device_id:%s:query_type:1'))
+            redis_key_list.extend(('physical:linucb:device_id:%s', 'ctr_physical:linucb:device_id:%s',
+                                   'physical:linucb:tag_recommend:device_id:%s', 'ctr_physical:linucb:tag_recommend:device_id:%s',
+                                   'physical:linucb:topic_recommend:device_id:%s', 'ctr_physical:linucb:topic_recommend:device_id:%s',
+                                   'physical:linucb:pictorial_recommend:device_id:%s', 'ctr_physical:linucb:pictorial_recommend:device_id:%s',
+                                   'physical:home_recommend:device_id:%s:query_type:1', 'physical:home_pictorial_recommend:device_id:%s:query_type:1'))
             for redis_key in redis_key_list:
                 key = redis_key % device_id
                 if redis_client.type(key) != "b'none'":
                     redis_client.delete(key)
             msg.append('device_id_clear')
         if user_id != -1:
-            redis_user_key_list = ["physical:home_recommend:user_id:%s:query_type:1",'physical:home_pictorial_recommend:user_id:%s:query_type:1']
+            redis_user_key_list = ["physical:home_recommend:user_id:%s:query_type:1",
+                                   'physical:home_pictorial_recommend:user_id:%s:query_type:1']
             for redis_key in redis_user_key_list:
                 key = redis_key % user_id
                 if redis_client.type(key) != "b'none'":
@@ -782,51 +826,32 @@ def clear_history(device_id=None,user_id=-1):
             msg.append('user_id_clear')
         logging.info("physical/search/clear_history_for_debug:done:device_id:{}".format(device_id))
         return msg
-   except:
-        return ['fail']
-
-
-@bind("physical/search/home_recommend_debug")
-def home_recommend_debug(device_id="", user_id=-1, recommend_topic_ids = [],rank_topic_ids = [],recommend_pictorial_ids =[]):
-    """
-    :remark:首页推荐,目前只推荐日记
-    :param session_id:
-    :param user_id:
-    :param offset:
-    :param size:
-    :return:
-    """
-    try:
-         if device_id:
-            results = get_topic_and_pictorial_detail_for_debug(device_id,recommend_topic_ids,rank_topic_ids,recommend_pictorial_ids)
-            return results
-         else:
-            return {}
     except:
-        logging.error("catch exception,err_msg:%s" % traceback.format_exc())
-        return {}
+        return ['fail']
 
 
-def get_device_linucb_tag(device_id = None,size = 20,has_score = False):
-    # {
-    #     "lincub": [{"id": 1, "name": 名字}, {"id": 2, "name": 名字}, {"id": 3, "name": 名字}, {"id": 5, "name": 名字},
-    #                {"id": 6, "name": 名字}, ]
-    # }
+def get_device_linucb_tag(device_id=None, size=20, has_score=False, gray_list=[1]):
     try:
-        user_feature = [1,1]
+        user_feature = [1, 1]
         if device_id:
-            linucb_matrix_redis_prefix = "physical:linucb:device_id:"
+            if STRATEGY_TYPE.CTR_GRAY in gray_list:
+                linucb_matrix_redis_prefix = "ctr_physical:linucb:device_id:"
+            else:
+                linucb_matrix_redis_prefix = "physical:linucb:device_id:"
             redis_key = linucb_matrix_redis_prefix + str(device_id)
             redis_linucb_tag_data_dict = redis_client.hgetall(redis_key)
             if len(redis_linucb_tag_data_dict) == 0:
                 return {"linucb": []}
             else:
-                (recommend_tag_dict,recommend_tag_set) = LinUCB.linucb_recommend_tag(device_id,redis_linucb_tag_data_dict,user_feature,list(redis_linucb_tag_data_dict.keys()))
+                (recommend_tag_dict, recommend_tag_set) = LinUCB.linucb_recommend_tag(device_id,
+                                                                                      redis_linucb_tag_data_dict,
+                                                                                      user_feature, list(
+                        redis_linucb_tag_data_dict.keys()))
                 recommend_tag_list = list(recommend_tag_dict.keys())
                 linucb_tag_dict_list = list()
                 linucb_tag_dict = dict()
                 part_recommend_tag_list = recommend_tag_list[0:size]
-                tag_query_results = Tag.objects.using(settings.SLAVE_DB_NAME).filter(
+                tag_query_results = Tag.objects.using(settings.SLAVE1_DB_NAME).filter(
                     id__in=part_recommend_tag_list, is_online=True, is_deleted=False).values_list("id", "name")
                 if has_score:
                     for id, name in tag_query_results:
@@ -834,18 +859,17 @@ def get_device_linucb_tag(device_id = None,size = 20,has_score = False):
                     return linucb_tag_dict
                 else:
                     for id, name in tag_query_results:
-                       linucb_tag_dict_list.append({"id": id, "name": name})
+                        linucb_tag_dict_list.append({"id": id, "name": name})
                     return linucb_tag_dict_list
-
         else:
-            return {"linucb":[]}
+            return {"linucb": []}
     except:
         return {"linucb": []}
 
 
 def get_edit_tag_name(tag_lst):
     try:
-        tag_query_results = Tag.objects.using(settings.SLAVE_DB_NAME).filter(
+        tag_query_results = Tag.objects.using(settings.SLAVE1_DB_NAME).filter(
             id__in=tag_lst, is_online=True, is_deleted=False).values_list("id", "name")
         tag_dict = dict()
         for id, name in tag_query_results:
@@ -856,40 +880,42 @@ def get_edit_tag_name(tag_lst):
 
 
 @bind("physical/search/device_linucb_tag_debug")
-def get_device_linucb_tag2(device_id = None,size = 20,has_score = False):
-    # {
-    #     "lincub": [{"id": 1, "name": 名字}, {"id": 2, "name": 名字}, {"id": 3, "name": 名字}, {"id": 5, "name": 名字},
-    #                {"id": 6, "name": 名字}, ]
-    # }
+def get_device_linucb_tag2(device_id=None, size=20, has_score=False, gray_list=[1]):
     try:
-        user_feature = [1,1]
+        user_feature = [1, 1]
         if device_id:
-            linucb_matrix_redis_prefix = "physical:linucb:device_id:"
+            if STRATEGY_TYPE.CTR_GRAY in gray_list:
+                linucb_matrix_redis_prefix = "ctr_physical:linucb:device_id:"
+            else:
+                linucb_matrix_redis_prefix = "physical:linucb:device_id:"
             redis_key = linucb_matrix_redis_prefix + str(device_id)
             redis_linucb_tag_data_dict = redis_client.hgetall(redis_key)
             if len(redis_linucb_tag_data_dict) == 0:
                 return {"linucb": []}
             else:
-                (recommend_tag_dict,recommend_tag_set) = LinUCB.linucb_recommend_tag(device_id,redis_linucb_tag_data_dict,user_feature,list(redis_linucb_tag_data_dict.keys()))
+                (recommend_tag_dict, recommend_tag_set) = LinUCB.linucb_recommend_tag(device_id,
+                                                                                      redis_linucb_tag_data_dict,
+                                                                                      user_feature, list(
+                        redis_linucb_tag_data_dict.keys()))
                 recommend_tag_list = list(recommend_tag_dict.keys())
                 linucb_tag_dict_list = list()
                 linucb_tag_dict = dict()
                 part_recommend_tag_list = recommend_tag_list[0:size]
-                tag_query_results = Tag.objects.using(settings.SLAVE_DB_NAME).filter(
+                tag_query_results = Tag.objects.using(settings.SLAVE1_DB_NAME).filter(
                     id__in=part_recommend_tag_list, is_online=True, is_deleted=False).values_list("id", "name")
                 if has_score:
                     result_lst = []
                     for id, name in tag_query_results:
-                       result_lst.append({"name": name, "score": recommend_tag_dict[str(id)]})
-                    linucb_tag_dict = {"linucb":result_lst}
+                        result_lst.append({"name": name, "score": recommend_tag_dict[str(id)]})
+                    linucb_tag_dict = {"linucb": result_lst}
                     logging.info("physical/search/device_linucb_tag_debug:%s" % str(linucb_tag_dict))
                     return linucb_tag_dict
                 else:
                     for id, name in tag_query_results:
-                       linucb_tag_dict_list.append({"id": id, "name": name})
+                        linucb_tag_dict_list.append({"id": id, "name": name})
                     return linucb_tag_dict_list
 
         else:
-            return {"linucb":[]}
+            return {"linucb": []}
     except:
         return {"linucb": []}
diff --git a/trans2es/management/commands/redis_new_topic_sub.py b/trans2es/management/commands/redis_new_topic_sub.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ed4dbbfc12c6b887c386b244a023015503507a3
--- /dev/null
+++ b/trans2es/management/commands/redis_new_topic_sub.py
@@ -0,0 +1,41 @@
+import json
+import logging
+import datetime
+from libs.cache import redis_client
+from libs.error import logging_exception
+from django.conf import settings
+from trans2es.models.portrait_stat import LikeTopicStat
+
+try:
+    ps = redis_client.pubsub()
+    ps.subscribe("new_topic_impression")
+    all_new_topic_impression_count_key = "all_new_topic_impression_count_key"
+    for item in ps.listen():
+        if item['type'] == 'message':
+            new_topic_ids = json.loads(item["data"])
+            all_new_topic_impression_count = json.loads(redis_client.get(all_new_topic_impression_count_key))
+            insert_topic_ids = []
+            for topic in new_topic_ids:
+                topic = str(topic)
+                if topic in all_new_topic_impression_count:
+                    all_new_topic_impression_count[topic] = all_new_topic_impression_count[topic] + 1
+                    if all_new_topic_impression_count[topic] > 100:
+                        insert_topic_ids.append(int(topic))
+                        all_new_topic_impression_count.pop(topic)
+                else:
+                    all_new_topic_impression_count[topic] = 1
+            if insert_topic_ids:
+                insert_list = []
+                for topic in insert_topic_ids:
+                    insert_list.append(
+                        LikeTopicStat(create_time=datetime.datetime.today(), update_time=datetime.datetime.today(),
+                                          topic_id=topic, is_new_topic=0, topic_ctr_30=0.0, like_rate_30=0.0))
+                LikeTopicStat.objects.using(settings.MASTER_DB_NAME).bulk_create(insert_list)
+                logging.info("impressions count gt 100 topic ids" + str(insert_topic_ids))
+            json_all_new_topic_impression_count = json.dumps(all_new_topic_impression_count)
+            logging.info("all_new_topic_impression_count" + str(all_new_topic_impression_count))
+            redis_client.set(all_new_topic_impression_count_key, json_all_new_topic_impression_count)
+except:
+    logging_exception()
+    logging.error("redis new topic sub error!")
+
diff --git a/trans2es/management/commands/trans2es_data2es_parallel.py b/trans2es/management/commands/trans2es_data2es_parallel.py
index 66a88fdfccd588e21cbf503d072d920dede8eb2a..1ac3ecd449deb3c1229865aba32e0e67fd010696 100644
--- a/trans2es/management/commands/trans2es_data2es_parallel.py
+++ b/trans2es/management/commands/trans2es_data2es_parallel.py
@@ -28,10 +28,13 @@ from trans2es.models.topic import Topic,ActionSumAboutTopic
 from search.utils.common import *
 from linucb.views.collect_data import CollectData
 from injection.data_sync.tasks import sync_user_similar_score
+import datetime
 
 from trans2es.models.tag import Tag
 from libs.cache import redis_client
 from trans2es.models.tag import TopicTag
+from libs.error import logging_exception
+from trans2es.models.portrait_stat import LikeTopicStat
 
 
 
@@ -215,6 +218,41 @@ class Command(BaseCommand):
             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
 
 
+    def sub_redis_new_topic_ids(self):
+        try:
+            ps = redis_client.pubsub()
+            ps.subscribe("new_topic_impression")
+            all_new_topic_impression_count_key = "all_new_topic_impression_count_key"
+            for item in ps.listen():
+                if item['type'] == 'message':
+                    new_topic_ids = json.loads(item["data"])
+                    all_new_topic_impression_count = json.loads(redis_client.get(all_new_topic_impression_count_key))
+                    insert_topic_ids = []
+                    for topic in new_topic_ids:
+                        topic = str(topic)
+                        if topic in all_new_topic_impression_count:
+                            all_new_topic_impression_count[topic] = all_new_topic_impression_count[topic] + 1
+                            if all_new_topic_impression_count[topic] > 100:
+                                insert_topic_ids.append(int(topic))
+                                all_new_topic_impression_count.pop(topic)
+                        else:
+                            all_new_topic_impression_count[topic] = 1
+                    if insert_topic_ids:
+                        insert_list = []
+                        for topic in insert_topic_ids:
+                            insert_list.append(
+                                LikeTopicStat(create_time=datetime.datetime.today(),
+                                                  update_time=datetime.datetime.today(),
+                                                  topic_id=topic, is_new_topic=0, topic_ctr_30=0.0, like_rate_30=0.0))
+                        LikeTopicStat.objects.using(settings.MASTER_DB_NAME).bulk_create(insert_list)
+                        logging.info("impressions count gt 100 topic ids" + str(insert_topic_ids))
+                    json_all_new_topic_impression_count = json.dumps(all_new_topic_impression_count)
+                    logging.info("all_new_topic_impression_count" + str(all_new_topic_impression_count))
+                    redis_client.set(all_new_topic_impression_count_key, json_all_new_topic_impression_count)
+        except:
+            logging.error("catch exception,err_msg:%s" % traceback.format_exc())
+
+
     def handle(self, *args, **options):
         try:
             type_name_list = get_type_info_map().keys()
@@ -241,5 +279,8 @@ class Command(BaseCommand):
             if len(options["sync_type"]) and options["sync_type"]=="tagname":
                 self.sync_tag_collecction_name_set()
 
+            if len(options["sync_type"]) and options["sync_type"] == "new_topic_sub":
+                self.sub_redis_new_topic_ids()
+
         except:
             logging.error("catch exception,err_msg:%s" % traceback.format_exc())
diff --git a/trans2es/management/commands/trans2es_mapping2es.py b/trans2es/management/commands/trans2es_mapping2es.py
index 68cb4913be44c71807264e00d0e0e62eee714f52..3ce5976102d557e326b47dd19304078a53606edc 100644
--- a/trans2es/management/commands/trans2es_mapping2es.py
+++ b/trans2es/management/commands/trans2es_mapping2es.py
@@ -7,10 +7,12 @@ import traceback
 import logging
 from libs.es import ESPerform
 from trans2es.type_info import get_type_info_map, TypeInfo
-from vest.reply import true_comment_one, true_comment_two, true_comment_three, one_seven_topic_comment
+from vest.reply import true_comment_one, true_comment_two, true_comment_three, one_seven_topic_comment,\
+    auto_reply_per_1d_to_pictorial, auto_reply_per_1d_to_topic, auto_reply_per_2h_to_topic
 from vest.click import true_click_five, true_click_two, true_click_four, true_click_one, true_click_three, \
-    one_seven_star_topic
-from vest.follow import auto_follow, auto_follow_new
+    one_seven_star_topic, auto_click_per_1d_by_post, auto_click_per_2h_by_post
+from vest.follow import auto_follow, auto_follow_new, auto_follow_per_5m_by_followed, \
+    auto_follow_per_1d_by_regist, auto_follow_per_1d_by_post, auto_follow_per_2h_by_post_and_regist
 from vest.urge import auto_star_urge, auto_lunch_app, auto_lunch_app2, auto_urge1, auto_urge2
 from vest.fix import fix_no_comment_click
 from vest.reply_answer import reply_comment2, reply_comment3, answer_reply2, answer_reply3, answer_reply1, \
@@ -86,6 +88,10 @@ class Command(BaseCommand):
                 true_click_five.true_click_five()
             if options["mvest"] == "one_seven_star_topic":
                 one_seven_star_topic.one_seven_star_topic()
+            if options["mvest"] == "auto_click_per_1d_by_post":
+                auto_click_per_1d_by_post.auto_click_per_1d_by_post()
+            if options["mvest"] == "auto_click_per_2h_by_post":
+                auto_click_per_2h_by_post.auto_click_per_2h_by_post()
 
             # 评论
             if options["mvest"] == "true_comment_one":
@@ -114,6 +120,14 @@ class Command(BaseCommand):
                 auto_follow.auto_follow()
             if options["mvest"] == "auto_follow_new":
                 auto_follow_new.auto_follow_new()
+            if options['mvest'] == "auto_follow_per_5m_by_followed":
+                auto_follow_per_5m_by_followed.auto_follow_per_5m_by_followed()
+            if options['mvest'] == "auto_follow_per_1d_by_regist":
+                auto_follow_per_1d_by_regist.auto_follow_per_1d_by_regist()
+            if options['mvest'] == "auto_follow_per_1d_by_post":
+                auto_follow_per_1d_by_post.auto_follow_per_1d_by_post()
+            if options['mvest'] == "auto_follow_per_2h_by_post_and_regist":
+                auto_follow_per_2h_by_post_and_regist.auto_follow_per_2h_by_post_and_regist()
 
             # 补足
             if options["mvest"] == "fix_no_comment_click":
@@ -129,6 +143,15 @@ class Command(BaseCommand):
             if options["mvest"] == "vest_click_reply":
                 vest_click_reply.vest_click_reply()
 
+
+            # 自动评论
+            if options["mvest"] == "auto_reply_per_1d_to_pictorial":
+                auto_reply_per_1d_to_pictorial.auto_reply_per_1d_to_pictorial()
+            if options["mvest"] == "auto_reply_per_2h_to_topic":
+                auto_reply_per_2h_to_topic.auto_reply_per_2h_to_topic()
+            if options["mvest"] == "auto_reply_per_1d_to_topic":
+                auto_reply_per_1d_to_topic.auto_reply_per_1d_to_topic()
+
             # 榜单评论
             if options["mvest"] == "principal_offline_comment1":
                 principal_offline_comment1.principal_offline_comment1()
diff --git a/trans2es/mapping/pictorial.json b/trans2es/mapping/pictorial.json
index a85c29a604f689fb42e1fe310bfd60bc02e5a72f..8a197fcc4d8f80421232147bf2a9ac4d3bc8d58b 100644
--- a/trans2es/mapping/pictorial.json
+++ b/trans2es/mapping/pictorial.json
@@ -4,6 +4,11 @@
     "id":{"type":"long"},
     "is_online":{"type":"boolean"},//上线
     "is_deleted":{"type":"boolean"},
+    "is_new_pictorial":{"type":"boolean"},
+    "pictorial_ctr_30":{"type": "double"},
+    "like_rate_30":{"type": "double"},
+    "pictorial_ctr_all":{"type": "double"},
+    "like_rate_all":{"type": "double"},
     "is_recommend":{"type":"boolean"},
     "name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"},
     "description":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"},
diff --git a/trans2es/models/portrait_stat.py b/trans2es/models/portrait_stat.py
new file mode 100644
index 0000000000000000000000000000000000000000..72d50b4336b1f9ae12e21c7ce01a577f057b80a6
--- /dev/null
+++ b/trans2es/models/portrait_stat.py
@@ -0,0 +1,108 @@
+import logging
+import traceback
+from django.db import models
+from django.conf import settings
+
+
+class LikeDeviceTagStat(models.Model):
+    class Meta:
+        verbose_name = u"30天内设备的tag的stat"
+        db_table = "like_device_tag_stat"
+        unique_together = ("device_id", "tag_id")
+
+    id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
+    create_time = models.DateTimeField(verbose_name=u'统计创建时间')
+    update_time = models.DateTimeField(verbose_name=u'统计更新时间')
+    device_id = models.CharField(verbose_name=u'设备id', max_length=100)
+    tag_id = models.IntegerField(verbose_name=u'标签id')
+    tag_click_30 = models.IntegerField(verbose_name=u'30天内的点击数')
+    tag_impress_30 = models.IntegerField(verbose_name=u"30天内的曝光数")
+    tag_ctr_30 = models.FloatField(verbose_name=u"30天内的ctr")
+
+
+class LikeTopicStat(models.Model):
+    class Meta:
+        verbose_name = u"30天内回答的stat"
+        db_table = "like_topic_stat"
+
+    id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
+    create_time = models.DateTimeField(verbose_name=u'统计创建时间')
+    update_time = models.DateTimeField(verbose_name=u'统计更新时间')
+    topic_id = models.IntegerField(verbose_name=u'回答id', unique=True)
+    is_new_topic = models.IntegerField(verbose_name=u"是否是新帖")
+    topic_ctr_30 = models.FloatField(verbose_name=u"30天内回答的ctr")
+    like_rate_30 = models.FloatField(verbose_name=u"30天内回答的点赞率")
+
+
+class LikePictorialStat(models.Model):
+    class Meta:
+        verbose_name = u"30天内问题的stat"
+        db_table = "like_pictorial_stat"
+
+    id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
+    create_time = models.DateTimeField(verbose_name=u'统计创建时间')
+    update_time = models.DateTimeField(verbose_name=u'统计更新时间')
+    pictorial_id = models.IntegerField(verbose_name=u'问题id', unique=True)
+    is_new_pictorial = models.IntegerField(verbose_name=u"是否是新问题")
+    pictorial_ctr_30 = models.FloatField(verbose_name=u"30天内问题的ctr")
+    like_rate_30 = models.FloatField(verbose_name=u"30天内问题的点赞率")
+    pictorial_ctr_all = models.FloatField(verbose_name=u"历史该问题的ctr")
+    like_rate_all = models.FloatField(verbose_name=u"历史该问题的点赞率")
+
+
+    @classmethod
+    def get_pictorial_is_new(cls, pictorial_id):
+        try:
+            is_new_pictorial = LikePictorialStat.objects.using(settings.SLAVE1_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
+                "is_new_pictorial", flat=True).first()
+            if is_new_pictorial == 0:
+                return False
+            else:
+                return True
+        except:
+            logging.error("catch exception,err_msg:%s" % traceback.format_exc())
+            return True
+
+    @classmethod
+    def get_pictorial_ctr_30(cls, pictorial_id):
+        try:
+            pictorial_ctr_30 = LikePictorialStat.objects.using(settings.SLAVE1_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
+                "pictorial_ctr_30", flat=True).first()
+
+            return pictorial_ctr_30
+        except:
+            logging.error("catch exception,err_msg:%s" % traceback.format_exc())
+            return 0.0
+
+    @classmethod
+    def get_pictorial_like_rate_30(cls, pictorial_id):
+        try:
+            like_rate_30 = LikePictorialStat.objects.using(settings.SLAVE1_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
+                "like_rate_30", flat=True).first()
+
+            return like_rate_30
+        except:
+            logging.error("catch exception,err_msg:%s" % traceback.format_exc())
+            return 0.0
+
+    @classmethod
+    def get_pictorial_ctr_all(cls, pictorial_id):
+        try:
+            pictorial_ctr_all = LikePictorialStat.objects.using(settings.SLAVE1_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
+                "pictorial_ctr_all", flat=True).first()
+
+            return pictorial_ctr_all
+        except:
+            logging.error("catch exception,err_msg:%s" % traceback.format_exc())
+            return 0.0
+
+    @classmethod
+    def get_pictorial_like_rate_all(cls, pictorial_id):
+        try:
+            like_rate_all = LikePictorialStat.objects.using(settings.SLAVE1_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
+                "like_rate_all", flat=True).first()
+
+            return like_rate_all
+        except:
+            logging.error("catch exception,err_msg:%s" % traceback.format_exc())
+            return 0.0
diff --git a/trans2es/utils/pictorial_transfer.py b/trans2es/utils/pictorial_transfer.py
index f65224c1cb0453d3341dad24ebc6549292e694cc..b72e1b8213b6e62f7abfdf4cfecbcfe57e48b480 100644
--- a/trans2es/utils/pictorial_transfer.py
+++ b/trans2es/utils/pictorial_transfer.py
@@ -6,6 +6,7 @@ import logging
 import traceback
 from libs.tools import tzlc
 from trans2es.models.topic import Topic
+from trans2es.models.portrait_stat import LikePictorialStat
 
 
 class PictorialTransfer(object):
@@ -75,6 +76,11 @@ class PictorialTransfer(object):
             res["real_user_activate_time"] = instance.get_real_user_activate_time()
             res["edit_tag_id"] = instance.get_edit_tag_id()
             res["edit_tag_name"] = instance.get_edit_tag_name(res["edit_tag_id"])
+            res["is_new_pictorial"] = LikePictorialStat.get_pictorial_is_new(instance.id)
+            res["pictorial_ctr_30"] = LikePictorialStat.get_pictorial_ctr_30(instance.id)
+            res["like_rate_30"] = LikePictorialStat.get_pictorial_like_rate_30(instance.id)
+            res["pictorial_ctr_all"] = LikePictorialStat.get_pictorial_ctr_all(instance.id)
+            res["like_rate_all"] = LikePictorialStat.get_pictorial_like_rate_all(instance.id)
             logging.info("get data:%s" % res)
             return res
         except:
diff --git a/vest/__init__.py b/vest/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..329ca6b89e43d8b406abf7408b81369d70ea0f58 100644
--- a/vest/__init__.py
+++ b/vest/__init__.py
@@ -0,0 +1 @@
+from .request import *
\ No newline at end of file
diff --git a/vest/change_reply/yesterday_topic_reply.py b/vest/change_reply/yesterday_topic_reply.py
index 341137103b85b4f0fb768ad4668950501b7c9314..94719b9ab19415149792679b320d3374b2446910 100644
--- a/vest/change_reply/yesterday_topic_reply.py
+++ b/vest/change_reply/yesterday_topic_reply.py
@@ -28,6 +28,7 @@ def get_data(numtime, numtime2):
 
 
 def yesterday_comment_one():
+    # 自动给帖子增加评论
     try:
         logging.info("comment one")
         numtime, numtime2 = time_convs(1, 1)
diff --git a/vest/click/auto_click_per_1d_by_post.py b/vest/click/auto_click_per_1d_by_post.py
new file mode 100644
index 0000000000000000000000000000000000000000..ba5f9809f2c22159757bf33baf3c5a0466039513
--- /dev/null
+++ b/vest/click/auto_click_per_1d_by_post.py
@@ -0,0 +1,117 @@
+import pymysql
+import random
+import traceback
+import logging
+from threading import Thread
+from vest.request.auto_request import login, time_convs, click
+from vest.request.auto_request import host, user, db, passwd
+from libs.error import logging_exception
+from libs.timelib import get_rand_time
+
+
+def get_commnet_id(numtime, numtime2, content_level_low=0, content_level_top=6):
+    pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
+    cursor = pc.cursor()
+
+    topic_ids = []
+
+    cursor.execute(
+
+        "select t.id from topic t left join user_extra u on t.user_id = u.user_id "
+        "where u.is_shadow=0 and t.create_time > '%s' and t.create_time < '%s' and t.is_online=1 "
+        "and t.content_level >= %s and t.content_level <=  %s " % (numtime, numtime2, str(content_level_low), str(content_level_top)))
+
+    res = cursor.fetchall()
+
+    for i, in res:
+        cursor.execute(
+
+            "SELECT pictorial_id FROM community_pictorial_topic where topic_id=%s limit 1" % i)
+
+        pictorial_id = cursor.fetchall()
+        if pictorial_id:
+            topic_ids.append((i, pictorial_id[0]))
+        else:
+            topic_ids.append((i, 0))
+
+    return topic_ids
+
+
+def batch_handle(auto_click_list):
+    for topic_id in auto_click_list:
+        try:
+            cookies = login()
+            if cookies is not None:
+                click.apply_async(args=(cookies, topic_id), eta=get_rand_time())
+                # click(cookies, topic_id)
+        except:
+            pass
+
+
+def auto_click_per_1d_by_post():
+    # 发帖触发自动点赞
+    auto_click_list = []
+    try:
+        # 1-3星及无星
+        # 1天前发的帖子:[2-6]个赞
+        numtime1, numtime2 = time_convs(1, 1)
+        topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=3)
+        for topic_id in topic_ids:
+            click_num = random.randint(2, 6)
+            for i in range(click_num):
+                auto_click_list.append(topic_id)
+
+        # 2-15天前发的帖子:[0-2]个赞
+        numtime1, numtime2 = time_convs(2, 15)
+        topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=3)
+        for topic_id in topic_ids:
+            click_num = random.randint(0, 2)
+            for i in range(click_num):
+                auto_click_list.append(topic_id)
+
+        # 15天前或更早发的帖子:每隔6天[0-1]个赞
+        numtime1, numtime2 = time_convs(2, 15)
+        topic_ids = get_commnet_id('0', numtime2, content_level_low=0, content_level_top=3)
+        for topic_id in topic_ids:
+            click_num = random.randint(1, 6)
+            if click_num == 6:
+                auto_click_list.append(topic_id)
+
+        # 4-6星帖
+        # 1天前发的帖子:[4-12]个赞
+        numtime1, numtime2 = time_convs(1, 1)
+        topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=4, content_level_top=6)
+        for topic_id in topic_ids:
+            click_num = random.randint(4, 12)
+            for i in range(click_num):
+                auto_click_list.append(topic_id)
+
+        # 2-15天前发的帖子:[0-6]个赞
+        numtime1, numtime2 = time_convs(2, 15)
+        topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=4, content_level_top=6)
+        for topic_id in topic_ids:
+            click_num = random.randint(0, 6)
+            for i in range(click_num):
+                auto_click_list.append(topic_id)
+
+        # 15天前或更早发的帖子:每隔5天[0-3]个赞
+        numtime1, numtime2 = time_convs(2, 15)
+        topic_ids = get_commnet_id('0', numtime2, content_level_low=4, content_level_top=6)
+        for topic_id in topic_ids:
+            click_num = random.randint(1, 3)
+            if click_num == 1:
+                auto_click_list.append(topic_id)
+
+    except:
+        logging_exception()
+        logging.error("catch exception,main:%s" % traceback.format_exc())
+
+    logging.info('auto_click_per_1d_by_post: len %s' % len(auto_click_list))
+    print('auto_click_per_1d_by_post: len %s' % len(auto_click_list))
+
+    total = len(auto_click_list)
+    limit = (total + 10) // 10
+    for start in range(0, total, limit):
+        batch = auto_click_list[start:start + limit]
+        t = Thread(target=batch_handle, args=[batch])
+        t.start()
diff --git a/vest/click/auto_click_per_2h_by_post.py b/vest/click/auto_click_per_2h_by_post.py
new file mode 100644
index 0000000000000000000000000000000000000000..11dc2efe935eaf74335dfbcafef62f260bf677ce
--- /dev/null
+++ b/vest/click/auto_click_per_2h_by_post.py
@@ -0,0 +1,76 @@
+import pymysql
+import random
+import traceback
+import logging
+from threading import Thread
+from vest.request.auto_request import login, time_conv_hour, click
+from vest.request.auto_request import host, user, db, passwd
+from libs.error import logging_exception
+from libs.timelib import get_rand_time
+
+
+def get_commnet_id(numtime, numtime2, content_level_low=0, content_level_top=6):
+    pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
+    cursor = pc.cursor()
+
+    topic_ids = []
+
+    # 发贴后
+    cursor.execute(
+
+        "select t.id from topic t left join user_extra u on t.user_id = u.user_id "
+        "where u.is_shadow=0 and t.create_time > '%s' and t.create_time < '%s' and t.is_online=1 "
+        "and t.content_level >= %s and t.content_level <=  %s " % (numtime, numtime2, str(content_level_low), str(content_level_top)))
+
+    res = cursor.fetchall()
+
+    for i, in res:
+        cursor.execute(
+
+            "SELECT pictorial_id FROM community_pictorial_topic where topic_id=%s limit 1" % i)
+
+        pictorial_id = cursor.fetchall()
+        if pictorial_id:
+            topic_ids.append((i, pictorial_id[0]))
+        else:
+            topic_ids.append((i, 0))
+
+    return topic_ids
+
+
+def batch_handle(auto_click_list):
+    for topic_id in auto_click_list:
+        try:
+            cookies = login()
+            if cookies is not None:
+                # click(cookies, topic_id)
+                click.apply_async(args=(cookies, topic_id), eta=get_rand_time(hourup=1))
+        except:
+            pass
+
+
+def auto_click_per_2h_by_post():
+    # 发帖触发自动点赞
+    auto_click_list = []
+    try:
+        # 发帖2小时内:[1-3]个点赞
+        numtime1, numtime2 = time_conv_hour(0, 2)
+        topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=6)
+        for topic_id in topic_ids:
+            click_num = random.randint(1, 3)
+            for i in range(click_num):
+                auto_click_list.append(topic_id)
+
+    except:
+        logging_exception()
+        logging.error("catch exception,main:%s" % traceback.format_exc())
+
+    logging.info('auto_click_per_2h_by_post: len %s' % len(auto_click_list))
+    print('auto_click_per_2h_by_post: len %s' % len(auto_click_list))
+
+    total = len(auto_click_list)
+    limit = (total + 10) // 10
+    for start in range(0, total, limit):
+        batch = auto_click_list[start:start + limit]
+        t = Thread(target=batch_handle, args=[batch])
+        t.start()
diff --git a/vest/data/majia_user_ids.py b/vest/data/majia_user_ids.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d45a54ee340eefa80cc8eaafce6c81ce35fc2cd
--- /dev/null
+++ b/vest/data/majia_user_ids.py
@@ -0,0 +1,3 @@
+majia_user_ids_list = [241407406, 241407033, 241407154, 241407307, 241407284, 241407248, 241407179, 241407428, 241407470, 241407314, 241407385, 241407490, 241407375, 241407318, 241407508, 241407192, 241407131, 241407226, 241407174, 241407505, 241407358, 241407466, 241407452, 241407093, 241407289, 241407495, 241407405, 241407200, 241407330, 241407079, 241407207, 241407100, 241407116, 241407111, 241407302, 241407095, 241407086, 241407499, 241407519, 241407031, 241407288, 241407436, 241407310, 241407404, 241407493, 241407201, 241407084, 241407158, 241407327, 241407178, 241407164, 241407292, 241407387, 241407352, 241407482, 241407325, 241407256, 241407252, 241407485, 241407376, 241407242, 241407219, 241407166, 241407413, 241407343, 241407423, 241407468, 241407198, 241407170, 241407287, 241407138, 241407353, 241407065, 241407262, 241407501, 241407255, 241407512, 241407309, 241407494, 241407319, 241407373, 241407264, 241407471, 241407250, 241407479, 241407456, 241407443, 241407362, 241407157, 241407130, 241407075, 241407390, 241407036, 241407502, 241407092, 241407513, 241407136, 241407048, 241407448, 241407080, 241407459, 241407045, 241407407, 241407186, 241407083, 241407073, 241407191, 241407421, 241407062, 241407275, 241407347, 241407397, 241407052, 241407393, 241407150, 241407063, 241407212, 241407072, 241407500, 241407331, 241407339, 241407474, 241407378, 241407147, 241407316, 241407301, 241407516, 241407527, 241407233, 241407526, 241407365, 241407384, 241407349, 241407210, 241407162, 241407042, 241407057, 241407514, 241407398, 241407103, 241407509, 241407336, 241407335, 241407145, 241407429, 241407144, 241407068, 241407074, 241407077, 241407238, 241407112, 241407236, 241407245, 241407364, 241407101, 241407108, 241407355, 241407431, 241407247, 241407167, 241407050, 241407064, 241407276, 241407139, 241407120, 241407249, 241407097, 241407437, 241407377, 241407140, 241407475, 241407469, 241407168, 241407486, 241407263, 241407379, 241407243, 241407081, 241407114, 241407230, 241407169, 241407059, 241407354, 241407253, 241407106, 241407467, 241407206, 241407221, 241407481, 241407326, 241407185, 241407322, 241407411, 241407044, 241407285, 241407324, 241407522, 241407175, 241407151, 241407246, 241407433, 241407496, 241407234, 241407183, 241407524, 241407265, 241407146, 241407043, 241407217, 241407046, 241407229, 241407417, 241407124, 241407280, 241407137, 241407091, 241407132, 241407261, 241407115, 241407071, 241407165, 241407369, 241407418, 241407176, 241407040, 241407143, 241407058, 241407491, 241407434, 241407329, 241407462, 241407135, 241407338, 241407283, 241407094, 241407117, 241407239, 241407410, 241407435, 241407060, 241407420, 241407424, 241407308, 241407489, 241407286, 241407268, 241407311, 241407328, 241407232, 241407463, 241407304, 241407346, 241407076, 241407126, 241407177, 241407382, 241407438, 241407032, 241407515, 241407363, 241407189, 241407360, 241407282, 241407295, 241407109, 241407340, 241407333, 241407240, 241407457, 241407231, 241407163, 241407492, 241407220, 241407125, 241407182, 241407070, 241407293, 241407356, 241407190, 241407128, 241407039, 241407241, 241407451, 241407334, 241407270, 241407090, 241407412, 241407465, 241407119, 241407244, 241407487, 241407511, 241407196, 241407415, 241407054, 241407504, 241407300, 241407320, 241407447, 241407205, 241407460, 241407216, 241407389, 241407188, 241407313, 241407156, 241407113, 241407350, 241407525, 241407497, 241407211, 241407345, 241407394, 241407107, 241407266, 241407332, 241407197, 241407337, 241407085, 241407366, 241407305, 241407401, 241407315, 241407089, 241407281, 241407386, 241407038, 241407351, 241407507, 241407484, 241407171, 241407082, 241407214, 241407419, 241407321, 241407409, 241407251, 241407426, 241407444, 241407432, 241407391, 241407357, 241407213, 241407368, 241407416, 241407342, 241407312, 241407303, 241407121, 241407099, 241407521, 241407259, 241407510, 241407049, 241407087, 241407279, 241407441, 241407388, 241407105, 241407478, 241407317, 241407142, 241407096, 241407422, 241407277, 241407141, 241407488, 241407297, 241407473, 241407110, 241407361, 241407306, 241407402, 241407215, 241407273, 241407155, 241407257, 241407454, 241407274, 241407123, 241407104, 241407208, 241407298, 241407399, 241407030, 241407069, 241407153, 241407453, 241407449, 241407440, 241407455, 241407223, 241407278, 241407400, 241407359, 241407088, 241407202, 241407254, 241407194, 241407159, 241407161, 241407061, 241407267, 241407372, 241407381, 241407374, 241407173, 241407392, 241407271, 241407341, 241407299, 241407446, 241407439, 241407523, 241407396, 241407227, 241407041, 241407067, 241407181, 241407344, 241407199, 241407203, 241407193, 241407127, 241407348, 241407118, 241407260, 241407209, 241407370, 241407291, 241407367, 241407380, 241407503, 241407037, 241407269, 241407450, 241407133, 241407371, 241407395, 241407122, 241407204, 241407430, 241407152, 241407148, 241407425, 241407160, 241407427, 241407296, 241407518, 241407480, 241407180, 241407272, 241407403, 241407102, 241407225, 241407195, 241407472, 241407056, 241407498, 241407218, 241407055, 241407323, 241407461, 241407098, 241407235, 241407187, 241407290, 241407520, 241407483, 241407172, 241407134, 241407506, 241407129, 241407476, 241407517, 241407237, 241407442, 241407184, 241407464, 241407529, 241407047, 241407053, 241407458, 241407078, 241407066, 241407294, 241407258, 241407035, 241407445, 241407149, 241407414, 241407222, 241407383, 241407408, 241407034, 241407051, 241407477, 241407224, 241407228, 241407528]
+
+majia_user_ids_dict = {'241407030': 's_sgMyOm@shadow.com', '241407031': 's_4HRYFK@shadow.com', '241407032': 's_KaRMJO@shadow.com', '241407033': 's_07ssdt@shadow.com', '241407034': 's_zMc7wq@shadow.com', '241407035': 's_zA014B@shadow.com', '241407036': 's_akgPyg@shadow.com', '241407037': 's_vKSOXA@shadow.com', '241407038': 's_oi7M2a@shadow.com', '241407039': 's_lOTICx@shadow.com', '241407040': 's_i4rFhk@shadow.com', '241407041': 's_UPqB4b@shadow.com', '241407042': 's_do9K3h@shadow.com', '241407043': 's_gXRMNW@shadow.com', '241407044': 's_GGopQR@shadow.com', '241407045': 's_be9Zi8@shadow.com', '241407046': 's_hDAJDM@shadow.com', '241407047': 's_yQJV10@shadow.com', '241407048': 's_axSkdT@shadow.com', '241407049': 's_QI5QFa@shadow.com', '241407050': 's_ERIRTt@shadow.com', '241407051': 's_zNA0qR@shadow.com', '241407052': 's_by2EKN@shadow.com', '241407053': 's_yRDyTe@shadow.com', '241407054': 's_mAu8xe@shadow.com', '241407055': 's_xNXLax@shadow.com', '241407056': 's_xkRtne@shadow.com', '241407057': 's_Dpguo5@shadow.com', '241407058': 's_i7ILAu@shadow.com', '241407059': 's_fjgn6l@shadow.com', '241407060': 's_iwSVzk@shadow.com', '241407061': 's_u39tAC@shadow.com', '241407062': 's_BqeGFr@shadow.com', '241407063': 's_cA91fz@shadow.com', '241407064': 's_ESqCxg@shadow.com', '241407065': 's_8G7dcC@shadow.com', '241407066': 's_yUirRE@shadow.com', '241407067': 's_upU0XW@shadow.com', '241407068': 's_e1gsjY@shadow.com', '241407069': 's_ShQyke@shadow.com', '241407070': 's_KzuAYn@shadow.com', '241407071': 's_hsG5eH@shadow.com', '241407072': 's_cF7TuX@shadow.com', '241407073': 's_bNzT3y@shadow.com', '241407074': 's_E4I3Xs@shadow.com', '241407075': 's_AdUmQr@shadow.com', '241407076': 's_JU1HJp@shadow.com', '241407077': 's_e6gQH3@shadow.com', '241407078': 's_YSpR4I@shadow.com', '241407079': 's_36xYLw@shadow.com', '241407080': 's_b5CSos@shadow.com', '241407081': 's_fGrHfu@shadow.com', '241407082': 's_OTNbWY@shadow.com', '241407083': 's_BmjUyS@shadow.com', '241407084': 's_5gjbBq@shadow.com', '241407085': 's_o1KuZK@shadow.com', '241407086': 's_4cYuPa@shadow.com', '241407087': 's_ql2jNt@shadow.com', '241407088': 's_TlQiUV@shadow.com', '241407089': 's_oD1Hmy@shadow.com', '241407090': 's_lRgLWN@shadow.com', '241407091': 's_hO4VFC@shadow.com', '241407092': 's_AviTFG@shadow.com', '241407093': 's_2Lago0@shadow.com', '241407094': 's_IN3CIK@shadow.com', '241407095': 's_49T1iu@shadow.com', '241407096': 's_QZ5xev@shadow.com', '241407097': 's_f0ULrl@shadow.com', '241407098': 's_XubfZG@shadow.com', '241407099': 's_PzH35A@shadow.com', '241407100': 's_3chHX1@shadow.com', '241407101': 's_Eh80vb@shadow.com', '241407102': 's_XAkiWi@shadow.com', '241407103': 's_DsRsnp@shadow.com', '241407104': 's_S4WZXB@shadow.com', '241407105': 's_QqYjNu@shadow.com', '241407106': 's_FMZSoT@shadow.com', '241407107': 's_NTvlOB@shadow.com', '241407108': 's_ei0zs6@shadow.com', '241407109': 's_KnFg4t@shadow.com', '241407110': 's_ropbJR@shadow.com', '241407111': 's_3Q98tn@shadow.com', '241407112': 's_EbB7e0@shadow.com', '241407113': 's_NdwviG@shadow.com', '241407114': 's_fIH36h@shadow.com', '241407115': 's_HrFCH6@shadow.com', '241407116': 's_3ixLfv@shadow.com', '241407117': 's_IqssSO@shadow.com', '241407118': 's_V1GHJz@shadow.com', '241407119': 's_Lt4V85@shadow.com', '241407120': 's_exsZWG@shadow.com', '241407121': 's_Py0oZ4@shadow.com', '241407122': 's_W4RqbA@shadow.com', '241407123': 's_s1L2lv@shadow.com', '241407124': 's_hgZehK@shadow.com', '241407125': 's_KU3D7m@shadow.com', '241407126': 's_JwVmFg@shadow.com', '241407127': 's_uZRiM7@shadow.com', '241407128': 's_LNhnOE@shadow.com', '241407129': 's_YcQsmL@shadow.com', '241407130': 's_AAXg2x@shadow.com', '241407131': 's_1N8qG8@shadow.com', '241407132': 's_hpC1IP@shadow.com', '241407133': 's_VWL5Pk@shadow.com', '241407134': 's_y8atXB@shadow.com', '241407135': 's_igjXaA@shadow.com', '241407136': 's_axJVqe@shadow.com', '241407137': 's_hhSSqd@shadow.com', '241407138': 's_8Evf2c@shadow.com', '241407139': 's_EvDs7H@shadow.com', '241407140': 's_F6FHfH@shadow.com', '241407141': 's_RGun9G@shadow.com', '241407142': 's_qVKF9Q@shadow.com', '241407143': 's_i6ffeZ@shadow.com', '241407144': 's_E0txbx@shadow.com', '241407145': 's_DVwDK3@shadow.com', '241407146': 's_gwVryk@shadow.com', '241407147': 's_Cm7btb@shadow.com', '241407148': 's_WP3s7w@shadow.com', '241407149': 's_ZdLDZo@shadow.com', '241407150': 's_C2PiMv@shadow.com', '241407151': 's_GPR0C1@shadow.com', '241407152': 's_wnrxFo@shadow.com', '241407153': 's_sNyLO4@shadow.com', '241407154': 's_08DlWX@shadow.com', '241407155': 's_rTS8qV@shadow.com', '241407156': 's_nd6HiH@shadow.com', '241407157': 's_a9cIf1@shadow.com', '241407158': 's_5I3fN3@shadow.com', '241407159': 's_TWwKNi@shadow.com', '241407160': 's_WrVSmD@shadow.com', '241407161': 's_Twwxb8@shadow.com', '241407162': 's_DJvlFY@shadow.com', '241407163': 's_KTcosq@shadow.com', '241407164': 's_5o4RJL@shadow.com', '241407165': 's_hsj9pq@shadow.com', '241407166': 's_7TOm5v@shadow.com', '241407167': 's_enXoFU@shadow.com', '241407168': 's_F9hH2t@shadow.com', '241407169': 's_fJD9SK@shadow.com', '241407170': 's_895iAz@shadow.com', '241407171': 's_oniMPE@shadow.com', '241407172': 's_y6GymP@shadow.com', '241407173': 's_UGN1aM@shadow.com', '241407174': 's_1RCzUi@shadow.com', '241407175': 's_GPok7I@shadow.com', '241407176': 's_hX41uJ@shadow.com', '241407177': 's_jYDXVu@shadow.com', '241407178': 's_5M8FPz@shadow.com', '241407179': 's_0KyxOu@shadow.com', '241407180': 's_wWtI7X@shadow.com', '241407181': 's_upwHbj@shadow.com', '241407182': 's_kWGe6S@shadow.com', '241407183': 's_GtpUFG@shadow.com', '241407184': 's_yMfJP3@shadow.com', '241407185': 's_g3zIuG@shadow.com', '241407186': 's_bLymnh@shadow.com', '241407187': 's_xvwj7J@shadow.com', '241407188': 's_n2rgzI@shadow.com', '241407189': 's_kfGtFE@shadow.com', '241407190': 's_lK7QJJ@shadow.com', '241407191': 's_bOQCNQ@shadow.com', '241407192': 's_1mypap@shadow.com', '241407193': 's_ux8AJL@shadow.com', '241407194': 's_twoHks@shadow.com', '241407195': 's_xgZqN4@shadow.com', '241407196': 's_MaChwe@shadow.com', '241407197': 's_NzQnI9@shadow.com', '241407198': 's_87PpDu@shadow.com', '241407199': 's_UTQ2Lb@shadow.com', '241407200': 's_2ZlLCx@shadow.com', '241407201': 's_5FAGd4@shadow.com', '241407202': 's_TNhLPD@shadow.com', '241407203': 's_uujHEb@shadow.com', '241407204': 's_wBqKNY@shadow.com', '241407205': 's_MrbRuR@shadow.com', '241407206': 's_FPb33o@shadow.com', '241407207': 's_3c7tUk@shadow.com', '241407208': 's_s7uz0U@shadow.com', '241407209': 's_v3TI1p@shadow.com', '241407210': 's_djjDaF@shadow.com', '241407211': 's_NjoRTW@shadow.com', '241407212': 's_CDooAN@shadow.com', '241407213': 's_pkMFSS@shadow.com', '241407214': 's_OU8QCt@shadow.com', '241407215': 's_rSlV4T@shadow.com', '241407216': 's_MtxuFr@shadow.com', '241407217': 's_H36e9F@shadow.com', '241407218': 's_xnXhe5@shadow.com', '241407219': 's_7nWGkq@shadow.com', '241407220': 's_KtWEu9@shadow.com', '241407221': 's_FpzO9Q@shadow.com', '241407222': 's_zhOQcV@shadow.com', '241407223': 's_T8Gcjy@shadow.com', '241407224': 's_ZsMr7d@shadow.com', '241407225': 's_XgWX4z@shadow.com', '241407226': 's_1ozsiy@shadow.com', '241407227': 's_UPD6bx@shadow.com', '241407228': 's_zsvmqU@shadow.com', '241407229': 's_HfHwKX@shadow.com', '241407230': 's_FihASr@shadow.com', '241407231': 's_KRN7h9@shadow.com', '241407232': 's_Jmkcc9@shadow.com', '241407233': 's_d7TIgK@shadow.com', '241407234': 's_gtMV3b@shadow.com', '241407235': 's_XVsXeA@shadow.com', '241407236': 's_edvBYb@shadow.com', '241407237': 's_YiX7OV@shadow.com', '241407238': 's_EAYAMe@shadow.com', '241407239': 's_IR0Jmz@shadow.com', '241407240': 's_KphQ2M@shadow.com', '241407241': 's_lPRv0u@shadow.com', '241407242': 's_7FUcFf@shadow.com', '241407243': 's_FGgy48@shadow.com', '241407244': 's_ltsupK@shadow.com', '241407245': 's_Ee4QC1@shadow.com', '241407246': 's_gqBA3v@shadow.com', '241407247': 's_embU8E@shadow.com', '241407248': 's_0I4C0g@shadow.com', '241407249': 's_EzpzqV@shadow.com', '241407250': 's_98A1Z2@shadow.com', '241407251': 's_p5kFP1@shadow.com', '241407252': 's_6srtjM@shadow.com', '241407253': 's_FJyYaA@shadow.com', '241407254': 's_TSJPo5@shadow.com', '241407255': 's_8nvaAF@shadow.com', '241407256': 's_6rkYkC@shadow.com', '241407257': 's_rUnoMo@shadow.com', '241407258': 's_Z8PP5J@shadow.com', '241407259': 's_Q5LUMj@shadow.com', '241407260': 's_v26xAA@shadow.com', '241407261': 's_HqZEg7@shadow.com', '241407262': 's_8gSWmC@shadow.com', '241407263': 's_fCQcu1@shadow.com', '241407264': 's_91xnox@shadow.com', '241407265': 's_gvlFut@shadow.com', '241407266': 's_nyfbCS@shadow.com', '241407267': 's_uA9TOF@shadow.com', '241407268': 's_JLlz1L@shadow.com', '241407269': 's_vm9MFh@shadow.com', '241407270': 's_Lqxoqm@shadow.com', '241407271': 's_uhXH6g@shadow.com', '241407272': 's_Wyn2gW@shadow.com', '241407273': 's_rtCnmO@shadow.com', '241407274': 's_RyIPYo@shadow.com', '241407275': 's_bS8mIT@shadow.com', '241407276': 's_eUL2wx@shadow.com', '241407277': 's_rggaRa@shadow.com', '241407278': 's_T93ZyL@shadow.com', '241407279': 's_qnVpkI@shadow.com', '241407280': 's_hHpKxO@shadow.com', '241407281': 's_OgaFKI@shadow.com', '241407282': 's_kJWvzA@shadow.com', '241407283': 's_ilPCgr@shadow.com', '241407284': 's_0bCas9@shadow.com', '241407285': 's_gJ4l7J@shadow.com', '241407286': 's_jBC0ve@shadow.com', '241407287': 's_8e80N0@shadow.com', '241407288': 's_4jPHc1@shadow.com', '241407289': 's_2LjC8I@shadow.com', '241407290': 's_XXGEwN@shadow.com', '241407291': 's_VbZq9G@shadow.com', '241407292': 's_5rfk9c@shadow.com', '241407293': 's_kzuGZj@shadow.com', '241407294': 's_yUUJmV@shadow.com', '241407295': 's_Kmd5N3@shadow.com', '241407296': 's_wvsmbJ@shadow.com', '241407297': 's_ROiMbb@shadow.com', '241407298': 's_sAZ7L8@shadow.com', '241407299': 's_UJWzA6@shadow.com', '241407300': 's_mCazhk@shadow.com', '241407301': 's_cuQRSf@shadow.com', '241407302': 's_3Us8vf@shadow.com', '241407303': 's_pxACJM@shadow.com', '241407304': 's_jRQhe1@shadow.com', '241407305': 's_O8qfPf@shadow.com', '241407306': 's_rQC4Tn@shadow.com', '241407307': 's_09lKhD@shadow.com', '241407308': 's_J3uR9F@shadow.com', '241407309': 's_8vCW0n@shadow.com', '241407310': 's_53EyN7@shadow.com', '241407311': 's_jlNKoI@shadow.com', '241407312': 's_prpHwa@shadow.com', '241407313': 's_NBpqo6@shadow.com', '241407314': 's_0YjkyC@shadow.com', '241407315': 's_oD0SPa@shadow.com', '241407316': 's_cRZ820@shadow.com', '241407317': 's_QuSUc9@shadow.com', '241407318': 's_1aQRIO@shadow.com', '241407319': 's_8WpiMk@shadow.com', '241407320': 's_mFbRwb@shadow.com', '241407321': 's_P0Y9Tk@shadow.com', '241407322': 's_GakHBe@shadow.com', '241407323': 's_XQ3BZh@shadow.com', '241407324': 's_gJDa8J@shadow.com', '241407325': 's_6MhNqA@shadow.com', '241407326': 's_fvdNZW@shadow.com', '241407327': 's_5J2GxW@shadow.com', '241407328': 's_JluV3t@shadow.com', '241407329': 's_Iblrkj@shadow.com', '241407330': 's_335ZV4@shadow.com', '241407331': 's_CHiVph@shadow.com', '241407332': 's_nYtmvx@shadow.com', '241407333': 's_KPbWLr@shadow.com', '241407334': 's_lQKSDY@shadow.com', '241407335': 's_Du0nUG@shadow.com', '241407336': 's_DTILiI@shadow.com', '241407337': 's_NzzDIB@shadow.com', '241407338': 's_IJ4WKW@shadow.com', '241407339': 's_ChO3b1@shadow.com', '241407340': 's_kNYvxm@shadow.com', '241407341': 's_UJDVhN@shadow.com', '241407342': 's_pPYeUT@shadow.com', '241407343': 's_85Kgkq@shadow.com', '241407344': 's_uTPfeO@shadow.com', '241407345': 's_nO8pMn@shadow.com', '241407346': 's_Jsgrmu@shadow.com', '241407347': 's_BXqhZU@shadow.com', '241407348': 's_V11qVt@shadow.com', '241407349': 's_dIHw1i@shadow.com', '241407350': 's_nh6qPY@shadow.com', '241407351': 's_OKKdnO@shadow.com', '241407352': 's_68WhjX@shadow.com', '241407353': 's_8FXzfc@shadow.com', '241407354': 's_FJSZGg@shadow.com', '241407355': 's_EkQYST@shadow.com', '241407356': 's_LbXj8I@shadow.com', '241407357': 's_PewDvT@shadow.com', '241407358': 's_1wfiXX@shadow.com', '241407359': 's_tIpF2Z@shadow.com', '241407360': 's_KifGet@shadow.com', '241407361': 's_RpvPdS@shadow.com', '241407362': 's_a30n4A@shadow.com', '241407363': 's_KExWNT@shadow.com', '241407364': 's_eGi9Rv@shadow.com', '241407365': 's_De6CKT@shadow.com', '241407366': 's_o4Fr6f@shadow.com', '241407367': 's_vDC1eS@shadow.com', '241407368': 's_pl3E7A@shadow.com', '241407369': 's_HttzBh@shadow.com', '241407370': 's_vAhvMd@shadow.com', '241407371': 's_w1S8S8@shadow.com', '241407372': 's_UcMCt1@shadow.com', '241407373': 's_8ZpViX@shadow.com', '241407374': 's_UGjdzs@shadow.com', '241407375': 's_17kAoA@shadow.com', '241407376': 's_741wfB@shadow.com', '241407377': 's_f5Jj7b@shadow.com', '241407378': 's_CJiMB7@shadow.com', '241407379': 's_FfHEiT@shadow.com', '241407380': 's_VI2Imw@shadow.com', '241407381': 's_UdLIDf@shadow.com', '241407382': 's_k77N6c@shadow.com', '241407383': 's_zKJf4X@shadow.com', '241407384': 's_dGjQK4@shadow.com', '241407385': 's_15Du4D@shadow.com', '241407386': 's_oGmTi8@shadow.com', '241407387': 's_65gIdM@shadow.com', '241407388': 's_qOqqzJ@shadow.com', '241407389': 's_mUZqQt@shadow.com', '241407390': 's_aI9Ku7@shadow.com', '241407391': 's_PdBQu7@shadow.com', '241407392': 's_UhTVV5@shadow.com', '241407393': 's_bzRObE@shadow.com', '241407394': 's_ntnHXm@shadow.com', '241407395': 's_w3ZZJn@shadow.com', '241407396': 's_uMjxpT@shadow.com', '241407397': 's_BxSAPH@shadow.com', '241407398': 's_dsJUNE@shadow.com', '241407399': 's_sfLGzM@shadow.com', '241407400': 's_Tao00Z@shadow.com', '241407401': 's_o92L1i@shadow.com', '241407402': 's_rRrUE5@shadow.com', '241407403': 's_x5DTTo@shadow.com', '241407404': 's_55RYhl@shadow.com', '241407405': 's_2Yteeu@shadow.com', '241407406': 's_06fWcj@shadow.com', '241407407': 's_BEa7Nm@shadow.com', '241407408': 's_Zli3Rp@shadow.com', '241407409': 's_p1PEOV@shadow.com', '241407410': 's_ISOl1S@shadow.com', '241407411': 's_gFzMYy@shadow.com', '241407412': 's_lRjEm4@shadow.com', '241407413': 's_7UkikS@shadow.com', '241407414': 's_ZFISK2@shadow.com', '241407415': 's_MaqNBa@shadow.com', '241407416': 's_PoKDa9@shadow.com', '241407417': 's_hFMaHa@shadow.com', '241407418': 's_hveGbT@shadow.com', '241407419': 's_OXrBq7@shadow.com', '241407420': 's_IYyGo8@shadow.com', '241407421': 's_BP84Ls@shadow.com', '241407422': 's_rexvsp@shadow.com', '241407423': 's_86vJCD@shadow.com', '241407424': 's_J2n2rf@shadow.com', '241407425': 's_wrajLj@shadow.com', '241407426': 's_p7moGs@shadow.com', '241407427': 's_WsbZFz@shadow.com', '241407428': 's_0LpqA0@shadow.com', '241407429': 's_DXhqau@shadow.com', '241407430': 's_wLJpFr@shadow.com', '241407431': 's_elsxmB@shadow.com', '241407432': 's_PbDamE@shadow.com', '241407433': 's_gsAitD@shadow.com', '241407434': 's_iAO3TP@shadow.com', '241407435': 's_ITbTqk@shadow.com', '241407436': 's_4O4rSo@shadow.com', '241407437': 's_f4PIek@shadow.com', '241407438': 's_kAafF1@shadow.com', '241407439': 's_UlbokC@shadow.com', '241407440': 's_StVyIN@shadow.com', '241407441': 's_Qny9jd@shadow.com', '241407442': 's_yLXjZH@shadow.com', '241407443': 's_A206OU@shadow.com', '241407444': 's_PA9kIT@shadow.com', '241407445': 's_ZCN9cu@shadow.com', '241407446': 's_ukKc8S@shadow.com', '241407447': 's_MhRJYd@shadow.com', '241407448': 's_aYvHUZ@shadow.com', '241407449': 's_sQTGcP@shadow.com', '241407450': 's_vqyczy@shadow.com', '241407451': 's_lqerll@shadow.com', '241407452': 's_2dIy2D@shadow.com', '241407453': 's_sqPBhm@shadow.com', '241407454': 's_rxeL87@shadow.com', '241407455': 's_T7Zllb@shadow.com', '241407456': 's_A1prFK@shadow.com', '241407457': 's_kqy0Ti@shadow.com', '241407458': 's_yRxvCl@shadow.com', '241407459': 's_bAf12Y@shadow.com', '241407460': 's_mSDYSZ@shadow.com', '241407461': 's_XQa53g@shadow.com', '241407462': 's_ifZ8Nd@shadow.com', '241407463': 's_JnVz5P@shadow.com', '241407464': 's_YQAkOm@shadow.com', '241407465': 's_LrWglN@shadow.com', '241407466': 's_20d8Bf@shadow.com', '241407467': 's_FOhoT3@shadow.com', '241407468': 's_87duIN@shadow.com', '241407469': 's_f7Wbt1@shadow.com', '241407470': 's_0rotjy@shadow.com', '241407471': 's_95vHay@shadow.com', '241407472': 's_xjCKlA@shadow.com', '241407473': 's_roJThz@shadow.com', '241407474': 's_cIoVmE@shadow.com', '241407475': 's_F7Kobr@shadow.com', '241407476': 's_Ye2HV6@shadow.com', '241407477': 's_Zq0esd@shadow.com', '241407478': 's_qtIoFr@shadow.com', '241407479': 's_9SUFAA@shadow.com', '241407480': 's_wWfMdw@shadow.com', '241407481': 's_fSjdY4@shadow.com', '241407482': 's_6COY94@shadow.com', '241407483': 's_Y4eWHx@shadow.com', '241407484': 's_oLysj9@shadow.com', '241407485': 's_6Sve8x@shadow.com', '241407486': 's_fai50t@shadow.com', '241407487': 's_LxFO8U@shadow.com', '241407488': 's_rha2wA@shadow.com', '241407489': 's_jb6wqN@shadow.com', '241407490': 's_16PqdV@shadow.com', '241407491': 's_I89v8b@shadow.com', '241407492': 's_kTP2tk@shadow.com', '241407493': 's_58S40h@shadow.com', '241407494': 's_8wiQXC@shadow.com', '241407495': 's_2n6PoJ@shadow.com', '241407496': 's_gsvhGu@shadow.com', '241407497': 's_njbBDy@shadow.com', '241407498': 's_xMzp0h@shadow.com', '241407499': 's_4g7YHw@shadow.com', '241407500': 's_cgbaLZ@shadow.com', '241407501': 's_8hDFBn@shadow.com', '241407502': 's_aOIRXe@shadow.com', '241407503': 's_vKHZZk@shadow.com', '241407504': 's_Mbo1lq@shadow.com', '241407505': 's_1RRDLt@shadow.com', '241407506': 's_YbKv8b@shadow.com', '241407507': 's_oLPWVd@shadow.com', '241407508': 's_1L5VHC@shadow.com', '241407509': 's_dt8GRU@shadow.com', '241407510': 's_q8Plkv@shadow.com', '241407511': 's_M4qo0a@shadow.com', '241407512': 's_8TOaIC@shadow.com', '241407513': 's_Axbdwo@shadow.com', '241407514': 's_DsgJyQ@shadow.com', '241407515': 's_KcE9m4@shadow.com', '241407516': 's_cxUoTi@shadow.com', '241407517': 's_yf5F2d@shadow.com', '241407518': 's_WWBin1@shadow.com', '241407519': 's_4hNpGq@shadow.com', '241407520': 's_Y0DdxY@shadow.com', '241407521': 's_q4AaZH@shadow.com', '241407522': 's_GnhyAn@shadow.com', '241407523': 's_ULfH0b@shadow.com', '241407524': 's_GUt0uv@shadow.com', '241407525': 's_nh7lJ6@shadow.com', '241407526': 's_dDXEx1@shadow.com', '241407527': 's_d6tSOl@shadow.com', '241407528': 's_ZUKIJB@shadow.com', '241407529': 's_YQizji@shadow.com'}
diff --git a/vest/data/topic_models.py b/vest/data/topic_models.py
index ea86011d20a5b3fffcaa34bde563cd361339155f..9036891b1a2948539d59a38a14c9d166fb47d16f 100644
--- a/vest/data/topic_models.py
+++ b/vest/data/topic_models.py
@@ -26,7 +26,7 @@ def get_edit_tag_id_list(topic_id):
 def get_tag_id_list(topic_id):
     try:
         tag_id_list = list(
-            TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=topic_id, tag_id=10332212).values_list(
+            TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=topic_id, tag_id=10328826).values_list(
                 "tag_id",
                 flat=True))
 
diff --git a/vest/follow/auto_follow_per_1d_by_post.py b/vest/follow/auto_follow_per_1d_by_post.py
new file mode 100644
index 0000000000000000000000000000000000000000..8af8e9fb2bd3033674a67ddd20cb0da320165f9a
--- /dev/null
+++ b/vest/follow/auto_follow_per_1d_by_post.py
@@ -0,0 +1,105 @@
+import pymysql
+import random
+import traceback
+import logging
+from threading import Thread
+from vest.request.auto_request import login, time_convs, follow
+from vest.request.auto_request import host, user, db, passwd
+from libs.error import logging_exception
+from libs.timelib import get_rand_time
+
+
+def get_commnet_id(numtime, numtime2, content_level_low=0, content_level_top=3):
+    pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
+    cursor = pc.cursor()
+
+    # 发贴后
+    cursor.execute(
+
+        "select t.user_id from topic t left join user_extra u on t.user_id = u.user_id "
+        "where u.is_shadow=0 and t.create_time > '%s' and t.create_time < '%s' "
+        "and t.content_level >= %s and t.content_level <=  %s " % (numtime, numtime2, str(content_level_low), str(content_level_top)))
+
+    res = cursor.fetchall()
+    return res and [i for i, in res] or []
+
+
+def batch_handle(auto_follow_list):
+    for user_id in auto_follow_list:
+        try:
+            cookies = login()
+            if cookies is not None:
+                # follow(cookies, user_id)
+                follow.apply_async(args=(cookies, user_id), eta=get_rand_time())
+
+        except:
+            pass
+
+
+def auto_follow_per_1d_by_post():
+    # 发帖触发自动加粉丝
+    auto_follow_list = []
+    try:
+        # 0-3星
+        # 1天前发的帖子:[2-6]个粉丝
+        numtime1, numtime2 = time_convs(1, 1)
+        user_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=3)
+        for user_id in user_ids:
+            follow_num = random.randint(2, 6)
+            for i in range(follow_num):
+                auto_follow_list.append(user_id)
+
+        # 2-15天前发的帖子:[0-1]个粉丝
+        numtime1, numtime2 = time_convs(2, 15)
+        user_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=3)
+        for user_id in user_ids:
+            follow_num = random.randint(0, 1)
+            for i in range(follow_num):
+                auto_follow_list.append(user_id)
+
+        # 15天前或更早发的帖子:每隔11天[0-2]个粉丝
+        numtime1, numtime2 = time_convs(2, 15)
+        user_ids = get_commnet_id('0', numtime2, content_level_low=0, content_level_top=3)
+        for user_id in user_ids:
+            follow_num = random.randint(1, 6)
+            if follow_num == 6:
+                auto_follow_list.append(user_id)
+
+        # 4-6星
+        # 1天前发的帖子:[5-10]个粉丝
+        numtime1, numtime2 = time_convs(1, 1)
+        user_ids = get_commnet_id(numtime2, numtime1, content_level_low=4, content_level_top=6)
+        for user_id in user_ids:
+            follow_num = random.randint(5, 10)
+            for i in range(follow_num):
+                auto_follow_list.append(user_id)
+
+        # 2-15天前发的帖子:[0-5]个粉丝
+        numtime1, numtime2 = time_convs(2, 15)
+        user_ids = get_commnet_id(numtime2, numtime1, content_level_low=4, content_level_top=6)
+        for user_id in user_ids:
+            follow_num = random.randint(0, 5)
+            for i in range(follow_num):
+                auto_follow_list.append(user_id)
+
+        # 15天前或更早发的帖子:每隔11天[0-2]个粉丝
+        numtime1, numtime2 = time_convs(2, 15)
+        user_ids = get_commnet_id('0', numtime2, content_level_low=4, content_level_top=6)
+        for user_id in user_ids:
+            follow_num = random.randint(1, 6)
+            if follow_num == 6:
+                auto_follow_list.append(user_id)
+
+    except:
+        logging_exception()
+        logging.error("catch exception,main:%s" % traceback.format_exc())
+
+    logging.info('auto_follow_per_1d_by_post: len %s' % len(auto_follow_list))
+    print('auto_follow_per_1d_by_post: len %s' % len(auto_follow_list))
+
+    total = len(auto_follow_list)
+    limit = (total + 10) // 10
+    for start in range(0, total, limit):
+        batch = auto_follow_list[start:start + limit]
+        t = Thread(target=batch_handle, args=[batch])
+        t.start()
diff --git a/vest/follow/auto_follow_per_1d_by_regist.py b/vest/follow/auto_follow_per_1d_by_regist.py
new file mode 100644
index 0000000000000000000000000000000000000000..1bf83bf988bd65c877137ca4affa733e291fda5e
--- /dev/null
+++ b/vest/follow/auto_follow_per_1d_by_regist.py
@@ -0,0 +1,80 @@
+import pymysql
+import random
+import traceback
+import logging
+from threading import Thread
+from vest.request.auto_request import login, time_convs, follow
+from vest.request.auto_request import host, user, db, passwd
+from libs.error import logging_exception
+from libs.timelib import get_rand_time
+
+
+def get_commnet_id(numtime, numtime2):
+    pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
+    cursor = pc.cursor()
+
+    # 注册后
+    cursor.execute(
+
+        "select  a.user_id from account_user a left join user_extra u on a.user_id = u.user_id "
+        "WHERE  a.create_time > '%s' and a.create_time < '%s' and u.is_shadow = 0 " % (numtime, numtime2))
+
+    res = cursor.fetchall()
+    return res and [i for i, in res] or []
+
+
+def batch_handle(auto_follow_list):
+    for user_id in auto_follow_list:
+        try:
+            cookies = login()
+            if cookies is not None:
+                # follow(cookies, user_id)
+                follow.apply_async(args=(cookies, user_id), eta=get_rand_time())
+
+        except:
+            pass
+
+
+def auto_follow_per_1d_by_regist():
+    # 注册事件触发自动加粉丝
+    auto_follow_list = []
+    try:
+        # 1天前注册:[1-3]个粉丝
+        numtime1, numtime2 = time_convs(1, 1)
+        user_ids= get_commnet_id(numtime2, numtime1)
+        for user_id in user_ids:
+            follow_num = random.randint(1, 3)
+            for i in range(follow_num):
+                auto_follow_list.append(user_id)
+
+
+        # 2-10天前注册:[0-1]个粉丝
+        numtime1, numtime2 = time_convs(2, 12)
+        user_ids = get_commnet_id(numtime2,  numtime1)
+        for user_id in user_ids:
+            follow_num = random.randint(0, 1)
+            for i in range(follow_num):
+                auto_follow_list.append(user_id)
+
+        # 10天前注册:每隔12天[0-1]个赞
+        numtime1, numtime2 = time_convs(1, 12)
+        user_ids = get_commnet_id('0', numtime2)
+        for user_id in user_ids:
+            follow_num = random.randint(1, 24)
+            if follow_num == 12:
+                auto_follow_list.append(user_id)
+
+    except:
+        logging_exception()
+        logging.error("catch exception,main:%s" % traceback.format_exc())
+
+
+    logging.info('auto_follow_per_1d_by_regist: len %s' % len(auto_follow_list))
+    print('auto_follow_per_1d_by_regist: len %s' % len(auto_follow_list))
+
+    total = len(auto_follow_list)
+    limit = (total + 10) // 10
+    for start in range(0, total, limit):
+        batch = auto_follow_list[start:start + limit]
+        t = Thread(target=batch_handle, args=[batch])
+        t.start()
diff --git a/vest/follow/auto_follow_per_2h_by_post_and_regist.py b/vest/follow/auto_follow_per_2h_by_post_and_regist.py
new file mode 100644
index 0000000000000000000000000000000000000000..786280dba94fb375bd512986254c040aef7134a3
--- /dev/null
+++ b/vest/follow/auto_follow_per_2h_by_post_and_regist.py
@@ -0,0 +1,75 @@
+import pymysql
+import random
+import traceback
+import logging
+from threading import Thread
+from vest.request.auto_request import login, time_conv_hour, follow
+from vest.request.auto_request import host, user, db, passwd
+from libs.error import logging_exception
+from libs.timelib import get_rand_time
+
+def get_commnet_id(numtime, numtime2, content_level_low=0, content_level_top=6):
+    pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
+    cursor = pc.cursor()
+
+    # 发贴后
+    cursor.execute(
+
+        "select t.user_id from topic t left join user_extra u on t.user_id = u.user_id "
+        "where u.is_shadow=0 and t.create_time > '%s' and t.create_time < '%s' "
+        "and t.content_level >= %s and t.content_level <=  %s " % (numtime, numtime2, str(content_level_low), str(content_level_top)))
+
+    res_post = cursor.fetchall()
+
+    # 注册后
+    cursor.execute(
+
+        "select  a.user_id from account_user a left join user_extra u on a.user_id = u.user_id "
+        "WHERE  a.create_time > '%s' and a.create_time < '%s' and u.is_shadow = 0 " % (numtime, numtime2))
+
+    res_regist = cursor.fetchall()
+
+    res = []
+    res.extend(res_regist)
+    res.extend(res_post)
+    return res and [i for i, in res] or []
+
+
+def batch_handle(auto_follow_list):
+    for user_id in auto_follow_list:
+        try:
+            cookies = login()
+            if cookies is not None:
+                # follow(cookies, user_id)
+                follow.apply_async(args=(cookies, user_id), eta=get_rand_time(hourup=1))
+
+        except:
+            pass
+
+
+def auto_follow_per_2h_by_post_and_regist():
+    # 发帖,注册触发自动加粉丝
+    auto_follow_list = []
+    try:
+        # 发帖,注册后2小时内:[1-3]个粉丝
+        numtime1, numtime2 = time_conv_hour(0, 2)
+        user_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=6)
+        for user_id in user_ids:
+            follow_num = random.randint(1, 3)
+            for i in range(follow_num):
+                auto_follow_list.append(user_id)
+
+
+    except:
+        logging_exception()
+        logging.error("catch exception,main:%s" % traceback.format_exc())
+
+    logging.info('auto_follow_per_2h_by_post_and_regist: len %s' % len(auto_follow_list))
+    print('auto_follow_per_2h_by_post_and_regist: len %s' % len(auto_follow_list))
+
+    total = len(auto_follow_list)
+    limit = (total + 10) // 10
+    for start in range(0, total, limit):
+        batch = auto_follow_list[start:start + limit]
+        t = Thread(target=batch_handle, args=[batch])
+        t.start()
diff --git a/vest/follow/auto_follow_per_5m_by_followed.py b/vest/follow/auto_follow_per_5m_by_followed.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ef4c798970a7060be724f1eb45a3f66b90673b9
--- /dev/null
+++ b/vest/follow/auto_follow_per_5m_by_followed.py
@@ -0,0 +1,40 @@
+import pymysql
+import traceback
+import logging
+from vest.request.auto_request import login, follow, time_conv_minute
+from vest.request.auto_request import host, user, db, passwd
+from vest.data.majia_user_ids import majia_user_ids_dict
+from libs.error import logging_exception
+from libs.timelib import get_rand_time
+
+
+def get_user_id(numtime):
+    pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
+    cursor = pc.cursor()
+    cursor.execute(
+        "SELECT  f.user_id, f.follow_id FROM user_follow f left join user_extra e on f.follow_id = e.user_id "
+        "WHERE f.create_time >= '%s' and e.is_shadow = 1 " % numtime
+    )
+    data_comment = cursor.fetchall()
+    return data_comment and [i for i in data_comment] or []
+
+
+def auto_follow_per_5m_by_followed():
+    # 如果马甲被用户关注,马甲即反向关注用户, 被关注5分钟后
+    try:
+        numtime1, numtime2, c = time_conv_minute(1, 5)
+        users = get_user_id(numtime2)
+        try:
+            for user in users and users:
+                user_id = user[0]
+                cookies = login(str(majia_user_ids_dict.get(str(user[1]))))
+                if cookies is not None:
+                    # follow(cookies, user_id)
+                    follow.apply_async(args=(cookies, user_id), eta=get_rand_time())
+
+        except:
+            pass
+
+    except:
+        logging_exception()
+        logging.error("catch exception,main:%s" % traceback.format_exc())
diff --git a/vest/pictorial/principal_online_comment1.py b/vest/pictorial/principal_online_comment1.py
index 0654da9ea077e0b89a6ccdcbc73bceaa0ade435e..41106605c24f51531e7afb8d0e6056bf7eac108a 100644
--- a/vest/pictorial/principal_online_comment1.py
+++ b/vest/pictorial/principal_online_comment1.py
@@ -59,6 +59,7 @@ def get_data(numtime1, numtime2):
 
 
 def principal_online_comment1():
+    # 根据问题内容,以及问题的评论条数据,给问题自动加评论,到这个表中取数据community_pictorial_topic
     try:
         logging.info("comment offline  pictorial")
         numtime1, numtime2, minute = time_conv_minute(240, 0)
diff --git a/vest/reply/auto_reply_per_1d_to_pictorial.py b/vest/reply/auto_reply_per_1d_to_pictorial.py
new file mode 100644
index 0000000000000000000000000000000000000000..18cc99dced9a0f4a2641cfa38696533e0d01ef33
--- /dev/null
+++ b/vest/reply/auto_reply_per_1d_to_pictorial.py
@@ -0,0 +1,98 @@
+import pymysql
+import random
+import traceback
+import logging
+from threading import Thread
+from vest.request.auto_request import login, pictorial_reply, get_majia, get_pictorial_comment, time_convs, \
+    judge_pictorial_info_get_comment
+
+from vest.request.auto_request import host, user, db, passwd
+from vest.data.topic_models import get_pictorial_tag_by_id
+from libs.timelib import get_rand_time
+
+"""
+榜单新增内容(仅限在线的)
+每半小时一次
+
+过去半小时之内,if
+
+新增在线帖子(既包含马甲用户、又包含普通用户)达到1个时,插入{1,2}条评论
+
+新增在线帖子(既包含马甲用户、又包含普通用户)达到2-5个时,插入{2,3}条评论
+
+新增在线帖子(既包含马甲用户、又包含普通用户)达到>5张时,插入{3,5}条评论
+
+(注意评论作者需要是马甲用户)
+
+过去半小时之内,if
+
+本榜单有新增投票(既包含马甲用户、又包含普通用户),插入{1,2}条评论
+
+评论取自单独的榜单评论列表,并自动随机关联马甲
+
+注意需要禁止本帖作者评论
+"""
+
+
+def get_data(numtime1, numtime2):
+    try:
+        pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
+        cursor = pc.cursor()
+        cursor.execute(
+            "SELECT id FROM community_pictorial WHERE is_online=1 and (create_time >= '%s' and create_time < '%s') " % (
+                numtime2, numtime1))
+        res = cursor.fetchall()
+
+        return res and [r for r, in res] or []
+
+    except:
+        logging.error("catch exception,get_data:%s" % traceback.format_exc())
+        return []
+
+
+def batch_handle(pictorial_id_list):
+    for pictorial_id in pictorial_id_list:
+        try:
+            cookies = login()
+            if cookies is not None:
+                comment = judge_pictorial_info_get_comment(pictorial_id)
+                # pictorial_reply(cookies, pictorial_id, comment)
+                pictorial_reply.apply_async(args=(cookies, pictorial_id, comment), eta=get_rand_time())
+
+        except:
+            pass
+
+
+def auto_reply_per_1d_to_pictorial():
+    # 问题自动增加回答
+    pictorial_id_list = []
+    try:
+        logging.info("comment offline  pictorial")
+        numtime1, numtime2 = time_convs(1, 1)
+        pictorial_ids = get_data(numtime1, numtime2)
+        for pictorial_id in pictorial_ids:
+            random_num = random.randint(0, 1)
+            for i in range(random_num):
+                pictorial_id_list.append(pictorial_id)
+
+        numtime1, numtime2 = time_convs(2, 6)
+        pictorial_ids = get_data(numtime1, numtime2)
+        for pictorial_id in pictorial_ids:
+            random_num = random.randint(0, 1)
+            for i in range(random_num):
+                pictorial_id_list.append(pictorial_id)
+
+            else:
+                logging.error("catch exception,main:%s" % traceback.format_exc())
+    except:
+        logging.error("catch exception,main:%s" % traceback.format_exc())
+
+    logging.info('auto_click_per_2h_by_post: len %s' % len(pictorial_id_list))
+    print('auto_click_per_2h_by_post: len %s' % len(pictorial_id_list))
+
+    total = len(pictorial_id_list)
+    limit = (total + 10) // 10
+    for start in range(0, total, limit):
+        batch = pictorial_id_list[start:start + limit]
+        t = Thread(target=batch_handle, args=[batch])
+        t.start()
\ No newline at end of file
diff --git a/vest/reply/auto_reply_per_1d_to_topic.py b/vest/reply/auto_reply_per_1d_to_topic.py
new file mode 100644
index 0000000000000000000000000000000000000000..89686540c61ddf0a334cd16525b492e78dc8ab78
--- /dev/null
+++ b/vest/reply/auto_reply_per_1d_to_topic.py
@@ -0,0 +1,103 @@
+import pymysql
+import traceback
+import logging
+import json
+import random
+from threading import Thread
+from vest.request.auto_request import login, time_convs, get_answer_data, reply_answer, get_majia, \
+    set_reply_to_redis, judge_topic_info_get_comment, reply, reply2
+from vest.request.auto_request import host, user, db, passwd
+from libs.error import logging_exception
+from libs.timelib import get_rand_time
+
+
+def get_data(numtime, numtime2, content_level_low, content_level_top):
+    try:
+        pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
+        cursor = pc.cursor()
+        cursor.execute(
+            "SELECT id FROM topic WHERE is_online=1  and (create_time >= '%s' and create_time <= '%s' )"
+            "and content_level>= %s and content_level<= %s" % (
+                numtime2, numtime, str(content_level_low), str(content_level_top)))
+        res = cursor.fetchall()
+        return res and [r for r, in res] or []
+
+    except:
+        logging.error("catch exception,get_data:%s" % traceback.format_exc())
+        return []
+
+
+def batch_handle(topic_id_list):
+    for topic_id in topic_id_list:
+        try:
+            cookies = login()
+            if cookies is not None:
+                comment = judge_topic_info_get_comment(topic_id)
+                if comment:
+                    # reply(cookies, topic_id, comment)
+                    reply.apply_async(args=(cookies, topic_id, comment), eta=get_rand_time())
+                else:
+                    comment1, comment2 = get_answer_data()
+                    response = reply_answer(cookies, topic_id, comment1)
+                    response = json.loads(response)
+                    cookies = login()
+                    reply_id = response["data"]["id"]
+                    reply2.apply_async(args=(cookies, topic_id, comment2, reply_id), eta=get_rand_time())
+
+
+
+        except:
+            logging_exception()
+            logging.error("catch exception,main:%s" % traceback.format_exc())
+
+
+def auto_reply_per_1d_to_topic():
+    topic_id_list = []
+    try:
+        # 1-3星及无星:
+        # 1天前发的帖子:[1-3]个评论
+        numtime1, numtime2 = time_convs(1, 1)
+        topic_ids = get_data(numtime1, numtime2, 0, 3)
+        for topic_id in topic_ids:
+            random_num = random.randint(1, 3)
+            for num in range(random_num):
+                topic_id_list.append(topic_id)
+
+        # 2-6天前发的帖子:[0-1]个评论
+        numtime1, numtime2 = time_convs(2, 6)
+        topic_ids = get_data(numtime1, numtime2, 0, 3)
+        for topic_id in topic_ids:
+            random_num = random.randint(0, 1)
+            for num in range(random_num):
+                topic_id_list.append(topic_id)
+
+        # 4-6星:
+        # 1天前发的帖子:[1-6]个评论
+        numtime1, numtime2 = time_convs(1, 1)
+        topic_ids = get_data(numtime1, numtime2, 4, 6)
+        for topic_id in topic_ids:
+            random_num = random.randint(1, 6)
+            for num in range(random_num):
+                topic_id_list.append(topic_id)
+
+        # 2-6天前发的帖子:[1-3]个评论
+        numtime1, numtime2 = time_convs(2, 6)
+        topic_ids = get_data(numtime1, numtime2, 4, 6)
+        for topic_id in topic_ids:
+            random_num = random.randint(1, 3)
+            for num in range(random_num):
+                topic_id_list.append(topic_id)
+
+    except:
+        logging_exception()
+        logging.error("catch exception,main:%s" % traceback.format_exc())
+
+    logging.info('auto_click_per_2h_by_post: len %s' % len(topic_id_list))
+    print('auto_click_per_2h_by_post: len %s' % len(topic_id_list))
+
+    total = len(topic_id_list)
+    limit = (total + 10) // 10
+    for start in range(0, total, limit):
+        batch = topic_id_list[start:start + limit]
+        t = Thread(target=batch_handle, args=[batch])
+        t.start()
\ No newline at end of file
diff --git a/vest/reply/auto_reply_per_2h_to_topic.py b/vest/reply/auto_reply_per_2h_to_topic.py
new file mode 100644
index 0000000000000000000000000000000000000000..13e7362699585cab9656552e3576dc4361af1357
--- /dev/null
+++ b/vest/reply/auto_reply_per_2h_to_topic.py
@@ -0,0 +1,74 @@
+import pymysql
+import traceback
+import logging
+import json
+from threading import Thread
+from vest.request.auto_request import login, time_conv_hour, get_answer_data, reply_answer, get_majia, \
+    set_reply_to_redis, judge_topic_info_get_comment, reply, reply2
+from vest.request.auto_request import host, user, db, passwd
+from libs.error import logging_exception
+from libs.timelib import get_rand_time
+import random
+
+
+def get_data(numtime, numtime2):
+    try:
+        pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
+        cursor = pc.cursor()
+        cursor.execute(
+            "SELECT id FROM topic WHERE is_online=1  and (create_time >= '%s' and create_time <= '%s' )" % (
+                numtime2, numtime))
+        res = cursor.fetchall()
+        return res and [r for r, in res] or []
+
+    except:
+        logging.error("catch exception,get_data:%s" % traceback.format_exc())
+        return []
+
+
+def batch_handle(topic_id_list):
+    for topic_id in topic_id_list:
+        try:
+            cookies = login()
+            if cookies is not None:
+                comment = judge_topic_info_get_comment(topic_id)
+                if comment:
+                    # reply(cookies, topic_id, comment)
+                    reply.apply_async(args=(cookies, topic_id, comment), eta=get_rand_time(hourup=1))
+                else:
+                    comment1, comment2 = get_answer_data()
+                    response = reply_answer(cookies, topic_id, comment1)
+                    response = json.loads(response)
+                    cookies = login()
+                    reply_id = response["data"]["id"]
+                    reply2.apply_async(args=(cookies, topic_id, comment2, reply_id), eta=get_rand_time(hourup=1))
+
+        except:
+            logging_exception()
+            logging.error("catch exception,main:%s" % traceback.format_exc())
+
+
+def auto_reply_per_2h_to_topic():
+    topic_id_list = []
+    try:
+        numtime1, numtime2 = time_conv_hour(0, 2)
+        topic_ids = get_data(numtime1, numtime2)
+        for topic_id in topic_ids:
+            random_num = random.randint(1, 2)
+            for num in range(random_num):
+                topic_id_list.append(topic_id)
+
+    except:
+        logging_exception()
+        logging.error("catch exception,main:%s" % traceback.format_exc())
+
+
+    logging.info('auto_click_per_2h_by_post: len %s' % len(topic_id_list))
+    print('auto_click_per_2h_by_post: len %s' % len(topic_id_list))
+
+    total = len(topic_id_list)
+    limit = (total + 10) // 10
+    for start in range(0, total, limit):
+        batch = topic_id_list[start:start + limit]
+        t = Thread(target=batch_handle, args=[batch])
+        t.start()
\ No newline at end of file
diff --git a/vest/reply_answer/reply_comment1.py b/vest/reply_answer/reply_comment1.py
index 74b74ac307c7f17d380f68d11109116c2c419846..be067a7b473ec5b59ca45dd955298f8a6b50d729 100644
--- a/vest/reply_answer/reply_comment1.py
+++ b/vest/reply_answer/reply_comment1.py
@@ -10,13 +10,16 @@ def reply_comment1():
         redis_key1 = "cybertron:set_reply_id:one"
         redis_client = set_reply_to_redis()
         have_reply1 = redis_client.get(redis_key1)
-        result = json.loads(str(have_reply1, encoding="utf-8"))
-        if result:
-            for item in result:
-                majia_user_id = get_majia_reply(item["majia_user_id"])
-                cook = logins(majia_user_id)
-                reply2(cook, item["topic_id"], item["answer"], item["id"])
-            redis_client.delete(redis_key1)
+        if have_reply1 == None:
+            pass
+        else:
+            result = json.loads(str(have_reply1, encoding="utf-8"))
+            if result:
+                for item in result:
+                    majia_user_id = get_majia_reply(item["majia_user_id"])
+                    cook = logins(majia_user_id)
+                    reply2(cook, item["topic_id"], item["answer"], item["id"])
+                redis_client.delete(redis_key1)
     except:
         logging_exception()
         logging.error("catch exception,main:%s" % traceback.format_exc())
diff --git a/vest/reply_answer/reply_comment2.py b/vest/reply_answer/reply_comment2.py
index 1f5661d09cc3096634d2a5542cdb758f04bdb2e7..50adbd200a3d0fc6f216f89168ef11e551490284 100644
--- a/vest/reply_answer/reply_comment2.py
+++ b/vest/reply_answer/reply_comment2.py
@@ -10,13 +10,16 @@ def reply_comment2():
         redis_key1 = "cybertron:set_reply_id:two"
         redis_client = set_reply_to_redis()
         have_reply1 = redis_client.get(redis_key1)
-        result = json.loads(str(have_reply1, encoding="utf-8"))
-        if result:
-            for item in result:
-                majia_user_id = get_majia_reply(item["majia_user_id"])
-                cook = logins(majia_user_id)
-                reply2(cook, item["topic_id"], item["answer"], item["id"])
-            redis_client.delete(redis_key1)
+        if have_reply1 == None:
+            pass
+        else:
+            result = json.loads(str(have_reply1, encoding="utf-8"))
+            if result:
+                for item in result:
+                    majia_user_id = get_majia_reply(item["majia_user_id"])
+                    cook = logins(majia_user_id)
+                    reply2(cook, item["topic_id"], item["answer"], item["id"])
+                redis_client.delete(redis_key1)
     except:
         logging_exception()
         logging.error("catch exception,main:%s" % traceback.format_exc())
diff --git a/vest/reply_answer/reply_comment3.py b/vest/reply_answer/reply_comment3.py
index 0ec09e78182cbbfafa50fc820382fefd6462e7cd..5bfcf92cb98df12dca9b6d7848653ba72ff7e950 100644
--- a/vest/reply_answer/reply_comment3.py
+++ b/vest/reply_answer/reply_comment3.py
@@ -10,13 +10,16 @@ def reply_comment3():
         redis_key1 = "cybertron:set_reply_id:three"
         redis_client = set_reply_to_redis()
         have_reply1 = redis_client.get(redis_key1)
-        result = json.loads(str(have_reply1, encoding="utf-8"))
-        if result:
-            for item in result:
-                majia_user_id = get_majia_reply(item["majia_user_id"])
-                cook = logins(majia_user_id)
-                reply2(cook, item["topic_id"], item["answer"], item["id"])
-            redis_client.delete(redis_key1)
+        if have_reply1 == None:
+            pass
+        else:
+            result = json.loads(str(have_reply1, encoding="utf-8"))
+            if result:
+                for item in result:
+                    majia_user_id = get_majia_reply(item["majia_user_id"])
+                    cook = logins(majia_user_id)
+                    reply2(cook, item["topic_id"], item["answer"], item["id"])
+                redis_client.delete(redis_key1)
     except:
         logging_exception()
         logging.error("catch exception,main:%s" % traceback.format_exc())
diff --git a/vest/reply_answer/reply_comment5.py b/vest/reply_answer/reply_comment5.py
index 2e8ae68b59bb028bab658c5d49b7e21295ac475c..878980732044c179f0274e9adc546a5645004246 100644
--- a/vest/reply_answer/reply_comment5.py
+++ b/vest/reply_answer/reply_comment5.py
@@ -10,13 +10,16 @@ def reply_comment5():
         redis_key1 = "cybertron:set_reply_id:five"
         redis_client = set_reply_to_redis()
         have_reply1 = redis_client.get(redis_key1)
-        result = json.loads(str(have_reply1, encoding="utf-8"))
-        if result:
-            for item in result:
-                majia_user_id = get_majia_reply(item["majia_user_id"])
-                cook = logins(majia_user_id)
-                reply2(cook, item["topic_id"], item["answer"], item["id"])
-            redis_client.delete(redis_key1)
+        if have_reply1 == None:
+            pass
+        else:
+            result = json.loads(str(have_reply1, encoding="utf-8"))
+            if result:
+                for item in result:
+                    majia_user_id = get_majia_reply(item["majia_user_id"])
+                    cook = logins(majia_user_id)
+                    reply2(cook, item["topic_id"], item["answer"], item["id"])
+                redis_client.delete(redis_key1)
     except:
         logging_exception()
         logging.error("catch exception,main:%s" % traceback.format_exc())
diff --git a/vest/reply_answer/reply_comment7.py b/vest/reply_answer/reply_comment7.py
index 635dff3b146acc5a6f022463c1fa80177f526140..60affcc30b6f821bbff75808ac74368ad95b1344 100644
--- a/vest/reply_answer/reply_comment7.py
+++ b/vest/reply_answer/reply_comment7.py
@@ -10,13 +10,16 @@ def reply_comment7():
         redis_key1 = "cybertron:set_reply_id:seven"
         redis_client = set_reply_to_redis()
         have_reply1 = redis_client.get(redis_key1)
-        result = json.loads(str(have_reply1, encoding="utf-8"))
-        if result:
-            for item in result:
-                majia_user_id = get_majia_reply(item["majia_user_id"])
-                cook = logins(majia_user_id)
-                reply2(cook, item["topic_id"], item["answer"], item["id"])
-            redis_client.delete(redis_key1)
+        if have_reply1 == None:
+            pass
+        else:
+            result = json.loads(str(have_reply1, encoding="utf-8"))
+            if result:
+                for item in result:
+                    majia_user_id = get_majia_reply(item["majia_user_id"])
+                    cook = logins(majia_user_id)
+                    reply2(cook, item["topic_id"], item["answer"], item["id"])
+                redis_client.delete(redis_key1)
     except:
         logging_exception()
         logging.error("catch exception,main:%s" % traceback.format_exc())
diff --git a/vest/request/auto_request.py b/vest/request/auto_request.py
index 95941c06588a03935525e763852d0d022c07b82f..04b1bac6aa2350cc196017fb690c185c3150df12 100644
--- a/vest/request/auto_request.py
+++ b/vest/request/auto_request.py
@@ -7,6 +7,7 @@ import logging
 import json
 import redis
 import smtplib
+from celery import shared_task
 from libs.cache import redis_client
 from email.mime.text import MIMEText
 from email.utils import formataddr
@@ -20,6 +21,7 @@ my_sender = 'lixiaofang@igengmei.com'
 my_pass = 'tg5AVKBB8jLQGBET'
 my_user6 = "lixiaofang@igengmei.com"
 
+auto_vote_url = settings.AUTO_VOTE_URL
 auto_click_url = settings.AUTO_CLICK_URL
 auto_reply_url = settings.AUTO_REPLY_URL
 auto_follow_url = settings.AUTO_FOLLOW_URL
@@ -82,9 +84,10 @@ def get_cookies(user_id):
         return None
 
 
-def login():
+def login(user_id=None):
     try:
-        user_id = get_majia()
+        if not user_id:
+            user_id = get_majia()
         logging.info("get user_id:%s" % user_id)
 
         cookies = get_cookies(user_id)
@@ -111,16 +114,32 @@ def logins(user_id):
         return None
 
 
+@shared_task
 def click(cookies_get, id):
+    # 点赞
     try:
-        post_dict = {
-            'type': 0,
-            'id': id
-        }
-        response = requests.post(url=auto_click_url,
-                                 cookies=cookies_get,
-                                 data=post_dict)
+        topic_id = id[0]
+        pictorial_id = id[1]
+
+        if pictorial_id:
+            post_dict = {
+                "topic_id": topic_id,
+                "pictorial_id": pictorial_id
+            }
+            response = requests.post(url=auto_vote_url,
+                                     cookies=cookies_get,
+                                     data=post_dict)
 
+        else:
+            post_dict = {
+                'type': 0,
+                'id': topic_id
+            }
+            response = requests.post(url=auto_click_url,
+                                     cookies=cookies_get,
+                                     data=post_dict)
+
+        print(response.text)
         logging.info("response.text:%s" % response.text)
 
         get_error(response.text, "click", id)
@@ -129,6 +148,7 @@ def click(cookies_get, id):
         logging.error("catch exception,logins:%s" % traceback.format_exc())
 
 
+@shared_task
 def reply(cookies_get, id, content):
     try:
         post_dict = {
@@ -140,6 +160,7 @@ def reply(cookies_get, id, content):
                                  cookies=cookies_get,
                                  data=post_dict)
 
+        print(response.text)
         logging.info("response.text:%s" % response.text)
         get_error(response.text, "reply", id)
     except:
@@ -168,6 +189,16 @@ def time_conv_minute(minutest, minutest2):
         return None
 
 
+def time_conv_hour(minutest, minutest2):
+    try:
+        now = datetime.datetime.now()
+        yes_time = now - datetime.timedelta(hours=minutest)
+        yes_time2 = now - datetime.timedelta(hours=minutest2)
+        return yes_time, yes_time2
+    except:
+        return None
+
+
 def time_now(minutest):
     try:
         now = datetime.datetime.now()
@@ -178,6 +209,11 @@ def time_now(minutest):
 
 
 def time_convs(numtime, numtime2):
+    '''
+    params: 1, 2
+    datetime.now(): 2019-10-09 10:35:50.231463
+    return: 2019-10-08 23:59:59.000000 2019-10-07 00:00:00.000000
+    '''
     try:
         now = datetime.datetime.now()
         yes_time = now - datetime.timedelta(days=numtime)
@@ -190,7 +226,6 @@ def time_convs(numtime, numtime2):
 
         logging.info("get yes_time_str:%s" % yes_time_str)
         logging.info("get yes_time_str2:%s" % yes_time_str2)
-
         return yes_time_str, yes_time_str2
     except:
         return None
@@ -237,6 +272,7 @@ def get_comments():
         return None
 
 
+@shared_task
 def follow(cookies_get, id):
     try:
         post_dict = {
@@ -247,6 +283,7 @@ def follow(cookies_get, id):
                                  cookies=cookies_get,
                                  data=post_dict)
 
+        print(response.text)
         logging.info("response.text:%s" % response.text)
         get_error(response.text, "follow", id)
     except:
@@ -401,6 +438,7 @@ def set_reply_to_redis():
         logging.error("catch exception,logins:%s" % traceback.format_exc())
 
 
+@shared_task
 def reply2(cookies_get, id, content, replied_id):
     try:
         post_dict = {
@@ -413,6 +451,7 @@ def reply2(cookies_get, id, content, replied_id):
                                  cookies=cookies_get,
                                  data=post_dict)
 
+        print(response.text)
         logging.info("response.text:%s" % response.text)
         get_error(response.text, "reply2", id)
     except:
@@ -420,6 +459,7 @@ def reply2(cookies_get, id, content, replied_id):
         logging.error("catch exception,logins:%s" % traceback.format_exc())
 
 
+@shared_task
 def pictorial_reply(cookies_get, id, content):
     try:
         post_dict = {
@@ -431,6 +471,7 @@ def pictorial_reply(cookies_get, id, content):
                                  cookies=cookies_get,
                                  data=post_dict)
 
+        print(response.text)
         logging.info("response.text:%s" % response.text)
         get_error(response.text, "pictorial_reply", id)
     except:
@@ -574,8 +615,9 @@ def judge_topic_info_get_comment(topic_id):
                         comment = get_face_comment()
                     # 原始评论
                     else:
-
-                        comment = get_comment()
+                        # 返回空就会去拿多级评论
+                        if random.randint(0, 1):
+                            comment = get_comment()
 
         else:
             # 判断有没有商品信息
@@ -593,7 +635,9 @@ def judge_topic_info_get_comment(topic_id):
                     comment = get_face_comment()
                 # 原始评论
                 else:
-                    comment = get_comment()
+                    # 返回空就会去拿多级评论
+                    if random.randint(0, 1):
+                        comment = get_comment()
 
         logging.info("get judge_topic_info_get_comment:%s" % comment)
         return comment
diff --git a/vest/request/get_session.py b/vest/request/get_session.py
index 0e62cfced80f681d0fb3a94d26b828b86af7902e..eb887d5f9a4a5b758b67e2b5fa427d340bccf88b 100644
--- a/vest/request/get_session.py
+++ b/vest/request/get_session.py
@@ -16,6 +16,31 @@ def index_first():
         return None
 
 
+def get_cook_by_email(email):
+    try:
+        cookies = index_first()
+        post_dict = {
+            'account_type': 2,
+            'pwd': '123456',
+            'email': email
+        }
+        response = requests.post(
+            url=settings.LOGIN_URL,
+            data=post_dict,
+            cookies=cookies
+        )
+        headers = response.headers
+        print(response.text)
+        cook = headers['Set-Cookie'].split(";")
+        cook = cook[0].split('=')[1]
+        logging.info("response.text :%s" % response.text)
+        return cook
+    except:
+
+        logging.error("index_first:%s" % traceback.format_exc())
+        return None
+
+
 def get_cook():
     try:
         data = open("/srv/apps/physical/vest/data/vest_user_email.txt")
diff --git a/vest/test.py b/vest/test.py
new file mode 100644
index 0000000000000000000000000000000000000000..1273f83d32e035306a9abc1592c199e516be6eeb
--- /dev/null
+++ b/vest/test.py
@@ -0,0 +1,181 @@
+import pymysql
+import datetime
+
+DATABASES = {
+    'default': {
+        'ENGINE': 'django.db.backends.mysql',
+        'NAME': 'alpha',
+        'USER': 'work',
+        'PASSWORD': 'Gengmei123!',
+        'HOST': '172.21.36.16',
+        'PORT': '3306',
+        'OPTIONS': {
+            "init_command": "SET foreign_key_checks = 0;",
+            "charset": "utf8mb4",
+        },
+    },
+    'master': {
+        'ENGINE': 'django.db.backends.mysql',
+        'NAME': 'alpha',
+        'USER': 'work',
+        'PASSWORD': 'Gengmei123!',
+        'HOST': '172.21.36.6',
+        'PORT': '3306',
+        'OPTIONS': {
+            "init_command": "SET foreign_key_checks = 0;",
+            "charset": "utf8mb4",
+        },
+    },
+    'face': {
+        'ENGINE': 'django.db.backends.mysql',
+        'NAME': 'face',
+        'USER': 'work',
+        'PASSWORD': 'Gengmei123!',
+        'HOST': '172.21.36.16',
+        'PORT': '3306',
+        # 'CONN_MAX_AGE': None,
+        'OPTIONS': {
+            "init_command": "SET foreign_key_checks = 0;",
+            "charset": "utf8mb4",
+        },
+    },
+    'commodity': {
+        'ENGINE': 'django.db.backends.mysql',
+        'NAME': 'commodity',
+        'USER': 'work',
+        'PASSWORD': 'Gengmei123!',
+        'HOST': '172.21.36.16',
+        'PORT': '3306',
+        # 'CONN_MAX_AGE': None,
+        'OPTIONS': {
+            "init_command": "SET foreign_key_checks = 0;",
+            "charset": "utf8mb4",
+        },
+    }
+
+}
+
+
+host = DATABASES['default']['HOST']
+
+user = DATABASES['default']['USER']
+
+port = DATABASES['default']['PORT']
+
+db = DATABASES['default']['NAME']
+
+passwd = DATABASES['default']['PASSWORD']
+
+def time_convs(numtime, numtime2):
+    '''
+    params: 1, 2
+    datetime.now(): 2019-10-09 10:35:50.231463
+    return: 2019-10-08 23:59:59.000000 2019-10-07 00:00:00.000000
+    '''
+    try:
+        now = datetime.datetime.now()
+        yes_time = now - datetime.timedelta(days=numtime)
+        yes_time_str = yes_time.strftime('%Y-%m-%d')
+        yes_time_str = yes_time_str + ' 23:59:59.000000'
+
+        yes_time2 = now - datetime.timedelta(days=numtime2)
+        yes_time_str2 = yes_time2.strftime('%Y-%m-%d')
+        yes_time_str2 = yes_time_str2 + ' 00:00:00.000000'
+        return yes_time_str, yes_time_str2
+    except:
+        return None
+
+
+def get_commnet_id(numtime, numtime2):
+    pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
+    cursor = pc.cursor()
+
+    # 发贴后
+    cursor.execute(
+
+        "select  user_id from topic  WHERE  create_time > '%s' and create_time < '%s' " % (numtime, numtime2))
+
+    data_new_user = cursor.fetchall()
+
+    all_data = list(data_new_user)
+
+    user_id_list = []
+    for i in all_data and all_data:
+        cursor.execute(
+            "select user_id from user_extra where user_id =" + str(i[0]) + " and is_shadow =0 ")
+        data = cursor.fetchall()
+        user_id = list(data)
+        if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
+            user_id_list.append(i)
+
+    pc.close()
+    return user_id_list
+
+
+# numtime1, numtime2 = time_convs(1, 1)
+# user_id = get_commnet_id(numtime2, numtime1)
+
+
+
+
+
+# def time_conv_minute(minutest, minutest2):
+#     try:
+#         now = datetime.datetime.now()
+#         minute = datetime.datetime.now().minute
+#         yes_time = now - datetime.timedelta(minutes=minutest)
+#         yes_time2 = now - datetime.timedelta(minutes=minutest2)
+#         return yes_time, yes_time2, minute
+#     except:
+#         return None
+#
+# a, b, c = time_conv_minute(0, 5)
+#
+# print(a, b)
+
+
+import requests
+auto_follow_url = 'http://earth.gmapp.env/api/v1/follow'
+
+
+def follow(cookies_get, id):
+    post_dict = {
+        'type': 1,
+        'id': id
+    }
+    response = requests.post(url=auto_follow_url,
+                             cookies=cookies_get,
+                             data=post_dict)
+    print(response.text)
+
+
+def index_first():
+    try:
+        r1 = requests.get("http://earth.gmapp.env/api/account/login_pwd")
+        return r1.cookies.get_dict()
+    except:
+        return None
+
+
+def get_cook_by_email(email):
+    cookies = index_first()
+    post_dict = {
+        'account_type': 2,
+        'pwd': '123456',
+        'email': email
+    }
+    response = requests.post(
+        url="http://earth.gmapp.env/api/account/login_pwd",
+        data=post_dict,
+        cookies=cookies
+    )
+    headers = response.headers
+    # print(response.text)
+    cook = headers['Set-Cookie'].split(";")
+    cook = cook[0].split('=')[1]
+    return cook
+
+
+
+res = get_cook_by_email("s_gXRMNW@shadow.com")
+print(res)
\ No newline at end of file