Commit ab2eff9b authored by 吴升宇's avatar 吴升宇

merge majia

parents bac23d59 4e74803b
Pipeline #3370 canceled with stage
crontab:
cp crontab.py /data/log/physical/app/crontab.py && python /data/log/physical/app/crontab.py && python /data/log/physical/app/crontabs.py
celery:
celery -A physical worker -c 1 -Q vest -l debug
strategy part like es perform
容器运行命令
docker run -it -p 9999:9999 -d \
--volume=/etc/gm-config:/etc/gm-config \
--volume=/etc/resolv.conf:/etc/resolv.conf \
--volume=/srv/apps/physical/physical/settings_local.py:/srv/apps/physical/physical/settings_local.py \
--entrypoint="/bin/bash" physical-test:1.0 "-c" "source ~/.bashrc && celery -A physical worker -Q tapir-alpha --loglevel=DEBUG --maxtasksperchild 500 -c 62"
## CICD
add a .drone.yml and configure pipeline
push to branch like-pre/*
## 启动方式
### 方式1:[program:physical-linucb] 消费kafka数据
一个while True服务,去消费kafka数据,维护redis中device的tag_map
```python
source /srv/envs/physical/bin/activate && python manage.py trans2es_data2es_parallel -S linucb
```
### 方式2:[program:physical] rpc服务
启动rpc服务供后端调用, 接口都在search文件夹下的bind函数定义
```python
gunicorn gm_rpcd.wsgi:application --workers=1 --worker-class=gevent
--worker-connections=1024
--bind=0.0.0.0:9999
--user=gmuser
--chdir /srv/apps/physical/
--timeout 600
--log-level=debug
--error-logfile=/data/log/physical/app/gunicorn_error.log
--access-logfile=/data/log/physical/app/gunicorn_access.log
```
### 方式3 [program:physical-celery] celery 消息队列服务
mysql数据同步到es,入口文件:injection.data_sync.tasks.write_to_es
数据同步工作大部分在数据管道,只有小部分在这,暂时先不用管这个服务,需求用不到
```python
celery -A physical worker -Q tapir-alpha --loglevel=DEBUG --maxtasksperchild 500 -c 62
```
### 方式4 [program:physical-beat-celery] celery 消息队列定时(作用未知)
```python
celery -A physical beat
```
# 自动运行脚本
### venus服务端脚本
通过celery beat 运行
配置说明文件:venus/setting/base.py
具体脚本:venus/communityity/task/xxx.py
目前共5个脚本
1:每天保存用户扫脸图片:community.tasks.save_image_task.save_user_face_image
2:每天3点0分评论自动点赞:community.tasks.crontab_reply_vote_task.crontab_reply_vote
3:向用户推送兴趣标签:community.tasks.push_task.push_interest_tag_pictorial_to_user
4:向用户推送经验标签:community.tasks.push_task.push_topic_draft_task
5:推送主题草稿任务:community.tasks.push_task.push_topic_draft_task
### physical策略端脚本
在Alp-Test-Cos-test001服务器上,通过crontab -l查看
import random
ontime_list = [
"0 9 * * * source /srv/envs/physical/bin/activate && python /data/log/physical/app/crontab.py",
"10 9 * * * source /srv/envs/physical/bin/activate && python /data/log/physical/app/crontabs.py",
"0 9 * * * sh /data/log/cybertron/app/statistics_query.sh > /data/log/cybertron/app/statistics_query.log",
"54 */2 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_click_per_2h_by_post",
# "*/5 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m true_click_one",
# "02,12,22,32,42,52 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es-m true_click_two",
# "00,10,20,30,40,50 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m true_click_three",
# "02,12,22,32,42,52 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es-m true_click_four",
# "06,16,26,36,46,56 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m true_click_five",
"0 14 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_star_urge",
"0 10 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_urge1",
"30 10 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_urge2",
"0 10 * * 3 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_lunch_app",
"30 10 * * 3 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_lunch_app2",
# "*/5 * * * 1 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_5m_by_followed",
"1 */2 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_2h_by_post_and_regist",
"0 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m get_login_session",
"0 0 * * 3 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m get_user_id",
# "0 14,18,22 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m principal_online_comment1",
"25 */2 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_reply_per_2h_to_topic",
"0 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_click_per_1d_by_post",
"1 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_1d_by_regist",
"2 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_1d_by_post",
"3 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_reply_per_1d_to_pictorial",
"4 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_reply_per_1d_to_topic"
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m answer_reply1",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m answer_reply2",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m answer_reply3",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m answer_reply5",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m answer_reply7",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m yesterday_topic_reply",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m before_yesterday_topic_reply",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m three_days_ago_topic_reply",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m five_days_ago_topic_reply",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m seven_days_ago_reply",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m reply_comment1",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m reply_comment3",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m reply_comment2",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m reply_comment5",
# "* * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m reply_comment7"
]
data = open("/data/log/physical/app/conf.txt", "w")
# data = open("abc.txt", "w")
for var in ontime_list:
data.write(var)
data.write("\n")
......@@ -188,19 +188,22 @@ class ESPerform(object):
@classmethod
def get_search_results(cls, es_cli, sub_index_name, query_body, offset=0, size=10,
auto_create_index=False, doc_type="_doc", aggregations_query=False, is_suggest_request=False,
batch_search=False, routing=None):
batch_search=False, routing=None,if_official_index_name=False):
try:
assert (es_cli is not None)
official_index_name = cls.get_official_index_name(sub_index_name, "read")
index_exists = es_cli.indices.exists(official_index_name)
if not index_exists:
if not auto_create_index:
logging.error("index:%s is not existing,get_search_results error!" % official_index_name)
return None
else:
cls.create_index(es_cli, sub_index_name)
cls.put_index_mapping(es_cli, sub_index_name)
if if_official_index_name:
official_index_name = sub_index_name
else:
official_index_name = cls.get_official_index_name(sub_index_name, "read")
index_exists = es_cli.indices.exists(official_index_name)
if not index_exists:
if not auto_create_index:
logging.error("index:%s is not existing,get_search_results error!" % official_index_name)
return None
else:
cls.create_index(es_cli, sub_index_name)
cls.put_index_mapping(es_cli, sub_index_name)
logging.info("duan add,query_body:%s" % str(query_body).encode("utf-8"))
......@@ -401,6 +404,7 @@ class ESPerform(object):
{"term": {"content_level": 6}},
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"term": {"is_new_topic": False}},
{"terms": {"tag_list": tag_id}}
]
}
......@@ -411,9 +415,18 @@ class ESPerform(object):
}
},
"_source": {
"include": ["id", "user_id", "latest_reply_time"]
"include": ["id", "user_id", "latest_reply_time", "topic_ctr_30", "topic_ctr_all", "like_rate_30", "like_rate_all"]
},
"sort": [
{
"_script": {
"order": "desc",
"script": {
"inline": "10*doc['topic_ctr_30'].value+doc['like_rate_30'].value+2*doc['topic_ctr_all'].value+doc['like_rate_all'].value"
},
"type": "number"
}
},
{"latest_reply_time": {"order": "desc"}},
{"create_time_val": {"order": "desc"}},
{"language_type": {"order": "asc"}},
......@@ -439,17 +452,67 @@ class ESPerform(object):
topic_id_dict = dict()
for item in result_dict["hits"]:
topic_id_dict[str(item["_source"]["id"])] = item["_source"]["user_id"]
logging.info("get_tag_topic_list_dict:gyz" + str(q) + str(result_dict))
return topic_id_list, topic_id_dict
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list(), dict()
topic_id_dict_latest_reply_time = list()
for item in result_dict["hits"]:
topic_id_dict_latest_reply_time.append([item["_source"]["id"], item["_source"]["latest_reply_time"]])
logging.info("topic_id_list:%s" % str(topic_id_dict))
@classmethod
def get_tag_new_topic_list(cls, tag_id, have_read_topic_id_list, size=10):
try:
functions_list = list()
for id in tag_id:
functions_list.append(
{
"filter": {"term": {"tag_list": id}},
"weight": 1
}
)
q = {
"query": {
"function_score": {
"query": {
"bool": {
"must": [
{"term": {"content_level": 6}},
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"term": {"is_new_topic": True}},
{"terms": {"tag_list": tag_id}}
]
}
},
"boost_mode": "sum",
"score_mode": "sum",
"functions": functions_list
}
},
"_source": {
"include": ["id", "user_id"]
},
"sort": [
{"latest_reply_time": {"order": "desc"}},
{"create_time_val": {"order": "desc"}},
{"language_type": {"order": "asc"}},
],
"collapse": {
"field": "user_id"
}
}
logging.info("linucb_tag_id_list_2_same_tagset_ids:" + str(tag_id))
logging.info("linucb_tag_id_list_2_same_tagset_ids_2_topics_detail:" + str(topic_id_dict_latest_reply_time))
if len(have_read_topic_id_list) > 0:
q["query"]["function_score"]["query"]["bool"]["must_not"] = {
"terms": {
"id": have_read_topic_id_list
}
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name="topic-high-star",
query_body=q,
offset=0, size=size, routing="6")
return topic_id_list, topic_id_dict
topic_id_list = [item["_source"]["id"] for item in result_dict["hits"]]
return topic_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list()
......@@ -476,9 +539,18 @@ class ESPerform(object):
}
},
"_source": {
"include": ["id"]
"include": ["id", "real_user_activate_time", "create_time", "pictorial_ctr_30", "pictorial_ctr_all", "like_rate_30", "like_rate_all"]
},
"sort": [
{
"_script": {
"order": "desc",
"script": {
"inline": "10*doc['pictorial_ctr_30'].value+10*doc['like_rate_30'].value+3*doc['pictorial_ctr_all'].value+2*doc['like_rate_all'].value"
},
"type": "number"
}
},
{"real_user_activate_time": {"order": "desc"}},
{"create_time": {"order": "desc"}},
],
......@@ -495,8 +567,7 @@ class ESPerform(object):
offset=0, size=size)
pictorial_id_list = [item["_source"]["id"] for item in result_dict["hits"]]
# logging.info("topic_id_list:%s" % str(topic_id_list))
# topic_id_dict = [{str(item["_source"]["id"]):item["_source"]["user_id"]} for item in result_dict["hits"]]
logging.info("get_tag_pictorial_id_list:gyz" + str(q) + str(result_dict))
return pictorial_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......
import pytz
import random
from datetime import datetime, timedelta
NOW = datetime.now()
def tzlc(dt, truncate_to_sec=True):
if dt is None:
return None
if truncate_to_sec:
dt = dt.replace(microsecond=0)
return pytz.timezone('Asia/Shanghai').localize(dt)
def eta_2_push_time(eta):
if eta:
eta = datetime.strptime(eta, '%Y-%m-%d %H:%M:%S')
eta = tzlc(eta)
return int((eta - datetime.fromtimestamp(0, pytz.timezone("UTC"))).total_seconds())
else:
push_time = None
return push_time
def get_rand_time(hourlow=0, hourup=13, minutelow=0, minuteup=60):
hours = random.randint(hourlow, hourup)
minutes = random.randint(minutelow, minuteup)
# todo redis会自动给加8个小时,所以这边先写死减少8小时
now_time = NOW + timedelta(hours=hours, minutes=minutes) - timedelta(hours=8)
time = eta_2_push_time(now_time.strftime("%Y-%m-%d %H:%M:%S"))
print(datetime.fromtimestamp(time))
return datetime.fromtimestamp(time)
......@@ -8,6 +8,7 @@ import traceback
from libs.cache import redis_client
import json
import logging
from django.db import connection
def tzlc(dt, truncate_to_sec=True):
......@@ -58,3 +59,12 @@ def get_have_read_lin_pictorial_id_list(device_id,user_id,query_type):
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list()
def is_connection_usable():
"""判断当前mysql的链接是否正常,不正常就close掉"""
try:
connection.connection.ping()
return True
except:
# logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
This diff is collapsed.
......@@ -2,6 +2,6 @@ from __future__ import unicode_literals, absolute_import, print_function
import pymysql
from _celery import app as celery_app
from vest import *
pymysql.install_as_MySQLdb()
#__all__ = ('celery_app',)
......@@ -9,6 +9,13 @@ class CeleryTaskRouter(object):
queue_task_map = {
"tapir-alpha": [
'injection.data_sync.tasks.write_to_es',
],
"vest": [
'vest.request.auto_request.click',
'vest.request.auto_request.reply',
'vest.request.auto_request.follow',
'vest.request.auto_request.reply2',
'vest.request.auto_request.pictorial_reply'
]
}
......
......@@ -12,6 +12,7 @@ from search.utils.common import *
from trans2es.models.pictorial import PictorialTopics
from libs.cache import redis_client
class TopicUtils(object):
@classmethod
......@@ -320,7 +321,7 @@ class TopicUtils(object):
'type': 'best_fields',
'operator': 'or',
'fields': ["content", "tag_name_list"],
"analyzer":"gm_default_index"
"analyzer": "gm_default_index"
}
query_function_score["boost_mode"] = "replace"
......@@ -335,13 +336,14 @@ class TopicUtils(object):
{"range": {"content_level": {"gte": 3, "lte": 6}}}
)
collection_redis_key_name="physical:official_tag_name_set"
collect_tag_name_set=set()
collection_redis_key_name = "physical:official_tag_name_set"
collect_tag_name_set = set()
body = {
'text': query,
'analyzer': "gm_default_search"
}
analyze_res = ESPerform.get_analyze_results(es_cli=ESPerform.get_cli(), sub_index_name="topic",query_body=body)
analyze_res = ESPerform.get_analyze_results(es_cli=ESPerform.get_cli(), sub_index_name="topic",
query_body=body)
for item in analyze_res["tokens"]:
token_word = item["token"]
# is_member = redis_client.sismember(collection_redis_key_name, token_word)
......@@ -357,10 +359,10 @@ class TopicUtils(object):
}
functions_list += [
{
"weight":10,
"filter":{
"term":{
"language_type":1
"weight": 10,
"filter": {
"term": {
"language_type": 1
}
}
},
......@@ -434,9 +436,9 @@ class TopicUtils(object):
topic_id_list.append(item["_source"]["id"])
if has_score:
return topic_id_list,ret_data_list,topic_score_list
return topic_id_list, ret_data_list, topic_score_list
else:
return topic_id_list,ret_data_list
return topic_id_list, ret_data_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
if has_score:
......@@ -444,7 +446,6 @@ class TopicUtils(object):
else:
return list(), list()
@classmethod
def userful_tag_topic_list(cls, user_id, have_read_topic_list, size,
index_type="topic-high-star", routing=None, useful_tag_list=[]):
......@@ -528,10 +529,9 @@ class TopicUtils(object):
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def get_linucb_topic_info_for_debug(cls, size,
index_type="topic-high-star", routing=None, linucb_topic_list=[]):
index_type="topic-high-star", routing=None, linucb_topic_list=[]):
try:
es_cli_obj = ESPerform.get_cli()
if len(linucb_topic_list) == 0:
......@@ -546,22 +546,22 @@ class TopicUtils(object):
}
}
q["_source"] = {
"includes": ["id","content_level","edit_tag_list"]
"includes": ["id", "content_level", "edit_tag_list"]
}
result_dict = ESPerform.get_search_results(es_cli_obj, sub_index_name=index_type, query_body=q,
size=size,
routing="6")
topic_id_dict = dict()
for item in result_dict["hits"]:
topic_id_dict.update({item["_source"]["id"]:{"content_level":item["_source"]["content_level"],"edit_tag_list":item["_source"]["edit_tag_list"]}})
topic_id_dict.update({item["_source"]["id"]: {"content_level": item["_source"]["content_level"],
"edit_tag_list": item["_source"]["edit_tag_list"]}})
return topic_id_dict
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def get_linucb_pictorial_info_for_debug(cls,size,linucb_pictorial_list = []):
def get_linucb_pictorial_info_for_debug(cls, size, linucb_pictorial_list=[]):
try:
q = {
"query": {
......@@ -593,8 +593,6 @@ class TopicUtils(object):
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list()
@classmethod
def get_topic_detail_recommend_list(cls, user_id, topic_id, topic_tag_list, topic_pictorial_id, topic_user_id,
filter_topic_user_id, have_read_topic_list, offset, size, es_cli_obj=None,
......@@ -794,9 +792,9 @@ class TopicUtils(object):
# "includes": ["id", "pictorial_id", "user_id", "_score", "create_time", "content_level"]
q['sort'] = [
{"latest_reply_time": {"order": "desc"}},
# {"create_time": {"order": "desc"}}
]
{"latest_reply_time": {"order": "desc"}},
# {"create_time": {"order": "desc"}}
]
result_dict = ESPerform.get_search_results(es_cli_obj, sub_index_name=index_type, query_body=q, size=size,
routing=routing)
......@@ -1294,7 +1292,20 @@ class TopicUtils(object):
"query": v
}
})
elif k == "is_shadow":
if v == 0:
f.append({
"term": {
"is_shadow": False
}
})
else:
f.append({
"term": {
"is_shadow": True
}
})
elif k == "virtual_content_level":
f.append({
"match": {k: v}
......@@ -1328,7 +1339,19 @@ class TopicUtils(object):
}
}
})
elif k == "is_kol":
f.append({
"term": {
"user_is_kol": True
}
})
elif k == "is_edit":
f.append({
"term": {
"user_is_edit": True
}
})
# elif k == "pictorial_id":
# f.append({
# "nested": {
......
......@@ -303,7 +303,7 @@ def pictorial_topic(topic_id=-1, offset=0, size=10):
@bind("physical/search/pictorial_topic_sort")
def pictorial_topic_sort(pictorial_id=-1, offset=0, size=10, sort_type=PICTORIAL_TOPIC_SORT.HOT):
def pictorial_topic_sort(pictorial_id=-1, offset=0, size=10, sort_type=PICTORIAL_TOPIC_SORT.HOT, user_id=-1):
"""
:remark 画报排序 人气 部分
人气按照票数从大到小排序,相同票数按照图片票数更新时间由旧到新排序
......@@ -420,7 +420,8 @@ def pictorial_topic_sort(pictorial_id=-1, offset=0, size=10, sort_type=PICTORIAL
pict_pictorial_ids_list = []
# 获取es链接对象
es_cli_obj = ESPerform.get_cli()
result_dict = ESPerform.get_search_results(es_cli_obj, "topic", q, offset, size)
result_dict = ESPerform.get_search_results(es_cli_obj, "topic", query_body=q, offset=offset, size=size)
# result_dict = ESPerform.get_search_results(es_cli=es_cli_obj, sub_index_name="mv-alpha-topic-prod-190905001", query_body=q, offset=offset, size=size,if_official_index_name=True)
# logging.info("get pictorial_topic_sort res:%s" % result_dict)
......
......@@ -11,9 +11,11 @@ from libs.cache import redis_client
from search.utils.common import *
from trans2es.models.tag import TopicTag, AccountUserTag, CommunityTagFollow, Tag
import time
from libs.tools import is_connection_usable
from trans2es.models.tag import CommunityTagSetRelation
from django.conf import settings
from libs.error import logging_exception
from django.db import connection
def get_highlight(fields=[]):
......@@ -235,27 +237,26 @@ def get_same_tagset_ids(tag_list):
:param tag_list:
:return:
"""
all_tag = list()
if isinstance(tag_list, int):
all_tag.append(tag_list)
else:
all_tag = tag_list
try:
all_tag = list()
if isinstance(tag_list, int):
all_tag.append(tag_list)
else:
all_tag = tag_list
if not is_connection_usable():
connection.close()
logging.error("unable mysql connection and close")
tag_set_list_id = list(
CommunityTagSetRelation.objects.filter(tag_id__in=all_tag, is_deleted=False).values_list("tag_set_id",
CommunityTagSetRelation.objects.using(settings.SLAVE1_DB_NAME).filter(tag_id__in=all_tag, is_deleted=False).values_list("tag_set_id",
flat=True))
tag_ids = list(
CommunityTagSetRelation.objects.filter(tag_set_id__in=tag_set_list_id, is_deleted=False).values_list(
CommunityTagSetRelation.objects.using(settings.SLAVE1_DB_NAME).filter(tag_set_id__in=tag_set_list_id, is_deleted=False).values_list(
"tag_id",
flat=True))
all_tag.extend(tag_ids)
logging.info("get_same_tagset_ids:%s" % str(all_tag))
return list(set(all_tag))
except:
logging_exception()
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
return all_tag
This diff is collapsed.
import json
import logging
import datetime
from libs.cache import redis_client
from libs.error import logging_exception
from django.conf import settings
from trans2es.models.portrait_stat import LikeTopicStat
try:
ps = redis_client.pubsub()
ps.subscribe("new_topic_impression")
all_new_topic_impression_count_key = "all_new_topic_impression_count_key"
for item in ps.listen():
if item['type'] == 'message':
new_topic_ids = json.loads(item["data"])
all_new_topic_impression_count = json.loads(redis_client.get(all_new_topic_impression_count_key))
insert_topic_ids = []
for topic in new_topic_ids:
topic = str(topic)
if topic in all_new_topic_impression_count:
all_new_topic_impression_count[topic] = all_new_topic_impression_count[topic] + 1
if all_new_topic_impression_count[topic] > 100:
insert_topic_ids.append(int(topic))
all_new_topic_impression_count.pop(topic)
else:
all_new_topic_impression_count[topic] = 1
if insert_topic_ids:
insert_list = []
for topic in insert_topic_ids:
insert_list.append(
LikeTopicStat(create_time=datetime.datetime.today(), update_time=datetime.datetime.today(),
topic_id=topic, is_new_topic=0, topic_ctr_30=0.0, like_rate_30=0.0))
LikeTopicStat.objects.using(settings.MASTER_DB_NAME).bulk_create(insert_list)
logging.info("impressions count gt 100 topic ids" + str(insert_topic_ids))
json_all_new_topic_impression_count = json.dumps(all_new_topic_impression_count)
logging.info("all_new_topic_impression_count" + str(all_new_topic_impression_count))
redis_client.set(all_new_topic_impression_count_key, json_all_new_topic_impression_count)
except:
logging_exception()
logging.error("redis new topic sub error!")
......@@ -28,10 +28,13 @@ from trans2es.models.topic import Topic,ActionSumAboutTopic
from search.utils.common import *
from linucb.views.collect_data import CollectData
from injection.data_sync.tasks import sync_user_similar_score
import datetime
from trans2es.models.tag import Tag
from libs.cache import redis_client
from trans2es.models.tag import TopicTag
from libs.error import logging_exception
from trans2es.models.portrait_stat import LikeTopicStat
......@@ -215,6 +218,41 @@ class Command(BaseCommand):
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
def sub_redis_new_topic_ids(self):
try:
ps = redis_client.pubsub()
ps.subscribe("new_topic_impression")
all_new_topic_impression_count_key = "all_new_topic_impression_count_key"
for item in ps.listen():
if item['type'] == 'message':
new_topic_ids = json.loads(item["data"])
all_new_topic_impression_count = json.loads(redis_client.get(all_new_topic_impression_count_key))
insert_topic_ids = []
for topic in new_topic_ids:
topic = str(topic)
if topic in all_new_topic_impression_count:
all_new_topic_impression_count[topic] = all_new_topic_impression_count[topic] + 1
if all_new_topic_impression_count[topic] > 100:
insert_topic_ids.append(int(topic))
all_new_topic_impression_count.pop(topic)
else:
all_new_topic_impression_count[topic] = 1
if insert_topic_ids:
insert_list = []
for topic in insert_topic_ids:
insert_list.append(
LikeTopicStat(create_time=datetime.datetime.today(),
update_time=datetime.datetime.today(),
topic_id=topic, is_new_topic=0, topic_ctr_30=0.0, like_rate_30=0.0))
LikeTopicStat.objects.using(settings.MASTER_DB_NAME).bulk_create(insert_list)
logging.info("impressions count gt 100 topic ids" + str(insert_topic_ids))
json_all_new_topic_impression_count = json.dumps(all_new_topic_impression_count)
logging.info("all_new_topic_impression_count" + str(all_new_topic_impression_count))
redis_client.set(all_new_topic_impression_count_key, json_all_new_topic_impression_count)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
def handle(self, *args, **options):
try:
type_name_list = get_type_info_map().keys()
......@@ -241,5 +279,8 @@ class Command(BaseCommand):
if len(options["sync_type"]) and options["sync_type"]=="tagname":
self.sync_tag_collecction_name_set()
if len(options["sync_type"]) and options["sync_type"] == "new_topic_sub":
self.sub_redis_new_topic_ids()
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......@@ -7,10 +7,12 @@ import traceback
import logging
from libs.es import ESPerform
from trans2es.type_info import get_type_info_map, TypeInfo
from vest.reply import true_comment_one, true_comment_two, true_comment_three, one_seven_topic_comment
from vest.reply import true_comment_one, true_comment_two, true_comment_three, one_seven_topic_comment,\
auto_reply_per_1d_to_pictorial, auto_reply_per_1d_to_topic, auto_reply_per_2h_to_topic
from vest.click import true_click_five, true_click_two, true_click_four, true_click_one, true_click_three, \
one_seven_star_topic
from vest.follow import auto_follow, auto_follow_new
one_seven_star_topic, auto_click_per_1d_by_post, auto_click_per_2h_by_post
from vest.follow import auto_follow, auto_follow_new, auto_follow_per_5m_by_followed, \
auto_follow_per_1d_by_regist, auto_follow_per_1d_by_post, auto_follow_per_2h_by_post_and_regist
from vest.urge import auto_star_urge, auto_lunch_app, auto_lunch_app2, auto_urge1, auto_urge2
from vest.fix import fix_no_comment_click
from vest.reply_answer import reply_comment2, reply_comment3, answer_reply2, answer_reply3, answer_reply1, \
......@@ -86,6 +88,10 @@ class Command(BaseCommand):
true_click_five.true_click_five()
if options["mvest"] == "one_seven_star_topic":
one_seven_star_topic.one_seven_star_topic()
if options["mvest"] == "auto_click_per_1d_by_post":
auto_click_per_1d_by_post.auto_click_per_1d_by_post()
if options["mvest"] == "auto_click_per_2h_by_post":
auto_click_per_2h_by_post.auto_click_per_2h_by_post()
# 评论
if options["mvest"] == "true_comment_one":
......@@ -114,6 +120,14 @@ class Command(BaseCommand):
auto_follow.auto_follow()
if options["mvest"] == "auto_follow_new":
auto_follow_new.auto_follow_new()
if options['mvest'] == "auto_follow_per_5m_by_followed":
auto_follow_per_5m_by_followed.auto_follow_per_5m_by_followed()
if options['mvest'] == "auto_follow_per_1d_by_regist":
auto_follow_per_1d_by_regist.auto_follow_per_1d_by_regist()
if options['mvest'] == "auto_follow_per_1d_by_post":
auto_follow_per_1d_by_post.auto_follow_per_1d_by_post()
if options['mvest'] == "auto_follow_per_2h_by_post_and_regist":
auto_follow_per_2h_by_post_and_regist.auto_follow_per_2h_by_post_and_regist()
# 补足
if options["mvest"] == "fix_no_comment_click":
......@@ -129,6 +143,15 @@ class Command(BaseCommand):
if options["mvest"] == "vest_click_reply":
vest_click_reply.vest_click_reply()
# 自动评论
if options["mvest"] == "auto_reply_per_1d_to_pictorial":
auto_reply_per_1d_to_pictorial.auto_reply_per_1d_to_pictorial()
if options["mvest"] == "auto_reply_per_2h_to_topic":
auto_reply_per_2h_to_topic.auto_reply_per_2h_to_topic()
if options["mvest"] == "auto_reply_per_1d_to_topic":
auto_reply_per_1d_to_topic.auto_reply_per_1d_to_topic()
# 榜单评论
if options["mvest"] == "principal_offline_comment1":
principal_offline_comment1.principal_offline_comment1()
......
......@@ -4,6 +4,11 @@
"id":{"type":"long"},
"is_online":{"type":"boolean"},//上线
"is_deleted":{"type":"boolean"},
"is_new_pictorial":{"type":"boolean"},
"pictorial_ctr_30":{"type": "double"},
"like_rate_30":{"type": "double"},
"pictorial_ctr_all":{"type": "double"},
"like_rate_all":{"type": "double"},
"is_recommend":{"type":"boolean"},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"},
"description":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"},
......
import logging
import traceback
from django.db import models
from django.conf import settings
class LikeDeviceTagStat(models.Model):
class Meta:
verbose_name = u"30天内设备的tag的stat"
db_table = "like_device_tag_stat"
unique_together = ("device_id", "tag_id")
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
create_time = models.DateTimeField(verbose_name=u'统计创建时间')
update_time = models.DateTimeField(verbose_name=u'统计更新时间')
device_id = models.CharField(verbose_name=u'设备id', max_length=100)
tag_id = models.IntegerField(verbose_name=u'标签id')
tag_click_30 = models.IntegerField(verbose_name=u'30天内的点击数')
tag_impress_30 = models.IntegerField(verbose_name=u"30天内的曝光数")
tag_ctr_30 = models.FloatField(verbose_name=u"30天内的ctr")
class LikeTopicStat(models.Model):
class Meta:
verbose_name = u"30天内回答的stat"
db_table = "like_topic_stat"
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
create_time = models.DateTimeField(verbose_name=u'统计创建时间')
update_time = models.DateTimeField(verbose_name=u'统计更新时间')
topic_id = models.IntegerField(verbose_name=u'回答id', unique=True)
is_new_topic = models.IntegerField(verbose_name=u"是否是新帖")
topic_ctr_30 = models.FloatField(verbose_name=u"30天内回答的ctr")
like_rate_30 = models.FloatField(verbose_name=u"30天内回答的点赞率")
class LikePictorialStat(models.Model):
class Meta:
verbose_name = u"30天内问题的stat"
db_table = "like_pictorial_stat"
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
create_time = models.DateTimeField(verbose_name=u'统计创建时间')
update_time = models.DateTimeField(verbose_name=u'统计更新时间')
pictorial_id = models.IntegerField(verbose_name=u'问题id', unique=True)
is_new_pictorial = models.IntegerField(verbose_name=u"是否是新问题")
pictorial_ctr_30 = models.FloatField(verbose_name=u"30天内问题的ctr")
like_rate_30 = models.FloatField(verbose_name=u"30天内问题的点赞率")
pictorial_ctr_all = models.FloatField(verbose_name=u"历史该问题的ctr")
like_rate_all = models.FloatField(verbose_name=u"历史该问题的点赞率")
@classmethod
def get_pictorial_is_new(cls, pictorial_id):
try:
is_new_pictorial = LikePictorialStat.objects.using(settings.SLAVE1_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
"is_new_pictorial", flat=True).first()
if is_new_pictorial == 0:
return False
else:
return True
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return True
@classmethod
def get_pictorial_ctr_30(cls, pictorial_id):
try:
pictorial_ctr_30 = LikePictorialStat.objects.using(settings.SLAVE1_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
"pictorial_ctr_30", flat=True).first()
return pictorial_ctr_30
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0.0
@classmethod
def get_pictorial_like_rate_30(cls, pictorial_id):
try:
like_rate_30 = LikePictorialStat.objects.using(settings.SLAVE1_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
"like_rate_30", flat=True).first()
return like_rate_30
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0.0
@classmethod
def get_pictorial_ctr_all(cls, pictorial_id):
try:
pictorial_ctr_all = LikePictorialStat.objects.using(settings.SLAVE1_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
"pictorial_ctr_all", flat=True).first()
return pictorial_ctr_all
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0.0
@classmethod
def get_pictorial_like_rate_all(cls, pictorial_id):
try:
like_rate_all = LikePictorialStat.objects.using(settings.SLAVE1_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
"like_rate_all", flat=True).first()
return like_rate_all
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0.0
......@@ -6,6 +6,7 @@ import logging
import traceback
from libs.tools import tzlc
from trans2es.models.topic import Topic
from trans2es.models.portrait_stat import LikePictorialStat
class PictorialTransfer(object):
......@@ -75,6 +76,11 @@ class PictorialTransfer(object):
res["real_user_activate_time"] = instance.get_real_user_activate_time()
res["edit_tag_id"] = instance.get_edit_tag_id()
res["edit_tag_name"] = instance.get_edit_tag_name(res["edit_tag_id"])
res["is_new_pictorial"] = LikePictorialStat.get_pictorial_is_new(instance.id)
res["pictorial_ctr_30"] = LikePictorialStat.get_pictorial_ctr_30(instance.id)
res["like_rate_30"] = LikePictorialStat.get_pictorial_like_rate_30(instance.id)
res["pictorial_ctr_all"] = LikePictorialStat.get_pictorial_ctr_all(instance.id)
res["like_rate_all"] = LikePictorialStat.get_pictorial_like_rate_all(instance.id)
logging.info("get data:%s" % res)
return res
except:
......
from .request import *
\ No newline at end of file
......@@ -28,6 +28,7 @@ def get_data(numtime, numtime2):
def yesterday_comment_one():
# 自动给帖子增加评论
try:
logging.info("comment one")
numtime, numtime2 = time_convs(1, 1)
......
import pymysql
import random
import traceback
import logging
from threading import Thread
from vest.request.auto_request import login, time_convs, click
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
from libs.timelib import get_rand_time
def get_commnet_id(numtime, numtime2, content_level_low=0, content_level_top=6):
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
topic_ids = []
cursor.execute(
"select t.id from topic t left join user_extra u on t.user_id = u.user_id "
"where u.is_shadow=0 and t.create_time > '%s' and t.create_time < '%s' and t.is_online=1 "
"and t.content_level >= %s and t.content_level <= %s " % (numtime, numtime2, str(content_level_low), str(content_level_top)))
res = cursor.fetchall()
for i, in res:
cursor.execute(
"SELECT pictorial_id FROM community_pictorial_topic where topic_id=%s limit 1" % i)
pictorial_id = cursor.fetchall()
if pictorial_id:
topic_ids.append((i, pictorial_id[0]))
else:
topic_ids.append((i, 0))
return topic_ids
def batch_handle(auto_click_list):
for topic_id in auto_click_list:
try:
cookies = login()
if cookies is not None:
click.apply_async(args=(cookies, topic_id), eta=get_rand_time())
# click(cookies, topic_id)
except:
pass
def auto_click_per_1d_by_post():
# 发帖触发自动点赞
auto_click_list = []
try:
# 1-3星及无星
# 1天前发的帖子:[2-6]个赞
numtime1, numtime2 = time_convs(1, 1)
topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=3)
for topic_id in topic_ids:
click_num = random.randint(2, 6)
for i in range(click_num):
auto_click_list.append(topic_id)
# 2-15天前发的帖子:[0-2]个赞
numtime1, numtime2 = time_convs(2, 15)
topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=3)
for topic_id in topic_ids:
click_num = random.randint(0, 2)
for i in range(click_num):
auto_click_list.append(topic_id)
# 15天前或更早发的帖子:每隔6天[0-1]个赞
numtime1, numtime2 = time_convs(2, 15)
topic_ids = get_commnet_id('0', numtime2, content_level_low=0, content_level_top=3)
for topic_id in topic_ids:
click_num = random.randint(1, 6)
if click_num == 6:
auto_click_list.append(topic_id)
# 4-6星帖
# 1天前发的帖子:[4-12]个赞
numtime1, numtime2 = time_convs(1, 1)
topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=4, content_level_top=6)
for topic_id in topic_ids:
click_num = random.randint(4, 12)
for i in range(click_num):
auto_click_list.append(topic_id)
# 2-15天前发的帖子:[0-6]个赞
numtime1, numtime2 = time_convs(2, 15)
topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=4, content_level_top=6)
for topic_id in topic_ids:
click_num = random.randint(0, 6)
for i in range(click_num):
auto_click_list.append(topic_id)
# 15天前或更早发的帖子:每隔5天[0-3]个赞
numtime1, numtime2 = time_convs(2, 15)
topic_ids = get_commnet_id('0', numtime2, content_level_low=4, content_level_top=6)
for topic_id in topic_ids:
click_num = random.randint(1, 3)
if click_num == 1:
auto_click_list.append(topic_id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
logging.info('auto_click_per_1d_by_post: len %s' % len(auto_click_list))
print('auto_click_per_1d_by_post: len %s' % len(auto_click_list))
total = len(auto_click_list)
limit = (total + 10) // 10
for start in range(0, total, limit):
batch = auto_click_list[start:start + limit]
t = Thread(target=batch_handle, args=[batch])
t.start()
import pymysql
import random
import traceback
import logging
from threading import Thread
from vest.request.auto_request import login, time_conv_hour, click
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
from libs.timelib import get_rand_time
def get_commnet_id(numtime, numtime2, content_level_low=0, content_level_top=6):
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
topic_ids = []
# 发贴后
cursor.execute(
"select t.id from topic t left join user_extra u on t.user_id = u.user_id "
"where u.is_shadow=0 and t.create_time > '%s' and t.create_time < '%s' and t.is_online=1 "
"and t.content_level >= %s and t.content_level <= %s " % (numtime, numtime2, str(content_level_low), str(content_level_top)))
res = cursor.fetchall()
for i, in res:
cursor.execute(
"SELECT pictorial_id FROM community_pictorial_topic where topic_id=%s limit 1" % i)
pictorial_id = cursor.fetchall()
if pictorial_id:
topic_ids.append((i, pictorial_id[0]))
else:
topic_ids.append((i, 0))
return topic_ids
def batch_handle(auto_click_list):
for topic_id in auto_click_list:
try:
cookies = login()
if cookies is not None:
# click(cookies, topic_id)
click.apply_async(args=(cookies, topic_id), eta=get_rand_time(hourup=1))
except:
pass
def auto_click_per_2h_by_post():
# 发帖触发自动点赞
auto_click_list = []
try:
# 发帖2小时内:[1-3]个点赞
numtime1, numtime2 = time_conv_hour(0, 2)
topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=6)
for topic_id in topic_ids:
click_num = random.randint(1, 3)
for i in range(click_num):
auto_click_list.append(topic_id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
logging.info('auto_click_per_2h_by_post: len %s' % len(auto_click_list))
print('auto_click_per_2h_by_post: len %s' % len(auto_click_list))
total = len(auto_click_list)
limit = (total + 10) // 10
for start in range(0, total, limit):
batch = auto_click_list[start:start + limit]
t = Thread(target=batch_handle, args=[batch])
t.start()
This diff is collapsed.
......@@ -26,7 +26,7 @@ def get_edit_tag_id_list(topic_id):
def get_tag_id_list(topic_id):
try:
tag_id_list = list(
TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=topic_id, tag_id=10332212).values_list(
TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=topic_id, tag_id=10328826).values_list(
"tag_id",
flat=True))
......
import pymysql
import random
import traceback
import logging
from threading import Thread
from vest.request.auto_request import login, time_convs, follow
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
from libs.timelib import get_rand_time
def get_commnet_id(numtime, numtime2, content_level_low=0, content_level_top=3):
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
# 发贴后
cursor.execute(
"select t.user_id from topic t left join user_extra u on t.user_id = u.user_id "
"where u.is_shadow=0 and t.create_time > '%s' and t.create_time < '%s' "
"and t.content_level >= %s and t.content_level <= %s " % (numtime, numtime2, str(content_level_low), str(content_level_top)))
res = cursor.fetchall()
return res and [i for i, in res] or []
def batch_handle(auto_follow_list):
for user_id in auto_follow_list:
try:
cookies = login()
if cookies is not None:
# follow(cookies, user_id)
follow.apply_async(args=(cookies, user_id), eta=get_rand_time())
except:
pass
def auto_follow_per_1d_by_post():
# 发帖触发自动加粉丝
auto_follow_list = []
try:
# 0-3星
# 1天前发的帖子:[2-6]个粉丝
numtime1, numtime2 = time_convs(1, 1)
user_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=3)
for user_id in user_ids:
follow_num = random.randint(2, 6)
for i in range(follow_num):
auto_follow_list.append(user_id)
# 2-15天前发的帖子:[0-1]个粉丝
numtime1, numtime2 = time_convs(2, 15)
user_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=3)
for user_id in user_ids:
follow_num = random.randint(0, 1)
for i in range(follow_num):
auto_follow_list.append(user_id)
# 15天前或更早发的帖子:每隔11天[0-2]个粉丝
numtime1, numtime2 = time_convs(2, 15)
user_ids = get_commnet_id('0', numtime2, content_level_low=0, content_level_top=3)
for user_id in user_ids:
follow_num = random.randint(1, 6)
if follow_num == 6:
auto_follow_list.append(user_id)
# 4-6星
# 1天前发的帖子:[5-10]个粉丝
numtime1, numtime2 = time_convs(1, 1)
user_ids = get_commnet_id(numtime2, numtime1, content_level_low=4, content_level_top=6)
for user_id in user_ids:
follow_num = random.randint(5, 10)
for i in range(follow_num):
auto_follow_list.append(user_id)
# 2-15天前发的帖子:[0-5]个粉丝
numtime1, numtime2 = time_convs(2, 15)
user_ids = get_commnet_id(numtime2, numtime1, content_level_low=4, content_level_top=6)
for user_id in user_ids:
follow_num = random.randint(0, 5)
for i in range(follow_num):
auto_follow_list.append(user_id)
# 15天前或更早发的帖子:每隔11天[0-2]个粉丝
numtime1, numtime2 = time_convs(2, 15)
user_ids = get_commnet_id('0', numtime2, content_level_low=4, content_level_top=6)
for user_id in user_ids:
follow_num = random.randint(1, 6)
if follow_num == 6:
auto_follow_list.append(user_id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
logging.info('auto_follow_per_1d_by_post: len %s' % len(auto_follow_list))
print('auto_follow_per_1d_by_post: len %s' % len(auto_follow_list))
total = len(auto_follow_list)
limit = (total + 10) // 10
for start in range(0, total, limit):
batch = auto_follow_list[start:start + limit]
t = Thread(target=batch_handle, args=[batch])
t.start()
import pymysql
import random
import traceback
import logging
from threading import Thread
from vest.request.auto_request import login, time_convs, follow
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
from libs.timelib import get_rand_time
def get_commnet_id(numtime, numtime2):
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
# 注册后
cursor.execute(
"select a.user_id from account_user a left join user_extra u on a.user_id = u.user_id "
"WHERE a.create_time > '%s' and a.create_time < '%s' and u.is_shadow = 0 " % (numtime, numtime2))
res = cursor.fetchall()
return res and [i for i, in res] or []
def batch_handle(auto_follow_list):
for user_id in auto_follow_list:
try:
cookies = login()
if cookies is not None:
# follow(cookies, user_id)
follow.apply_async(args=(cookies, user_id), eta=get_rand_time())
except:
pass
def auto_follow_per_1d_by_regist():
# 注册事件触发自动加粉丝
auto_follow_list = []
try:
# 1天前注册:[1-3]个粉丝
numtime1, numtime2 = time_convs(1, 1)
user_ids= get_commnet_id(numtime2, numtime1)
for user_id in user_ids:
follow_num = random.randint(1, 3)
for i in range(follow_num):
auto_follow_list.append(user_id)
# 2-10天前注册:[0-1]个粉丝
numtime1, numtime2 = time_convs(2, 12)
user_ids = get_commnet_id(numtime2, numtime1)
for user_id in user_ids:
follow_num = random.randint(0, 1)
for i in range(follow_num):
auto_follow_list.append(user_id)
# 10天前注册:每隔12天[0-1]个赞
numtime1, numtime2 = time_convs(1, 12)
user_ids = get_commnet_id('0', numtime2)
for user_id in user_ids:
follow_num = random.randint(1, 24)
if follow_num == 12:
auto_follow_list.append(user_id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
logging.info('auto_follow_per_1d_by_regist: len %s' % len(auto_follow_list))
print('auto_follow_per_1d_by_regist: len %s' % len(auto_follow_list))
total = len(auto_follow_list)
limit = (total + 10) // 10
for start in range(0, total, limit):
batch = auto_follow_list[start:start + limit]
t = Thread(target=batch_handle, args=[batch])
t.start()
import pymysql
import random
import traceback
import logging
from threading import Thread
from vest.request.auto_request import login, time_conv_hour, follow
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
from libs.timelib import get_rand_time
def get_commnet_id(numtime, numtime2, content_level_low=0, content_level_top=6):
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
# 发贴后
cursor.execute(
"select t.user_id from topic t left join user_extra u on t.user_id = u.user_id "
"where u.is_shadow=0 and t.create_time > '%s' and t.create_time < '%s' "
"and t.content_level >= %s and t.content_level <= %s " % (numtime, numtime2, str(content_level_low), str(content_level_top)))
res_post = cursor.fetchall()
# 注册后
cursor.execute(
"select a.user_id from account_user a left join user_extra u on a.user_id = u.user_id "
"WHERE a.create_time > '%s' and a.create_time < '%s' and u.is_shadow = 0 " % (numtime, numtime2))
res_regist = cursor.fetchall()
res = []
res.extend(res_regist)
res.extend(res_post)
return res and [i for i, in res] or []
def batch_handle(auto_follow_list):
for user_id in auto_follow_list:
try:
cookies = login()
if cookies is not None:
# follow(cookies, user_id)
follow.apply_async(args=(cookies, user_id), eta=get_rand_time(hourup=1))
except:
pass
def auto_follow_per_2h_by_post_and_regist():
# 发帖,注册触发自动加粉丝
auto_follow_list = []
try:
# 发帖,注册后2小时内:[1-3]个粉丝
numtime1, numtime2 = time_conv_hour(0, 2)
user_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=6)
for user_id in user_ids:
follow_num = random.randint(1, 3)
for i in range(follow_num):
auto_follow_list.append(user_id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
logging.info('auto_follow_per_2h_by_post_and_regist: len %s' % len(auto_follow_list))
print('auto_follow_per_2h_by_post_and_regist: len %s' % len(auto_follow_list))
total = len(auto_follow_list)
limit = (total + 10) // 10
for start in range(0, total, limit):
batch = auto_follow_list[start:start + limit]
t = Thread(target=batch_handle, args=[batch])
t.start()
import pymysql
import traceback
import logging
from vest.request.auto_request import login, follow, time_conv_minute
from vest.request.auto_request import host, user, db, passwd
from vest.data.majia_user_ids import majia_user_ids_dict
from libs.error import logging_exception
from libs.timelib import get_rand_time
def get_user_id(numtime):
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT f.user_id, f.follow_id FROM user_follow f left join user_extra e on f.follow_id = e.user_id "
"WHERE f.create_time >= '%s' and e.is_shadow = 1 " % numtime
)
data_comment = cursor.fetchall()
return data_comment and [i for i in data_comment] or []
def auto_follow_per_5m_by_followed():
# 如果马甲被用户关注,马甲即反向关注用户, 被关注5分钟后
try:
numtime1, numtime2, c = time_conv_minute(1, 5)
users = get_user_id(numtime2)
try:
for user in users and users:
user_id = user[0]
cookies = login(str(majia_user_ids_dict.get(str(user[1]))))
if cookies is not None:
# follow(cookies, user_id)
follow.apply_async(args=(cookies, user_id), eta=get_rand_time())
except:
pass
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
......@@ -59,6 +59,7 @@ def get_data(numtime1, numtime2):
def principal_online_comment1():
# 根据问题内容,以及问题的评论条数据,给问题自动加评论,到这个表中取数据community_pictorial_topic
try:
logging.info("comment offline pictorial")
numtime1, numtime2, minute = time_conv_minute(240, 0)
......
import pymysql
import random
import traceback
import logging
from threading import Thread
from vest.request.auto_request import login, pictorial_reply, get_majia, get_pictorial_comment, time_convs, \
judge_pictorial_info_get_comment
from vest.request.auto_request import host, user, db, passwd
from vest.data.topic_models import get_pictorial_tag_by_id
from libs.timelib import get_rand_time
"""
榜单新增内容(仅限在线的)
每半小时一次
过去半小时之内,if
新增在线帖子(既包含马甲用户、又包含普通用户)达到1个时,插入{1,2}条评论
新增在线帖子(既包含马甲用户、又包含普通用户)达到2-5个时,插入{2,3}条评论
新增在线帖子(既包含马甲用户、又包含普通用户)达到>5张时,插入{3,5}条评论
(注意评论作者需要是马甲用户)
过去半小时之内,if
本榜单有新增投票(既包含马甲用户、又包含普通用户),插入{1,2}条评论
评论取自单独的榜单评论列表,并自动随机关联马甲
注意需要禁止本帖作者评论
"""
def get_data(numtime1, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT id FROM community_pictorial WHERE is_online=1 and (create_time >= '%s' and create_time < '%s') " % (
numtime2, numtime1))
res = cursor.fetchall()
return res and [r for r, in res] or []
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def batch_handle(pictorial_id_list):
for pictorial_id in pictorial_id_list:
try:
cookies = login()
if cookies is not None:
comment = judge_pictorial_info_get_comment(pictorial_id)
# pictorial_reply(cookies, pictorial_id, comment)
pictorial_reply.apply_async(args=(cookies, pictorial_id, comment), eta=get_rand_time())
except:
pass
def auto_reply_per_1d_to_pictorial():
# 问题自动增加回答
pictorial_id_list = []
try:
logging.info("comment offline pictorial")
numtime1, numtime2 = time_convs(1, 1)
pictorial_ids = get_data(numtime1, numtime2)
for pictorial_id in pictorial_ids:
random_num = random.randint(0, 1)
for i in range(random_num):
pictorial_id_list.append(pictorial_id)
numtime1, numtime2 = time_convs(2, 6)
pictorial_ids = get_data(numtime1, numtime2)
for pictorial_id in pictorial_ids:
random_num = random.randint(0, 1)
for i in range(random_num):
pictorial_id_list.append(pictorial_id)
else:
logging.error("catch exception,main:%s" % traceback.format_exc())
except:
logging.error("catch exception,main:%s" % traceback.format_exc())
logging.info('auto_click_per_2h_by_post: len %s' % len(pictorial_id_list))
print('auto_click_per_2h_by_post: len %s' % len(pictorial_id_list))
total = len(pictorial_id_list)
limit = (total + 10) // 10
for start in range(0, total, limit):
batch = pictorial_id_list[start:start + limit]
t = Thread(target=batch_handle, args=[batch])
t.start()
\ No newline at end of file
import pymysql
import traceback
import logging
import json
import random
from threading import Thread
from vest.request.auto_request import login, time_convs, get_answer_data, reply_answer, get_majia, \
set_reply_to_redis, judge_topic_info_get_comment, reply, reply2
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
from libs.timelib import get_rand_time
def get_data(numtime, numtime2, content_level_low, content_level_top):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT id FROM topic WHERE is_online=1 and (create_time >= '%s' and create_time <= '%s' )"
"and content_level>= %s and content_level<= %s" % (
numtime2, numtime, str(content_level_low), str(content_level_top)))
res = cursor.fetchall()
return res and [r for r, in res] or []
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def batch_handle(topic_id_list):
for topic_id in topic_id_list:
try:
cookies = login()
if cookies is not None:
comment = judge_topic_info_get_comment(topic_id)
if comment:
# reply(cookies, topic_id, comment)
reply.apply_async(args=(cookies, topic_id, comment), eta=get_rand_time())
else:
comment1, comment2 = get_answer_data()
response = reply_answer(cookies, topic_id, comment1)
response = json.loads(response)
cookies = login()
reply_id = response["data"]["id"]
reply2.apply_async(args=(cookies, topic_id, comment2, reply_id), eta=get_rand_time())
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
def auto_reply_per_1d_to_topic():
topic_id_list = []
try:
# 1-3星及无星:
# 1天前发的帖子:[1-3]个评论
numtime1, numtime2 = time_convs(1, 1)
topic_ids = get_data(numtime1, numtime2, 0, 3)
for topic_id in topic_ids:
random_num = random.randint(1, 3)
for num in range(random_num):
topic_id_list.append(topic_id)
# 2-6天前发的帖子:[0-1]个评论
numtime1, numtime2 = time_convs(2, 6)
topic_ids = get_data(numtime1, numtime2, 0, 3)
for topic_id in topic_ids:
random_num = random.randint(0, 1)
for num in range(random_num):
topic_id_list.append(topic_id)
# 4-6星:
# 1天前发的帖子:[1-6]个评论
numtime1, numtime2 = time_convs(1, 1)
topic_ids = get_data(numtime1, numtime2, 4, 6)
for topic_id in topic_ids:
random_num = random.randint(1, 6)
for num in range(random_num):
topic_id_list.append(topic_id)
# 2-6天前发的帖子:[1-3]个评论
numtime1, numtime2 = time_convs(2, 6)
topic_ids = get_data(numtime1, numtime2, 4, 6)
for topic_id in topic_ids:
random_num = random.randint(1, 3)
for num in range(random_num):
topic_id_list.append(topic_id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
logging.info('auto_click_per_2h_by_post: len %s' % len(topic_id_list))
print('auto_click_per_2h_by_post: len %s' % len(topic_id_list))
total = len(topic_id_list)
limit = (total + 10) // 10
for start in range(0, total, limit):
batch = topic_id_list[start:start + limit]
t = Thread(target=batch_handle, args=[batch])
t.start()
\ No newline at end of file
import pymysql
import traceback
import logging
import json
from threading import Thread
from vest.request.auto_request import login, time_conv_hour, get_answer_data, reply_answer, get_majia, \
set_reply_to_redis, judge_topic_info_get_comment, reply, reply2
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
from libs.timelib import get_rand_time
import random
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT id FROM topic WHERE is_online=1 and (create_time >= '%s' and create_time <= '%s' )" % (
numtime2, numtime))
res = cursor.fetchall()
return res and [r for r, in res] or []
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def batch_handle(topic_id_list):
for topic_id in topic_id_list:
try:
cookies = login()
if cookies is not None:
comment = judge_topic_info_get_comment(topic_id)
if comment:
# reply(cookies, topic_id, comment)
reply.apply_async(args=(cookies, topic_id, comment), eta=get_rand_time(hourup=1))
else:
comment1, comment2 = get_answer_data()
response = reply_answer(cookies, topic_id, comment1)
response = json.loads(response)
cookies = login()
reply_id = response["data"]["id"]
reply2.apply_async(args=(cookies, topic_id, comment2, reply_id), eta=get_rand_time(hourup=1))
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
def auto_reply_per_2h_to_topic():
topic_id_list = []
try:
numtime1, numtime2 = time_conv_hour(0, 2)
topic_ids = get_data(numtime1, numtime2)
for topic_id in topic_ids:
random_num = random.randint(1, 2)
for num in range(random_num):
topic_id_list.append(topic_id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
logging.info('auto_click_per_2h_by_post: len %s' % len(topic_id_list))
print('auto_click_per_2h_by_post: len %s' % len(topic_id_list))
total = len(topic_id_list)
limit = (total + 10) // 10
for start in range(0, total, limit):
batch = topic_id_list[start:start + limit]
t = Thread(target=batch_handle, args=[batch])
t.start()
\ No newline at end of file
......@@ -10,13 +10,16 @@ def reply_comment1():
redis_key1 = "cybertron:set_reply_id:one"
redis_client = set_reply_to_redis()
have_reply1 = redis_client.get(redis_key1)
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
if have_reply1 == None:
pass
else:
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
......@@ -10,13 +10,16 @@ def reply_comment2():
redis_key1 = "cybertron:set_reply_id:two"
redis_client = set_reply_to_redis()
have_reply1 = redis_client.get(redis_key1)
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
if have_reply1 == None:
pass
else:
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
......@@ -10,13 +10,16 @@ def reply_comment3():
redis_key1 = "cybertron:set_reply_id:three"
redis_client = set_reply_to_redis()
have_reply1 = redis_client.get(redis_key1)
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
if have_reply1 == None:
pass
else:
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
......@@ -10,13 +10,16 @@ def reply_comment5():
redis_key1 = "cybertron:set_reply_id:five"
redis_client = set_reply_to_redis()
have_reply1 = redis_client.get(redis_key1)
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
if have_reply1 == None:
pass
else:
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
......@@ -10,13 +10,16 @@ def reply_comment7():
redis_key1 = "cybertron:set_reply_id:seven"
redis_client = set_reply_to_redis()
have_reply1 = redis_client.get(redis_key1)
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
if have_reply1 == None:
pass
else:
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
......@@ -7,6 +7,7 @@ import logging
import json
import redis
import smtplib
from celery import shared_task
from libs.cache import redis_client
from email.mime.text import MIMEText
from email.utils import formataddr
......@@ -20,6 +21,7 @@ my_sender = 'lixiaofang@igengmei.com'
my_pass = 'tg5AVKBB8jLQGBET'
my_user6 = "lixiaofang@igengmei.com"
auto_vote_url = settings.AUTO_VOTE_URL
auto_click_url = settings.AUTO_CLICK_URL
auto_reply_url = settings.AUTO_REPLY_URL
auto_follow_url = settings.AUTO_FOLLOW_URL
......@@ -82,9 +84,10 @@ def get_cookies(user_id):
return None
def login():
def login(user_id=None):
try:
user_id = get_majia()
if not user_id:
user_id = get_majia()
logging.info("get user_id:%s" % user_id)
cookies = get_cookies(user_id)
......@@ -111,16 +114,32 @@ def logins(user_id):
return None
@shared_task
def click(cookies_get, id):
# 点赞
try:
post_dict = {
'type': 0,
'id': id
}
response = requests.post(url=auto_click_url,
cookies=cookies_get,
data=post_dict)
topic_id = id[0]
pictorial_id = id[1]
if pictorial_id:
post_dict = {
"topic_id": topic_id,
"pictorial_id": pictorial_id
}
response = requests.post(url=auto_vote_url,
cookies=cookies_get,
data=post_dict)
else:
post_dict = {
'type': 0,
'id': topic_id
}
response = requests.post(url=auto_click_url,
cookies=cookies_get,
data=post_dict)
print(response.text)
logging.info("response.text:%s" % response.text)
get_error(response.text, "click", id)
......@@ -129,6 +148,7 @@ def click(cookies_get, id):
logging.error("catch exception,logins:%s" % traceback.format_exc())
@shared_task
def reply(cookies_get, id, content):
try:
post_dict = {
......@@ -140,6 +160,7 @@ def reply(cookies_get, id, content):
cookies=cookies_get,
data=post_dict)
print(response.text)
logging.info("response.text:%s" % response.text)
get_error(response.text, "reply", id)
except:
......@@ -168,6 +189,16 @@ def time_conv_minute(minutest, minutest2):
return None
def time_conv_hour(minutest, minutest2):
try:
now = datetime.datetime.now()
yes_time = now - datetime.timedelta(hours=minutest)
yes_time2 = now - datetime.timedelta(hours=minutest2)
return yes_time, yes_time2
except:
return None
def time_now(minutest):
try:
now = datetime.datetime.now()
......@@ -178,6 +209,11 @@ def time_now(minutest):
def time_convs(numtime, numtime2):
'''
params: 1, 2
datetime.now(): 2019-10-09 10:35:50.231463
return: 2019-10-08 23:59:59.000000 2019-10-07 00:00:00.000000
'''
try:
now = datetime.datetime.now()
yes_time = now - datetime.timedelta(days=numtime)
......@@ -190,7 +226,6 @@ def time_convs(numtime, numtime2):
logging.info("get yes_time_str:%s" % yes_time_str)
logging.info("get yes_time_str2:%s" % yes_time_str2)
return yes_time_str, yes_time_str2
except:
return None
......@@ -237,6 +272,7 @@ def get_comments():
return None
@shared_task
def follow(cookies_get, id):
try:
post_dict = {
......@@ -247,6 +283,7 @@ def follow(cookies_get, id):
cookies=cookies_get,
data=post_dict)
print(response.text)
logging.info("response.text:%s" % response.text)
get_error(response.text, "follow", id)
except:
......@@ -401,6 +438,7 @@ def set_reply_to_redis():
logging.error("catch exception,logins:%s" % traceback.format_exc())
@shared_task
def reply2(cookies_get, id, content, replied_id):
try:
post_dict = {
......@@ -413,6 +451,7 @@ def reply2(cookies_get, id, content, replied_id):
cookies=cookies_get,
data=post_dict)
print(response.text)
logging.info("response.text:%s" % response.text)
get_error(response.text, "reply2", id)
except:
......@@ -420,6 +459,7 @@ def reply2(cookies_get, id, content, replied_id):
logging.error("catch exception,logins:%s" % traceback.format_exc())
@shared_task
def pictorial_reply(cookies_get, id, content):
try:
post_dict = {
......@@ -431,6 +471,7 @@ def pictorial_reply(cookies_get, id, content):
cookies=cookies_get,
data=post_dict)
print(response.text)
logging.info("response.text:%s" % response.text)
get_error(response.text, "pictorial_reply", id)
except:
......@@ -574,8 +615,9 @@ def judge_topic_info_get_comment(topic_id):
comment = get_face_comment()
# 原始评论
else:
comment = get_comment()
# 返回空就会去拿多级评论
if random.randint(0, 1):
comment = get_comment()
else:
# 判断有没有商品信息
......@@ -593,7 +635,9 @@ def judge_topic_info_get_comment(topic_id):
comment = get_face_comment()
# 原始评论
else:
comment = get_comment()
# 返回空就会去拿多级评论
if random.randint(0, 1):
comment = get_comment()
logging.info("get judge_topic_info_get_comment:%s" % comment)
return comment
......
......@@ -16,6 +16,31 @@ def index_first():
return None
def get_cook_by_email(email):
try:
cookies = index_first()
post_dict = {
'account_type': 2,
'pwd': '123456',
'email': email
}
response = requests.post(
url=settings.LOGIN_URL,
data=post_dict,
cookies=cookies
)
headers = response.headers
print(response.text)
cook = headers['Set-Cookie'].split(";")
cook = cook[0].split('=')[1]
logging.info("response.text :%s" % response.text)
return cook
except:
logging.error("index_first:%s" % traceback.format_exc())
return None
def get_cook():
try:
data = open("/srv/apps/physical/vest/data/vest_user_email.txt")
......
import pymysql
import datetime
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'alpha',
'USER': 'work',
'PASSWORD': 'Gengmei123!',
'HOST': '172.21.36.16',
'PORT': '3306',
'OPTIONS': {
"init_command": "SET foreign_key_checks = 0;",
"charset": "utf8mb4",
},
},
'master': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'alpha',
'USER': 'work',
'PASSWORD': 'Gengmei123!',
'HOST': '172.21.36.6',
'PORT': '3306',
'OPTIONS': {
"init_command": "SET foreign_key_checks = 0;",
"charset": "utf8mb4",
},
},
'face': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'face',
'USER': 'work',
'PASSWORD': 'Gengmei123!',
'HOST': '172.21.36.16',
'PORT': '3306',
# 'CONN_MAX_AGE': None,
'OPTIONS': {
"init_command": "SET foreign_key_checks = 0;",
"charset": "utf8mb4",
},
},
'commodity': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'commodity',
'USER': 'work',
'PASSWORD': 'Gengmei123!',
'HOST': '172.21.36.16',
'PORT': '3306',
# 'CONN_MAX_AGE': None,
'OPTIONS': {
"init_command": "SET foreign_key_checks = 0;",
"charset": "utf8mb4",
},
}
}
host = DATABASES['default']['HOST']
user = DATABASES['default']['USER']
port = DATABASES['default']['PORT']
db = DATABASES['default']['NAME']
passwd = DATABASES['default']['PASSWORD']
def time_convs(numtime, numtime2):
'''
params: 1, 2
datetime.now(): 2019-10-09 10:35:50.231463
return: 2019-10-08 23:59:59.000000 2019-10-07 00:00:00.000000
'''
try:
now = datetime.datetime.now()
yes_time = now - datetime.timedelta(days=numtime)
yes_time_str = yes_time.strftime('%Y-%m-%d')
yes_time_str = yes_time_str + ' 23:59:59.000000'
yes_time2 = now - datetime.timedelta(days=numtime2)
yes_time_str2 = yes_time2.strftime('%Y-%m-%d')
yes_time_str2 = yes_time_str2 + ' 00:00:00.000000'
return yes_time_str, yes_time_str2
except:
return None
def get_commnet_id(numtime, numtime2):
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
# 发贴后
cursor.execute(
"select user_id from topic WHERE create_time > '%s' and create_time < '%s' " % (numtime, numtime2))
data_new_user = cursor.fetchall()
all_data = list(data_new_user)
user_id_list = []
for i in all_data and all_data:
cursor.execute(
"select user_id from user_extra where user_id =" + str(i[0]) + " and is_shadow =0 ")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
user_id_list.append(i)
pc.close()
return user_id_list
# numtime1, numtime2 = time_convs(1, 1)
# user_id = get_commnet_id(numtime2, numtime1)
# def time_conv_minute(minutest, minutest2):
# try:
# now = datetime.datetime.now()
# minute = datetime.datetime.now().minute
# yes_time = now - datetime.timedelta(minutes=minutest)
# yes_time2 = now - datetime.timedelta(minutes=minutest2)
# return yes_time, yes_time2, minute
# except:
# return None
#
# a, b, c = time_conv_minute(0, 5)
#
# print(a, b)
import requests
auto_follow_url = 'http://earth.gmapp.env/api/v1/follow'
def follow(cookies_get, id):
post_dict = {
'type': 1,
'id': id
}
response = requests.post(url=auto_follow_url,
cookies=cookies_get,
data=post_dict)
print(response.text)
def index_first():
try:
r1 = requests.get("http://earth.gmapp.env/api/account/login_pwd")
return r1.cookies.get_dict()
except:
return None
def get_cook_by_email(email):
cookies = index_first()
post_dict = {
'account_type': 2,
'pwd': '123456',
'email': email
}
response = requests.post(
url="http://earth.gmapp.env/api/account/login_pwd",
data=post_dict,
cookies=cookies
)
headers = response.headers
# print(response.text)
cook = headers['Set-Cookie'].split(";")
cook = cook[0].split('=')[1]
return cook
res = get_cook_by_email("s_gXRMNW@shadow.com")
print(res)
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment