Commit 66c78b42 authored by 吴升宇's avatar 吴升宇

Merge branch 'master' of git.wanmeizhensuo.com:alpha/physical into like-pre/r01

parents e146f846 ab172f78
Pipeline #3954 canceled with stage
...@@ -4,7 +4,7 @@ __pycache__/ ...@@ -4,7 +4,7 @@ __pycache__/
*~ *~
# C extensions # C extensions
*.so *.so
venv
# Distribution / packaging # Distribution / packaging
.Python .Python
.vscode .vscode
......
crontab: crontab:
cp crontab.py /data/log/physical/app/crontab.py && python /data/log/physical/app/crontab.py && python /data/log/physical/app/crontabs.py cp crontab.py /data/log/physical/app/crontab.py && python /data/log/physical/app/crontab.py && python /data/log/physical/app/crontabs.py
celery: celery:
celery -A physical worker -c 1 -Q tapir-alpha -l debug --max-tasks-per-child == 500 celery -A physical worker -c 1 -Q vest -l debug
...@@ -4,7 +4,7 @@ ontime_list = [ ...@@ -4,7 +4,7 @@ ontime_list = [
"0 9 * * * source /srv/envs/physical/bin/activate && python /data/log/physical/app/crontab.py", "0 9 * * * source /srv/envs/physical/bin/activate && python /data/log/physical/app/crontab.py",
"10 9 * * * source /srv/envs/physical/bin/activate && python /data/log/physical/app/crontabs.py", "10 9 * * * source /srv/envs/physical/bin/activate && python /data/log/physical/app/crontabs.py",
"0 9 * * * sh /data/log/cybertron/app/statistics_query.sh > /data/log/cybertron/app/statistics_query.log", "0 9 * * * sh /data/log/cybertron/app/statistics_query.sh > /data/log/cybertron/app/statistics_query.log",
"54 */2 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_click_per_2h_by_post", "54 */1 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_click_per_2h_by_post",
# "*/5 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m true_click_one", # "*/5 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m true_click_one",
# "02,12,22,32,42,52 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es-m true_click_two", # "02,12,22,32,42,52 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es-m true_click_two",
# "00,10,20,30,40,50 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m true_click_three", # "00,10,20,30,40,50 * * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m true_click_three",
...@@ -16,11 +16,11 @@ ontime_list = [ ...@@ -16,11 +16,11 @@ ontime_list = [
"0 10 * * 3 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_lunch_app", "0 10 * * 3 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_lunch_app",
"30 10 * * 3 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_lunch_app2", "30 10 * * 3 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_lunch_app2",
# "*/5 * * * 1 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_5m_by_followed", # "*/5 * * * 1 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_5m_by_followed",
"1 */2 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_2h_by_post_and_regist", "1 */1 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_2h_by_post_and_regist",
"0 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m get_login_session", "0 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m get_login_session",
"0 0 * * 3 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m get_user_id", "0 0 * * 3 source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m get_user_id",
# "0 14,18,22 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m principal_online_comment1", # "0 14,18,22 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m principal_online_comment1",
"25 */2 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_reply_per_2h_to_topic", "25 */1 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_reply_per_2h_to_topic",
"0 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_click_per_1d_by_post", "0 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_click_per_1d_by_post",
"1 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_1d_by_regist", "1 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_1d_by_regist",
"2 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_1d_by_post", "2 9 * * * source /srv/envs/physical/bin/activate && cd /srv/apps/physical && python manage.py trans2es_mapping2es -m auto_follow_per_1d_by_post",
......
...@@ -27,7 +27,7 @@ def get_rand_time(hourlow=0, hourup=13, minutelow=0, minuteup=60): ...@@ -27,7 +27,7 @@ def get_rand_time(hourlow=0, hourup=13, minutelow=0, minuteup=60):
hours = random.randint(hourlow, hourup) hours = random.randint(hourlow, hourup)
minutes = random.randint(minutelow, minuteup) minutes = random.randint(minutelow, minuteup)
# todo redis会自动给加8个小时,所以这边先写死减少8小时 # todo redis会自动给加8个小时,所以这边先写死减少8小时
now_time = NOW + timedelta(hours=hours, minutes=minutes) - timedelta(hours=8) now_time = NOW + timedelta(hours=hours, minutes=minutes)
time = eta_2_push_time(now_time.strftime("%Y-%m-%d %H:%M:%S")) time = eta_2_push_time(now_time.strftime("%Y-%m-%d %H:%M:%S"))
print(datetime.fromtimestamp(time, pytz.timezone('Asia/Shanghai'))) print(datetime.fromtimestamp(time, pytz.timezone('Asia/Shanghai')))
return datetime.fromtimestamp(time, pytz.timezone('Asia/Shanghai')) return datetime.fromtimestamp(time, pytz.timezone('Asia/Shanghai'))
......
This diff is collapsed.
...@@ -13,13 +13,6 @@ app = Celery('physical') ...@@ -13,13 +13,6 @@ app = Celery('physical')
# - namespace='CELERY' means all celery-related configuration keys # - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix. # should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY') app.config_from_object('django.conf:settings', namespace='CELERY')
app.conf.ONCE = {
'backend': 'celery_once.backends.Redis',
'settings': {
'url': settings.CELERY_BROKER_URL,
'default_timeout': 60 * 60
}
}
# Load task modules from all registered Django app configs. # Load task modules from all registered Django app configs.
......
...@@ -9,6 +9,8 @@ class CeleryTaskRouter(object): ...@@ -9,6 +9,8 @@ class CeleryTaskRouter(object):
queue_task_map = { queue_task_map = {
"tapir-alpha": [ "tapir-alpha": [
'injection.data_sync.tasks.write_to_es', 'injection.data_sync.tasks.write_to_es',
],
"vest": [
'vest.request.auto_request.click', 'vest.request.auto_request.click',
'vest.request.auto_request.reply', 'vest.request.auto_request.reply',
'vest.request.auto_request.follow', 'vest.request.auto_request.follow',
......
...@@ -14,7 +14,6 @@ gevent==1.3.7 ...@@ -14,7 +14,6 @@ gevent==1.3.7
pypinyin==0.34.1 pypinyin==0.34.1
numpy==1.16.2 numpy==1.16.2
lz4==2.1.6 lz4==2.1.6
celery_once==3.0.1
git+ssh://git@git.wanmeizhensuo.com/backend/gm-rpcd.git@master git+ssh://git@git.wanmeizhensuo.com/backend/gm-rpcd.git@master
git+ssh://git@git.wanmeizhensuo.com/backend/helios.git@master git+ssh://git@git.wanmeizhensuo.com/backend/helios.git@master
......
...@@ -16,6 +16,7 @@ from trans2es.models.tag import CommunityTagSetRelation ...@@ -16,6 +16,7 @@ from trans2es.models.tag import CommunityTagSetRelation
from django.conf import settings from django.conf import settings
from libs.error import logging_exception from libs.error import logging_exception
from django.db import connection from django.db import connection
from trans2es.models.account_reg_extra import AccountRegExtra
def get_highlight(fields=[]): def get_highlight(fields=[]):
...@@ -143,6 +144,27 @@ def choice_pictorial_push_tag(device_id, user_id): ...@@ -143,6 +144,27 @@ def choice_pictorial_push_tag(device_id, user_id):
return {"pictorial_tag_list": []} return {"pictorial_tag_list": []}
@bind("physical/search/lintag_by_user_id")
def get_lintags_by_user_id(user_id):
try:
devices = AccountRegExtra.objects.filter(user_id=user_id, is_online=True, is_deleted=False).values_list("device_id", flat=True)
if devices:
linucb_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
device_id = devices[0]
redis_key = linucb_recommend_redis_prefix + str(device_id)
tag_data = redis_client.get(redis_key)
lintags = []
if tag_data is None:
lintags = []
else:
lintags = json.loads(str(tag_data, encoding="utf-8"))
return {"lin_tag_list": lintags[:3]}
return {"lin_tag_list": []}
except Exception as e:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"lin_tag_list": []}
@bind("physical/search/choice_push_tag") @bind("physical/search/choice_push_tag")
def choice_push_tag(device_id, user_id): def choice_push_tag(device_id, user_id):
""" """
......
import datetime
from django.db import models
class AccountRegExtra(models.Model):
class Meta:
verbose_name = u"设备用户关系表"
db_table = "account_reg_extra"
id = models.IntegerField(verbose_name="主键ID", primary_key=True)
is_online = models.BooleanField(verbose_name=u"是否上线")
create_time = models.DateTimeField(verbose_name=u"创建时间", default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u"更新时间", default=datetime.datetime.fromtimestamp(0))
is_deleted = models.BooleanField(verbose_name=u"")
geo = models.CharField(verbose_name=u"", max_length=300)
model = models.CharField(verbose_name=u"", max_length=64)
device_id = models.CharField(verbose_name=u"设备ID", max_length=64)
share_code = models.CharField(verbose_name=u"", max_length=64)
user_id = models.IntegerField(verbose_name="用户ID")
# coding=utf8
import redis
from django.conf import settings
class _RedisWithoutprefixProxy(object):
_hacked_methods = set([
'get', 'mget', 'hget', 'hgetall', 'rpop'
])
def __getattribute__(self, name):
try:
return super(_RedisWithoutprefixProxy, self).__getattribute__(name)
except AttributeError:
f = getattr(self.redis, name)
if name in _RedisWithoutprefixProxy._hacked_methods:
def wrapper(k, *args, **kwargs):
data = f(k, *args, **kwargs)
# bug fix for py35, json.loads does accept bytes!
if type(data) == bytes:
data = data.decode()
return data
return wrapper
return f
def __init__(self, conf):
self.__pool = redis.ConnectionPool(**conf)
self.redis = redis.StrictRedis(connection_pool=self.__pool)
reply_cache = _RedisWithoutprefixProxy(settings.REDIS_TOPIC['reply_cache'])
follow_cache = _RedisWithoutprefixProxy(settings.REDIS_TOPIC['follow_cache'])
click_cache = _RedisWithoutprefixProxy(settings.REDIS_TOPIC['click_cache'])
...@@ -42,7 +42,8 @@ def batch_handle(auto_click_list): ...@@ -42,7 +42,8 @@ def batch_handle(auto_click_list):
try: try:
cookies = login() cookies = login()
if cookies is not None: if cookies is not None:
click.apply_async(args=(cookies, topic_id), eta=get_rand_time()) time = get_rand_time()
click.apply_async(args=(cookies, topic_id), eta=time)
# click(cookies, topic_id) # click(cookies, topic_id)
except: except:
pass pass
......
...@@ -44,7 +44,8 @@ def batch_handle(auto_click_list): ...@@ -44,7 +44,8 @@ def batch_handle(auto_click_list):
cookies = login() cookies = login()
if cookies is not None: if cookies is not None:
# click(cookies, topic_id) # click(cookies, topic_id)
click.apply_async(args=(cookies, topic_id), eta=get_rand_time(hourup=1)) time = get_rand_time(hourup=0)
click.apply_async(args=(cookies, topic_id), eta=time)
except: except:
pass pass
...@@ -54,7 +55,7 @@ def auto_click_per_2h_by_post(): ...@@ -54,7 +55,7 @@ def auto_click_per_2h_by_post():
auto_click_list = [] auto_click_list = []
try: try:
# 发帖2小时内:[1-3]个点赞 # 发帖2小时内:[1-3]个点赞
numtime1, numtime2 = time_conv_hour(0, 2) numtime1, numtime2 = time_conv_hour(0, 1)
topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=6) topic_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=6)
for topic_id in topic_ids: for topic_id in topic_ids:
click_num = random.randint(1, 3) click_num = random.randint(1, 3)
......
...@@ -30,7 +30,8 @@ def batch_handle(auto_follow_list): ...@@ -30,7 +30,8 @@ def batch_handle(auto_follow_list):
cookies = login() cookies = login()
if cookies is not None: if cookies is not None:
# follow(cookies, user_id) # follow(cookies, user_id)
follow.apply_async(args=(cookies, user_id), eta=get_rand_time()) time = get_rand_time()
follow.apply_async(args=(cookies, user_id), eta=time)
except: except:
pass pass
......
...@@ -29,7 +29,8 @@ def batch_handle(auto_follow_list): ...@@ -29,7 +29,8 @@ def batch_handle(auto_follow_list):
cookies = login() cookies = login()
if cookies is not None: if cookies is not None:
# follow(cookies, user_id) # follow(cookies, user_id)
follow.apply_async(args=(cookies, user_id), eta=get_rand_time()) time = get_rand_time()
follow.apply_async(args=(cookies, user_id), eta=time)
except: except:
pass pass
......
...@@ -41,7 +41,8 @@ def batch_handle(auto_follow_list): ...@@ -41,7 +41,8 @@ def batch_handle(auto_follow_list):
cookies = login() cookies = login()
if cookies is not None: if cookies is not None:
# follow(cookies, user_id) # follow(cookies, user_id)
follow.apply_async(args=(cookies, user_id), eta=get_rand_time(hourup=1)) time = get_rand_time(hourup=0)
follow.apply_async(args=(cookies, user_id), eta=time)
except: except:
pass pass
...@@ -52,7 +53,7 @@ def auto_follow_per_2h_by_post_and_regist(): ...@@ -52,7 +53,7 @@ def auto_follow_per_2h_by_post_and_regist():
auto_follow_list = [] auto_follow_list = []
try: try:
# 发帖,注册后2小时内:[1-3]个粉丝 # 发帖,注册后2小时内:[1-3]个粉丝
numtime1, numtime2 = time_conv_hour(0, 2) numtime1, numtime2 = time_conv_hour(0, 1)
user_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=6) user_ids = get_commnet_id(numtime2, numtime1, content_level_low=0, content_level_top=6)
for user_id in user_ids: for user_id in user_ids:
follow_num = random.randint(1, 3) follow_num = random.randint(1, 3)
......
...@@ -57,7 +57,8 @@ def batch_handle(pictorial_id_list): ...@@ -57,7 +57,8 @@ def batch_handle(pictorial_id_list):
if cookies is not None: if cookies is not None:
comment = judge_pictorial_info_get_comment(pictorial_id) comment = judge_pictorial_info_get_comment(pictorial_id)
# pictorial_reply(cookies, pictorial_id, comment) # pictorial_reply(cookies, pictorial_id, comment)
pictorial_reply.apply_async(args=(cookies, pictorial_id, comment), eta=get_rand_time()) time = get_rand_time()
pictorial_reply.apply_async(args=(cookies, pictorial_id, comment), eta=time)
except: except:
pass pass
......
...@@ -35,14 +35,17 @@ def batch_handle(topic_id_list): ...@@ -35,14 +35,17 @@ def batch_handle(topic_id_list):
comment = judge_topic_info_get_comment(topic_id) comment = judge_topic_info_get_comment(topic_id)
if comment: if comment:
# reply(cookies, topic_id, comment) # reply(cookies, topic_id, comment)
reply.apply_async(args=(cookies, topic_id, comment), eta=get_rand_time()) time = get_rand_time()
reply.apply_async(args=(cookies, topic_id, comment), eta=time)
else: else:
comment1, comment2 = get_answer_data() comment1, comment2 = get_answer_data()
response = reply_answer(cookies, topic_id, comment1) response = reply_answer(cookies, topic_id, comment1)
response = json.loads(response) response = json.loads(response)
cookies = login() cookies = login()
reply_id = response["data"]["id"] reply_id = response["data"].get('id')
reply2.apply_async(args=(cookies, topic_id, comment2, reply_id), eta=get_rand_time()) if reply_id:
time = get_rand_time()
reply2.apply_async(args=(cookies, topic_id, comment2, reply_id), eta=time)
......
...@@ -34,14 +34,17 @@ def batch_handle(topic_id_list): ...@@ -34,14 +34,17 @@ def batch_handle(topic_id_list):
comment = judge_topic_info_get_comment(topic_id) comment = judge_topic_info_get_comment(topic_id)
if comment: if comment:
# reply(cookies, topic_id, comment) # reply(cookies, topic_id, comment)
reply.apply_async(args=(cookies, topic_id, comment), eta=get_rand_time(hourup=1)) time = get_rand_time(hourup=0)
reply.apply_async(args=(cookies, topic_id, comment), eta=time)
else: else:
comment1, comment2 = get_answer_data() comment1, comment2 = get_answer_data()
response = reply_answer(cookies, topic_id, comment1) response = reply_answer(cookies, topic_id, comment1)
response = json.loads(response) response = json.loads(response)
cookies = login() cookies = login()
reply_id = response["data"]["id"] reply_id = response["data"].get('id')
reply2.apply_async(args=(cookies, topic_id, comment2, reply_id), eta=get_rand_time(hourup=1)) if reply_id:
time = get_rand_time(hourup=0)
reply2.apply_async(args=(cookies, topic_id, comment2, reply_id), eta=time)
except: except:
logging_exception() logging_exception()
...@@ -51,7 +54,7 @@ def batch_handle(topic_id_list): ...@@ -51,7 +54,7 @@ def batch_handle(topic_id_list):
def auto_reply_per_2h_to_topic(): def auto_reply_per_2h_to_topic():
topic_id_list = [] topic_id_list = []
try: try:
numtime1, numtime2 = time_conv_hour(0, 2) numtime1, numtime2 = time_conv_hour(0, 1)
topic_ids = get_data(numtime1, numtime2) topic_ids = get_data(numtime1, numtime2)
for topic_id in topic_ids: for topic_id in topic_ids:
random_num = random.randint(1, 2) random_num = random.randint(1, 2)
......
import requests import requests
import time
import datetime import datetime
import random import random
import traceback import traceback
...@@ -8,12 +7,12 @@ import json ...@@ -8,12 +7,12 @@ import json
import redis import redis
import smtplib import smtplib
from celery import shared_task from celery import shared_task
from celery_once import QueueOnce
from libs.cache import redis_client from libs.cache import redis_client
from email.mime.text import MIMEText from email.mime.text import MIMEText
from email.utils import formataddr from email.utils import formataddr
from physical.settings_local import DATABASES from physical.settings_local import DATABASES
from physical.settings_local import REDIS_URL from physical.settings_local import REDIS_URL
from vest.cache.base import reply_cache, follow_cache, click_cache
from vest.data.topic_models import get_pictorial_tag_by_id, get_topic_product_info, get_edit_tag_id_list, \ from vest.data.topic_models import get_pictorial_tag_by_id, get_topic_product_info, get_edit_tag_id_list, \
get_category_tag_id, topic_has_image, get_tag_id_list get_category_tag_id, topic_has_image, get_tag_id_list
from django.conf import settings from django.conf import settings
...@@ -115,9 +114,16 @@ def logins(user_id): ...@@ -115,9 +114,16 @@ def logins(user_id):
return None return None
# @shared_task(retry_kwargs={'max_retries': 0}, base=QueueOnce, once={'graceful': True, 'unlock_before_run': True})
@shared_task(retry_kwargs={'max_retries': 0}) @shared_task(retry_kwargs={'max_retries': 0})
def click(cookies_get, id): def click(cookies_get, id):
click_key = 'click topic_id:%s, cookies_get: %s' % (str(cookies_get), str(id))
cache_count = click_cache.get(click_key)
if cache_count:
return
else:
click_cache.set(click_key, 1)
click_cache.expire(click_key, settings.CACHE_SECONDS)
# 点赞 # 点赞
try: try:
topic_id = id[0] topic_id = id[0]
...@@ -150,9 +156,15 @@ def click(cookies_get, id): ...@@ -150,9 +156,15 @@ def click(cookies_get, id):
logging.error("catch exception,logins:%s" % traceback.format_exc()) logging.error("catch exception,logins:%s" % traceback.format_exc())
# @shared_task(retry_kwargs={'max_retries': 0}, base=QueueOnce, once={'graceful': True, 'unlock_before_run': True})
@shared_task(retry_kwargs={'max_retries': 0}) @shared_task(retry_kwargs={'max_retries': 0})
def reply(cookies_get, id, content): def reply(cookies_get, id, content):
reply_key = 'click topic_id:%s, cookies_get: %s, content:%s' % (str(id), str(cookies_get), str(content))
cache_count = reply_cache.get(reply_key)
if cache_count:
return
else:
reply_cache.set(reply_key, 1)
reply_cache.expire(reply_key, settings.CACHE_SECONDS)
try: try:
post_dict = { post_dict = {
'topic_id': id, 'topic_id': id,
...@@ -275,9 +287,15 @@ def get_comments(): ...@@ -275,9 +287,15 @@ def get_comments():
return None return None
# @shared_task(retry_kwargs={'max_retries': 0}, base=QueueOnce, once={'graceful': True, 'unlock_before_run': True})
@shared_task(retry_kwargs={'max_retries': 0}) @shared_task(retry_kwargs={'max_retries': 0})
def follow(cookies_get, id): def follow(cookies_get, id):
follow_key = 'click user_id:%s, cookies_get: %s' % (str(id), str(cookies_get))
cache_count = follow_cache.get(follow_key)
if cache_count:
return
else:
follow_cache.set(follow_key, 1)
follow_cache.expire(follow_key, settings.CACHE_SECONDS)
try: try:
post_dict = { post_dict = {
'type': 1, 'type': 1,
...@@ -442,9 +460,16 @@ def set_reply_to_redis(): ...@@ -442,9 +460,16 @@ def set_reply_to_redis():
logging.error("catch exception,logins:%s" % traceback.format_exc()) logging.error("catch exception,logins:%s" % traceback.format_exc())
# @shared_task(retry_kwargs={'max_retries': 0}, base=QueueOnce, once={'graceful': True, 'unlock_before_run': True})
@shared_task(retry_kwargs={'max_retries': 0}) @shared_task(retry_kwargs={'max_retries': 0})
def reply2(cookies_get, id, content, replied_id): def reply2(cookies_get, id, content, replied_id):
reply_key = 'click topic_id:%s, cookies_get: %s, content:%s, replied_id: %s' % \
(str(id), str(cookies_get), str(content), str(replied_id))
cache_count = reply_cache.get(reply_key)
if cache_count:
return
else:
reply_cache.set(reply_key, 1)
reply_cache.expire(reply_key, settings.CACHE_SECONDS)
try: try:
post_dict = { post_dict = {
'topic_id': id, 'topic_id': id,
...@@ -464,9 +489,15 @@ def reply2(cookies_get, id, content, replied_id): ...@@ -464,9 +489,15 @@ def reply2(cookies_get, id, content, replied_id):
logging.error("catch exception,logins:%s" % traceback.format_exc()) logging.error("catch exception,logins:%s" % traceback.format_exc())
# @shared_task(retry_kwargs={'max_retries': 0}, base=QueueOnce, once={'graceful': True, 'unlock_before_run': True})
@shared_task(retry_kwargs={'max_retries': 0}) @shared_task(retry_kwargs={'max_retries': 0})
def pictorial_reply(cookies_get, id, content): def pictorial_reply(cookies_get, id, content):
reply_key = 'click topic_id:%s, cookies_get: %s, content:%s' % (str(id), str(cookies_get), str(content))
cache_count = reply_cache.get(reply_key)
if cache_count:
return
else:
reply_cache.set(reply_key, 1)
reply_cache.expire(reply_key, settings.CACHE_SECONDS)
try: try:
post_dict = { post_dict = {
'pictorial_id': id, 'pictorial_id': id,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment