Commit b86d6452 authored by 吴升宇's avatar 吴升宇

fix add redis key

parent 29407cf9
......@@ -4,7 +4,7 @@ __pycache__/
*~
# C extensions
*.so
venv
# Distribution / packaging
.Python
.vscode
......
crontab:
cp crontab.py /data/log/physical/app/crontab.py && python /data/log/physical/app/crontab.py && python /data/log/physical/app/crontabs.py
celery:
celery -A physical worker -c 1 -Q tapir-alpha -l debug --max-tasks-per-child == 500
celery -A physical worker -c 1 -Q vest -l debug
......@@ -13,13 +13,6 @@ app = Celery('physical')
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
app.conf.ONCE = {
'backend': 'celery_once.backends.Redis',
'settings': {
'url': settings.CELERY_BROKER_URL,
'default_timeout': 60 * 60
}
}
# Load task modules from all registered Django app configs.
......
......@@ -14,7 +14,6 @@ gevent==1.3.7
pypinyin==0.34.1
numpy==1.16.2
lz4==2.1.6
celery_once==3.0.1
git+ssh://git@git.wanmeizhensuo.com/backend/gm-rpcd.git@master
git+ssh://git@git.wanmeizhensuo.com/backend/helios.git@master
......
# coding=utf8
import redis
from django.conf import settings
class _RedisWithoutprefixProxy(object):
_hacked_methods = set([
'get', 'mget', 'hget', 'hgetall', 'rpop'
])
def __getattribute__(self, name):
try:
return super(_RedisWithoutprefixProxy, self).__getattribute__(name)
except AttributeError:
f = getattr(self.redis, name)
if name in _RedisWithoutprefixProxy._hacked_methods:
def wrapper(k, *args, **kwargs):
data = f(k, *args, **kwargs)
# bug fix for py35, json.loads does accept bytes!
if type(data) == bytes:
data = data.decode()
return data
return wrapper
return f
def __init__(self, conf):
self.__pool = redis.ConnectionPool(**conf)
self.redis = redis.StrictRedis(connection_pool=self.__pool)
reply_cache = _RedisWithoutprefixProxy(settings.REDIS_TOPIC['reply_cache'])
follow_cache = _RedisWithoutprefixProxy(settings.REDIS_TOPIC['follow_cache'])
click_cache = _RedisWithoutprefixProxy(settings.REDIS_TOPIC['click_cache'])
import requests
import time
import datetime
import random
import traceback
......@@ -8,12 +7,12 @@ import json
import redis
import smtplib
from celery import shared_task
from celery_once import QueueOnce
from libs.cache import redis_client
from email.mime.text import MIMEText
from email.utils import formataddr
from physical.settings_local import DATABASES
from physical.settings_local import REDIS_URL
from vest.cache.base import reply_cache, follow_cache, click_cache
from vest.data.topic_models import get_pictorial_tag_by_id, get_topic_product_info, get_edit_tag_id_list, \
get_category_tag_id, topic_has_image, get_tag_id_list
from django.conf import settings
......@@ -115,9 +114,16 @@ def logins(user_id):
return None
# @shared_task(retry_kwargs={'max_retries': 0}, base=QueueOnce, once={'graceful': True, 'unlock_before_run': True})
@shared_task(retry_kwargs={'max_retries': 0})
def click(cookies_get, id):
click_key = 'click topic_id:%s, cookies_get: %s' % (cookies_get, id)
cache_count = click_cache.get(click_key)
if cache_count:
return
else:
click_cache.set(click_key, 1)
click_cache.expire(click_key, settings.cache_seconds)
# 点赞
try:
topic_id = id[0]
......@@ -150,9 +156,15 @@ def click(cookies_get, id):
logging.error("catch exception,logins:%s" % traceback.format_exc())
# @shared_task(retry_kwargs={'max_retries': 0}, base=QueueOnce, once={'graceful': True, 'unlock_before_run': True})
@shared_task(retry_kwargs={'max_retries': 0})
def reply(cookies_get, id, content):
reply_key = 'click topic_id:%s, cookies_get: %s, content:%s' % (id, cookies_get, content)
cache_count = reply_cache.get(reply_key)
if cache_count:
return
else:
reply_cache.set(reply_key, 1)
reply_cache.expire(reply_key, settings.cache_seconds)
try:
post_dict = {
'topic_id': id,
......@@ -275,9 +287,15 @@ def get_comments():
return None
# @shared_task(retry_kwargs={'max_retries': 0}, base=QueueOnce, once={'graceful': True, 'unlock_before_run': True})
@shared_task(retry_kwargs={'max_retries': 0})
def follow(cookies_get, id):
follow_key = 'click user_id:%s, cookies_get: %s' % (id, cookies_get)
cache_count = follow_cache.get(follow_key)
if cache_count:
return
else:
follow_cache.set(follow_key, 1)
follow_cache.expire(follow_key, settings.cache_seconds)
try:
post_dict = {
'type': 1,
......@@ -442,9 +460,15 @@ def set_reply_to_redis():
logging.error("catch exception,logins:%s" % traceback.format_exc())
# @shared_task(retry_kwargs={'max_retries': 0}, base=QueueOnce, once={'graceful': True, 'unlock_before_run': True})
@shared_task(retry_kwargs={'max_retries': 0})
def reply2(cookies_get, id, content, replied_id):
reply_key = 'click topic_id:%s, cookies_get: %s, content:%s' % (id, cookies_get, content)
cache_count = reply_cache.get(reply_key)
if cache_count:
return
else:
reply_cache.set(reply_key, 1)
reply_cache.expire(reply_key, settings.cache_seconds)
try:
post_dict = {
'topic_id': id,
......@@ -464,9 +488,15 @@ def reply2(cookies_get, id, content, replied_id):
logging.error("catch exception,logins:%s" % traceback.format_exc())
# @shared_task(retry_kwargs={'max_retries': 0}, base=QueueOnce, once={'graceful': True, 'unlock_before_run': True})
@shared_task(retry_kwargs={'max_retries': 0})
def pictorial_reply(cookies_get, id, content):
reply_key = 'click topic_id:%s, cookies_get: %s, content:%s' % (id, cookies_get, content)
cache_count = reply_cache.get(reply_key)
if cache_count:
return
else:
reply_cache.set(reply_key, 1)
reply_cache.expire(reply_key, settings.cache_seconds)
try:
post_dict = {
'pictorial_id': id,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment