Commit 86df2634 authored by 张宇's avatar 张宇

list_v2

parent ea0deec2
......@@ -9,6 +9,7 @@ from gm_rpcd.internals.configuration.model import Config, environ, from_property
from gm_rpcd.internals.configuration.model_base import literal, xml_text, xml_text_list, \
DefaultConfigPropertyWrapper, EnvironmentConfigProperty
DISPATCHER = None
GM_RPCD_APP_CONF_PATH_KEY = 'GM_RPCD_APP_CONF_PATH'
GM_RPCD_DEVELOP_CONF_PATH_KEY = 'GM_RPCD_DEVELOP_CONF_PATH'
......@@ -95,7 +96,8 @@ def setup_rpcd():
from gm_rpcd.internals.initializations import initialize
# expose to module scope to make dispatcher singleton
global DISPATCHER
DISPATCHER = initialize().dispatcher
if not DISPATCHER:
DISPATCHER = initialize().dispatcher
# avoid call `setup_rpcd` repeatly
global setup_rpcd
......
from __future__ import unicode_literals
from gm_types.error import ERROR as CODES
class GaiaRPCFaultException(Exception):
def __init__(self, error, message, data):
self.error = error
self.message = message
self.data = data
def __repr__(self):
return "Error %d, %s" % (self.error, self.message)
class UniError(GaiaRPCFaultException):
def __init__(self, message):
self.error = CODES.UNIVERSAL
self.message = message
self.data = None
class GaiaRPCSpecificExceptionBase(GaiaRPCFaultException):
error = None
default_message = None
def __init__(self, message=None, data=None):
error = self.error
if message is None:
message = self.default_message
super(GaiaRPCSpecificExceptionBase, self).__init__(error=error, message=message, data=data)
class RPCPermanentError(GaiaRPCSpecificExceptionBase):
error = 2
default_message = "Permanent Error"
class RPCTemporaryError(GaiaRPCSpecificExceptionBase):
error = 3
default_message = "Temporary Error"
class RPCValidationError(GaiaRPCSpecificExceptionBase):
error = 17
default_message = "Params/Result Validation Error"
class RPCLoginRequiredException(GaiaRPCSpecificExceptionBase):
error = 401
default_message = "Login Required"
class RPCPermissionDeniedException(GaiaRPCSpecificExceptionBase):
error = 1001
default_message = "Permission Denied"
class RPCTagRelationCycleException(RPCPermanentError):
error = CODES.TAG_RELATION_CYCLE
default_message = CODES.getDesc(error)
class RPCStaffRequiredException(GaiaRPCSpecificExceptionBase):
error = 1002
default_message = "Staff Required"
class RPCNotFoundException(GaiaRPCSpecificExceptionBase):
error = 1404
default_message = "Not Found"
class RPCIntegrityError(GaiaRPCSpecificExceptionBase):
error = 1601
default_message = "Integrity Error"
......@@ -73,7 +73,6 @@ class V1BatchView(View):
session_key=request_v1_value.get('session_key'),
environment=request_v1_value.get('environment'),
)
# print(DISPATCHER._method_table, DISPATCHER._method_table._MethodTable__method_map)
response = DISPATCHER.process_single_request(request)
response_v1_list.append(response_to_v1_json_value(response))
......
# -*- coding: utf-8 -*-
'''
gaia api module -> 使用 app_label = 'api' 的model
'''
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
import hashlib
import json
import logging
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models, transaction, IntegrityError
from django.db.models import Q, DO_NOTHING
from django.utils import timezone
from gm_types.gaia import JUMP_TYPE, CONVERSATION_STATUS, DOCTOR_TYPE
from gm_types.gaia import MESSAGE_TYPE
from gm_types.msg import CONVERSATION_TYPE
from gm_upload.utils.image_utils import Picture
from message import message_signals
from tool.datetime_tool import get_timestamp_or_none
conversation_logger = logging.getLogger(__name__)
SPECIAL_MSG_TYPE = frozenset([MESSAGE_TYPE.SERVICE, MESSAGE_TYPE.DOCTOR_TOPIC, MESSAGE_TYPE.TEXT_WITH_URL,
MESSAGE_TYPE.DIARY, MESSAGE_TYPE.GIFT])
class ConversationManager(models.Manager):
def conversation_exists(self, *user_ids, **kwargs):
conversation_query = self.filter(uid_hash=Conversation.gen_uid_hash(user_ids))
return conversation_query.exists()
def get_conversation(self, *user_ids, **kwargs):
conversation_query = self.filter(uid_hash=Conversation.gen_uid_hash(user_ids))
if conversation_query.exists():
return conversation_query[0]
queries = (Q(user_status_set__user_id=user_id) for user_id in user_ids)
conversation_query = self.all()
for query in queries:
conversation_query = conversation_query.filter(query)
if conversation_query.exists():
conversation = conversation_query.latest('id')
else:
conversation_already_exists = False
with transaction.atomic(using=settings.MESSAGE_DB_NAME):
conversation = self.model(uidConversationUserStatus_hash=Conversation.gen_uid_hash(user_ids))
if kwargs.get('conversation_type') in CONVERSATION_TYPE:
conversation_type = kwargs['conversation_type']
else:
conversation_type = CONVERSATION_TYPE.MESSAGE
conversation.conversation_type = conversation_type
try:
conversation.save()
except IntegrityError:
conversation_already_exists = True
else:
for user_id in user_ids:
conversation.user_status_set.create(user_id=user_id, status=CONVERSATION_STATUS.OLD,
read_status=False, conversation_type=conversation_type)
if conversation_already_exists:
return self.filter(uid_hash=Conversation.gen_uid_hash(user_ids)).first()
# process extra
conversation_extra = {}
if 'conversation_type' in kwargs:
conversation_extra['conversation_type'] = kwargs['conversation_type']
message_signals.post_create_conversation(conversation, conversation_extra)
return conversation
class Conversation(models.Model):
class Meta:
verbose_name = u'私信对话'
verbose_name_plural = u'私信对话'
db_table = 'api_conversation'
app_label = 'api'
# user = models.ManyToManyField(User, through='ConversationUserStatus') # CHANGED: through ConversationUserStatus
created_time = models.DateTimeField(verbose_name=u'创建时间', default=timezone.now)
last_reply_time = models.DateTimeField(verbose_name=u'最后回复时间', default=timezone.now)
is_empty = models.BooleanField(verbose_name=u'是否是空对话', default=True)
uid_hash = models.CharField(verbose_name=u'用户id列表hash', max_length=32, unique=True)
last_msg_content_disp = models.TextField(verbose_name=u'最后消息内容', max_length=1024, default='',
blank=True) # duplicate for last msg content
# add at 7750
conversation_type = models.SmallIntegerField(verbose_name=u'对话类型', choices=CONVERSATION_TYPE, default=CONVERSATION_TYPE.MESSAGE)
objects = ConversationManager()
@staticmethod
def gen_uid_hash(user_ids=[]):
user_ids = sorted(map(int, user_ids))
uid_str = ','.join(map(str, user_ids))
return hashlib.md5(uid_str).hexdigest()
def user_ids(self):
uids = self.user_status_set.values_list('user_id', flat=True)
return sorted(uids)
def conversation_info_v2(self, cs):
"""格式化私信对话详细信息"""
return {
'id': self.id,
'last_reply_time': get_timestamp_or_none(self.last_reply_time),
'is_new': cs.read_status,
'unread_num': 0,
'text': self.last_msg_content_disp,
'user_ids': self.user_ids(),
'jump_type': JUMP_TYPE.CONVERSATION,
'conversation_type': self.conversation_type,
'is_star': cs.is_star,
'created_time': get_timestamp_or_none(self.created_time)
}
def conversation_info_v3(self, cs, user_ids=None):
"""格式化私信对话详细信息"""
c_user_ids = list(self.user_status_set.values_list('user_id', flat=True))
doctor_user_id = None
if user_ids:
user_ids = map(lambda x: int(x), user_ids)
for c_user_id in c_user_ids:
if c_user_id in user_ids:
doctor_user_id = c_user_id
return {
'id': self.id,
'last_reply_time': get_timestamp_or_none(self.last_reply_time),
'is_new': cs.read_status,
'unread_num': 0,
'text': self.last_msg_content_disp,
'user_ids': self.user_ids(),
'jump_type': JUMP_TYPE.CONVERSATION,
'conversation_type': self.conversation_type,
'is_star': cs.is_star,
'comment': cs.comment,
'doctor_user_id': doctor_user_id
}
def conversation_info(self, user=None, cs=None, doctor_id=''):
info = {
'id': self.id,
'doctor_id': doctor_id,
'hospital_id': '',
'last_reply_time': get_timestamp_or_none(self.last_reply_time),
'is_new': False,
'unread_num': 0,
'text': '',
'user_ids': self.user_ids(),
'jump_type': JUMP_TYPE.CONVERSATION,
'conversation_type': self.conversation_type,
}
if user:
# 更新is_new
# <<<< old read way
# status_set = self.user_status_set.filter(user=user)
# status_set = status_set.filter(status=CONVERSATION_STATUS.NEW)
# if status_set.exists():
# info['is_new'] = True
# ====
if cs:
info['is_new'] = cs.read_status
else:
status_set = self.user_status_set.filter(user_id=user.id)
status_set = status_set.filter(read_status=True).first()
if status_set:
info['is_new'] = True
# >>>> new read way
# <<<< old read way
# latest_message = self.message_set.latest('id')
# info['text'] = latest_message.content_display()
# ====
info['text'] = self.last_msg_content_disp
# >>>> new read way
return info
def send_message(self, user, type_, content, update_self_status=True, send_time=None, body=None, is_system=0):
"""
统一的发送消息的数据库逻辑
:param content: 这里指的是消息的文字部分
"""
if not send_time:
send_time = timezone.now()
message = self.message_set.create(user_id=user.id, type=type_, content=content,
send_time=send_time, body=body, is_system=is_system)
if update_self_status:
# 将自己的对话状态标记为旧
self.user_status_set.filter(user_id=user.id).update(
# <<<< to be removed
status=CONVERSATION_STATUS.OLD,
# >>>> to be removed
is_empty=False,
read_status=False,
last_reply_time=send_time
)
# 把同对话的其他用户的对话状态设置为新
self.user_status_set.exclude(user_id=user.id).update(
# <<<< to be removed
status=CONVERSATION_STATUS.NEW,
# >>>> to be removed
is_empty=False,
read_status=True,
last_reply_time=send_time
)
self.last_reply_time = send_time
self.is_empty = False
self.last_msg_content_disp = Message.content_display(type=type_, content=content, user=user)
self.save()
c_logger = "new message, sender: {}, content: {}".format(user._get_full_field_content_('username'), content)
conversation_logger.info(c_logger)
message_signals.post_touch_conversation(user=None, conversation=self)
return message
def get_target_user(self, user):
try:
cs = ConversationUserStatus.objects.filter(
conversation_id=self.id
).exclude(user_id=user.id).first()
if not cs:
raise User.DoesNotExist
target_user = User.objects.get(pk=cs.user_id)
except User.DoesNotExist:
target_user = None
return target_user
class ConversationUserStatus(models.Model):
class Meta:
verbose_name = u'用户私信状态'
verbose_name_plural = u'用户私信状态'
db_table = 'api_conversationuserstatus'
app_label = 'api'
index_together = [
('user_id', 'is_empty', 'last_reply_time'),
('user_id', 'is_empty', 'read_status', 'last_reply_time'),
]
unique_together = [
('user_id', 'conversation')
]
conversation = models.ForeignKey(Conversation, related_name=u'user_status_set', on_delete=DO_NOTHING)
user_id = models.IntegerField()
status = models.CharField(max_length=1, verbose_name=u'状态', choices=CONVERSATION_STATUS,
default=CONVERSATION_STATUS.NEW)
is_empty = models.BooleanField(default=True, verbose_name='是否空会话') # duplicate field in conversation
read_status = models.BooleanField(default=True,
verbose_name='是否未读') # True(1) for unread, False(0) for read, designed for sorting
last_reply_time = models.DateTimeField(verbose_name=u'最后回复时间',
default=timezone.now) # duplicate field in conversation
refer = models.CharField(max_length=64, verbose_name=u'用户从哪个页面打开私信', blank=True, default=None)
business_id = models.CharField(max_length=128, verbose_name=u'实体id', blank=True, default=None)
is_star = models.BooleanField(verbose_name=u'是否是星标私信', default=False)
# add at 7755 冗余字段
conversation_type = models.SmallIntegerField(verbose_name=u'对话类型', choices=CONVERSATION_TYPE,
default=CONVERSATION_TYPE.MESSAGE)
# add at 7765 备注由markuser表移动到这里
comment = models.CharField(verbose_name='备注内容', max_length=100, blank=True, null=True, default='')
class Message(models.Model):
MSG_PER_PAGE = 20
class Meta:
verbose_name = u'私信消息'
verbose_name_plural = u'私信消息'
app_label = 'api'
# TODO: 需要把私信和对话的索引重新整理一次
conversation = models.ForeignKey(Conversation, on_delete=DO_NOTHING)
# <<<< should be removed
user_id = models.IntegerField()
type = models.CharField(verbose_name=u'消息类型', max_length=16, default='txt')
content = models.TextField(verbose_name=u'消息内容', max_length=1024, default='', blank=True)
body = models.TextField(verbose_name=u'消息内容', max_length=1024, default='', blank=True)
# >>>> should be removed
send_time = models.DateTimeField(verbose_name=u'发送时间', default=timezone.now)
is_system = models.BooleanField(verbose_name=u'是否由系统主动发送', default=False)
is_read = models.BooleanField(verbose_name=u'接收者是否已读', default=False)
def __unicode__(self):
return u'【{}】{}'.format(MESSAGE_TYPE.getDesc(self.type), self.content)
@property
def message_detail(self):
return {
'send_time': get_timestamp_or_none(self.send_time),
'image': self.content if self.type == MESSAGE_TYPE.IMAGE else None,
'text': self.content if self.type == MESSAGE_TYPE.TEXT else None,
'audio': self.content if self.type == MESSAGE_TYPE.AUDIO else None,
'image_thumb': self.content if self.type == MESSAGE_TYPE.IMAGE else None
}
@classmethod
def get_msg_service_info(cls, service_id):
pass
# TODO: service
# try:
# service = Service.objects.get(pk=service_id)
# res = {
# 'id': service_id,
# 'title': u'在[更美]做整形优惠又放心',
# 'image': service.image_header,
# 'gengmei_price': service.lowest_gengmei_price,
# 'text': service.short_description,
# 'name': service.name,
# 'url': gm_protocol.get_service_detail(id=service_id),
# 'is_multiattribute': service.is_multiattribute,
# }
# except Service.DoesNotExist:
# res = {
# 'id': service_id,
# 'title': u'在[更美]做整形优惠又放心',
# 'image': u'',
# 'text': u'',
# 'url': u'',
# }
# return res
@classmethod
def get_msg_doctor_topic_info(cls, topic_id):
pass
# TODO: get_msg_doctor_topic_info
# res = {
# 'id': topic_id,
# 'title': u'更美发布',
# 'image': Picture.get_full_path(u'img/icon114.png'),
# 'text': u'',
# 'url': u'',
# }
# rpc = get_rpc_remote_invoker()
# try:
# topic_infos = rpc['topic/get_topic'](topic_id=topic_id).unwrap()
# if topic_infos:
# res['title'] = Message._who_sent(topic_infos.get('user_id')) + u'的更美发布'
# res['text'] = topic_infos.get('content')
# res['url'] = gm_protocol.get_topic_detail(id=topic_id)
# if topic_infos.get('image_url'):
# res['image'] = get_full_path(topic_infos.get('image_url'))
# else:
# res['image'] = get_full_path(u'img/icon114.png')
# except:
# logging_exception()
#
# return res
@classmethod
def get_msg_diary_info(cls, diary_id):
pass
# TODO: get_msg_diary_info
# if diary_id is None:
# return None
# rpc = get_rpc_remote_invoker()
# diary_info = rpc['diary/simple_data'](diary_id=diary_id, for_doctor=True).unwrap()
# result = {
# 'images': diary_info['images'],
# 'tags_new_era': diary_info['tags'],
# 'title': diary_info.get('title', ''),
# 'url': gm_protocol.get_diary_detail(id=diary_id),
# 'id': diary_id,
# }
# return result
@classmethod
def get_msg_coupon_info(cls, gift_id, channel_id):
"""
这个版本暂时只支持医生券
"""
pass
# TODO: get_msg_coupon_info
# if not gift_id or not channel_id:
# return {}
# else:
# gift_id = int(gift_id)
# channel_id = int(channel_id)
# from api.tool.coupon_tool import _gift_id_to_doctor_send_coupon_info
# gift_list = _gift_id_to_doctor_send_coupon_info(gift_ids=[gift_id])
# default = {
# 'coupon_name': "该红包已下线",
# 'coupon_value': 0,
# 'coupon_threshold_desc': "该红包已下线",
# 'doctor_name': "",
# 'has_threshold_desc': "该红包已下线",
# 'end_time': "",
# 'get_coupon_data': {
# 'gift_id': gift_id,
# 'channel_id': settings.DOCTOR_BUSINESS_CHANNAL_ID,
# },
# 'doctor_coupon_use_desc': "",
# "doctor_coupon_use_type": "",
# }
# result = gift_list[0] if len(gift_list) == 1 else default
# if not result['doctor_coupon_use_type']:
# return result
#
# doctor_coupon_use_desc = {
# DOCTOR_USE_COUPON_TYPE.PART_GENERAL: "部分美购可用",
# DOCTOR_USE_COUPON_TYPE.DOCTOR_GENERAL: "医生通用券",
# DOCTOR_USE_COUPON_TYPE.FULL_PLATFORM_GENERAL: "全店通用",
# }[result['doctor_coupon_use_type']]
# result['doctor_coupon_use_desc'] = doctor_coupon_use_desc
#
# return result
@classmethod
def batch_get_toc_message_info(cls, messages, gevent_mode=True):
"""批量获取消息格式化后的数据
:param messages List[Message]:
:return:
"""
pass
# TODO: batch_get_toc_message_info
# if not messages:
# return []
# if not all(map(lambda x: isinstance(x, Message), messages)):
# raise ValueError('messages must be Message object')
# msg_id_to_msg_type_and_body = {m.id: {"msg_type": m.type, "body": m.body}
# for m in messages if m.type in SPECIAL_MSG_TYPE}
# msg_id_to_display_content = {}
# if msg_id_to_msg_type_and_body:
# if len(msg_id_to_msg_type_and_body) > 1 and gevent_mode:
# jobs = [
# gevent.spawn(cls.get_special_msg_content, item['msg_type'], item['body'])
# for item in msg_id_to_msg_type_and_body.values()
# ]
# gevent.joinall(jobs)
# for job, msg_id in zip(jobs, msg_id_to_msg_type_and_body.keys()):
# if job.exception is None:
# msg_id_to_display_content[msg_id] = job.value
# else:
# msg_id_to_display_content[msg_id] = u''
# else:
# for msg_id, item in msg_id_to_msg_type_and_body.items():
# msg_id_to_display_content[msg_id] = cls.get_special_msg_content(item['msg_type'], item['body'])
# result = []
# for msg in messages:
# _data = msg.to_dict(with_content=False)
# if msg.type in SPECIAL_MSG_TYPE:
# _data['content'] = msg_id_to_display_content[msg.id]
# else:
# _data['content'] = cls.get_general_msg_content(msg.type, msg.content)
# result.append(_data)
# return result
@classmethod
def get_general_msg_content(cls, type, content):
"""针对一般的消息内容, 只需要做一些简单的数据格式封装"""
if type in [ # 最初的三种类型
MESSAGE_TYPE.TEXT,
MESSAGE_TYPE.AUDIO,
MESSAGE_TYPE.IMAGE,
]:
content = {
'text': content if type == MESSAGE_TYPE.TEXT else u'',
'image': Picture.get_full_path(content, '-w') if type == MESSAGE_TYPE.IMAGE else u'',
'audio': Picture.get_full_path(content, '-audio') if type == MESSAGE_TYPE.AUDIO else u'',
'image_thumb': Picture.get_full_path(content, '-thumb') if type == MESSAGE_TYPE.IMAGE else u''
}
elif type in [ # 以下类型直接透传
MESSAGE_TYPE.CUSTOMER_SRV_CTRL,
]:
content = {'text': content}
else:
raise TypeError('msg type: {} not belong general msg type'.format(type))
return content
@classmethod
def get_special_msg_content(cls, type, body):
"""需要查表获取是调用其他的服务"""
content = {}
try:
body = json.loads(body)
except:
# TODO: logging
# logging_exception()
return content
if type == MESSAGE_TYPE.SERVICE:
content['service_info'] = cls.get_msg_service_info(service_id=body)
elif type == MESSAGE_TYPE.DOCTOR_TOPIC:
content['doctor_topic_info'] = cls.get_msg_doctor_topic_info(topic_id=body)
elif type == MESSAGE_TYPE.TEXT_WITH_URL:
content['text_with_url_info'] = {
'text': body['text'],
'url': body['url'],
}
elif type == MESSAGE_TYPE.DIARY:
content['diary_info'] = cls.get_msg_diary_info(diary_id=body)
elif type == MESSAGE_TYPE.GIFT:
content['coupon_info'] = cls.get_msg_coupon_info(
gift_id=body.get('gift_id'),
channel_id=body.get('channel_id')
)
else:
raise TypeError('msg type: {} not belong special msg type'.format(type))
return content
def to_dict(self, with_content=True):
data = {
'id': self.id,
'conversation_id': self.conversation_id,
'uid': self.user_id,
'send_time': get_timestamp_or_none(self.send_time),
'type': self.type, # 消息类型
'content': {},
'is_read': self.is_read,
}
if with_content:
if self.type in SPECIAL_MSG_TYPE:
data['content'] = Message.get_special_msg_content(self.type, self.body)
else:
data['content'] = Message.get_general_msg_content(self.type, self.content)
return data
@classmethod
def msg_info(cls, m, user_ids=[]):
pass
# TODO:
# try:
# doctor = Doctor.objects.get(user__id=m['user_id'])
# doctor_id = doctor.id
# except Doctor.DoesNotExist:
# doctor_id = None
#
# data = {
# 'id': m['id'],
# 'uid': m['user_id'],
# 'send_time': get_timestamp_or_none(m['send_time']),
# 'reply_time': get_timestamp_or_none(m['send_time']),
# 'conversation_user_ids': user_ids, # 参与会话的用户id列表
# 'doctor_id': doctor_id, # 如果发信人不是doctor则为None
# 'type': m['type'], # 消息类型
# 'content': {},
# }
#
# # TODO 这个地方最好重构 返回对应类型和唯一标识(id?) 以便于外面的batch处理 或者服务拆分
# if m['type'] in [ # 以下类型直接透传
# MESSAGE_TYPE.CUSTOMER_SRV_CTRL,
# ]:
# data['content'] = m['content']
# elif m['type'] == MESSAGE_TYPE.SERVICE:
# data['content']['service_info'] = cls.get_msg_service_info(service_id=m['content']['service_id'])
# elif m['type'] == MESSAGE_TYPE.DOCTOR_TOPIC:
# data['content']['doctor_topic_info'] = cls.get_msg_doctor_topic_info(topic_id=m['content']['topic_id'])
# elif m['type'] == MESSAGE_TYPE.TEXT_WITH_URL:
# data['content']['text_with_url_info'] = {
# 'text': m['content']['text'],
# 'url': m['content']['url'],
# }
# elif m['type'] == MESSAGE_TYPE.DIARY:
# data['content']['diary_info'] = cls.get_msg_diary_info(diary_id=m['content'].get('diary_id', None))
# elif m['type'] == MESSAGE_TYPE.GIFT:
# data['content']['coupon_info'] = cls.get_msg_coupon_info(
# gift_id=m['content'].get('gift_id', None),
# channel_id=m['content'].get('channel_id', None)
# )
# elif m['type'] in [ # 最初的三种类型
# MESSAGE_TYPE.TEXT,
# MESSAGE_TYPE.AUDIO,
# MESSAGE_TYPE.IMAGE,
# ]:
# data['content'] = {
# 'text': m['content']['text'] if m['type'] == MESSAGE_TYPE.TEXT else u'',
# 'image': Picture.get_full_path(m['content']['image'], '-w') if m['type'] == MESSAGE_TYPE.IMAGE else u'',
# 'audio': Picture.get_full_path(m['content']['audio'], '-audio') if m['type'] == MESSAGE_TYPE.AUDIO else u'',
# 'image_thumb': Picture.get_full_path(m['content']['image'], '-thumb') if m['type'] == MESSAGE_TYPE.IMAGE else u''
# }
#
# return data
@classmethod
def _who_sent(cls, user):
pass
# TODO: _who_sent
# if isinstance(user, int):
# user = get_user_by_id(user)
#
# if user.id == 22: # 更美所长
# return user.last_name
#
# try:
# doctor = Doctor.objects.get(user=user)
# who = doctor.name
# if doctor.doctor_type == DOCTOR_TYPE.DOCTOR:
# who = who + '医生'
# except Doctor.DoesNotExist:
# who = user.last_name
# return who
@classmethod
def push_alert_text(cls, type, content, user):
if type != MESSAGE_TYPE.CUSTOMER_SRV_CTRL:
text = Message._who_sent(user)
else:
text = u'' # do not show anything in alert
if type == MESSAGE_TYPE.TEXT:
if content and len(content) > 50:
content = content[:50] + u'...'
text += u'私信你了哦:{}'.format(content)
elif type == MESSAGE_TYPE.AUDIO:
text += u'发来一段语音'
elif type == MESSAGE_TYPE.IMAGE:
text += u'发来一张图片'
elif type == MESSAGE_TYPE.SERVICE:
text += u'私信你:' + Message._who_sent(user) + '的更美美购'
elif type == MESSAGE_TYPE.DOCTOR_TOPIC:
text += u'私信你:' + Message._who_sent(user) + '的更美发布'
elif type == MESSAGE_TYPE.TEXT_WITH_URL:
text += u'私信你:' + content
elif type == MESSAGE_TYPE.CUSTOMER_SRV_CTRL:
text += u'' # do not show anything in alert
elif type == MESSAGE_TYPE.GIFT:
text += u'私信你:你有一个平台红包券,快来领取~'
elif type == MESSAGE_TYPE.DIARY:
text += u'私信你:推荐给你一个变美日记本,快来查看~'
else:
text += u'私信你'
return text
@classmethod
def content_display(cls, type, content, user):
if type in [MESSAGE_TYPE.TEXT, MESSAGE_TYPE.TEXT_WITH_URL, MESSAGE_TYPE.CUSTOMER_SRV_CTRL]:
text = content
elif type == MESSAGE_TYPE.AUDIO:
text = u'[语音]'
elif type == MESSAGE_TYPE.IMAGE:
text = u'[图片]'
elif type == MESSAGE_TYPE.SERVICE:
text = Message._who_sent(user) + u'的更美美购'
elif type == MESSAGE_TYPE.DOCTOR_TOPIC:
text = Message._who_sent(user) + u'的更美发布'
elif type == MESSAGE_TYPE.DIARY:
text = u'[更美平台用户变美日记]'
elif type == MESSAGE_TYPE.GIFT:
text = u'[更美红包券 优惠多多]'
else:
text = u''
return text
\ No newline at end of file
# -*- coding: utf-8 -*-
......@@ -37,6 +37,7 @@ INSTALLED_APPS = [
# 'django.contrib.sessions',
'django.contrib.messages',
# 'django.contrib.staticfiles',
'api'
]
MIDDLEWARE = [
......@@ -115,7 +116,7 @@ AUTH_PASSWORD_VALIDATORS = [
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
......@@ -203,6 +204,13 @@ LOGGING = {
'level': 'DEBUG' if DEBUG else 'WARNING',
'propagate': False
},
# debug
'kafka': {
'level': 'FATAL'
},
'sentry': {
'level': 'FATAL'
},
}
}
......@@ -222,3 +230,9 @@ try:
from courier.settings_local import *
except ModuleNotFoundError:
pass
COUNT_LIMIT = 100
ES_SEARCH_TIMEOUT = '10s'
DATABASE_ROUTERS = ['courier.db_routers.MessageRouter']
MESSAGE_DB_NAME = 'message'
import multiprocessing
workers = multiprocessing.cpu_count() + 1
bind = '0.0.0.0:8000'
bind = '0.0.0.0:8005'
proc_name = 'courier'
#pidfile = '/var/courier/gunicorn.pid'
timeout = 3600
preload_app = True
access_log_format = '%(t)s %(p)s %(h)s "%(r)s" %(s)s %(L)s %(b)s "%(f)s" "%(a)s"'
#pidfile = '/var/courier/gunicorn.pid'
#accesslog = '/var/log/courier/access.log'
#access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"' # default
#access_log_format = '"%({X-Real-IP}i)s" %(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
#errorlog = '/var/log/courier/error.log'
preload_app = True
#worker_class = 'gevent'
access_log_format = '%(t)s %(p)s %(h)s "%(r)s" %(s)s %(L)s %(b)s "%(f)s" "%(a)s"'
# -*- coding: utf-8 -*-
from . import signal_handlers # fire sigal regist
# coding=utf-8
from __future__ import unicode_literals, print_function, absolute_import
class Signal(object):
def __init__(self, func=None):
self.__handlers = []
if func:
self.connect(func)
@classmethod
def create(cls, func):
return cls(func)
def connect(self, func):
self.__handlers.append(func)
return func
def __call__(self, *args, **kwargs):
for func in self.__handlers:
func(*args, **kwargs)
@Signal.create
def post_create_conversation(conversation, conversation_extra):
pass
@Signal.create
def post_touch_conversation(user, conversation):
pass
@Signal.create
def post_send_message(conversation):
pass
\ No newline at end of file
# coding=utf-8
from __future__ import unicode_literals, print_function, absolute_import
from elasticsearch import TransportError
# from message.utils.es_abstract import get_esop, get_migrate_esop
# from rpc.tool.log_tool import logging_exception, conversation_logger
from . import message_signals
def sync_conversation(conversation, conversation_extra=None):
# from message.utils.conversation import get_conversation_head_from_conversation, get_conversation_head_bulk_action
# conversation_extra = conversation_extra or {}
# conversation_head = get_conversation_head_from_conversation(
# conversation,
# conversation_extra)
# action = get_conversation_head_bulk_action(conversation_head)
# get_esop().bulk_single(action)
pass
@message_signals.post_create_conversation.connect
def post_create_conversation(conversation, conversation_extra):
sync_conversation(conversation, conversation_extra)
@message_signals.post_touch_conversation.connect
def post_touch_conversation(user, conversation):
sync_conversation(conversation)
@message_signals.post_send_message.connect
def post_send_message(conversation):
sync_conversation(conversation)
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
from typing import List, Dict, Optional
@bind('courier/message/conversation/list')
def conversation_list():
return {}
from six import string_types
import dateutil
from django.db.models import Q
from gm_rpcd.all import bind, context
from gm_types.gaia import MESSAGE_ORDER_TYPE
from gm_types.msg import CONVERSATION_TYPE, CONVERSATION_ORDER
from adapter.rpcd.exceptions import RPCPermanentError
from api.models.message import ConversationUserStatus
from rpc import gaia_client
from search.utils import search_conversation_from_es
from services.unread.stat import UserUnread
ROUTER_PREFIX = 'courier/'
bind_prefix = lambda endpoint, **options: bind(ROUTER_PREFIX+endpoint, **options)
@bind_prefix('message/conversation/list_v2')
def batch_get_conversations_v2(user_ids: List[int],
offset: int,
size: int,
order_by: MESSAGE_ORDER_TYPE=MESSAGE_ORDER_TYPE.READ_STATUS,
read_status: Optional[bool]=None,
is_star: Optional[bool]=None,
conversation_type: Optional[CONVERSATION_TYPE]=None,
search_content: Optional[str]=None,
with_fields: Optional[List[str]]=None)\
-> Dict:
"""获取对话列表的ID, 注意和之前的v1版本不同的是, 这里只会返回conversation_ids
权限放在上层处理
:param user_ids: List[int] 用户ID列表
:param offset: int 偏移量
:param size: int 每页数量
:param order_by: MESSAGE_ORDER_TYPE 排序规则 详细看枚举值
:param read_status: Optional[bool] 读取状态
:param is_star: Optional[bool] 是否标星
:param conversation_type: CONVERSATION_TYPE 会话类型
:param search_content str: 搜索关键词 主要针对 user_name 和 message_content进行过滤
:return: Dict
"""
if not isinstance(user_ids, list):
raise RPCPermanentError
if not isinstance(size, int):
raise RPCPermanentError
if size <= 0 or size >= 50:
size = 50
if with_fields is None or not isinstance(with_fields, list):
with_fields = []
# 如果没有查询关键字 走mysql过滤, 如果有查询关键字 则从es获取对应的conversation_ids
# ES filters
es_filters = {'user_ids': user_ids}
# Mysql filters
query_q = Q(user_id__in=user_ids)
ordering_by = list()
if read_status is not None and isinstance(read_status, bool):
query_q &= Q(read_status=read_status)
es_filters['multi_filter'] = {
'is_unread': read_status,
'user_ids': user_ids,
}
if is_star is not None and isinstance(is_star, bool):
query_q &= Q(is_star=is_star)
es_filters['is_star'] = is_star
# 这块暂时不支持es的过滤,
# conversation_type 暂时没有什么特别的需求
if conversation_type is not None and isinstance(conversation_type, list):
conversation_type = filter(lambda cy: cy in CONVERSATION_TYPE, conversation_type)
query_q &= Q(conversation_type__in=conversation_type)
es_sort_type = CONVERSATION_ORDER.UNREAD
if order_by == MESSAGE_ORDER_TYPE.LAST_REPLY_TIME:
ordering_by.append('-last_reply_time')
es_sort_type = CONVERSATION_ORDER.LAST_REPLY_TIME
elif order_by == MESSAGE_ORDER_TYPE.READ_STATUS:
ordering_by.extend(['-read_status', '-last_reply_time'])
if search_content is not None:
es_query = {
'content': search_content,
'user_last_name': search_content,
}
assert isinstance(search_content, string_types)
es_result = search_conversation_from_es(offset=offset, size=size, filters=es_filters,
query=es_query, sort_type=es_sort_type)
total_count = es_result['total_count']
_conversation_ids = es_result['conversation_ids']
conversation_user_status = ConversationUserStatus.objects.filter(
user_id__in=user_ids, conversation_id__in=_conversation_ids
).all()
# 重排序
conversation_id_to_conversation_user_status = {cus.conversation_id: cus for cus in conversation_user_status}
conversation_user_status = []
for conversation_id in _conversation_ids:
if conversation_id_to_conversation_user_status.get(conversation_id):
conversation_user_status.append(conversation_id_to_conversation_user_status[conversation_id])
else:
total_count = ConversationUserStatus.objects.filter(query_q).count()
conversation_user_status = ConversationUserStatus.objects.prefetch_related('conversation').\
filter(query_q).order_by(*ordering_by)[offset:offset + size]
# conversation_info in database
conversation_info_list = [cs.conversation.conversation_info_v2(cs) for cs in conversation_user_status]
conversation_id_to_user_id = {cs.conversation_id: cs.user_id for cs in conversation_user_status}
cs_ids = [cs.id for cs in conversation_user_status]
conversation_ids = [cs.conversation_id for cs in conversation_user_status]
# 此接口目前只用于客户端私信,返回unread_num
for c in conversation_info_list:
default = 1 if c['is_new'] else 0
c['unread_num'] = UserUnread(conversation_id_to_user_id[c['id']]).get_conversation_unread(c['id'], default=default)
if 'mark' in with_fields:
conversation_id_and_target_user_id = ConversationUserStatus.objects.filter(
conversation_id__in=conversation_ids
).exclude(id__in=cs_ids).values('conversation_id', 'user_id')
conversation_id_to_target_user_id = {item['conversation_id']: item['user_id']
for item in conversation_id_and_target_user_id}
mark_user_dict_list = gaia_client.get_message_mark_user(user_id_list=user_ids, target_user_id_list=list(conversation_id_to_target_user_id.values()))
_user_ids_to_comment = {(entry.get('user_id'), entry.get('target_user_id')): entry.get('comment') for entry in mark_user_dict_list}
_conversation_id_to_user_ids = {conversation_id: (user_id, conversation_id_to_target_user_id[conversation_id])
for conversation_id, user_id in conversation_id_to_user_id.items()
if conversation_id_to_target_user_id.get(conversation_id)}
for c in conversation_info_list:
c['comment'] = _user_ids_to_comment.get(_conversation_id_to_user_ids[c['id']], u'')
# filter out items with empty user_ids
conversation_info_list = [c for c in conversation_info_list if len(c['user_ids']) > 1]
return {
'conversation_list': conversation_info_list,
'total_count': total_count,
}
@bind_prefix('message/conversation/list_v3')
def message_conversation_list_v3(user_ids: List[int],
offset: int,
size: int,
order_by: CONVERSATION_ORDER=CONVERSATION_ORDER.LAST_REPLY_TIME,
last_reply_time_start: Optional[str]=None,
last_reply_time_end: Optional[str]=None,
reply_status: Optional[bool]=None,
is_star: Optional[bool]=None,
user_id: Optional[bool]=None,
user_last_name: Optional[bool]=None,
comment: Optional[bool]=None) -> dict:
"""
获取会话列表, 与v2不同的是全部走es获取conversation_ids
:param user_ids: LIST[USER_ID]
:param offset: INT 偏移量
:param size: INT 每页数量
:param order_by: 排序规则
:param last_reply_time_start: str 最后回复开始时间筛选
:param last_reply_time_end: str 最后回复结束时间筛选
:param reply_status: bool 回复状态(新消息、旧消息)
:param is_star: bool
:param user_id: int 精确搜索用户ID
:param user_last_name: int 搜索用户昵称
:param comment: int 搜索备注
:return:
"""
if not isinstance(user_ids, list):
raise RPCPermanentError
if not isinstance(size, int):
raise RPCPermanentError
if size <= 0 or size >= 50:
size = 50
es_filters = {'user_ids': user_ids}
if is_star is not None and isinstance(is_star, bool):
es_filters['is_star'] = is_star
if last_reply_time_start is not None:
es_filters['last_reply_time_start_gte'] = dateutil.parser.parse('{}+0800'.format(last_reply_time_start)).isoformat()
if last_reply_time_end is not None:
es_filters['last_reply_time_end_lte'] = dateutil.parser.parse('{}+0800'.format(last_reply_time_end)).isoformat()
es_query = dict()
if user_id is not None:
es_query['user_id'] = user_id
if user_last_name:
es_query['user_last_name'] = user_last_name
if comment:
es_query['comment'] = comment
es_sort_type = order_by
# 全部
es_result_total = search_conversation_from_es(offset=offset, size=size, filters=es_filters,
query=es_query, sort_type=es_sort_type)
# 已回复
es_filters['multi_filter_status'] = {
'status': True,
'user_ids': user_ids,
}
es_result_reply = search_conversation_from_es(offset=offset, size=size, filters=es_filters,
query=es_query, sort_type=es_sort_type)
# 未回复
es_filters['multi_filter_status'] = {
'status': False,
'user_ids': user_ids,
}
es_result_not_reply = search_conversation_from_es(offset=offset, size=size, filters=es_filters,
query=es_query, sort_type=es_sort_type)
# es_result = search_conversation_from_es(offset=offset, size=size, filters=es_filters,
# query=es_query, sort_type=es_sort_type)
if reply_status == None:
es_result = es_result_total
else:
if reply_status:
es_result = es_result_reply
else:
es_result = es_result_not_reply
_conversation_ids = es_result['conversation_ids']
cus_ids = []
for conversation_id in _conversation_ids:
sub_ids = ConversationUserStatus.objects.filter(
user_id__in=user_ids, conversation_id=conversation_id
).values_list('id', flat=True)
cus_ids.extend(list(sub_ids))
conversation_user_status = ConversationUserStatus.objects.filter(id__in=cus_ids).order_by('-last_reply_time')
conversation_info_list = [cs.conversation.conversation_info_v3(cs, user_ids) for cs in conversation_user_status]
conversation_id_to_user_id = {cs.conversation_id: cs.user_id for cs in conversation_user_status}
for c in conversation_info_list:
default = 1 if c['is_new'] else 0
c['unread_num'] = UserUnread(conversation_id_to_user_id[c['id']]
).get_conversation_unread(c['id'], default=default)
return {
'conversation_list': conversation_info_list,
'total_count': es_result_total['total_count'],
'reply_total': es_result_reply['total_count'],
'not_reply_total': es_result_not_reply['total_count']
}
pass
......@@ -5,13 +5,16 @@ git+ssh://git@git.wanmeizhensuo.com/backend/gm-rpcd.git
git+ssh://git@git.wanmeizhensuo.com/backend/helios.git
git+ssh://git@git.wanmeizhensuo.com/system/gm-tracer.git
#git+ssh://git@git.wanmeizhensuo.com/system/kafka-python.git
git+ssh://git@git.wanmeizhensuo.com/backend/gm-upload.git@master
jsonschema==2.5.1
Django==3.0.1
PyMySQL==0.9.3
mysqlclient==1.4.6
redis==3.3.11
django-redis==4.11.0
raven==6.10.0
elasticsearch==2.3.0
kafka-python==1.4.7
gunicorn==20.0.4
djangorestframework==3.11.0
\ No newline at end of file
# -*- coding: utf-8 -*-
from importlib import import_module
from django.conf import settings
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, load_backend
from django.utils.translation import LANGUAGE_SESSION_KEY
from django.utils.crypto import constant_time_compare
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.signals import user_logged_in, user_logged_out, user_login_failed
# copy from django.contrib.sessions.middleware.SessionMiddleware
_engine = import_module(settings.SESSION_ENGINE)
_SessionStore = _engine.SessionStore
# copy and modified from django.contrib.sessions.middleware.SessionMiddleware
def get_django_session(session_key):
django_session = _SessionStore(session_key)
return django_session
# copy and modified from django.contrib.auth.get_user
def get_user_from_django_session(django_session):
user = None
try:
user_id = django_session[SESSION_KEY]
backend_path = django_session[BACKEND_SESSION_KEY]
except KeyError:
pass
else:
if backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
user = backend.get_user(user_id)
# Verify the session
if ('django.contrib.auth.middleware.SessionAuthenticationMiddleware'
in settings.MIDDLEWARE_CLASSES and hasattr(user, 'get_session_auth_hash')):
session_hash = django_session.get(HASH_SESSION_KEY)
session_hash_verified = session_hash and constant_time_compare(
session_hash,
user.get_session_auth_hash()
)
if not session_hash_verified:
django_session.flush()
user = None
return user or AnonymousUser()
# copy and modified from django.contrib.auth.login
def login(django_session, user):
session_auth_hash = ''
assert user is not None
if hasattr(user, 'get_session_auth_hash'):
session_auth_hash = user.get_session_auth_hash()
if SESSION_KEY in django_session:
if django_session[SESSION_KEY] != user.pk or (
session_auth_hash and
django_session.get(HASH_SESSION_KEY) != session_auth_hash):
# To avoid reusing another user's session, create a new, empty
# session if the existing session corresponds to a different
# authenticated user.
django_session.flush()
else:
django_session.cycle_key()
django_session[SESSION_KEY] = user.pk
django_session[BACKEND_SESSION_KEY] = user.backend
django_session[HASH_SESSION_KEY] = session_auth_hash
django_session.save()
user_logged_in.send(sender=user.__class__, request=None, user=user)
# copy and modified from django.contrib.auth.logout
def logout(django_session, user):
if hasattr(user, 'is_authenticated') and not user.is_authenticated():
user = None
user_logged_out.send(sender=user.__class__, request=None, user=user)
# remember language choice saved to session
# for backwards compatibility django_language is also checked (remove in 1.8)
language = django_session.get(LANGUAGE_SESSION_KEY, django_session.get('django_language'))
django_session.flush()
if language is not None:
django_session[LANGUAGE_SESSION_KEY] = language
user_logged_out.send(sender=user.__class__, request=None, user=user)
# coding=utf-8
import json
import hashlib
import six
import redis
from django.conf import settings
class _PoolMinx(object):
pool = redis.ConnectionPool(**settings.REDIS['view'])
client = redis.Redis(connection_pool=pool)
class ViewRecord(_PoolMinx):
def __init__(self, model_name):
self.model_name = model_name
def __getitem__(self, item):
return self.client.hget(self.model_name, item) or 0
def __setitem__(self, key, value):
return self.client.hset(self.model_name, key, value)
# class DiaryPV(_PoolMinx):
# _prefix = 'diary_pv'
#
# def get_k(self, diary_id):
# k = '%s:%s' % (self._prefix, diary_id)
# return k
#
# def get(self, diary_id):
# k = self.get_k(diary_id)
# return self.client.get(k)
#
# def set(self, diary_id, value):
# k = self.get_k(diary_id)
# return self.client.set(k, value)
#
# def incrby(self, diary_id, num=1):
# k = self.get_k(diary_id)
# self.client.incrby(k, num)
class _RedisProxy(object):
"""redis proxy add prefix automatically."""
__connect_class = redis.StrictRedis
__pool = redis.ConnectionPool(**settings.DEFAULT_REDIS)
_client = __connect_class(connection_pool=__pool)
# add methods those are need to be hacked here
_hacked_methods = [
'set', 'get', 'setex', 'hget', 'hset', 'hincrby', 'hdel', 'hgetall',
'smembers', 'sadd', 'incr', 'delete', 'expire', 'expireat', 'decr',
'lpush', 'lrange', 'lrem', 'llen', 'sadd', 'srem', 'scard',
'sismember', 'rpop', 'keys', 'rpush', 'lpop', 'hmset', 'hmget',
'exists', 'mget'
]
def __getattribute__(self, name):
attr = getattr(object.__getattribute__(self, '_client'), name)
if name in _RedisProxy._hacked_methods:
def newfunc(k, *args, **kwargs):
prefix = object.__getattribute__(self, 'prefix')
if isinstance(k, six.string_types):
k = prefix + ':' + k
elif isinstance(k, list):
k = [prefix + ':' + _k for _k in k]
else:
raise TypeError("must be str or list")
result = attr(k, *args, **kwargs)
return result
return newfunc
return attr
def __init__(self, prefix):
self.prefix = prefix
@classmethod
def get_client(cls, prefix=''):
return cls(prefix)
class _LiveRedisProxy(object):
"""redis proxy add prefix automatically."""
__connect_class = redis.StrictRedis
__pool = redis.ConnectionPool(**settings.LIVE_REDIS)
_client = __connect_class(connection_pool=__pool)
# add methods those are need to be hacked here
_hacked_methods = [
'set', 'get', 'setex', 'hget', 'hset', 'hincrby', 'hdel', 'hgetall',
'smembers', 'sadd', 'incr', 'delete', 'expire', 'decr',
'lpush', 'lrange', 'lrem', 'llen', 'sadd', 'srem', 'scard',
'sismember', 'rpop', 'keys', 'rpush', 'zadd', 'lpop', 'hmset'
]
def __getattribute__(self, name):
attr = getattr(_LiveRedisProxy._client, name)
if name in _LiveRedisProxy._hacked_methods:
def newfunc(k, *args, **kwargs):
prefix = object.__getattribute__(self, 'prefix')
k = prefix + ':' + k
result = attr(k, *args, **kwargs)
return result
return newfunc
return attr
def __init__(self, prefix):
self.prefix = prefix
@classmethod
def get_client(cls, prefix=''):
return cls(prefix)
class _PunishmentProxy(_RedisProxy):
_pool = redis.ConnectionPool(**settings.PUNISHMENT_REDIS)
_client = redis.StrictRedis(connection_pool=_pool)
def add_limit_num(key, timeout, max_num):
"""
固定时间限制次数公共缓存 key最好带上模块名
"""
times = get_limit_num(key)
if times < max_num:
limit_num_cache.setex(key, timeout, times + 1)
else:
pass
def get_limit_num(key):
res = limit_num_cache.get(key)
if res:
return int(res)
else:
return 0
def make_cache_key(f, **kwargs):
method_url = "%s.%s?%s" % (f.__module__, f.__name__, json.dumps(kwargs, sort_keys=True))
method_md5 = hashlib.md5(method_url).hexdigest()
cache_key = "cache:%s:rpc:%s" % (method_md5, settings.VERSION)
return cache_key
_redis_proxy = _RedisProxy.get_client
_punishment_proxy = _PunishmentProxy.get_client
user_cache = _redis_proxy("user_cache")
push_cache = _redis_proxy("push_cache")
code_cache = _redis_proxy("code")
count_cache = _redis_proxy("count")
servicehome_cache = _redis_proxy('servicehome')
filter_cache = _redis_proxy('filter')
hospital_cache = _redis_proxy("hospital")
req_data_cache = _redis_proxy("req_data")
feedback_msg_cache = _redis_proxy("feedback_msg")
topic_sug_cache = _redis_proxy('topic_sug')
itemwiki_hot_cache = _redis_proxy('itemwiki_hot')
itemwiki_cache = _redis_proxy('itemwiki')
send_cache = _redis_proxy('sms')
consult_cache = _redis_proxy('consult')
service_template_images = _redis_proxy('service_template_images')
doctor_switch_cache = _redis_proxy('doctor_switch')
business_select_cache = _redis_proxy('business_select')
recommend_cache = _redis_proxy("recommend")
gadget_cache = _redis_proxy('gadget') # 首页/美购首页/社区首页组件化cache
index_func_cache = _redis_proxy('index_func') # 首页功能区
sideslip_cache = _redis_proxy('sideslip') # 首页按钮横滑
doctor_tags_cache = _redis_proxy('doctor_tags')
hospital_tags_cache = _redis_proxy('hospital_tags')
doctor_patient_cache = _redis_proxy('doctor_patient') # 医生患者缓存list
limit_num_cache = _redis_proxy('limit_num') # 限制次数
data_transfer_cache = _redis_proxy('data_trans') # 搜索data-transfer用cache
hospital_doctor_service_cache = _redis_proxy('hospital_doctor_service')
installment_cache = _RedisProxy('installment')
follow_cache = _RedisProxy('new_follow')
model_cache = _redis_proxy('model_c')
email_cache = _redis_proxy('email')
tag_child_tags_cache = _redis_proxy('tag_child_tags')
unread_cache = _redis_proxy('unread')
doctor_traffic_cache = _redis_proxy('real_traffic')
sleep_user_cache = _redis_proxy('sleep_user')
doctor_discount_cache = _redis_proxy('doctor_discount')
tag_top_sale_cache = _redis_proxy('tag_top_sale')
service_home_city_cache = _redis_proxy('service_home_city')
doctor_service_tags_distribution_cache = _redis_proxy('doctor_service_tags_distribution')
hospital_service_tags_distribution_cache = _redis_proxy('hospital_service_tags_distribution')
shop_cart_cache = _redis_proxy('shopcart')
service_info_list_cache = _redis_proxy('service_info_list')
wechatpub_cache = _redis_proxy('wxpub')
search_dic_cache = _redis_proxy('search_dic') # 搜索引擎词典用cache
sleep_action_fans_cache = _redis_proxy('sleep_action_fans_cache')
sleep_noaction_fans_cache = _redis_proxy('sleep_noaction_fans_cache')
# todo update in 7660 mimas 维护,不要再写入!不要再写入!不要再写入! ******start*******
# _pool = redis.ConnectionPool(**settings.REDIS['vote_cache'])
# vote_cache = redis.StrictRedis(connection_pool=_pool)
diary_heat_score_cache = _redis_proxy('diary_heat') # todo 等待hera迁移完
take_sofa_diary_cache = _redis_proxy('take_sofa_diary') # todo 等待hera迁移完
# wechat_cache = _redis_proxy('wechat')
# diary_pv_cache = DiaryPV()
# todo update in 7660 mimas 维护,不要再写入!不要再写入!不要再写入! ******end*******
page_cache = redis.StrictRedis(**settings.REDIS['page_cache'])
price_cache = _redis_proxy('price_cache')
service_feature_cache = _redis_proxy('service_feature_cache')
# add for young_doctor vote
yd_vote = _redis_proxy('yd_vote')
seo_cache = _redis_proxy('seo')
service_config_cache = _redis_proxy('service_config')
addfans_config_cache = _redis_proxy('addfans_config')
doctor_search_tags_cache = _redis_proxy('doctor_search_tags')
hospital_search_tags_cache = _redis_proxy('hospital_search_tags')
special_service_list_cache = _redis_proxy('special_service_list')
service_score_tag_avg = _redis_proxy('service_score_tag_avg')
share_diary_num_cache = _redis_proxy('share_diary_num_cache')
coupon_cache = _redis_proxy('coupon')
index_tab_cache = _redis_proxy('index_tab') # 首页顶部横滑tab配置
multitopic_cache = _redis_proxy('multitopic')
coupon_badge_cache = _redis_proxy('coupon_badge')
doctor_badge_cache = _redis_proxy('doctor_badge')
# high_quality_question_cache = _redis_proxy('high_quality_question_cache')
doctor_sell_amount_cache = _redis_proxy('doctor_sell_amount_cache')
hospital_sell_amount_cache = _redis_proxy('hospital_sell_amount_cache')
mid_autumn_answer_record_cache = _redis_proxy('mid_autumn_answer_record') # 中秋活动,只有成功的才记录
punishment_cache = _punishment_proxy('pilot:m') # 美购纬度
punishment_diary_cache = _punishment_proxy('pilot:d:m') # 日记纬度
billapplyrecord_cache = _redis_proxy('billapplyrecord')
citycache = _redis_proxy('index_city')
popup_cache = _redis_proxy('popup')
rankcache = _redis_proxy("rank")
recommend_college_cache = _redis_proxy("recommend_colleges") # 推荐大学
serviceregister_activity_status_cache = _redis_proxy("serviceregister_activity_status") # 美购提报状态
common_cache = _redis_proxy("common")
qq_city_cache = _redis_proxy("qq_city_name") # 记录经纬度与城市名称映射
city_info_cache = _redis_proxy("city_info") # 城市信息
variety_cache = _redis_proxy("variety_vote")
groupbuy_cache = _redis_proxy('groupbuy')
lipstick_cache = _redis_proxy('lipstick')
diary_cache = _redis_proxy('diary')
sign_cache = _redis_proxy('sign')
new_spring_click = _redis_proxy('newspring_click')
wechat_auth_cache = _redis_proxy('wechat_auth')
hera_feeds_cache = _redis_proxy('hera_feeds') #首页二期改版,feed策略
young_doc_cache = _redis_proxy('yd') # 青年医生1905
famous_doctors_cache = _redis_proxy('famous_doctors') # famous_doctors
browsing_message_cache = _redis_proxy('browsing_message') #商户每日主动私信
old_new_tag_mapping_cache = _redis_proxy('old_new_tag_mapping')
smark_rank_cache = _redis_proxy('smark_rank')
channel_cache=_redis_proxy('channel_cache')
slide_cache=_redis_proxy('slide_cache')
sign_rush_cache = _redis_proxy('sign_rush_cache')
sign_task_cache = _redis_proxy('sign_task_cache')
sku_stock_lock = _redis_proxy('sku_stock_lock')
conversation_cache = redis.StrictRedis(**settings.REDIS['conversation_cache'])
\ No newline at end of file
import contextlib
import logging
import threading
from django.contrib.auth.models import AnonymousUser
from django.conf import settings
from cached_property import cached_property
from gm_logging.utils import get_exception_logging_func
from helios.rpc import create_default_invoker
from adapter.rpcd.exceptions import RPCLoginRequiredException
from . import auth
# from .tool.log_tool import logging_exception, info_logger
info_logger = logging.getLogger(__name__)
exception_logger = logging.getLogger('exception_logger')
from raven.contrib.django.raven_compat.models import client as _sentry_client
logging_exception = get_exception_logging_func(exception_logger, _sentry_client)
class Session(object):
def __init__(self, session_key=None):
assert session_key is None or isinstance(session_key, str)
if session_key == '': # make empty session_key as empty session
session_key = None
django_session = auth.get_django_session(session_key)
django_user = auth.get_user_from_django_session(django_session)
self._django_session = django_session
self._django_user = django_user
def do_login(self, user):
auth.login(self._django_session, user)
self._django_user = user
self._django_session.save()
def do_logout(self):
auth.logout(self._django_session, self._django_user)
self._django_user = auth.AnonymousUser()
self._django_session.save()
@property
def session_key(self):
return self._django_session.session_key
@property
def has_login(self):
user = self.user
info_logger.info(user)
res = user.is_authenticated()
return res
def login_required(self):
if not self.has_login:
raise RPCLoginRequiredException
def set_wechat_unionid(self, unionid):
if unionid:
sk = "wx_unionid"
self._django_session[sk] = unionid
self._django_session.save()
def get_wechat_unionid(self):
sk = "wx_unionid"
result = self._django_session.get(sk, None)
return result
def set_wechat_openid(self, wechat_appid, openid):
if wechat_appid and openid:
sk = "wx_openid_for_app_{}".format(wechat_appid)
self._django_session[sk] = openid
self._django_session.save()
def get_wechat_openid(self, wechat_appid):
result = None
if wechat_appid:
sk = "wx_openid_for_app_{}".format(wechat_appid)
result = self._django_session.get(sk, None)
return result
@property
def user_id(self):
return self.user.id
@property
def user(self):
user = self._django_user
return user if user.is_active else AnonymousUser()
@property
def groups(self):
return self.user.belong_groups.values_list('name', flat=True)
_base_invoker = create_default_invoker(
debug=settings.DEBUG
).with_config(
dump_curl=True
)
class NoCurrentContextError(Exception):
pass
def get_current_context_or_throw_exception():
context = ContextManager.get_active_context()
if context:
return context
raise NoCurrentContextError
def _do_get_rpc_remote_invoker():
context = ContextManager.get_active_context()
if context:
return context.rpc_remote
else:
return _base_invoker
def get_rpc_remote_invoker():
return _do_get_rpc_remote_invoker()
def get_gaia_local_invoker():
# TODO: gaia_loal_invoker
context = ContextManager.get_active_context()
# if context:
return context.gaia_local
# else:
# from .nested_invoker import NestedInvoker
# return NestedInvoker(ctx=context)
class Context(object):
has_session = None
logger = None
def __init__(self, session_key, request=None):
self.__session_key = session_key
self.has_session = bool(session_key)
self._request = request
@cached_property
def session(self):
return Session(session_key=self.__session_key)
# @cached_property
# def gaia_local(self):
# from .nested_invoker import NestedInvoker
# return NestedInvoker(self)
@property
def rpc(self):
try:
raise Exception(u'should not use Context.rpc, use Context.gaia_local or Context.rpc_remote')
except Exception:
if settings.DEBUG:
raise
else:
logging_exception()
return self.gaia_local
@cached_property
def rpc_remote(self):
if self._request:
client_info = self._request.client_info
else:
client_info = None
return _base_invoker.with_config(
session_key=self.__session_key,
client_info=client_info,
)
class ContextManager(object):
_active_context_local = threading.local()
@classmethod
@contextlib.contextmanager
def with_active_context(cls, context):
"""
:type context: Context
"""
acl = cls._active_context_local
previous = getattr(acl, 'context', None)
acl.context = context
try:
yield
finally:
acl.context = previous
@classmethod
def get_active_context(cls):
"""
:rtype: Context | None
"""
return getattr(cls._active_context_local, 'context', None)
class ConnectionInfo(object):
request = None
client_ip = None
def __init__(self, request, client_ip=None):
self.request = request
# Xing Ye tells me that there are these settings on proxy:
# proxy_set_header X-Real-IP $remote_addr;
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
if not client_ip:
try:
client_ip = request.META.get('HTTP_X_FORWARDED_FOR').split(',')[0]
except Exception:
pass
self.client_ip = client_ip
class Request(object):
method = None
params = None
session_key = None
environment = None
is_nested_call = None
context = None
method_info = None
request_info = None
def __init__(self, method, params, session_key, environment, is_nested_call=False):
self.method = method
self.params = params
self.session_key = session_key
self.environment = environment
self.is_nested_call = is_nested_call
self.context = Context(session_key=session_key, request=self)
@property
def client_info(self):
# return (self.environment or {}).get(u'client_info')
request_info = self.request_info
if request_info:
return request_info.get_client_info()
return None
def create_fake_context():
return Context(session_key=None)
# -*- coding: utf-8 -*-
from typing import List, Dict
from rpc.context import get_rpc_remote_invoker
def get_doctor_id_by_user_id(user_id: int) -> int:
result = get_rpc_remote_invoker()['gaia/get_doctor_id_by_user_id'](
user_id=user_id,
).unwrap()
return result.get('user_id')
def get_message_mark_user(user_id_list: List[int],
target_user_id_list: List[int]) \
-> List[Dict]:
return get_rpc_remote_invoker()['gaia/courier/mark_user'](
user_id_list=user_id_list,
target_user_id_list=target_user_id_list
).unwrap()
\ No newline at end of file
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
import itertools
import threading
import functools
import elasticsearch
from django.conf import settings
from .config import config, ESDatabaseConfig, ESTableSchema, ESVersion, \
load_mapping
from .models import ESBulkAction
class ESClientManagerInterface(object):
def search(self, table, **kwargs):
raise NotImplementedError
def index(self, table, body, **kwargs):
raise NotImplementedError
def mget(self, table, body, **kwargs):
raise NotImplementedError
def bulk_single(self, action):
raise NotImplementedError
def helper_bulk(self, action_iter, **kwargs):
raise NotImplementedError
def helper_scan(self, table, **kwargs):
raise NotImplementedError
def alter_table(self, table, drop_if_exists=False):
raise NotImplementedError
def _create_es_client(hosts):
if settings.DEBUG:
es = elasticsearch.Elasticsearch(
hosts=hosts,
# no sniffing
sniff_on_start=False,
sniff_on_connection_fail=False
)
else:
es = elasticsearch.Elasticsearch(
hosts=hosts,
# sniff before doing anything
sniff_on_start=True,
# refresh nodes after a node fails to respond
sniff_on_connection_fail=True,
# and also every 60 seconds
sniffer_timeout=60,
sniff_timeout=1
)
return es
class ESClientManager(ESClientManagerInterface):
INDEX = 'index'
DOC_TYPE = 'doc_type'
def __init__(self, database_config: ESDatabaseConfig):
# database_config = config.get_database_config(database_name)
assert isinstance(database_config, ESDatabaseConfig)
self.client = _create_es_client(hosts=database_config.hosts)
self.database_config = database_config
def get_table_index(self, table):
return self.database_config[table].index
def get_table_doc_type(self, table):
return self.database_config[table].doc_type
def params_add_table(self, table, params):
assert isinstance(params, dict)
if self.INDEX in params or self.DOC_TYPE in params:
raise ValueError(
'params contains {} or {}'.format(self.INDEX, self.DOC_TYPE))
table_config = self.database_config[table]
params[self.INDEX] = table_config.index
params[self.DOC_TYPE] = table_config.doc_type
return params
def bulk_action_to_dict(self, bulk_action):
assert isinstance(bulk_action, ESBulkAction)
d = dict(bulk_action.params)
if bulk_action.table:
table_config = self.database_config[bulk_action.table]
d[bulk_action.INDEX] = table_config.index
d[bulk_action.DOC_TYPE] = table_config.doc_type
return d
def search(self, table, **kwargs):
assert isinstance(table, ESTableSchema)
return self.client.search(
index=self.get_table_index(table),
doc_type=self.get_table_doc_type(table),
**kwargs
)
def index(self, table, body, **kwargs):
assert isinstance(table, ESTableSchema)
return self.client.index(
index=self.get_table_index(table),
doc_type=self.get_table_doc_type(table),
body=body,
**kwargs
)
def mget(self, table, body, **kwargs):
assert isinstance(table, ESTableSchema)
return self.client.mget(
index=self.get_table_index(table),
doc_type=self.get_table_doc_type(table),
body=body,
**kwargs
)
def bulk_single(self, action):
assert isinstance(action, ESBulkAction)
action_dict = self.bulk_action_to_dict(action)
expanded = elasticsearch.helpers.expand_action(action_dict)
if expanded[1] is None:
bulk_actions = (expanded[0],)
else:
bulk_actions = expanded
self.client.bulk(bulk_actions)
def helper_scan(self, table, **kwargs):
params = dict(kwargs)
self.params_add_table(table=table, params=params)
return elasticsearch.helpers.scan(
client=self.client,
**params
)
def helper_bulk(self, action_iter, **kwargs):
return elasticsearch.helpers.bulk(
client=self.client,
actions=itertools.imap(self.bulk_action_to_dict, action_iter),
**kwargs
)
def alter_table(self, table, drop_if_exists=False):
assert isinstance(table, ESTableSchema)
if self.database_config.es_version == ESVersion.V1:
mapping_name = table.mapping_v1_name
elif self.database_config.es_version == ESVersion.V2:
mapping_name = table.mapping_v2_name
else:
raise Exception('invalid es_version: {}'.format(
self.database_config.es_version))
mapping = load_mapping(mapping_name)
cl = self.client.indices
index = self.get_table_index(table)
doc_type = self.get_table_doc_type(table)
if not cl.exists(index=index):
cl.create(index=index)
if cl.exists_type(index=index, doc_type=doc_type) and drop_if_exists:
cl.delete_mapping(index=index, doc_type=doc_type)
return cl.put_mapping(index=[index], doc_type=doc_type, body=mapping)
class ESOperationType(object):
HOST_ONLY = 1
EXTENSIVE = 2
def es_operation_type_seletor(optype):
def decorator(f):
name = f.__name__
if optype == ESOperationType.HOST_ONLY:
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
return getattr(self.default_operator, name)(*args, **kwargs)
return wrapper
elif optype == ESOperationType.EXTENSIVE:
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
try:
return getattr(self.default_operator, name)(*args, **kwargs)
finally:
return getattr(self.secondary_operator, name)(*args, **kwargs)
return wrapper
else:
raise Exception('invalid operation type: {}'.format(optype))
return decorator
class ESHostBackupClientManager(ESClientManager):
def __init__(self, default, secondary):
assert isinstance(default, ESClientManager)
assert isinstance(secondary, ESClientManager)
self.default_operator = default # for host
self.secondary_operator = secondary # for backup
@es_operation_type_seletor(ESOperationType.HOST_ONLY)
def search(self, table, **kwargs):
raise NotImplementedError
@es_operation_type_seletor(ESOperationType.EXTENSIVE)
def index(self, table, body, **kwargs):
raise NotImplementedError
@es_operation_type_seletor(ESOperationType.HOST_ONLY)
def mget(self, table, body, **kwargs):
raise NotImplementedError
@es_operation_type_seletor(ESOperationType.EXTENSIVE)
def bulk_single(self, action):
raise NotImplementedError
@es_operation_type_seletor(ESOperationType.HOST_ONLY)
def helper_scan(self, table, **kwargs):
raise NotImplementedError
def alter_table(self, table, drop_if_exists=False):
raise NotImplementedError('alter_table does not work for ESOperatorCombined')
\ No newline at end of file
# -*- coding: utf-8 -*-
import json
import os
import re
import jsonschema
import six
from django.conf import settings
def load_mapping(mapping_name):
mapping_file_path = os.path.join(
settings.BASE_DIR,
'message', 'mapping', '%s.json' % (mapping_name,)
)
mapping = ''
with open(mapping_file_path, 'r') as f:
for line in f:
# 去掉注释
mapping += re.sub(r'//.*$', '', line)
mapping = json.loads(mapping)
return mapping
class ESVersion(object):
V1 = 'v1'
V2 = 'v2'
class ESTableSchema(object):
def __init__(self, table_name, mapping_v1_name, mapping_v2_name):
assert isinstance(table_name, six.string_types)
assert isinstance(mapping_v1_name, six.string_types)
self.table_name = table_name
self.mapping_v1_name = mapping_v1_name
self.mapping_v2_name = mapping_v2_name
def __repr__(self):
return '{}(table_name={})'.format(
self.__class__.__name__,
self.table_name,
)
def __reduce__(self):
raise Exception('unserializable')
def __reduce_ex__(self, *args, **kwargs):
raise Exception('unserializable')
table_message = ESTableSchema(
table_name='message',
mapping_v1_name='message.v1',
mapping_v2_name='message.v2',
)
table_conversation = ESTableSchema(
table_name='conversation',
mapping_v1_name='conversation.v1',
mapping_v2_name='conversation.v2',
)
table_schema_map = {
ts.table_name: ts
for ts in [table_conversation, table_message]
}
_config_schema = {
'$schema': 'http://json-schema.org/draft-04/schema#',
'type': 'object',
'properties': {
'order': {
'type': 'array',
'items': {
'type': 'string',
'minItems': 1,
'maxItems': 2,
'uniqueItems': True,
}
},
'database': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'es_version': {'enum': [ESVersion.V1, ESVersion.V2]},
'hosts': {'type': 'array'},
'table': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'name': {
'type': 'string',
'enum': list(table_schema_map),
},
'index': {'type': 'string'},
'doc_type': {'type': 'string'},
},
'required': ['name', 'index', 'doc_type'],
}
}
},
'required': ['name', 'es_version', 'hosts', 'table'],
},
},
},
'required': ['order', 'database'],
}
class ESTableConfig(object):
def __init__(self, name, index, doc_type):
assert isinstance(name, six.string_types)
assert isinstance(index, six.string_types)
assert isinstance(doc_type, six.string_types)
self.name = name
self.index = index
self.doc_type = doc_type
class ESDatabaseConfig(object):
def __init__(self, name, es_version, hosts, table_list):
self.name = name
self.es_version = es_version
self.hosts = hosts
self.table_list = table_list
self.__table_map = {
table.name: table
for table in table_list
}
def __getitem__(self, item):
assert isinstance(item, ESTableSchema)
return self.__table_map[item.table_name]
class Config(object):
def __init__(self, config_data):
jsonschema.validate(config_data, _config_schema)
self.config_data = config_data
order = config_data['order']
assert isinstance(order, list)
self.order = order
database_list = []
for database_config in config_data['database']:
table_list = []
for table_config in database_config['table']:
table_list.append(ESTableConfig(
name=table_config['name'],
index=table_config['index'],
doc_type=table_config['doc_type'],
))
database_list.append(ESDatabaseConfig(
name=database_config['name'],
es_version=database_config['es_version'],
hosts=database_config['hosts'],
table_list=table_list,
))
self.database_list = database_list
self.__database_map = {
database.name: database
for database in database_list
}
def get_database_config(self, name):
return self.__database_map[name]
config = Config(settings.ES_MSG)
# -*- coding: utf-8 -*-
class ESBulkAction(object):
INDEX = '_index'
DOC_TYPE = '_type'
def __init__(self, table=None, params=None):
params = params or {}
if self.INDEX in params or self.DOC_TYPE in params:
raise ValueError('params contains {} or {}'.format(self.INDEX, self.DOC_TYPE))
self.table = table
self.params = params
# -*- coding: utf-8 -*-
import threading
from typing import Type
from .config import config
from .client import ESClientManager, ESHostBackupClientManager
_esop_instance_lock = threading.Lock()
_esop_instance = None # singleton
_esop_migrate_instance = None
def create_esop_for_database(database_name) -> ESClientManager:
database_config = config.get_database_config(database_name)
return ESClientManager(database_config=database_config)
def _create_esop() -> Type[ESClientManager]:
es_client_manager_list = [
create_esop_for_database(database_name)
for database_name in config.order
]
if len(es_client_manager_list) == 1:
return es_client_manager_list[0]
elif len(es_client_manager_list) == 2:
default, secondary = es_client_manager_list
return ESHostBackupClientManager(default=default, secondary=secondary)
else:
raise Exception('impossible')
def get_esop() -> Type[ESClientManager]:
'''be sure thread-safed singleton'''
global _esop_instance
if _esop_instance is None:
with _esop_instance_lock:
if _esop_instance is None:
_esop_instance = _create_esop()
return _esop_instance
# -*- coding: utf-8 -*-
import functools
from typing import Dict
from django.conf import settings
from gm_types.msg import CONVERSATION_TYPE, CONVERSATION_ORDER
limited_size = functools.partial(min, settings.COUNT_LIMIT)
def search_conversation_from_es(offset=0,
size=50,
filters={},
query={},
sort_type=CONVERSATION_ORDER.LAST_REPLY_TIME)\
-> dict:
res = search_conversation_es(offset, size, filters, query, sort_type)
conversation_ids = [int(s['_id']) for s in res['hits']['hits']]
total_count = res['hits']['total']
return {
'total_count': total_count,
'conversation_ids': conversation_ids,
}
def search_conversation_es(offset: int=0,
size: int=50,
filters: Dict={},
query: Dict={},
sort_type:CONVERSATION_ORDER=CONVERSATION_ORDER.LAST_REPLY_TIME):
size = limited_size(size)
filter_element_list = []
query_element_list = []
for k, v in filters.items():
if k == 'user_ids':
filter_element_list.append({
'nested': {
'path': 'users',
'query': {
'terms': {'users.id': v}
}
}
})
elif k == 'multi_filter':
filter_element_list.append({
'nested': {
'path': 'users',
'query': {
'bool': {
'must': [
{'match': {'users.is_unread': v['is_unread']}},
{'terms': {'users.id': v['user_ids']}}
]
}
}
}
})
elif k == 'multi_filter_status':
filter_element_list.append({
'nested': {
'path': 'users',
'query': {
'bool': {
'must': [
{'match': {'users.status': v['status']}},
{'terms': {'users.id': v['user_ids']}}
]
}
}
}
})
elif k == 'conversation_type':
v = int(v)
local_filters = []
local_filters.append({
'term': {'conversation_type': v}
})
if v == CONVERSATION_TYPE.MESSAGE:
local_filters.append({
'bool': {
'must_not': {
'exists': {'field': 'conversation_type'}
}
}
})
filter_element_list.append({
'bool': {
'should': local_filters
}
})
elif k == 'is_star':
filter_element_list.append({
'bool': {
'must': {
'term': {'is_star_by_doctor': v}
}
}
})
elif k == 'last_reply_time_start_gte':
filter_element_list.append({
'bool': {
'must': {
'range': {'last_reply_time': {
"gte": v
}
}
}
}
})
elif k == 'last_reply_time_end_lte':
filter_element_list.append({
'bool': {
'must': {
'range': {'last_reply_time': {
"lte": v
}
}
}
}
})
elif k == 'status':
filter_element_list.append({
'bool': {
'must': {
'term': {'status': v}
}
}
})
for k, v in query.items():
if k == 'content' and v:
query_element_list.append({
'nested': {
'path': 'messages',
'score_mode': 'max',
'query': {
'match_phrase': {'messages.content.text': v}
}
}
})
if k == 'comment' and v:
query_element_list.append({
'nested': {
'path': 'users',
'score_mode': 'max',
'query': {
'match_phrase': {'users.comment': v}
}
}
})
if k == 'user_last_name' and v:
query_element_list.append({
'nested': {
'path': 'users',
'score_mode': 'max',
'query': {
'match_phrase': {'users.last_name': v}
}
}
})
if k == 'user_id' and v:
query_element_list.append({
'nested': {
'path': 'users',
'score_mode': 'max',
'query': {
'match_phrase': {'users.id': v}
}
}
})
es_query = {
'query': {
'filtered': {
'filter': {
'bool': {
'must': filter_element_list,
},
},
'query': {
'bool': {
'should': query_element_list,
}
}
},
},
}
if sort_type == CONVERSATION_ORDER.UNREAD:
es_query['sort'] = [
{'_script': {
'lang': settings.ES_SCRIPT_LANG,
'script_file': 'sort_conversation-default',
'type': 'number',
'params': {
'context_user_id': filters['user_id'] if 'user_id' in filters else -1,
},
'order': 'desc',
}},
{'last_reply_time': {'order': 'desc'}},
]
elif sort_type == CONVERSATION_ORDER.LAST_REPLY_TIME:
es_query['sort'] = [
{'last_reply_time': {'order': 'desc'}},
]
from search.eswrapper.config import table_conversation
from search.eswrapper.shortcuts import get_esop
res = get_esop().search(
table=table_conversation,
timeout=settings.ES_SEARCH_TIMEOUT,
body=es_query,
from_=offset,
size=size,
)
return res
# -*- coding: utf-8 -*-
#!/usr/bin/env python
# coding=utf-8
#!/usr/bin/env python
# coding=utf-8
import base64
class BaseUnread(object):
_cache_time = 86400 # 缓存一天
_time_fmt = '%Y-%m-%d %H:%M:%S' # 记录用户最新操作时间存储的格式
def __init__(self, user_id=None, doctor_id=None):
assert any([user_id, doctor_id])
self.user_id = None
self.doctor_id = None
# def _reload_user(self):
# if self.user_id is not None:
# return
# assert self.doctor_id
# from hippo.models import Doctor
# self.user_id = Doctor.objects.get(id=self.doctor_id).user_id
def gen_cache_key(self, event, *args, **kwargs):
"""
1. 在用户通用缓存中,使用该方法,需要传递 参数 is_user=True
2. 子类必须要初始化self.user或者self.doctor
"""
is_user = kwargs.get('is_user', False)
if is_user:
key = u'user:{}:{}'.format(self.user_id, event)
else:
key = u'doctor:{}:{}'.format(self.doctor_id, event)
return key
def gen_order_viewtime_key(self, pre):
"""
医生最后一次访问订单列表缓存时间所用的key
"""
return u'{}:time'.format(pre)
def gen_conversation_key(self, conversation_id=None):
"""
私信未读,以hash的方式存储,返回一对值
"""
name = u'conversation:{}'.format(self.user_id)
return name, conversation_id
def gen_poll_channel(doctor_id):
"""
连接poll需要的channel_name
"""
doctor_id_str = base64.b64encode(doctor_id.encode('utf-8'))
return u"gaia:doctor:unread:{}".format(doctor_id_str)
def gen_poll_named_channel(user_id):
"""生成连接poll需要的channel_name
:param user_id: 用户ID
:rtype: unicode
"""
userid_base64 = base64.b64encode(user_id.encode('utf-8'))
return u"poll_channel:{}".format(userid_base64)
#!/usr/bin/env python
# coding=utf-8
import json
import datetime
import time
from django.contrib.auth.models import User
from gm_types.gaia import NOTIFY_EVENT, NOTIFY_OPERATION, DOCTOR_TYPE
# from api.models import Doctor
from api.tool.user_tool import get_doctor_by_user_id
from hippo.tool.chain_hospital_tools import get_master_merchant
from rpc.all import get_rpc_remote_invoker
from rpc.cache import unread_cache
from rpc.tool.log_tool import doctor_unread_logger
from services.unread.base import BaseUnread, gen_poll_channel, gen_poll_named_channel
def noti_operation(event, operation, user_id=None, doctor_id=None, data=None):
"""
是User的操作传值user_id
确认是Doctor操作,传值doctor_id
user_id和doctor_id必有一个不为空
1. event == NOTIFY_EVENT.CONVERSATION 为私信时
data = {
'conversation_id': 1518193,
'send_uid': 1938785,
'target_uid': 602329,
'conversation_type': 1,
}
注意:只处理了conversation_type == 1 的未读消息
"""
method = "{}_{}".format(event, operation)
unread_info = u'event_operation is {}, user_id is {}, doctor_id is {}, data is {}'.format(
method, user_id, doctor_id, json.dumps(data) if data else ''
)
doctor_unread_logger.info(unread_info)
if event == NOTIFY_EVENT.CONVERSATION: # 若是私信是面对User的
noti_unread = NotiUserUnread(user_id)
else:
noti_unread = NotiDoctorUnread(doctor_id)
handler = getattr(noti_unread, method)
handler(data=data)
def noti_poll(doctor_id, event, operation, content):
noti_content = {
'event': event,
'operation': operation,
'content': content,
'send_timestamp': time.time()
}
text = json.dumps(noti_content)
get_rpc_remote_invoker()['poll/named_channel/publish_text'](
named_channel=gen_poll_channel(doctor_id),
text=text,
).unwrap()
def noti_user_poll(user_id, event, operation, content):
"""发送消息通知对应的用户
:param user_id: 用户ID
:param event: 事件 example: conversation
:param operation: 操作 example: add
:param content: 数据内容
:rtype: None
"""
noti_content = {
'event': event,
'operation': operation,
'content': content,
}
text = json.dumps(noti_content)
get_rpc_remote_invoker()['poll/named_channel/publish_text'](
named_channel=gen_poll_named_channel(str(user_id)),
text=text,
).unwrap()
class _NotiUnread(BaseUnread):
_hacked_methods = []
def __getattribute__(self, name):
_hacked_methods = object.__getattribute__(self, '_hacked_methods')
if name in _hacked_methods:
def newfunc(*args, **kwargs):
_method = name.split('_')
_event = _method[0]
_operation = _method[1]
result = self._operation(_event, _operation, *args, **kwargs)
return result
return newfunc
else:
return super(_NotiUnread, self).__getattribute__(name)
def _operation(self, *args, **options):
raise NotImplementedError('subclasses of _NotiUnread must provide a _operation() method')
class NotiUserUnread(_NotiUnread):
_hacked_methods = [
# conversation_clear 是清除某个具体conversation_id会话的未读消息
'conversation_add', 'conversation_clear', # 'conversation_delete',
]
def __init__(self, user_id):
self.user_id = user_id
def _gen_user_cache_key(self, event):
"""
!!! 用户使用此获取cache_key
不要使用 gen_cache_key
"""
return self.gen_cache_key(event, is_user=True)
def _operation(self, event, operation, *args, **kwargs):
if event == NOTIFY_EVENT.CONVERSATION: # 私信
# 对私信的操作,都会带参数data={'conversation_id': xxx, 'send_uid': xxx, 'target_uid': xxx}
data = kwargs['data']
# 统计未读数
name, key = self.gen_conversation_key(data['conversation_id'])
if operation == NOTIFY_OPERATION.ADD:
unread_cache.hincrby(name, key)
elif operation == NOTIFY_OPERATION.CLEAR:
_unread_num = unread_cache.hdel(name, key)
data['unread_num'] = _unread_num
unread_cache.delete(self._gen_user_cache_key(event))
doctor = get_doctor_by_user_id(self.user_id)
if doctor:
noti_poll(doctor.id, event, operation, data)
if doctor.doctor_type == DOCTOR_TYPE.DOCTOR:
# 通知机构管理者
doctor_office = Doctor.objects.filter(
doctor_type=DOCTOR_TYPE.OFFICER, hospital_id=doctor.hospital.id
).first()
if doctor_office:
noti_poll(doctor_office.id, event, operation, data)
_merchant = doctor.merchant
if _merchant:
# 通知商户
_master_merchant = get_master_merchant(_merchant)
if _master_merchant:
noti_poll(_master_merchant.doctor.id, event, operation, data)
else:
noti_user_poll(self.user_id, event, operation, data)
class NotiDoctorUnread(_NotiUnread):
_hacked_methods = [
'order_add', 'order_delete', 'order_clear',
'refund_add', 'refund_delete', # 'refund_clear',
'reserve_add', 'reserve_delete', # 'reserve_clear',
'system_add',
'deal_add',
]
def __init__(self, doctor_id):
self.doctor_id = doctor_id
def _operation(self, event, operation, *args, **kwargs):
key = self.gen_cache_key(event, *args, **kwargs)
del_num = unread_cache.delete(key)
unread_info = u'NotiDoctorUnread._operation, key is {}, del_num is {}'.format(key, del_num)
doctor_unread_logger.info(unread_info)
if event == NOTIFY_EVENT.ORDER and operation == NOTIFY_OPERATION.CLEAR:
# 若是查看订单列表时,则清除小红点,记录查看时间
order_time_key = self.gen_order_viewtime_key(key)
now = datetime.datetime.now()
unread_cache.set(order_time_key, now.strftime(self._time_fmt))
elif event == NOTIFY_EVENT.REFUND:
# 若是 申请退款,订单状态变成 WAIT_REFUNDED = ('6', '退款处理中')
# 若是同意/拒绝了退款,订单状态改变了,此时需清除 更改订单的 未读统计
order_del_num = unread_cache.delete(self.gen_cache_key(NOTIFY_EVENT.ORDER))
order_log = u'delete order cache when operate refund, doctor_id is {}, del_num is {}'.format(
self.doctor_id, order_del_num
)
doctor_unread_logger.info(order_log)
# 通知
noti_poll(self.doctor_id, event, operation, None)
#!/usr/bin/env python
# coding=utf-8
import datetime
from django.conf import settings
# from api.models import Doctor, Order, RefundOrder, Reservation
from rpc.cache import unread_cache
# from rpc.tool.log_tool import doctor_unread_logger
from gm_types.gaia import NOTIFY_EVENT
from gm_types.gaia import ORDER_STATUS, REFUND_STATUS, RESERVATION_STATUS
from services.unread.base import BaseUnread
class _Unread(BaseUnread):
"""
1. event有新的operation,删除缓存未读数,并通知poll
2. get由client主动请求,缓存没有,计算一次
"""
# TODO: _Unread
# def _get_unread(self, event, *args, **kwargs):
# key = self.gen_cache_key(event, *args, **kwargs)
# unread = unread_cache.get(key)
# if unread is not None:
# unread = int(unread)
# else:
# unread_info = u'get_unread_from_cache, key is {}, cache is delete'.format(key)
# doctor_unread_logger.info(unread_info)
#
# return key, unread
class UserUnread(_Unread):
def __init__(self, user_id):
self.user_id = user_id
def _get_user_unread(self, event, *args, **kwargs):
"""
!!! 用户使用 _get_user_unread 获取未读
不要使用 _get_unread
"""
return self._get_unread(event, *args, is_user=True, **kwargs)
def get_conversation_unread(self, conversation_id, default=0):
"""
返回单个私信未读数
"""
name, key = self.gen_conversation_key(conversation_id)
unread = unread_cache.hget(name, key) or 0
if unread == 0 and default > 0:
unread = default
unread_cache.hsetnx(name, key, default)
else:
unread = int(unread)
return unread
def get_conversation_all_unread(self):
"""
返回私信所有未读总数
1. 先get取缓存看是否有
2. 再考虑取出hgetall所有缓存进行计算一次
"""
self._reload_user()
key, unread = self._get_user_unread(NOTIFY_EVENT.CONVERSATION)
if unread is None:
name, _ = self.gen_conversation_key()
info = unread_cache.hgetall(name)
unread = 0
for (k, v) in info.items():
unread += int(v)
unread_cache.set(key, unread, ex=self._cache_time, nx=True)
return unread
@classmethod
def get_all_conversation_unread_num_by_ids(cls, user_ids):
"""批量获取所有未读数 针对拜博大集团优化"""
result = {}
cache_key_format = 'user:{}' + ':{}'.format(NOTIFY_EVENT.CONVERSATION)
cache_key_list = []
for user_id in user_ids:
_cache_key = cache_key_format.format(user_id)
cache_key_list.append(_cache_key)
if not cache_key_list:
return result
cache_res = unread_cache.mget(cache_key_list)
for user_id, unread_num in zip(user_ids, cache_res):
if unread_num is None:
unread_num = cls(user_id=user_id).get_conversation_all_unread()
result[user_id] = int(unread_num)
return result
class DoctorUnread(UserUnread):
def __init__(self, doctor_id):
self.user_id = None
self.doctor_id = doctor_id
def get_order_unread(self):
key, unread = self._get_unread(NOTIFY_EVENT.ORDER)
if unread is None:
orders = Order.objects.filter(service__doctor_id=self.doctor_id, status=ORDER_STATUS.PAID)
order_time_key = self.gen_order_viewtime_key(key)
time_record = unread_cache.get(order_time_key)
if time_record: # 若是查看过,从上次看的时间开始计算
orders = orders.filter(pay_time__gt=datetime.datetime.strptime(time_record, self._time_fmt))
unread = orders.count()
unread_cache.set(key, unread, self._cache_time)
return unread
def get_refund_unread(self):
key, unread = self._get_unread(NOTIFY_EVENT.REFUND)
if unread is None:
# 医生只能处理48小时之内的
time_limit = datetime.datetime.now() - datetime.timedelta(hours=settings.DOCTOR_HANDLE_TIME)
unread = RefundOrder.objects.filter(
order__service__doctor_id=self.doctor_id,
status=REFUND_STATUS.PROCESSING,
lastest_apply_refund__gte=time_limit,
).count()
unread_cache.set(key, unread, self._cache_time)
return unread
def get_reserve_unread(self):
"""
预约的未读数目
"""
key, unread = self._get_unread(NOTIFY_EVENT.RESERVE)
if unread is None:
unread = Reservation.objects.filter(
schedule__doctor_id=self.doctor_id,
status=RESERVATION_STATUS.RESERVING,
).count()
unread_cache.set(key, unread, self._cache_time)
return unread
@classmethod
def get_all_conversation_unread_num_by_ids(cls, doctor_ids):
"""批量获取所有未读数 针对拜博大集团优化"""
user_ids = list(Doctor.objects.filter(id__in=doctor_ids).values_list('user_id', flat=True))
return UserUnread.get_all_conversation_unread_num_by_ids(user_ids)
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
from datetime import datetime, date, timedelta, timezone
from typing import Union
def get_timestamp_epoch(the_time: Union[date, datetime, None]):
if the_time is None:
return None
if isinstance(the_time, datetime):
pass
elif isinstance(the_time, date):
the_time = datetime(the_time.year, the_time.month, the_time.day)
else:
raise TypeError(
"datetime.datetime or datetime.date expected. [%s]" % type(
the_time))
return int(the_time.timestamp())
def get_timestamp(the_time: Union[date, datetime]):
return get_timestamp_epoch(the_time)
def get_timestamp_or_none(the_time: Union[date, datetime, None]):
return get_timestamp(the_time) if the_time is not None else None
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment