Commit ac80723e authored by 段英荣's avatar 段英荣

Merge branch 'mr/develop/dbmw' into 'master'

Mr/develop/dbmw

See merge request !65
parents 7e406f02 2a0afa2b
......@@ -36,12 +36,15 @@ RUN apk add --no-cache --virtual .build-deps \
mariadb-dev \
git \
openssh \
g++ \
\
&& apk add --no-cache jpeg-dev zlib-dev freetype-dev lcms2-dev openjpeg-dev tiff-dev tk-dev tcl-dev \
# 取消ssh第一次链接的确认
&& echo "StrictHostKeyChecking no" >> /etc/ssh/ssh_config \
&& apk add --no-cache mariadb-connector-c-dev libxml2-dev libxslt-dev librdkafka-dev \
&& pip install --no-cache-dir -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com -r /tmp/requirements.txt \
# && pip install --no-cache-dir -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com -r /tmp/requirements.txt \
&& pip install --no-cache-dir -i https://pypi-1609228582764:0ca4878446d984cb4d8046f980ea0bfadffec639@gengmei-pypi.pkg.coding.net/tob/pypi/simple pytest-runner==5.1 \
&& pip install --no-cache-dir -i https://pypi-1609228582764:0ca4878446d984cb4d8046f980ea0bfadffec639@gengmei-pypi.pkg.coding.net/tob/pypi/simple -r /tmp/requirements.txt \
&& mkdir -p /tmp/video_convert \
&& mkdir -p /data/log/mentha/app
......@@ -52,7 +55,8 @@ COPY . /srv/apps/mentha/
WORKDIR /srv/apps/mentha/
RUN cat requirements.txt | grep master > /tmp/gm-requirements.txt \
&& pip install --no-deps --upgrade -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com -r /tmp/gm-requirements.txt \
# && pip install --no-deps --upgrade -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com -r /tmp/gm-requirements.txt \
&& pip install --no-deps --upgrade -i https://pypi-1609228582764:0ca4878446d984cb4d8046f980ea0bfadffec639@gengmei-pypi.pkg.coding.net/tob/pypi/simple -r /tmp/gm-requirements.txt \
&& apk del .build-deps
CMD gunicorn gm_rpcd.wsgi:application -w 5 -k gevent -b 0.0.0.0:8000 --worker-tmp-dir /dev/shm
......
......@@ -7,6 +7,7 @@ from django.conf import settings
from data_sync.utils import to_epoch
from data_sync.utils import tzlc
from qa.models.answer import QuestionTag, Question, Answer, AnswerVote, AnswerReply, QuestionAnswer
from utils.brain_tools import user_tool
from utils.rpc import get_rpc_invoker
from qa.models.toutiao import by_content_type_id_get_keywords, get_content_star_keywords, get_content_title_keywords, \
get_content_star_first_keyword
......@@ -157,10 +158,10 @@ def get_questions(pks):
user_ids = list(queryset.values_list('user', flat=True))
question_ids = list(queryset.values_list('id', flat=True))
users = rpc['api/user/get_fundamental_info_by_user_ids'](user_ids=user_ids)
users = user_tool.get_fundamental_info_by_user_ids(user_ids, [])
tags = rpc['api/tag/info_by_ids'](tag_ids=tag_ids)
doctors = rpc['doctor/user/get_doctors'](user_ids=user_ids)
user_dict = {str(user['id']): user for user in users.unwrap()}
user_dict = {str(user['id']): user for user in users}
tag_dict = {str(tag['id']): tag for tag in tags.unwrap()}
doctor_list = doctors.unwrap()['doctors']
doctor_user_ids = [str(doctor.get('user') or doctor.get('user_id')) for doctor in doctor_list]
......@@ -306,33 +307,33 @@ def get_questions(pks):
res['score'] = Score.get_score(q)
res['operators_add_tags'] = get_tag_v3_operators_tags(content_id=q.id, content_type="question")
(need_refresh_data, second_demands_list, second_solutions_list, second_positions_list,
second_demands_ids_list,
second_solutions_ids_list, second_positions_ids_list,
first_demands_ids_list, first_solutions_ids_list, first_positions_ids_list, first_demands_list,
first_solutions_list, first_positions_list,
project_tags_list, project_tags_ids_list, first_classify_ids_list, first_classify_names_list,
second_classify_ids_list, second_classify_names_list) = get_tagv3_analysis_info(content_id=res["id"],
content_type="question")
if need_refresh_data:
item["tags_v3"] = list(project_tags_list)
item["first_demands"] = list(first_demands_list)
item["second_demands"] = list(second_demands_list)
item["first_solutions"] = list(first_solutions_list)
item["second_solutions"] = list(second_solutions_list)
item["positions"] = list(first_positions_list)
item["second_positions"] = list(second_positions_list)
item["tagv3_ids"] = list(project_tags_ids_list)
item["first_demands_ids"] = list(first_demands_ids_list)
item["second_demands_ids"] = list(second_demands_ids_list)
item["first_solutions_ids"] = list(first_solutions_ids_list)
item["second_solutions_ids"] = list(second_solutions_ids_list)
item["first_positions_ids"] = list(first_positions_ids_list)
item["second_positions_ids"] = list(second_positions_ids_list)
item["first_classify_ids"] = list(first_classify_ids_list)
item["first_classify_names"] = list(first_classify_names_list)
item["second_classify_ids"] = list(second_classify_ids_list)
item["second_classify_names"] = list(second_classify_names_list)
# (need_refresh_data, second_demands_list, second_solutions_list, second_positions_list,
# second_demands_ids_list,
# second_solutions_ids_list, second_positions_ids_list,
# first_demands_ids_list, first_solutions_ids_list, first_positions_ids_list, first_demands_list,
# first_solutions_list, first_positions_list,
# project_tags_list, project_tags_ids_list, first_classify_ids_list, first_classify_names_list,
# second_classify_ids_list, second_classify_names_list) = get_tagv3_analysis_info(content_id=res["id"],
# ontent_type="question")
# if need_refresh_data:
# item["tags_v3"] = list(project_tags_list)
# item["first_demands"] = list(first_demands_list)
# item["second_demands"] = list(second_demands_list)
# item["first_solutions"] = list(first_solutions_list)
# item["second_solutions"] = list(second_solutions_list)
# item["positions"] = list(first_positions_list)
# item["second_positions"] = list(second_positions_list)
# item["tagv3_ids"] = list(project_tags_ids_list)
# item["first_demands_ids"] = list(first_demands_ids_list)
# item["second_demands_ids"] = list(second_demands_ids_list)
# item["first_solutions_ids"] = list(first_solutions_ids_list)
# item["second_solutions_ids"] = list(second_solutions_ids_list)
# item["first_positions_ids"] = list(first_positions_ids_list)
# item["second_positions_ids"] = list(second_positions_ids_list)
# item["first_classify_ids"] = list(first_classify_ids_list)
# item["first_classify_names"] = list(first_classify_names_list)
# item["second_classify_ids"] = list(second_classify_ids_list)
# item["second_classify_names"] = list(second_classify_names_list)
results.append(res)
......
......@@ -5,6 +5,8 @@ import logging
import traceback
import datetime
import time
from utils.brain_tools import user_tool
from utils.pic import PictureTools
import redis, json
from cached_property import cached_property
......@@ -57,7 +59,7 @@ class UserManager(RPCMixin):
:param pk_list:
:return:
"""
return self.call_rpc('api/user/get_fundamental_info_by_user_ids', user_ids=pk_list)
return user_tool.get_fundamental_info_by_user_ids(pk_list, [])
class TagManager(RPCMixin):
......
......@@ -20,6 +20,7 @@ from helios.rpc import RPCFaultException
from gm_protocol import GmProtocol
from talos.services.other import get_user_lastest_device_app_version_by_user_id
from utils.brain_tools import user_tool
from utils.protocol import gm_protocol
from utils.rpc import rpc_client, logging_exception
from utils.push import push_task_to_user_multi, limit_get_comment_push, vote_push, send_applet_subscribe_msg
......@@ -254,7 +255,7 @@ def build_inviter_pool():
user_ids = list(filter(lambda y: y not in doctor_user_ids, user_ids))
# 通过请求 gaia 的方法,过滤掉马甲账号,需要先做一次预处理,user_is_puppet_dic:{"user_id": "is_puppet"}
user_infos = rpc_client["api/user/get_fundamental_info_by_user_ids"](user_ids=user_ids).unwrap()
user_infos = user_tool.get_fundamental_info_by_user_ids(user_ids,[])
user_is_puppet_dic = {user["id"]: user["is_puppet"] for user in user_infos}
user_ids = list(filter(lambda user_id: not user_is_puppet_dic[user_id], user_ids))
......
......@@ -7,8 +7,7 @@ git+ssh://git@git.wanmeizhensuo.com/gushitong/gm-test.git@master
git+ssh://git@git.wanmeizhensuo.com/backend/gm-dbmw-api.git@v0.1.2
git+ssh://git@git.wanmeizhensuo.com/backend/gm-dataquery.git@v0.2.5b5
git+ssh://git@git.wanmeizhensuo.com/backend/gm-crypto.git@master
git+ssh://git@git.wanmeizhensuo.com/backend/gm-pili.git@v2.0.3
git+ssh://git@git.wanmeizhensuo.com/system/kafka-python.git@master
git+ssh://git@git.wanmeizhensuo.com/backend/gm-pili.git@v2.0.9
mysqlclient==1.3.13
Django==1.9
......@@ -28,7 +27,6 @@ bs4==0.0.1
lxml==3.6.4
openpyxl==2.5.3
protobuf==3.6.0
pillow==5.3.0
pymysql==0.9.2
......@@ -38,10 +36,17 @@ celery_once==3.0.0
aiohttp==3.6.0
xlrd==1.2.0
git+ssh://git@git.wanmeizhensuo.com/system/gm-tracer.git@v0.1.3
gm-tracer==0.1.6
git+ssh://git@git.wanmeizhensuo.com/backend/helios.git@v0.7.5
git+ssh://git@git.wanmeizhensuo.com/backend/gm-logging.git@v0.8.5
git+ssh://git@git.wanmeizhensuo.com/backend/gm-rpcd.git@v0.2.5
git+ssh://git@git.wanmeizhensuo.com/backend/gm-logging.git@v0.9.0
git+ssh://git@git.wanmeizhensuo.com/backend/gm-rpcd.git@v0.2.7
git+ssh://git@git.wanmeizhensuo.com/rank/gm-text-miner.git@master
kafka-python==2.0.2
crc32c==2.2
lz4==2.2.1
sqlalchemy==1.3.20
six==1.15.0
brain-grpc-client==1.1.28
protobuf>=3.12.0
cryptography<3.4
......@@ -30,6 +30,7 @@ from gm_types.gaia import (
DOCTOR_TYPE,
)
from talos.cache.base import tractate_pv_cache, tractate_favor_count_cache, tractate_vote_count_cache
from utils.brain_tools import user_tool
from utils.rpc import RPCMixin
from utils.pic import PictureTools
from talos.rpc import get_current_rpc_invoker
......@@ -43,7 +44,7 @@ class UserManager(RPCMixin):
:param pk_list:
:return:
"""
return self.call_rpc('api/user/get_fundamental_info_by_user_ids', user_ids=pk_list)
return user_tool.get_fundamental_info_by_user_ids(pk_list, [])
class TagManager(RPCMixin):
......@@ -330,7 +331,7 @@ class Tractate(models.Model):
res_list = []
result = r['pims/catalog/usertags/ids2tag'](ids=list(ids)).unwrap()
res_list = result.get("result", [])
except :
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return res_list
......
......@@ -13,6 +13,8 @@ from talos.services.base import ServiceBase, RpcServiceModelCache
from talos.cache.service import talos_rpc_service_model_cache
from talos.services.models.user import User, SimpleUser
from talos.logger import cache_hit_logger
from utils.brain_tools import user_tool
from utils.rpc import get_current_user
class UserService(ServiceBase):
......@@ -61,7 +63,7 @@ class UserService(ServiceBase):
missing = cached_info.pop(cls.__cached_layer.missing_k)
if missing:
cache_hit_logger.info("diary: not cached user: {}".format(missing))
rpc_result = cls.call_rpc('api/user/get_fundamental_info_by_user_ids', user_ids=missing)
rpc_result = user_tool.get_fundamental_info_by_user_ids(missing, [])
if rpc_result:
for info in rpc_result:
user = User.from_dict(info)
......@@ -86,8 +88,7 @@ class UserService(ServiceBase):
@classmethod
def _get_user_data_by_person_ids(cls, person_ids):
person_ids = list(set(person_ids))
r = cls.call_rpc('api/user/get_fundamental_info_by_person_ids', person_ids=person_ids)
r = user_tool.get_fundamental_info_by_user_ids([], person_ids)
return cls._generate_user_object_from_rpc_user_data(result=r)
@classmethod
......@@ -100,7 +101,10 @@ class UserService(ServiceBase):
:return:
User object
"""
user_infos = cls.call_rpc('api/user/get_fundamental_info_by_user_ids')
user = get_current_user()
user_infos = []
if user:
user_infos = user_tool.get_fundamental_info_by_user_ids([user.id], [])
if not user_infos:
return User.get_anonymous_user()
......
from gm_upload.utils.image_utils import Picture
from google.protobuf import json_format
from utils.gevent_util import get_brain_client
from utils.rpc import get_current_user
from utils.rpc import logging_exception
client = get_brain_client(client_name='user')
def get_fundamental_info_by_user_ids(user_ids, person_ids, current_user_id=0):
fundamental_info = list()
try:
if not user_ids and not person_ids and not current_user_id:
user = get_current_user()
current_user_id = user.id if user else 0
if current_user_id == 0:
return fundamental_info
user_ids = [int(i) for i in user_ids]
resp = client.getUserFundamentalInfo(current_user_id, user_ids, person_ids, 10)
result = json_format.MessageToDict(resp, including_default_value_fields=True, preserving_proto_field_name=True)
for base_user in result["base_user"]:
base_user["portrait"] = Picture.get_thumb_path(base_user["portrait"])
for k, v in base_user.items():
if v == -1:
base_user[k] = None
fundamental_info.append(base_user)
return fundamental_info
except Exception as ex:
logging_exception()
return fundamental_info
......@@ -2,8 +2,13 @@
from functools import wraps
from brain.device.v1.device_grpc_client import DeviceGrpcClient
from brain.gis.v1.gis_grpc_client import GisGrpcClient
from brain.user.v1.user_grpc_client import UserGrpcClient
from gm_tracer.context import register_tracer, current_tracer, get_current_span, active_span
from helios.rpc import get_mesh_info, set_mesh_info
from gm_rpcd.all import get_default_service
from gm_tracer import func_span
trace_info_attr_name = '_trace_attr_'
......@@ -70,3 +75,20 @@ def wrap_tracer(*args, **kwargs):
return _inner_tracer(args[0])
else:
return _inner_tracer
def get_brain_client(client_name='gis'):
"""获取brain的client"""
tracer = init_tracer()
tracer = func_span(tracer)
tracer = tracer.tracer
client = None
server, port = get_default_service("brain")
if client_name == 'gis':
client = GisGrpcClient(server, port, tracer)
elif client_name == 'user':
client = UserGrpcClient(server, port, tracer)
elif client_name == 'device':
client = DeviceGrpcClient(server, port, tracer)
return client
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment