Commit f2142cd4 authored by lixiaofang's avatar lixiaofang

Merge branch 'vest_edit_judge' into dev

parents b357ea40 15aef535
...@@ -83,3 +83,8 @@ gaia/rpcd.json ...@@ -83,3 +83,8 @@ gaia/rpcd.json
*.swp *.swp
dbmw_deploy/config.dir/ dbmw_deploy/config.dir/
.idea/codeStyles/Project.xml
.idea/misc.xml
.idea/physical.iml
.idea/codeStyles/Project.xml
.idea/
\ No newline at end of file
<component name="ProjectCodeStyleConfiguration">
<code_scheme name="Project" version="173">
<DBN-PSQL>
<case-options enabled="false">
<option name="KEYWORD_CASE" value="lower" />
<option name="FUNCTION_CASE" value="lower" />
<option name="PARAMETER_CASE" value="lower" />
<option name="DATATYPE_CASE" value="lower" />
<option name="OBJECT_CASE" value="preserve" />
</case-options>
<formatting-settings enabled="false" />
</DBN-PSQL>
<DBN-SQL>
<case-options enabled="false">
<option name="KEYWORD_CASE" value="lower" />
<option name="FUNCTION_CASE" value="lower" />
<option name="PARAMETER_CASE" value="lower" />
<option name="DATATYPE_CASE" value="lower" />
<option name="OBJECT_CASE" value="preserve" />
</case-options>
<formatting-settings enabled="false">
<option name="STATEMENT_SPACING" value="one_line" />
<option name="CLAUSE_CHOP_DOWN" value="chop_down_if_statement_long" />
<option name="ITERATION_ELEMENTS_WRAPPING" value="chop_down_if_not_single" />
</formatting-settings>
</DBN-SQL>
<DBN-PSQL>
<case-options enabled="false">
<option name="KEYWORD_CASE" value="lower" />
<option name="FUNCTION_CASE" value="lower" />
<option name="PARAMETER_CASE" value="lower" />
<option name="DATATYPE_CASE" value="lower" />
<option name="OBJECT_CASE" value="preserve" />
</case-options>
<formatting-settings enabled="false" />
</DBN-PSQL>
<DBN-SQL>
<case-options enabled="false">
<option name="KEYWORD_CASE" value="lower" />
<option name="FUNCTION_CASE" value="lower" />
<option name="PARAMETER_CASE" value="lower" />
<option name="DATATYPE_CASE" value="lower" />
<option name="OBJECT_CASE" value="preserve" />
</case-options>
<formatting-settings enabled="false">
<option name="STATEMENT_SPACING" value="one_line" />
<option name="CLAUSE_CHOP_DOWN" value="chop_down_if_statement_long" />
<option name="ITERATION_ELEMENTS_WRAPPING" value="chop_down_if_not_single" />
</formatting-settings>
</DBN-SQL>
</code_scheme>
</component>
\ No newline at end of file
This diff is collapsed.
FROM python:3.6
ENV PATH="/usr/local/bin:$PATH"
COPY . /srv/apps/physical/
WORKDIR /root/.ssh/
ADD ssh/id_rsa .
ADD ./sources.list /etc/apt/sources.list
WORKDIR /srv/apps/physical/
RUN chmod -R 600 /root/.ssh/id_rsa \
&& echo "StrictHostKeyChecking no" >> /etc/ssh/ssh_config \
&& mkdir -p /data/log/physical/app
RUN apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 3B4FE6ACC0B21F32 \
&& apt-get -y update && apt-get -y install mysql-client libmysqlclient-dev \
&& apt-get clean \
&& apt-get autoclean \
&& rm -rf /var/lib/apt/lists/*
RUN pip3 install -i https://pypi.tuna.tsinghua.edu.cn/simple -r requirements.txt
CMD bash -c 'source ~/.bashrc && celery -A physical worker -Q tapir-alpha --loglevel=DEBUG --maxtasksperchild 500 -c 62'
strategy part like es perform strategy part like es perform
容器运行命令
docker run -it -p 9999:9999 -d \
--volume=/etc/gm-config:/etc/gm-config \
--volume=/etc/resolv.conf:/etc/resolv.conf \
--volume=/srv/apps/physical/physical/settings_local.py:/srv/apps/physical/physical/settings_local.py \
--entrypoint="/bin/bash" physical-test:1.0 "-c" "source ~/.bashrc && celery -A physical worker -Q tapir-alpha --loglevel=DEBUG --maxtasksperchild 500 -c 62"
...@@ -17,5 +17,7 @@ ...@@ -17,5 +17,7 @@
<element value="search.views.contrast_similar"/> <element value="search.views.contrast_similar"/>
<element value="injection.data_sync.tasks"/> <element value="injection.data_sync.tasks"/>
<element value="search.views.contrast_similar"/> <element value="search.views.contrast_similar"/>
<element value="search.views.search_hotword"/>
<element value="search.views.product"/>
</config> </config>
</gm_rpcd_config> </gm_rpcd_config>
import sys
from gm_rpcd.commands.utils import add_cwd_to_path
from gm_rpcd.internals.utils import serve
def main(args):
add_cwd_to_path()
from gm_rpcd.internals.configuration import config
config.is_develop_mode = True
config.freeze()
host = '127.0.0.1'
port = 9000
try:
first_arg = args[0]
except IndexError:
pass
else:
if ':' in first_arg:
host, port = first_arg.split(':')
port = int(port)
else:
port = int(first_arg)
print('Serving on {}:{}'.format(host, port))
serve(host=host, port=port)
if __name__ == '__main__':
main(sys.argv[1:])
# -*- coding: UTF-8 -*- # -*- coding: UTF-8 -*-
import logging
import traceback
import json
import pymysql
import threading
import random
import datetime
from celery import shared_task from celery import shared_task
from django.conf import settings from django.conf import settings
from django.core import serializers from django.core import serializers
from trans2es.type_info import get_type_info_map from trans2es.type_info import get_type_info_map
# from rpc.all import get_rpc_remote_invoker # from rpc.all import get_rpc_remote_invoker
from libs.es import ESPerform from libs.es import ESPerform
import logging
import traceback
from libs.cache import redis_client from libs.cache import redis_client
from trans2es.models.face_user_contrast_similar import FaceUserContrastSimilar,UserSimilarScore from trans2es.models.face_user_contrast_similar import FaceUserContrastSimilar, UserSimilarScore
import json from linucb.utils.register_user_tag import RegisterUserTag
from trans2es.models.tag import SettingsConfig, Tag
@shared_task @shared_task
def write_to_es(es_type, pk_list, use_batch_query_set=False): def write_to_es(es_type, pk_list, use_batch_query_set=False):
try: try:
pk_list = list(frozenset(pk_list)) pk_list = list(frozenset(pk_list))
type_info_map = get_type_info_map()
type_info = type_info_map[es_type] if es_type == "register_user_tag":
RegisterUserTag.get_register_user_tag(pk_list)
logging.info("duan add,es_type:%s" % str(es_type)) elif es_type == "attention_user_tag":
type_info.insert_table_by_pk_list( RegisterUserTag.get_user_attention_tag(pk_list)
sub_index_name=es_type, else:
pk_list=pk_list, type_info_map = get_type_info_map()
use_batch_query_set=use_batch_query_set, type_info = type_info_map[es_type]
es=ESPerform.get_cli()
) logging.info("consume es_type:%s" % str(es_type))
type_info.insert_table_by_pk_list(
sub_index_name=es_type,
pk_list=pk_list,
use_batch_query_set=use_batch_query_set,
es=ESPerform.get_cli()
)
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
...@@ -37,7 +48,7 @@ def sync_face_similar_data_to_redis(): ...@@ -37,7 +48,7 @@ def sync_face_similar_data_to_redis():
result_items = FaceUserContrastSimilar.objects.filter(is_online=True, is_deleted=False).distinct().values( result_items = FaceUserContrastSimilar.objects.filter(is_online=True, is_deleted=False).distinct().values(
"participant_user_id").values_list("participant_user_id", flat=True) "participant_user_id").values_list("participant_user_id", flat=True)
logging.info("duan add,begin sync_face_similar_data_to_redis!") logging.info("begin sync_face_similar_data_to_redis!")
redis_key_prefix = "physical:user_similar:participant_user_id:" redis_key_prefix = "physical:user_similar:participant_user_id:"
for participant_user_id in result_items: for participant_user_id in result_items:
...@@ -50,38 +61,74 @@ def sync_face_similar_data_to_redis(): ...@@ -50,38 +61,74 @@ def sync_face_similar_data_to_redis():
item_list = list() item_list = list()
for item in similar_result_items: for item in similar_result_items:
weight_score = int(item.similarity * 100)
item_list.append( item_list.append(
{ {
"contrast_user_id": item.contrast_user_id, "filter": {
"similarity": item.similarity "constant_score": {
"filter": {
"term": {"user_id": item.contrast_user_id}
}
}
},
"weight": weight_score * 2
} }
) )
if len(item_list) >= 100:
break
redis_client.set(redis_key, json.dumps(item_list)) redis_client.set(redis_key, json.dumps(item_list))
logging.info("duan add,participant_user_id:%d set data done!" % participant_user_id) logging.info("participant_user_id:%d set data done!" % participant_user_id)
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
@shared_task @shared_task
def sync_user_similar_score(): def sync_user_similar_score():
try: try:
results_items = UserSimilarScore.objects.filter(is_deleted=False).distinct().values("user_id").values_list("user_id",flat=True) results_items = UserSimilarScore.objects.filter(is_deleted=False).distinct().values("user_id").values_list(
"user_id", flat=True)
redis_key_prefix = "physical:user_similar_score:user_id:" redis_key_prefix = "physical:user_similar_score:user_id:"
logging.info("duan add,begin sync user_similar_score!") logging.info("begin sync user_similar_score!")
for user_id in results_items: for user_id in results_items:
redis_key = redis_key_prefix + str(user_id) redis_key = redis_key_prefix + str(user_id)
similar_results_items = UserSimilarScore.objects.filter(is_deleted=False,user_id=user_id).order_by("-score") similar_results_items = UserSimilarScore.objects.filter(is_deleted=False, user_id=user_id).order_by(
"-score")
item_list = list() item_list = list()
for item in similar_results_items: for item in similar_results_items:
contrast_user_id = item.contrast_user_id contrast_user_id = item.contrast_user_id
score = item.score score = item.score
item_list.append( item_list.append(
[contrast_user_id,score] [contrast_user_id, score]
) )
redis_client.set(redis_key, json.dumps(item_list)) redis_client.set(redis_key, json.dumps(item_list))
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
@shared_task
def get_tag_count():
try:
# 获取搜索推荐热词
# results_registr_tag = list(set(SettingsConfig.objects.filter(is_deleted=False).values_list("val", flat=True)))
# tag_val_list =set()
# for item in results_registr_tag:
# for word in item.split():
# tag_val_list.add(word)
# 获取符合条件的核心词
results_tag = list(
set(Tag.objects.filter(is_online=True, is_deleted=False, collection=1).values_list("id", flat=True)))
redis_registr_tag = "physical:search_hotword:results_registr_tag"
redis_tag = "physical:search_hotword:results_tag"
# redis_client.set(redis_registr_tag, list(results_registr_tag))
redis_client.set(redis_tag, list(results_tag))
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
This diff is collapsed.
...@@ -4,6 +4,10 @@ ...@@ -4,6 +4,10 @@
from django.conf import settings from django.conf import settings
from pytz import timezone from pytz import timezone
from datetime import datetime from datetime import datetime
import traceback
from libs.cache import redis_client
import json
import logging
def tzlc(dt, truncate_to_sec=True): def tzlc(dt, truncate_to_sec=True):
...@@ -15,4 +19,23 @@ def tzlc(dt, truncate_to_sec=True): ...@@ -15,4 +19,23 @@ def tzlc(dt, truncate_to_sec=True):
if dt.tzinfo is None: if dt.tzinfo is None:
return timezone(settings.TIME_ZONE).localize(dt) return timezone(settings.TIME_ZONE).localize(dt)
else: else:
return timezone(settings.TIME_ZONE).normalize(dt) return timezone(settings.TIME_ZONE).normalize(dt)
\ No newline at end of file
def get_have_read_topic_id_list(device_id, user_id, query_type):
try:
if user_id and int(user_id) > 0:
redis_key = "physical:home_recommend" + ":user_id:" + str(user_id) + ":query_type:" + str(query_type)
else:
redis_key = "physical:home_recommend" + ":device_id:" + str(device_id) + ":query_type:" + str(query_type)
have_read_topic_id_list = list()
redis_field_list = [b'have_read_topic_list']
redis_field_val_list = redis_client.hmget(redis_key, redis_field_list)
if redis_field_val_list[0]:
have_read_topic_id_list = list(json.loads(redis_field_val_list[0]))
return have_read_topic_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list()
# -*- coding: UTF-8 -*-
# !/usr/bin/env python
import numpy as np
import redis
from libs.cache import redis_client
import logging
import traceback
import json
import pickle
from django.conf import settings
from trans2es.models.tag import AccountUserTag,CommunityTagFollow
from libs.es import ESPerform
import libs.tools as Tools
from search.utils.common import *
class RegisterUserTag(object):
linucb_device_id_matrix_redis_prefix = "physical:linucb:device_id:"
linucb_device_id_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
linucb_device_id_recommend_topic_id_prefix = "physical:linucb:topic_recommend:device_id:"
tag_topic_id_redis_prefix = "physical:tag_id:topic_id_list:"
linucb_user_id_matrix_redis_prefix = "physical:linucb:user_id:"
linucb_user_id_recommend_redis_prefix = "physical:linucb:tag_recommend:user_id:"
linucb_user_id_recommend_topic_id_prefix = "physical:linucb:topic_recommend:user_id:"
linucb_device_id_register_tag_topic_id_prefix = "physical:linucb:register_tag_topic_recommend:device_id:"
linucb_user_id_register_tag_topic_id_prefix = "physical:linucb:register_tag_topic_recommend:user_id:"
# 用户关注标签
linucb_register_user_tag_key = "physical:linucb:register_user_tag_info"
# # 用户关注标签
# linucb_user_attention_tag_key = "physical:linucb:user_attention_tag_info"
# # 用户喜好标签(注册勾选 + 关注)
# linucb_user_like_tag_key = "physical:linucb:user_like_tag_info"
@classmethod
def get_user_attention_tag(cls, pk_list):
"""
:remark 获取用户关注标签
:param pk_list:
:return:
"""
try:
user_id_dict = dict()
query_results = CommunityTagFollow.objects.filter(pk__in=pk_list,is_deleted=False,is_online=True)
for item in query_results:
tag_id = item.tag_id
user_id = item.user_id
user_tag_list = list(CommunityTagFollow.objects.filter(user_id=user_id,is_deleted=False,is_online=True).values_list("tag_id", flat=True))
user_id_dict[user_id] = user_tag_list
user_register_tag_list = list(AccountUserTag.objects.filter(user=user_id,is_deleted=False).values_list("tag_id", flat=True))
user_id_dict[user_id].extend(user_register_tag_list)
for user_id in user_id_dict:
# redis_user_tag_id_data = redis_client.hget(cls.linucb_register_user_tag_key, user_id)
# redis_user_tag_id_list = json.loads(redis_user_tag_id_data) if redis_user_tag_id_data else []
# redis_user_tag_id_list.extend(user_id_dict[user_id])
redis_client.hset(cls.linucb_register_user_tag_key, user_id, json.dumps(list(set(user_id_dict[user_id]))))
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
@classmethod
def get_register_user_tag(cls,pk_list):
"""
:remark 用户注册时选的标签
:param pk_list:
:return:
"""
try:
user_id_dict = dict()
query_results = AccountUserTag.objects.filter(pk__in=pk_list)
for item in query_results:
tag_id = item.tag_id
user_id = item.user
user_tag_list = list(AccountUserTag.objects.filter(user=user_id).values_list("tag_id", flat=True))
user_id_dict[user_id] = user_tag_list
user_follow_tag_list = list(CommunityTagFollow.objects.filter(user_id=user_id,is_deleted=False,is_online=True).values_list("tag_id", flat=True))
user_id_dict[user_id].extend(user_follow_tag_list)
for user_id in user_id_dict:
# redis_user_tag_id_data = redis_client.hget(cls.linucb_register_user_tag_key, user_id)
# redis_user_tag_id_list = json.loads(redis_user_tag_id_data) if redis_user_tag_id_data else []
# redis_user_tag_id_list.extend(user_id_dict[user_id])
redis_client.hset(cls.linucb_register_user_tag_key, user_id, json.dumps(list(set(user_id_dict[user_id]))))
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
# -*- coding: UTF-8 -*-
# !/usr/bin/env python
import numpy as np
import redis
from libs.cache import redis_client
import logging
import traceback
import json
import pickle
from django.conf import settings
from trans2es.models.tag import CommunityTagFollow
from libs.es import ESPerform
import libs.tools as Tools
from search.utils.common import *
class UserAttentionTag(object):
linucb_device_id_matrix_redis_prefix = "physical:linucb:device_id:"
linucb_device_id_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
linucb_device_id_recommend_topic_id_prefix = "physical:linucb:topic_recommend:device_id:"
tag_topic_id_redis_prefix = "physical:tag_id:topic_id_list:"
linucb_user_id_matrix_redis_prefix = "physical:linucb:user_id:"
linucb_user_id_recommend_redis_prefix = "physical:linucb:tag_recommend:user_id:"
linucb_user_id_recommend_topic_id_prefix = "physical:linucb:topic_recommend:user_id:"
linucb_user_id_attention_tag_topic_id_prefix = "physical:linucb:attention_tag_topic_recommend:user_id:"
@classmethod
def get_register_user_tag(cls,pk_list):
try:
user_id_set = set()
query_results = CommunityTagFollow.objects.filter(pk__in=pk_list)
for item in query_results:
tag_id = item.tag_id
user_id = item.user_id
if user_id not in user_id_set:
user_id_set.add(user_id)
user_tag_list = CommunityTagFollow.objects.filter(user_id=user_id).order_by("-create_time").values_list("tag_id",flat=True)
have_read_topic_id_list = Tools.get_have_read_topic_id_list(-1, user_id,
TopicPageType.HOME_RECOMMEND)
recommend_topic_id_list = list()
cycle_num = int(10000/len(user_tag_list))
for index in range(0,cycle_num):
for tag_id in user_tag_list:
redis_tag_id_key = cls.tag_topic_id_redis_prefix + str(tag_id)
redis_tag_id_data = redis_client.get(redis_tag_id_key)
tag_topic_id_list = json.loads(redis_tag_id_data) if redis_tag_id_data else []
if not redis_tag_id_data:
tag_topic_id_list = ESPerform.get_tag_topic_list(tag_id)
redis_client.set(redis_tag_id_key,json.dumps(tag_topic_id_list))
redis_client.expire(redis_tag_id_key,1*24*60*60)
if len(tag_topic_id_list)>index:
for topic_id in tag_topic_id_list[index:]:
if topic_id not in have_read_topic_id_list and topic_id not in recommend_topic_id_list:
recommend_topic_id_list.append(topic_id)
break
redis_register_tag_topic_data = {
"data": json.dumps(recommend_topic_id_list),
"cursor": 0
}
redis_client.hmset(cls.linucb_user_id_attention_tag_topic_id_prefix,redis_register_tag_topic_data)
redis_client.expire(cls.linucb_user_id_attention_tag_topic_id_prefix,30*24*60*60)
topic_recommend_redis_key = cls.linucb_user_id_recommend_topic_id_prefix + str(user_id)
redis_recommend_topic_dict = redis_client.hgetall(topic_recommend_redis_key)
if len(redis_recommend_topic_dict)==0:
redis_data_dict = {
"data": json.dumps(recommend_topic_id_list),
"cursor":0
}
redis_client.hmset(topic_recommend_redis_key,redis_data_dict)
redis_client.expire(topic_recommend_redis_key,30*24*60*60)
else:
ori_recommend_topic_id_list = json.loads(redis_recommend_topic_dict["data"])
ori_recommend_cursor = redis_recommend_topic_dict["cursor"]
ori_index = 0
for new_recommend_index in range(0,len(recommend_topic_id_list),2):
pass
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
This diff is collapsed.
...@@ -20,11 +20,16 @@ class LinUCB: ...@@ -20,11 +20,16 @@ class LinUCB:
default_tag_list = list() default_tag_list = list()
@classmethod @classmethod
def get_default_tag_list(cls): def get_default_tag_list(cls,user_id):
try: try:
if len(cls.default_tag_list) == 0: if len(cls.default_tag_list) == 0:
cls.default_tag_list = Tag.objects.using(settings.SLAVE_DB_NAME).filter(is_online=True,collection=1).values_list("id",flat=True)[0:100] if user_id:
redis_tag_data = redis_client.hget("physical:linucb:register_user_tag_info", user_id)
cls.default_tag_list = json.loads(redis_tag_data) if redis_tag_data else []
if len(cls.default_tag_list) == 0:
cls.default_tag_list = Tag.objects.using(settings.SLAVE_DB_NAME).filter(is_online=True,collection=1).values_list("id",flat=True)[0:100]
return cls.default_tag_list return cls.default_tag_list
except: except:
......
# !/usr/bin/env python # !/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
DATABASE_APPS_MAPPING = {'face': 'face', 'commodity': 'commodity'}
class DBRouter: class DBRouter:
...@@ -12,32 +13,46 @@ class DBRouter: ...@@ -12,32 +13,46 @@ class DBRouter:
""" """
Attempts to read user models go to users_db. Attempts to read user models go to users_db.
""" """
if model._meta.app_label == 'face': # if model._meta.app_label == 'face':
return 'face' # return 'face'
if model._meta.app_label in DATABASE_APPS_MAPPING:
return DATABASE_APPS_MAPPING[model._meta.app_label]
return None return None
def db_for_write(self, model, **hints): def db_for_write(self, model, **hints):
""" """
Attempts to write user models go to users_db. Attempts to write user models go to users_db.
""" """
if model._meta.app_label == 'face': # if model._meta.app_label == 'face':
return 'face' # return 'face'
if model._meta.app_label in DATABASE_APPS_MAPPING:
return DATABASE_APPS_MAPPING[model._meta.app_label]
return None return None
def allow_relation(self, obj1, obj2, **hints): def allow_relation(self, obj1, obj2, **hints):
""" """
Allow relations if a model in the user app is involved. Allow relations if a model in the user app is involved.
""" # """
if obj1._meta.app_label == 'face' or \ db_obj1 = DATABASE_APPS_MAPPING.get(obj1._meta.app_label)
obj2._meta.app_label == 'face': db_obj2 = DATABASE_APPS_MAPPING.get(obj2._meta.app_label)
return True if db_obj1 and db_obj2:
return None if db_obj1 == db_obj2:
return True
else:
return False
else:
return None
def allow_migrate(self, db, app_label, model_name=None, **hints): def allow_migrate(self, db, app_label, model_name=None, **hints):
""" """
Make sure the auth app only appears in the 'users_db' Make sure the auth app only appears in the 'users_db'
database. database.
""" """
if app_label == 'face': if db in DATABASE_APPS_MAPPING.values():
return db == 'face' return DATABASE_APPS_MAPPING.get(app_label) == db
elif app_label in DATABASE_APPS_MAPPING:
return False
return None return None
# coding=utf-8 # coding=utf-8
from __future__ import unicode_literals, print_function, absolute_import from __future__ import unicode_literals, print_function, absolute_import
import itertools
from django.conf import settings from django.conf import settings
import itertools
import logging import logging
......
...@@ -10,8 +10,5 @@ For the full list of settings and their values, see ...@@ -10,8 +10,5 @@ For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/ https://docs.djangoproject.com/en/1.10/ref/settings/
""" """
import os
from .log_settings import * from .log_settings import *
from datetime import timedelta
from celery.schedules import crontab
from .settings_local import * from .settings_local import *
...@@ -57,6 +57,7 @@ CELERYBEAT_SCHEDULE = { ...@@ -57,6 +57,7 @@ CELERYBEAT_SCHEDULE = {
'args': () 'args': ()
}, },
} }
""" """
MIDDLEWARE = [ MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware', 'django.middleware.security.SecurityMiddleware',
......
...@@ -9,7 +9,7 @@ from libs.es import ESPerform ...@@ -9,7 +9,7 @@ from libs.es import ESPerform
class GroupUtils(object): class GroupUtils(object):
@classmethod @classmethod
def get_group_query_result(cls,query,offset,size): def get_group_query_result(cls, query, offset, size):
try: try:
q = dict() q = dict()
...@@ -38,37 +38,92 @@ class GroupUtils(object): ...@@ -38,37 +38,92 @@ class GroupUtils(object):
} }
} }
q["_source"] = { q["_source"] = {
"includes":["id"] "includes": ["id"]
} }
return ESPerform.get_search_results(ESPerform.get_cli(), "group", q, offset, size) return ESPerform.get_search_results(ESPerform.get_cli(), "group", q, offset, size)
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"total_count":0, "hits":[]} return {"total_count": 0, "hits": []}
@classmethod @classmethod
def get_hot_pictorial_recommend_result_list(cls,offset,size,es_cli_obj=None): def get_hot_pictorial_recommend_result_list(cls, offset, size, es_cli_obj=None, attention_tag_list=[]):
try: try:
if not es_cli_obj: if not es_cli_obj:
es_cli_obj = ESPerform.get_cli() es_cli_obj = ESPerform.get_cli()
q = dict() functions_list = list()
q["query"] = { for tag_id in attention_tag_list:
"bool":{ functions_list.append({
"must":[ "filter": {
{"term": {"is_online": True}}, "constant_score": {
{"term":{"is_deleted": False}} "filter": {
] "term": {
"tag_id": tag_id
}
}
}
},
"weight": 20
})
if len(functions_list) >= 20:
break
functions_list.append(
{
"gauss": {
"create_time": {
"scale": "1d",
"decay": 0.99
}
},
"weight": 60
}
)
q = {
"query": {
"function_score": {
"query": {
"bool": {
"filter": [
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"term": {"effective": True}}
],
"must_not": [
{"term": {"is_default": 1}}
]
}
},
"score_mode": "sum",
"boost_mode": "sum",
"functions": functions_list
}
} }
} }
q["sort"] = [ q["sort"] = [
{"high_quality_topic_num":{"order":"desc"}} {
"_script": {
"type": "number",
"script": {
"lang": "expression",
"source": "_score+doc['offline_score']"
},
"order": "desc"
}
},
{
"_score": {
"order": "desc"
}
}
] ]
q["_source"] = { q["_source"] = {
"includes":["id"] "includes": ["id"]
} }
result_dict = ESPerform.get_search_results(es_cli_obj,"pictorial",q,offset,size) result_dict = ESPerform.get_search_results(es_cli_obj, "pictorial", q, offset, size)
pictorial_ids_list = [] pictorial_ids_list = []
if len(result_dict["hits"]) > 0: if len(result_dict["hits"]) > 0:
...@@ -80,7 +135,7 @@ class GroupUtils(object): ...@@ -80,7 +135,7 @@ class GroupUtils(object):
return [] return []
@classmethod @classmethod
def get_user_attention_pictorial_list(cls,user_id,offset=0,size=10,es_cli_obj=None): def get_user_attention_pictorial_list(cls, user_id, offset=0, size=10, es_cli_obj=None):
""" """
:remark: 获取用户关注小组列表 :remark: 获取用户关注小组列表
:return: :return:
...@@ -91,29 +146,31 @@ class GroupUtils(object): ...@@ -91,29 +146,31 @@ class GroupUtils(object):
q = dict() q = dict()
q["query"] = { q["query"] = {
"bool":{ "bool": {
"must":[ "must": [
{"term":{"is_online": True}}, {"term": {"is_online": True}},
{"term":{"user_id":user_id}}, {"term": {"user_id": user_id}},
{"term":{"is_deleted":False}} {"term": {"is_deleted": False}},
{"term": {"effective": True}}
] ]
} }
} }
q["_source"] = { q["_source"] = {
"includes":["attention_pictorial_id_list"] "includes": ["attention_pictorial_id_list"]
} }
result_dict = ESPerform.get_search_results(es_cli_obj,"user",q,offset,size) result_dict = ESPerform.get_search_results(es_cli_obj, "user", q, offset, size)
if len(result_dict["hits"])>0: if len(result_dict["hits"]) > 0:
return result_dict["hits"][0]["_source"]["attention_pictorial_id_list"] return result_dict["hits"][0]["_source"]["attention_pictorial_id_list"]
else: else:
return [] return []
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return [] return []
@classmethod @classmethod
def get_pictorial_ids_by_aggs(cls,pictorial_ids_list,es_cli_obj=None): def get_pictorial_ids_by_aggs(cls, pictorial_ids_list, es_cli_obj=None):
""" """
:remark:聚合查询获取小组列表 :remark:聚合查询获取小组列表
:param group_id_list: :param group_id_list:
...@@ -124,34 +181,96 @@ class GroupUtils(object): ...@@ -124,34 +181,96 @@ class GroupUtils(object):
es_cli_obj = ESPerform.get_cli() es_cli_obj = ESPerform.get_cli()
q = dict() q = dict()
q["size"]=0 q["size"] = 0
q["query"] = { q["query"] = {
"terms":{ "terms": {
"pictorial_id":pictorial_ids_list "pictorial_id": pictorial_ids_list
} }
} }
q["aggs"] = { q["aggs"] = {
"pictorial_ids":{ "pictorial_ids": {
"terms":{ "terms": {
"field":"pictorial_id" "field": "pictorial_id"
}, },
"aggs":{ "aggs": {
"max_date":{ "max_date": {
"max":{ "max": {
"field":"update_time_val" "field": "update_time_val"
} }
} }
} }
} }
} }
result_dict = ESPerform.get_search_results(es_cli_obj,"topic",q,aggregations_query=True) result_dict = ESPerform.get_search_results(es_cli_obj, "topic", q, aggregations_query=True)
buckets_list = result_dict["aggregations"]["pictorial_ids"]["buckets"] buckets_list = result_dict["aggregations"]["pictorial_ids"]["buckets"]
sorted_buckets_list = sorted(buckets_list,key=lambda item:item["max_date"]["value"],reverse=True) sorted_buckets_list = sorted(buckets_list, key=lambda item: item["max_date"]["value"], reverse=True)
sorted_pictorial_id_list = [item["key"] for item in sorted_buckets_list] sorted_pictorial_id_list = [item["key"] for item in sorted_buckets_list]
return sorted_pictorial_id_list return sorted_pictorial_id_list
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return [] return []
\ No newline at end of file
@classmethod
def get_search_pictorial_topic(cls, query, offset, size):
try:
q = dict()
multi_fields = {
'name': 4,
'description': 4,
'edit_tag_name': 4
}
query_fields = ['^'.join((k, str(v))) for (k, v) in multi_fields.items()]
multi_match = {
'query': query,
'type': 'cross_fields',
'operator': 'and',
'fields': query_fields,
"analyzer": "gm_default_index"
}
q['query'] = {
'bool': {
"must": [
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"term": {"is_default": 0}},
{"range": {"topic_id_list": {"gte": 0}}},
{"term": {"is_cover": True}}
],
"should": [
{'multi_match': multi_match}
],
"minimum_should_match": 1
}
}
q["_source"] = {
"includes": ["id", "is_online", "is_deleted", "is_default", "name", "tag_name", "description",
"is_cover",
"offline_score",
"is_default"]
}
q["sort"] = [
{
"real_user_activate_time": {
"order": "desc"
}
},
{
"_score": {
"order": "desc"
}
}
]
logging.info("get get_search_pictorial_topic:%s" % q)
es_cli_obj = ESPerform.get_cli()
result_dict = ESPerform.get_search_results(es_cli_obj, "pictorial", q, offset, size)
return result_dict
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.group import GroupUtils
from search.utils.common import GroupSortTypes
from libs.es import ESPerform
from trans2es.models.pictorial import PictorialTopics
class ProductUtils(object):
@classmethod
def get_product_sku(cls, query='', offset=0, size=10, filters={}):
try:
multi_fields = {
'cn_name': 2,
'en_name': 2,
'alias': 2,
'brand_cn_name': 2,
'brand_en_name': 2,
'brand_alias': 2,
'category_cn_name': 2,
"effect_cn_name": 2
}
query_fields = ['^'.join((k, str(v))) for (k, v) in multi_fields.items()]
multi_match = {
'query': query,
'type': 'cross_fields',
'operator': 'and',
'fields': query_fields,
}
sku_must_flter = cls.sku_filter(filters)
logging.info("get sku_must_flter:%s " % sku_must_flter)
logging.info("get sku_must_flter:%s " % type(sku_must_flter))
q = {
"query": {
"bool": {
"must": sku_must_flter
}
}
}
if query != '':
q = {
"query": {
"bool": {
"must": sku_must_flter,
"should": {
"multi_match": multi_match
},
"minimum_should_match": 1
}
}
}
q["sort"] = [{"comment_nums": {"order": "desc"}}, {"cn_name_sort": {"order": "asc"}}]
logging.info("get product query:%s" % q)
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name="product", query_body=q,
offset=offset, size=size)
return result_dict
except:
logging.error("catch exception, query_sku:%s" % traceback.format_exc())
return []
@classmethod
def sku_filter(cls, filters):
"""处理过滤器部分。"""
logging.info("get filters:%s" % filters)
f = [
{'term': {"have_image": True}},
{'term': {"is_online": True}},
{"term": {"is_deleted": False}},
]
if not filters:
return f
for k, v in filters.items():
if v in (None, '', []):
continue
if k == "brand":
f.append({"term": {"brand_cn_name_pre": v}})
if k == "effect":
f.append({"term": {"effect_cn_name_pre": v}})
if k == "category":
f.append({"term": {"category_cn_name_pre": v}})
return f
This diff is collapsed.
...@@ -25,7 +25,6 @@ def business_topic_search(filters, nfilters=None, sorts_by=None, offset=0, size= ...@@ -25,7 +25,6 @@ def business_topic_search(filters, nfilters=None, sorts_by=None, offset=0, size=
index_name="topic" index_name="topic"
) )
logging.info("get result_lsit:%s"%result_list)
topic_ids = [item["_source"]["id"] for item in result_list["hits"]] topic_ids = [item["_source"]["id"] for item in result_list["hits"]]
return {"topic_ids": topic_ids, "total_count": result_list["total_count"]} return {"topic_ids": topic_ids, "total_count": result_list["total_count"]}
except: except:
......
This diff is collapsed.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.product import ProductUtils
from search.utils.common import GroupSortTypes
from libs.es import ESPerform
@bind("physical/search/query_product_sku")
def product_hot_sort(query='', offset=0, size=10, filters={}):
'''
商品sku排序
:param query:
:param offset:
:param size:
:param sort_type:
:param filters:
:return:
'''
try:
res = ProductUtils.get_product_sku(query=query, offset=offset, size=size, filters=filters)
product_list = []
res_hit = res["hits"]
for item in res_hit:
product_id = item["_source"]["id"]
product_list.append(product_id)
return {"product_hot_ids": product_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"product_hot_ids": []}
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import random
import json
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.group import GroupUtils
from search.utils.common import GroupSortTypes
from libs.es import ESPerform
from trans2es.models.pictorial import PictorialTopics
from trans2es.models.tag import SettingsConfig, Tag
from libs.cache import redis_client
@bind("physical/search/search_hotword")
def search_hotword(device_id=-1):
"""
:remark:搜索页的热门灵感
内容组成:搜索推荐热词register_show_tag
个性化标签physical:linucb:tag_recommend:device_id:
核心词,需要去重community_tag collection 1
①先从后台随机取最多6个搜索推荐热词,不够6个则有多少取多少
②然后随机取和推荐热词一样数量的linUCB标签,取不够数则取核心标签,linUCB+核心词去重后的数量要和搜索推荐热词的数量一样
③最后打乱顺序填入到热门灵感中,超过3行的标签弃掉,不够3行不用管
:param query:
:param offset:
:param size:
:return:
"""
try:
all_tag_name_list = set()
# results_registr_tag = json.loads(redis_client.get("physical:search_hotword:results_registr_tag"))
results_tag = json.loads(redis_client.get("physical:search_hotword:results_tag"))
# 先获取搜索推荐热词
results_registr_tag = list(set(SettingsConfig.objects.filter(is_deleted=False,key=1).values_list("val", flat=True)))
tag_val_list = set()
for item in results_registr_tag:
for word in item.split():
tag_val_list.add(word)
tag_id_list = random.sample(range(0, len(tag_val_list)), 6)
for tag_id in tag_id_list:
tag_val = list(tag_val_list)[tag_id]
all_tag_name_list.add(tag_val)
logging.info("get all_tag_name_list:%s" % all_tag_name_list)
# 获取个性化标签
linucb_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
tag_recommend_redis_key = linucb_recommend_redis_prefix + str(device_id)
linucb_recommend_tag_data = redis_client.get(tag_recommend_redis_key)
linucb_recommend_tag_list = json.loads(linucb_recommend_tag_data) if linucb_recommend_tag_data else []
for item in linucb_recommend_tag_list:
results_tag_recommend = list(
set(Tag.objects.filter(id=item, is_online=True).values_list("name", flat=True)))
if results_tag_recommend:
all_tag_name_list.add(results_tag_recommend[0])
logging.info("get all_tag_name_list:%s" % all_tag_name_list)
if len(all_tag_name_list) == 12:
return {"recommend_tag_name": list(all_tag_name_list)}
# 取不够数则取核心标签
if len(all_tag_name_list) < 12:
for i in range(0, 12):
tag_id = random.randint(1, len(results_tag))
results_tag_hexin = Tag.objects.filter(id=results_tag[tag_id], is_online=True,
collection=1).values_list("name",
flat=True)
if results_tag_hexin:
if results_tag_hexin[0] not in all_tag_name_list:
all_tag_name_list.add(results_tag_hexin[0])
logging.info("get all_tag_name_list:%s" % all_tag_name_list)
if len(all_tag_name_list) >= 12:
return {"recommend_tag_name": list(all_tag_name_list)}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_tag_name": []}
...@@ -10,6 +10,9 @@ from search.utils.topic import TopicUtils ...@@ -10,6 +10,9 @@ from search.utils.topic import TopicUtils
from libs.es import ESPerform from libs.es import ESPerform
from libs.cache import redis_client from libs.cache import redis_client
from search.utils.common import * from search.utils.common import *
from trans2es.models.tag import TopicTag,AccountUserTag,CommunityTagFollow,Tag
import time
from django.conf import settings
def get_highlight(fields=[]): def get_highlight(fields=[]):
...@@ -24,33 +27,31 @@ def get_highlight(fields=[]): ...@@ -24,33 +27,31 @@ def get_highlight(fields=[]):
@bind("physical/search/query_tag") @bind("physical/search/query_tag")
def query_tag(query,offset,size): def query_tag(query,offset,size):
try: try:
""" if query:
q = { query = query.lower()
pre_q = {
"query":{ "query":{
"bool":{ "bool": {
"must":[ "must":[
{"term":{"is_online":True}}, {"term":{"name_pre": query}},
{"term": {"is_deleted": False}} {"term":{"is_online": True}}
], ]
"should":[
{"multi_match":{
"query": query,
"fields":["name"],
"operator":"and"}}
],
"minimum_should_match":1
} }
}, },
"sort":[
{"near_new_topic_num":{"order":"desc"}},
{'_score': {"order": "desc"}}
],
"_source": { "_source": {
"includes": ["id", "name"] "include": ["id", "name", "is_deleted", "is_online"]
} }
} }
q["highlight"] = get_highlight(["name"])
""" ret_list = list()
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="tag",query_body=pre_q,offset=0,size=1)
if len(result_dict["hits"])>0:
hitLight = u'<ems>%s</ems>' % query
result_dict["hits"][0]["_source"]["highlight"] = result_dict["hits"][0]["_source"]["name"].replace(query, hitLight)
ret_list.append(result_dict["hits"][0]["_source"])
size -= 1
q = { q = {
"suggest":{ "suggest":{
...@@ -58,6 +59,7 @@ def query_tag(query,offset,size): ...@@ -58,6 +59,7 @@ def query_tag(query,offset,size):
"prefix":query, "prefix":query,
"completion":{ "completion":{
"field":"suggest", "field":"suggest",
"size":size,
"contexts":{ "contexts":{
"is_online": [True], "is_online": [True],
"is_deleted": [False] "is_deleted": [False]
...@@ -73,7 +75,6 @@ def query_tag(query,offset,size): ...@@ -73,7 +75,6 @@ def query_tag(query,offset,size):
} }
} }
ret_list = list()
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="tag",query_body=q,offset=offset,size=size,is_suggest_request=True) result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="tag",query_body=q,offset=offset,size=size,is_suggest_request=True)
for tips_item in result_dict["suggest"]["tips-suggest"]: for tips_item in result_dict["suggest"]["tips-suggest"]:
...@@ -121,3 +122,85 @@ def query_by_tag_type(tag_type_id,offset,size): ...@@ -121,3 +122,85 @@ def query_by_tag_type(tag_type_id,offset,size):
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"tag_list": []} return {"tag_list": []}
@bind("physical/search/choice_push_tag")
def choice_push_tag(device_id, user_id):
"""
:remark 挑选push标签
:param device_id:
:param user_id:
:return:
"""
try:
redis_key_prefix = "physical:push_tag:user_id:"
redis_push_tag_key = redis_key_prefix + str(user_id)
redis_push_tag_data = redis_client.get(redis_push_tag_key)
redis_push_tag_dict = json.loads(redis_push_tag_data) if redis_push_tag_data else {}
now_sec = int(time.time())
valid_time = 8*7*24*60*60
ori_key_list = list(redis_push_tag_dict.keys())
for tag_id in ori_key_list:
if now_sec - redis_push_tag_dict[tag_id] >= valid_time:
redis_push_tag_dict.pop(tag_id)
redis_push_tag_list = list(redis_push_tag_dict.keys())
redis_push_tag_list = [int(item) for item in redis_push_tag_list]
account_user_tag_list = list(AccountUserTag.objects.filter(user=user_id,is_deleted=False).values_list("tag_id",flat=True))
community_tag_follow_list = list(CommunityTagFollow.objects.filter(user_id=user_id,is_online=True,is_deleted=False).values_list("tag_id",flat=True))
linucb_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
tag_recommend_redis_key = linucb_recommend_redis_prefix + str(device_id)
linucb_recommend_tag_data = redis_client.get(tag_recommend_redis_key)
linucb_recommend_tag_list = json.loads(linucb_recommend_tag_data) if linucb_recommend_tag_data else []
account_user_tag_list.extend(community_tag_follow_list)
account_user_tag_list.extend(linucb_recommend_tag_list)
unread_tag_list = list(set(account_user_tag_list) - set(redis_push_tag_list))
unread_tag_list = list(Tag.objects.filter(id__in=unread_tag_list, is_online=True, is_deleted=False).values_list("id",flat=True))
ret_tag_set = set()
if len(unread_tag_list)>0:
for tag_id in unread_tag_list:
valid_tag_topic_num = TopicTag.objects.filter(tag_id=tag_id,is_online=True).count()
if valid_tag_topic_num>100:
ret_tag_set.add(tag_id)
redis_push_tag_dict[tag_id] = now_sec
if len(ret_tag_set)>=1:
break
redis_client.set(redis_push_tag_key, json.dumps(redis_push_tag_dict))
return {"tag_list": list(ret_tag_set)}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"tag_list": []}
@bind("physical/search/identity_tag_name")
def identity_tag_name(topic_content):
try:
ret_tag_set = set()
redis_key_name = "physical:tag_name_set"
body = {
'text': topic_content,
'analyzer': "gm_default_index"
}
cli_info = settings.TAG_ES_INFO_LIST
res = ESPerform.get_analyze_results(es_cli=ESPerform.get_cli(cli_info=cli_info), sub_index_name="tag", query_body=body)
logging.info("duan add,res:%s" % str(res).encode("utf-8"))
for item in res["tokens"]:
token_word = item["token"]
is_member = redis_client.sismember(redis_key_name, token_word)
if is_member:
ret_tag_set.add(token_word)
return {"tag_name_list": list(ret_tag_set)}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"tag_name_list": []}
This diff is collapsed.
...@@ -14,7 +14,7 @@ from libs.es import ESPerform ...@@ -14,7 +14,7 @@ from libs.es import ESPerform
@bind("physical/search/recommend_user") @bind("physical/search/recommend_user")
def recommend_user(self_user_id,interesting_user_id,offset=0,size=10): def recommend_user(self_user_id, interesting_user_id, offset=0, size=10):
""" """
:remark 点关注推荐用户 :remark 点关注推荐用户
:param self_user_id: :param self_user_id:
...@@ -29,25 +29,29 @@ def recommend_user(self_user_id,interesting_user_id,offset=0,size=10): ...@@ -29,25 +29,29 @@ def recommend_user(self_user_id,interesting_user_id,offset=0,size=10):
if not isinstance(interesting_user_id, int): if not isinstance(interesting_user_id, int):
interesting_user_id = -1 interesting_user_id = -1
#获取es链接对象 # 获取es链接对象
es_cli_obj = ESPerform.get_cli() es_cli_obj = ESPerform.get_cli()
#获取关注用户列表 # 获取关注用户列表
(self_attention_user_id_list,recursion_attention_user_id_list) = UserUtils.get_attention_user_list([self_user_id,interesting_user_id],self_user_id,es_cli_obj) (self_attention_user_id_list, recursion_attention_user_id_list) = UserUtils.get_attention_user_list(
[self_user_id, interesting_user_id], self_user_id, es_cli_obj)
#去除自身及感兴趣的用户ID # 去除自身及感兴趣的用户ID
self_attention_user_id_list.append(self_user_id) self_attention_user_id_list.append(self_user_id)
self_attention_user_id_list.append(interesting_user_id) self_attention_user_id_list.append(interesting_user_id)
recommend_user_list = UserUtils.get_recommend_user_list(self_attention_user_id_list,recursion_attention_user_id_list,offset,size,es_cli_obj) recommend_user_list = UserUtils.get_recommend_user_list(self_attention_user_id_list,
recursion_attention_user_id_list, offset, size,
es_cli_obj)
return recommend_user_list return recommend_user_list
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return [] return []
@bind("physical/search/batch_recommend_user") @bind("physical/search/batch_recommend_user")
def batch_recommend_user(self_user_id,interesting_user_id_list,offset=0,size=10): def batch_recommend_user(self_user_id, interesting_user_id_list, offset=0, size=10):
""" """
:remark 点关注推荐用户 - 批量接口 :remark 点关注推荐用户 - 批量接口
:param self_user_id: :param self_user_id:
...@@ -57,23 +61,128 @@ def batch_recommend_user(self_user_id,interesting_user_id_list,offset=0,size=10) ...@@ -57,23 +61,128 @@ def batch_recommend_user(self_user_id,interesting_user_id_list,offset=0,size=10)
:return: :return:
""" """
try: try:
if not isinstance(self_user_id,int): if not isinstance(self_user_id, int):
self_user_id = -1 self_user_id = -1
#获取es链接对象 # 获取es链接对象
es_cli_obj = ESPerform.get_cli() es_cli_obj = ESPerform.get_cli()
#获取关注用户列表 # 获取关注用户列表
(need_filter_attention_user_id_list, attention_user_dict_list,attention_user_id_list) = UserUtils.get_batch_attention_user_list(interesting_user_id_list,self_user_id,es_cli_obj) (need_filter_attention_user_id_list, attention_user_dict_list,
attention_user_id_list) = UserUtils.get_batch_attention_user_list(interesting_user_id_list, self_user_id,
es_cli_obj)
# 去除自身及感兴趣的用户ID
#去除自身及感兴趣的用户ID
need_filter_attention_user_id_list.append(self_user_id) need_filter_attention_user_id_list.append(self_user_id)
recommend_user_dict = UserUtils.get_batch_recommend_user_dict(need_filter_attention_user_id_list=need_filter_attention_user_id_list,attention_user_id_list=attention_user_id_list,attention_user_dict_list=attention_user_dict_list,self_user_id=self_user_id,offset=offset,size=size,es_cli_obj=es_cli_obj) recommend_user_dict = UserUtils.get_batch_recommend_user_dict(
need_filter_attention_user_id_list=need_filter_attention_user_id_list,
attention_user_id_list=attention_user_id_list, attention_user_dict_list=attention_user_dict_list,
self_user_id=self_user_id, offset=offset, size=size, es_cli_obj=es_cli_obj)
logging.info("duan add,recommend_user_dict:%s" % str(recommend_user_dict)) logging.info("duan add,recommend_user_dict:%s" % str(recommend_user_dict))
return recommend_user_dict return recommend_user_dict
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {} return {}
\ No newline at end of file
@bind("physical/search/search_user")
def search_user(query="", offset=0, size=10):
"""
:remark 搜索用户
:param query:
:param offset:
:param size:
:return:
搜索域:用户名称
1.识别query:允许模糊匹配
2.召回部分:全部 线上状态用户
3.rank部分:一级-是否推荐,二级-发帖数
"""
try:
es_cli_obj = ESPerform.get_cli()
q = {}
# 先获取完全匹配的用户ID
sheer_user_id = []
q["query"] = {
"bool": {
"must": [
{"term": {
"nick_pre": query
}
}, {
"term": {
"is_recommend": True
}
}
]
}
}
q["sort"] = {
"count_topic": {
"order": "desc"
}
}
# que = {"query": {"term": {"nick_name_pre": query}}}
result_dict = ESPerform.get_search_results(es_cli_obj, "user", q, 0, 10)
res = result_dict["hits"]
if len(res) > 0:
sheer_user_id = [item["_source"]["user_id"] for item in res]
logging.info("get res:%s" % res)
# 获取模糊匹配的用户ID
multi_match = {
"fields": ["nick_name"],
"type": "cross_fields",
"operator": "and",
"query": query
}
q = {}
q["query"] = {
"bool": {
"must": [{
"multi_match": multi_match
}, {
"term": {
"is_online": True
}
}
]
}
}
q["sort"] = {
"is_recommend": {
"order": "desc"
},
"count_topic": {
"order": "desc"
},
}
logging.info("get q:%s" % q)
result_dict = ESPerform.get_search_results(es_cli_obj, "user", q, offset, size)
logging.info("get result_dict:%s" % result_dict)
search_user_id = []
res = result_dict["hits"]
if len(res) > 0:
search_user_id = [item["_source"]["user_id"] for item in res]
return {"sheer_user_id": sheer_user_id, "search_user_id": search_user_id}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"sheer_user_id": [], "search_user_id": []}
deb http://mirrors.aliyun.com/ubuntu/ bionic main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ bionic main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ bionic-security main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ bionic-security main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ bionic-updates main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ bionic-updates main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ bionic-backports main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ bionic-backports main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ bionic-proposed main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ bionic-proposed main restricted universe multiverse
-- Deploy flipr:sl_user_login_status to mysql
BEGIN;
CREATE TABLE `sl_user_login_status` (
`user_id` varchar(100) NOT NULL COMMENT '用户ID',
`is_shadow` tinyint(1) NOT NULL COMMENT '是否是马甲用户',
`first_visit_day` date COMMENT '首次日期',
`last_visit_day` date COMMENT '最后一次登陆日期',
`day_id` varchar(10) NOT NULL COMMENT '数据账期',
PRIMARY KEY (`user_id`),
INDEX `lv_day` (`last_visit_day`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='数据仓库推送表用户登录状态表';
-- XXX Add DDLs here.
COMMIT;
-- Revert flipr:sl_user_login_status from mysql
BEGIN;
DROP table sl_user_login_status;
-- XXX Add DDLs here.
COMMIT;
[core]
engine = mysql
# plan_file = sqitch.plan
# top_dir = .
# [engine "mysql"]
# target = db:mysql:
# registry = sqitch
# client = /usr/local/mysql/bin/mysql
%syntax-version=1.0.0
%project=flipr
%uri=https://github.com/sqitchers/sqitch-mysql-intro/
sl_user_login_status 2019-06-25T11:06:15Z Lxrent <lxrent@lxrentdeMacBook-Pro.local> # 数据仓库推送表用户登录状态表
-- Verify flipr:sl_user_login_status on mysql
BEGIN;
SELECT user_id FROM sl_user_login_status;
-- XXX Add verifications here.
ROLLBACK;
-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEAvnSXGsqnlSxWVh9e4U40lWeV1f8DOD4B/atSjfxU9CJaSisc
xtmcLcyRY91lwx2OJJ0GvTx0DBifYBRsvpu8zrG33QQgr+StuJlIKUMlCXzaqsVt
u4wWgSLRks1nKFXVL4yIsKDxUAc60abHB3x9ygM+pS182iZAaW7yowe/E05IvnkU
jLifQRgQ2jLpgOmlxI+X2BDw988exIlwqfdErmEe06DMjuCbLnhbOUhD+Q88Xtn6
7UfxN4IiQTkRai405ZEhr5QKnqmx4RQF5Am+00p8caDksOQQYYQ0sKt+52rUnwfS
p7TQw6A+1QCZtaz3Sdbvhmace2mlC/Ekl2ZVCQIDAQABAoIBACEFkAl6J7JKnLGU
ANxptd5NtoIDFCjVP30fDlJF9OjbZ/JCJVwo8NZUCMVa5sD8x997RmbbK3fJcSC0
ieJGmGbyE2IDzDMTIDfIg+V7mdlaR4OidZM2B9TeF54vdPpeX3c+E5kkXpK0njMp
ioq2wAydoWf8ShB832Aod3Ni7XNIK6QtAQEWwJTPSbXLXx6+X8JIRoVFpZmNLkOA
MG1ElGPRH5fm92D/ZYfBMkDqPUFQF28f96MazD8EFDGRyOU/rpi4pIa+fZJYlIm1
ICzpk8UvSgg3xEWRhSvzoCixdxdVToX4JCZ8jSO6IlqDwa22WmB3rhjCVP3Ctbog
kStqizECgYEA8KRXxt78XVDKcW6Ydv2wmeN2JSQxxKgypU6Ux6amF8WY+0OThCdL
JDfva+ada04UVNdJ/dz3NDBr238e03pF8Z/gZp1NV3/m5rCARXrLIfxxCAVOae0P
nQVnBF270knUWM0vO6E+EhAzKlOcrTZXNQYdtGFic9IydNd1wXx98m0CgYEAypxK
JRny68YtKzwtaFlrgCkaJqGqExLglabskubFuh0g0878bLM4Ogpd514Z46ZUzUcv
859SQzGR48XGd7lUEZvQeAnCfalyl5dc/FDIiz/P2jiwjPvMGR6XVwWllJTWZwTc
H9TQ6ls3xigU9FO3ts0bEBqTVoGGl8xYWPHdq40CgYBbWDbNkaVAEsPVzQJo4KGh
uJsHPDb8DFC1OR/2rUaM7X/FmQAtAPFf/S+PyMlmiwvirJo0mCTqFZtLhXSBF//m
2SZILVvHZBCU7tiyBwuGihmpzsHWKZtsojlZie8awtWtI63TN8ClAKs7XOOzSFZQ
FVM/Plwt1NM8UPEtEgGI/QKBgQCT5w1VEiE7rfpRIme/Ue9gqiB17PO7Yr9hX245
Oo9CcnsBQ4sPSrET5XdxK0dY4JXtGDdP75DcjjYCgOCFi1tsU0MWLYG1u9l0AcGW
St5qkFWJ5nIzKKhv+d3eX7fkw9XTdD/AWNl9CsOnOqE5TlfA8O79jXja4EjBTSF9
JGp+DQKBgC04JCqYJ4jHTQLNCEh42tajL34K8VCltNWdHrAabnd9a16YqzsdVIZ/
xxOBghO9Xwhz666v8yh5TDGAR8XA9kCNbVxeDlqWP1oqWMpHXSwUN5Q7cH/l8M8F
YlQLOkFz4B9mSobZoiupYXS9mpe2kMase2FroYkTy6NFX8mKa93q
-----END RSA PRIVATE KEY-----
...@@ -7,6 +7,16 @@ import traceback ...@@ -7,6 +7,16 @@ import traceback
import logging import logging
from libs.es import ESPerform from libs.es import ESPerform
from trans2es.type_info import get_type_info_map, TypeInfo from trans2es.type_info import get_type_info_map, TypeInfo
from vest.reply import true_comment_one, true_comment_two, true_comment_three, one_seven_topic_comment
from vest.click import true_click_five, true_click_two, true_click_four, true_click_one, true_click_three, \
one_seven_star_topic
from vest.follow import auto_follow, auto_follow_new
from vest.urge import auto_star_urge, auto_lunch_app, auto_lunch_app2, auto_urge1, auto_urge2
from vest.fix import fix_no_comment_click
from vest.reply_answer import reply_comment2, reply_comment3, answer_reply2, answer_reply3, answer_reply1
from vest.request import get_session, auto_user_id
from vest.vest_majiauser import vest_click_reply
from vest.pictorial import principal_offline_comment1, principal_online_comment1,no_reply_principal
class Command(BaseCommand): class Command(BaseCommand):
...@@ -27,10 +37,14 @@ class Command(BaseCommand): ...@@ -27,10 +37,14 @@ class Command(BaseCommand):
make_option('-s', '--pks', dest='pks', help='specify sync pks, comma separated', metavar='PKS', default=''), make_option('-s', '--pks', dest='pks', help='specify sync pks, comma separated', metavar='PKS', default=''),
make_option('--streaming-slicing', dest='streaming_slicing', action='store_true', default=True), make_option('--streaming-slicing', dest='streaming_slicing', action='store_true', default=True),
make_option('--no-streaming-slicing', dest='streaming_slicing', action='store_false', default=True), make_option('--no-streaming-slicing', dest='streaming_slicing', action='store_false', default=True),
make_option('-m', '--mvest', dest='mvest', help='mvest reply comment', metavar='MVEST'),
) )
def handle(self, *args, **options): def handle(self, *args, **options):
try: try:
logging.info("get-------------")
es_cli = ESPerform.get_cli() es_cli = ESPerform.get_cli()
type_name_list = get_type_info_map().keys() type_name_list = get_type_info_map().keys()
...@@ -47,7 +61,6 @@ class Command(BaseCommand): ...@@ -47,7 +61,6 @@ class Command(BaseCommand):
logging.info("begin create [%s] mapping!" % type_name) logging.info("begin create [%s] mapping!" % type_name)
ESPerform.put_index_mapping(es_cli, type_name, force_sync=True) ESPerform.put_index_mapping(es_cli, type_name, force_sync=True)
if len(options["indices_template"]): if len(options["indices_template"]):
template_file_name = options["indices_template"] template_file_name = options["indices_template"]
if ESPerform.put_indices_template(es_cli=es_cli, template_file_name=template_file_name, if ESPerform.put_indices_template(es_cli=es_cli, template_file_name=template_file_name,
...@@ -55,5 +68,84 @@ class Command(BaseCommand): ...@@ -55,5 +68,84 @@ class Command(BaseCommand):
logging.info("put indices template suc!") logging.info("put indices template suc!")
else: else:
logging.error("put indices template err!") logging.error("put indices template err!")
# 点赞
if options["mvest"] == "true_click_one":
true_click_one.true_click_one()
if options["mvest"] == "true_click_two":
true_click_two.true_click_two()
if options["mvest"] == "true_click_three":
true_click_three.true_click_three()
if options["mvest"] == "true_click_four":
true_click_four.true_click_four()
if options["mvest"] == "true_click_five":
true_click_five.true_click_five()
if options["mvest"] == "one_seven_star_topic":
one_seven_star_topic.one_seven_star_topic()
# 评论
if options["mvest"] == "true_comment_one":
true_comment_one.true_comment_one()
if options["mvest"] == "true_comment_two":
true_comment_two.true_comment_two()
if options["mvest"] == "true_comment_three":
true_comment_three.true_comment_three()
if options["mvest"] == "one_seven_topic_comment":
one_seven_topic_comment.one_seven_topic_comment()
# 催更
if options["mvest"] == "auto_urge1":
auto_urge1.auto_urge1()
if options["mvest"] == "auto_urge2":
auto_urge2.auto_urge2()
if options["mvest"] == "auto_lunch_app":
auto_lunch_app.auto_lunch_app()
if options["mvest"] == "auto_lunch_app2":
auto_lunch_app2.auto_lunch_app2()
if options["mvest"] == "auto_star_urge":
auto_star_urge.auto_star_urge()
# 关注
if options["mvest"] == "auto_follow":
auto_follow.auto_follow()
if options["mvest"] == "auto_follow_new":
auto_follow_new.auto_follow_new()
# 补足
if options["mvest"] == "fix_no_comment_click":
fix_no_comment_click.fix_no_comment_click()
# 二级评论
if options["mvest"] == "answer_reply1":
answer_reply1.answer_reply1()
if options["mvest"] == "answer_reply2":
answer_reply2.answer_reply2()
if options["mvest"] == "answer_reply3":
answer_reply3.answer_reply3()
if options["mvest"] == "reply_comment2":
reply_comment2.reply_comment2()
if options["mvest"] == "reply_comment3":
reply_comment3.reply_comment3()
# 获得session和use_id
if options["mvest"] == "get_login_session":
get_session.get_session()
if options["mvest"] == "get_user_id":
auto_user_id.auto_user_id()
# 马甲3456
if options["mvest"] == "vest_click_reply":
vest_click_reply.vest_click_reply()
# 榜单评论
if options["mvest"] == "principal_offline_comment1":
principal_offline_comment1.principal_offline_comment1()
if options["mvest"] == "principal_online_comment1":
principal_online_comment1.principal_online_comment1()
if options["mvest"] == "no_reply_principal":
no_reply_principal.no_reply_principal()
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
...@@ -5,8 +5,8 @@ ...@@ -5,8 +5,8 @@
"is_online":{"type":"boolean"},//上线 "is_online":{"type":"boolean"},//上线
"is_deleted":{"type":"boolean"}, "is_deleted":{"type":"boolean"},
"is_recommend":{"type":"boolean"}, "is_recommend":{"type":"boolean"},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"}, "name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"},
"description":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"}, "description":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"},
"topic_num":{"type":"long"}, "topic_num":{"type":"long"},
"creator_id":{"type":"long"}, "creator_id":{"type":"long"},
"icon":{"type":"text"}, "icon":{"type":"text"},
...@@ -14,8 +14,19 @@ ...@@ -14,8 +14,19 @@
"create_time":{"type":"date", "format":"date_time_no_millis"}, "create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"}, "update_time":{"type":"date", "format":"date_time_no_millis"},
"tag_id":{"type":"long"}, "tag_id":{"type":"long"},
"tag_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"}, "tag_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"},
"topic_id_list":{"type":"long"} "topic_id_list":{"type":"long"},
"effective":{"type":"boolean"},
"offline_score":{"type":"long"},
"is_default":{"type":"long"},
"is_cover":{"type":"boolean"},
"topic_vote_number":{"type":"long"},
"activity_join":{"type":"long"},
"latest_real_reply_time":{"type":"date", "format":"date_time_no_millis"},
"latest_real_topic_time":{"type":"date", "format":"date_time_no_millis"},
"real_user_activate_time":{"type":"date", "format":"date_time_no_millis"},
"edit_tag_id":{"type":"long"},
"edit_tag_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"}
} }
} }
\ No newline at end of file
{
"dynamic":"strict",
"_routing": {"required": false},
"properties": {
"id":{"type":"long"}, //id
"is_online":{"type":"boolean"},//上线
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"},
"create_time_val":{"type":"long"},//创建时间
"update_time_val":{"type":"long"},//更新时间
"is_deleted":{"type":"boolean"}, //是否被删除
"price":{"type":"double"}, //价格
"cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"}, //商品名称
"en_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"alias":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"brand_cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"brand_en_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"brand_alias":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"category_cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"effect_cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"cn_name_pre":{"type": "text","analyzer":"keyword"}, //商品名称
"cn_name_sort":{"type": "text", "fielddata":"true"}, //商品名称
"en_name_pre":{"type": "text", "analyzer":"keyword"}, //商品原名
"alias_pre":{"type": "text", "analyzer":"keyword"},
"description":{"type":"text","analyzer":"keyword","search_analyzer":"keyword"},
"have_image":{"type":"boolean"},
"comment_nums":{"type":"long"},
"brand_cn_name_pre":{"type": "text", "analyzer":"keyword"}, //所属品牌名称
"brand_en_name_pre":{"type": "text", "analyzer":"keyword"}, //所属品牌原名
"brand_alias_pre":{"type": "text", "analyzer":"keyword"}, //所属品牌别名
"category_cn_name_pre":{"type": "text", "analyzer":"keyword"}, //所属类目的名称
"effect_cn_name_pre":{"type": "text", "analyzer":"keyword"}//所属功效的名称
}
}
{
"dynamic":"strict",
"properties": {
"name":{"type":"keyword"}
}
}
\ No newline at end of file
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
"properties": { "properties": {
"id":{"type":"long"}, "id":{"type":"long"},
"suggest":{ "suggest":{
"analyzer":"keyword",
"search_analyzer":"keyword",
"type":"completion", "type":"completion",
"contexts":[ "contexts":[
{ {
...@@ -18,6 +20,7 @@ ...@@ -18,6 +20,7 @@
] ]
}, },
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"}, "name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"name_pre":{"type":"text","analyzer":"keyword","search_analyzer":"keyword"},
"tag_type":{"type":"long"}, "tag_type":{"type":"long"},
"collection":{"type":"long"}, "collection":{"type":"long"},
"is_ai":{"type":"long"}, "is_ai":{"type":"long"},
......
{
"dynamic":"strict",
"properties": {
"id":{"type":"long"},
"suggest":{
"analyzer":"keyword",
"search_analyzer":"keyword",
"type":"completion",
"contexts":[
{
"name":"is_online",
"type": "category",
"path": "is_online"
},
{
"name":"is_deleted",
"type": "category",
"path": "is_deleted"
}
]
},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"name_pre":{"type":"text","analyzer":"keyword","search_analyzer":"keyword"},
"tag_type":{"type":"long"},
"collection":{"type":"long"},
"is_ai":{"type":"long"},
"is_own":{"type":"long"},
"is_online":{"type":"keyword"},//上线
"is_deleted":{"type":"keyword"},
"near_new_topic_num":{"type":"long","store": true}
}
}
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
{ {
"dynamic":"strict", "dynamic":"strict",
"_routing": {"required": true},
"properties": { "properties": {
"id":{"type":"long"}, "id":{"type":"long"},
"is_online":{"type":"boolean"},//上线 "is_online":{"type":"boolean"},//上线
...@@ -13,8 +14,11 @@ ...@@ -13,8 +14,11 @@
"content_level":{"type":"text"}, "content_level":{"type":"text"},
"user_id":{"type":"long"}, "user_id":{"type":"long"},
"user_nick_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},//帖子用户名 "user_nick_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},//帖子用户名
"user_nick_name_pre": {"type":"text","analyzer":"keyword"}, //不切词的用户名
"group_id":{"type":"long"}, //所在组ID "group_id":{"type":"long"}, //所在组ID
"tag_list":{"type":"long"},//标签属性 "tag_list":{"type":"long"},//标签属性
"latest_reply_time":{"type":"date", "format":"date_time_no_millis"},
"useful_tag_list":{"type":"long"},//有用标签属性
"edit_tag_list":{"type":"long"},//编辑标签 "edit_tag_list":{"type":"long"},//编辑标签
"tag_name_list":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"}, "tag_name_list":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"share_num":{"type":"long"}, "share_num":{"type":"long"},
...@@ -49,7 +53,22 @@ ...@@ -49,7 +53,22 @@
"type": "text", "type": "text",
"analyzer": "gm_default_index", "analyzer": "gm_default_index",
"search_analyzer": "gm_default_index" "search_analyzer": "gm_default_index"
},
"is_excellent":{"type": "long"},
"is_operation_home_recommend": {"type": "boolean"}, //是否首页运营推荐
"is_history": {"type": "boolean"}, //是否历史数据
"related_billboard":{
"type":"nested",
"properties":{
"pictorial_id":{"type":"long"},
"topic_add_createtime":{"type":"long"},
"real_vote_cnt":{"type":"long"},
"virt_vote_cnt":{"type":"long"},
"total_vote_cnt":{"type":"long"}
}
} }
} }
} }
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment