Commit ee0f36c8 authored by lixiaofang's avatar lixiaofang

Merge branch 'master' of git.wanmeizhensuo.com:alpha/physical

parents ce8cb6ba 20ffa800
...@@ -18,5 +18,6 @@ ...@@ -18,5 +18,6 @@
<element value="injection.data_sync.tasks"/> <element value="injection.data_sync.tasks"/>
<element value="search.views.contrast_similar"/> <element value="search.views.contrast_similar"/>
<element value="search.views.search_hotword"/> <element value="search.views.search_hotword"/>
<element value="search.views.product"/>
</config> </config>
</gm_rpcd_config> </gm_rpcd_config>
...@@ -414,8 +414,7 @@ class ESPerform(object): ...@@ -414,8 +414,7 @@ class ESPerform(object):
"include": ["id","user_id"] "include": ["id","user_id"]
}, },
"sort": [ "sort": [
{"_score": {"order": "desc"}}, {"latest_reply_time": {"order": "desc"}},
{"latest_reply_time":{"order": "desc"}},
{"create_time_val": {"order": "desc"}}, {"create_time_val": {"order": "desc"}},
{"language_type": {"order": "asc"}}, {"language_type": {"order": "asc"}},
], ],
......
...@@ -22,15 +22,13 @@ def tzlc(dt, truncate_to_sec=True): ...@@ -22,15 +22,13 @@ def tzlc(dt, truncate_to_sec=True):
return timezone(settings.TIME_ZONE).normalize(dt) return timezone(settings.TIME_ZONE).normalize(dt)
def get_have_read_topic_id_list(device_id, user_id, query_type):
def get_have_read_topic_id_list(device_id,user_id,query_type):
try: try:
if user_id and int(user_id)>0: if user_id and int(user_id) > 0:
redis_key = "physical:home_recommend" + ":user_id:" + str(user_id) + ":query_type:" + str(query_type) redis_key = "physical:home_recommend" + ":user_id:" + str(user_id) + ":query_type:" + str(query_type)
else: else:
redis_key = "physical:home_recommend" + ":device_id:" + str(device_id) + ":query_type:" + str(query_type) redis_key = "physical:home_recommend" + ":device_id:" + str(device_id) + ":query_type:" + str(query_type)
have_read_topic_id_list = list() have_read_topic_id_list = list()
redis_field_list = [b'have_read_topic_list'] redis_field_list = [b'have_read_topic_list']
redis_field_val_list = redis_client.hmget(redis_key, redis_field_list) redis_field_val_list = redis_client.hmget(redis_key, redis_field_list)
...@@ -40,4 +38,4 @@ def get_have_read_topic_id_list(device_id,user_id,query_type): ...@@ -40,4 +38,4 @@ def get_have_read_topic_id_list(device_id,user_id,query_type):
return have_read_topic_id_list return have_read_topic_id_list
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list() return list()
\ No newline at end of file
...@@ -94,7 +94,7 @@ class CollectData(object): ...@@ -94,7 +94,7 @@ class CollectData(object):
# } # }
# redis_client.hmset(click_recommend_redis_key, click_redis_data_dict) # redis_client.hmset(click_recommend_redis_key, click_redis_data_dict)
tag_id_list = recommend_tag_list[0:100] tag_id_list = recommend_tag_list[0:20]
topic_recommend_redis_key = self.linucb_recommend_topic_id_prefix + str(device_id) topic_recommend_redis_key = self.linucb_recommend_topic_id_prefix + str(device_id)
# redis_topic_data_dict = redis_client.hgetall(topic_recommend_redis_key) # redis_topic_data_dict = redis_client.hgetall(topic_recommend_redis_key)
...@@ -159,7 +159,7 @@ class CollectData(object): ...@@ -159,7 +159,7 @@ class CollectData(object):
click_topic_tag_list = list() click_topic_tag_list = list()
if "on_click_feed_topic_card" == raw_val_dict["type"]: if "on_click_feed_topic_card" == raw_val_dict["type"]:
topic_id = raw_val_dict["params"]["business_id"] or raw_val_dict["params"]["topic_id"] topic_id = raw_val_dict["params"]["topic_id"]
device_id = raw_val_dict["device"]["device_id"] device_id = raw_val_dict["device"]["device_id"]
user_id = raw_val_dict["user_id"] if "user_id" in raw_val_dict else None user_id = raw_val_dict["user_id"] if "user_id" in raw_val_dict else None
......
# !/usr/bin/env python # !/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
DATABASE_APPS_MAPPING = {'face': 'face', 'commodity': 'commodity'}
class DBRouter: class DBRouter:
...@@ -12,32 +13,46 @@ class DBRouter: ...@@ -12,32 +13,46 @@ class DBRouter:
""" """
Attempts to read user models go to users_db. Attempts to read user models go to users_db.
""" """
if model._meta.app_label == 'face': # if model._meta.app_label == 'face':
return 'face' # return 'face'
if model._meta.app_label in DATABASE_APPS_MAPPING:
return DATABASE_APPS_MAPPING[model._meta.app_label]
return None return None
def db_for_write(self, model, **hints): def db_for_write(self, model, **hints):
""" """
Attempts to write user models go to users_db. Attempts to write user models go to users_db.
""" """
if model._meta.app_label == 'face': # if model._meta.app_label == 'face':
return 'face' # return 'face'
if model._meta.app_label in DATABASE_APPS_MAPPING:
return DATABASE_APPS_MAPPING[model._meta.app_label]
return None return None
def allow_relation(self, obj1, obj2, **hints): def allow_relation(self, obj1, obj2, **hints):
""" """
Allow relations if a model in the user app is involved. Allow relations if a model in the user app is involved.
""" # """
if obj1._meta.app_label == 'face' or \ db_obj1 = DATABASE_APPS_MAPPING.get(obj1._meta.app_label)
obj2._meta.app_label == 'face': db_obj2 = DATABASE_APPS_MAPPING.get(obj2._meta.app_label)
return True if db_obj1 and db_obj2:
return None if db_obj1 == db_obj2:
return True
else:
return False
else:
return None
def allow_migrate(self, db, app_label, model_name=None, **hints): def allow_migrate(self, db, app_label, model_name=None, **hints):
""" """
Make sure the auth app only appears in the 'users_db' Make sure the auth app only appears in the 'users_db'
database. database.
""" """
if app_label == 'face': if db in DATABASE_APPS_MAPPING.values():
return db == 'face' return DATABASE_APPS_MAPPING.get(app_label) == db
elif app_label in DATABASE_APPS_MAPPING:
return False
return None return None
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.group import GroupUtils
from search.utils.common import GroupSortTypes
from libs.es import ESPerform
from trans2es.models.pictorial import PictorialTopics
class ProductUtils(object):
@classmethod
def get_product_sku(cls, query='', offset=0, size=10, filters={}):
try:
multi_fields = {
'cn_name': 2,
'en_name': 2,
'alias': 2,
'brand_cn_name': 2,
'brand_en_name': 2,
'brand_alias': 2,
'category_cn_name': 2,
"effect_cn_name": 2
}
query_fields = ['^'.join((k, str(v))) for (k, v) in multi_fields.items()]
multi_match = {
'query': query,
'type': 'cross_fields',
'operator': 'and',
'fields': query_fields,
}
sku_must_flter = cls.sku_filter(filters)
logging.info("get sku_must_flter:%s " % sku_must_flter)
logging.info("get sku_must_flter:%s " % type(sku_must_flter))
q = {
"query": {
"bool": {
"must": sku_must_flter
}
}
}
if query != '':
q = {
"query": {
"bool": {
"must": sku_must_flter,
"should": {
"multi_match": multi_match
},
"minimum_should_match": 1
}
}
}
q["sort"] = [{"comment_nums": {"order": "desc"}}, {"cn_name_sort": {"order": "asc"}}]
logging.info("get product query:%s" % q)
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name="product", query_body=q,
offset=offset, size=size)
return result_dict
except:
logging.error("catch exception, query_sku:%s" % traceback.format_exc())
return []
@classmethod
def sku_filter(cls, filters):
"""处理过滤器部分。"""
logging.info("get filters:%s" % filters)
f = [
{'term': {"have_image": True}},
{'term': {"is_online": True}},
{"term": {"is_deleted": False}},
]
if not filters:
return f
for k, v in filters.items():
if v in (None, '', []):
continue
if k == "brand":
f.append({"term": {"brand_cn_name_pre": v}})
if k == "effect":
f.append({"term": {"effect_cn_name_pre": v}})
if k == "category":
f.append({"term": {"category_cn_name_pre": v}})
return f
...@@ -173,29 +173,6 @@ class TopicUtils(object): ...@@ -173,29 +173,6 @@ class TopicUtils(object):
} }
] ]
if user_id and user_id > 0:
redis_key_prefix = "physical:user_similar:participant_user_id:"
similar_redis_key = redis_key_prefix + str(user_id)
redis_user_similar_data = redis_client.get(similar_redis_key)
user_similar_list = json.loads(redis_user_similar_data) if redis_user_similar_data else []
if len(user_similar_list) > 0:
functions_list.extend(user_similar_list)
if len(attention_user_id_list) > 0:
functions_list.append(
{
"filter": {"constant_score": {"filter": {"terms": {"user_id": attention_user_id_list}}}},
"weight": 100,
}
)
if len(attention_tag_list) > 0:
functions_list.append(
{
"filter": {"bool": {
"should": {"terms": {"tag_list": attention_tag_list}}}},
"weight": 100
}
)
query_function_score = { query_function_score = {
"query": { "query": {
"bool": { "bool": {
...@@ -283,16 +260,41 @@ class TopicUtils(object): ...@@ -283,16 +260,41 @@ class TopicUtils(object):
query_function_score["query"]["bool"]["filter"].append( query_function_score["query"]["bool"]["filter"].append(
{"term": {"content_level": 6}} {"term": {"content_level": 6}}
) )
if not disable_collpase:
q["collapse"] = {
"field": "user_id"
}
q["_source"] = { q["_source"] = {
"includes": ["id"] "includes": ["id"]
} }
if query is None: if query is None:
if user_id and user_id > 0:
redis_key_prefix = "physical:user_similar:participant_user_id:"
similar_redis_key = redis_key_prefix + str(user_id)
redis_user_similar_data = redis_client.get(similar_redis_key)
user_similar_list = json.loads(redis_user_similar_data) if redis_user_similar_data else []
if len(user_similar_list) > 0:
functions_list.extend(user_similar_list)
if len(attention_user_id_list) > 0:
functions_list.append(
{
"filter": {"constant_score": {"filter": {"terms": {"user_id": attention_user_id_list}}}},
"weight": 100,
}
)
if len(attention_tag_list) > 0:
functions_list.append(
{
"filter": {"bool": {
"should": {"terms": {"tag_list": attention_tag_list}}}},
"weight": 100
}
)
query_function_score["functions"] = functions_list
q["query"]["function_score"] = query_function_score q["query"]["function_score"] = query_function_score
if not disable_collpase:
q["collapse"] = {
"field": "user_id"
}
q["sort"] = [ q["sort"] = [
# { # {
# "_script": { # "_script": {
...@@ -346,7 +348,9 @@ class TopicUtils(object): ...@@ -346,7 +348,9 @@ class TopicUtils(object):
}, },
"minimum_should_match": 1, "minimum_should_match": 1,
"should": [ "should": [
{'multi_match': multi_match}, {'match_phrase': {"content":query}},
{'match_phrase': {"tag_name_list": query}},
# {'multi_match': multi_match},
{"term": {"tag_list": tag_id}}, {"term": {"tag_list": tag_id}},
{"term": {"user_nick_name_pre": query.lower()}} {"term": {"user_nick_name_pre": query.lower()}}
] ]
...@@ -366,7 +370,9 @@ class TopicUtils(object): ...@@ -366,7 +370,9 @@ class TopicUtils(object):
}, },
"minimum_should_match": 1, "minimum_should_match": 1,
"should": [ "should": [
{'multi_match': multi_match}, {'match_phrase': {"content": query}},
{'match_phrase': {"tag_name_list": query}},
# {'multi_match': multi_match},
{"term": {"tag_list": tag_id}}, {"term": {"tag_list": tag_id}},
{"term": {"user_nick_name_pre": query.lower()}} {"term": {"user_nick_name_pre": query.lower()}}
] ]
...@@ -386,7 +392,9 @@ class TopicUtils(object): ...@@ -386,7 +392,9 @@ class TopicUtils(object):
}, },
"minimum_should_match": 1, "minimum_should_match": 1,
"should": [ "should": [
{'multi_match': multi_match}, {'match_phrase': {"content": query}},
{'match_phrase': {"tag_name_list": query}},
# {'multi_match': multi_match},
{"term": {"tag_list": tag_id}}, {"term": {"tag_list": tag_id}},
{"term": {"user_nick_name_pre": query.lower()}} {"term": {"user_nick_name_pre": query.lower()}}
] ]
......
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging import logging
import traceback import traceback
import json import json
import time
import datetime
from libs.cache import redis_client
from gm_rpcd.all import bind
from libs.cache import redis_client from libs.cache import redis_client
from libs.es import ESPerform from libs.es import ESPerform
from search.utils.group import GroupUtils from search.utils.group import GroupUtils
from search.utils.common import GroupSortTypes from search.utils.common import GroupSortTypes
from libs.es import ESPerform from libs.es import ESPerform
from trans2es.models.pictorial import PictorialTopics from trans2es.models.pictorial import PictorialTopics
from trans2es.models.pictorial import CommunityPictorialActivity
from alpha_types.venus import PICTORIAL_ACTIVITY_SORT
@bind("physical/search/query_pictorial") @bind("physical/search/query_pictorial")
...@@ -333,6 +338,105 @@ def search_physical(query="", offset=0, size=10): ...@@ -333,6 +338,105 @@ def search_physical(query="", offset=0, size=10):
return {"search_pictorial_ids": []} return {"search_pictorial_ids": []}
@bind("physical/search/pictorial_activity")
def pictorial_activity_sort(activity_id=0, offset=0, size=10, sort_type=1):
"""
榜单活动的排名和最新tab下的排序
排名根据该榜单下的帖子的总票数倒排
最新根据榜单的创建时间排序
过滤掉下线的无封面的帖子
:param activity_id:
:param offset:
:param size:
:param sort_type:
:return:
"""
try:
now = datetime.datetime.now()
activity_status = CommunityPictorialActivity.objects.filter(id=activity_id).values_list("end_time",
flat=True).first()
now = int(time.mktime(now.timetuple()))
activity_status = int(time.mktime(activity_status.timetuple()))
logging.info("get now:%s" % now)
logging.info("get activity_status:%s" % activity_status)
pictorial_ids_list = []
if sort_type == 1:
redis_key = "pictorial:activity:vote:id:" + str(activity_id)
else:
redis_key = "pictorial:activity:create:id:" + str(activity_id)
if now < activity_status:
q = {
"query": {
"bool": {
"must": [
{
"term": {
"activity_join": activity_id
}
},
{
"term": {
"is_cover": True
}
},
{
"term": {
"is_online": True
}
}
]
}
}
}
q["sort"] = process_sort(sort_type)
es_cli_obj = ESPerform.get_cli()
result_dict = ESPerform.get_search_results(es_cli_obj, "pictorial", q, offset, size)
if len(result_dict["hits"]) > 0:
for item in result_dict["hits"]:
pictorial_id = item["_source"]["id"]
vote_num = item["_source"]["topic_vote_number"]
pictorial_ids_list.append({"pictorial_id": pictorial_id, "vote_num": vote_num})
redis_client.set(redis_key, json.dumps(pictorial_ids_list))
else:
redis_field_val_list = redis_client.get(redis_key)
pictorial_ids_list_all = json.loads(str(redis_field_val_list, encoding="utf-8"))
pictorial_ids_list = pictorial_ids_list_all[offset:size + offset]
return {"pictorial_activity_sort": pictorial_ids_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"pictorial_activity_sort": []}
def process_sort(sorts_by):
"""处理排序部分。"""
sort_rule = []
if isinstance(sorts_by, int):
if sorts_by == PICTORIAL_ACTIVITY_SORT.VOTE_NUM:
sort_rule.append({
"topic_vote_number": {
"order": "desc"
}
})
if sorts_by == PICTORIAL_ACTIVITY_SORT.CREATE_TIME:
sort_rule.append({
"create_time": {
"order": "desc"
}
})
logging.info("get picotirial:%s" % sort_rule)
return sort_rule
@bind("physical/search/pictorialid_topic") @bind("physical/search/pictorialid_topic")
def pictorial_topic_sort_peoplehot(pictorial_id=-1, user_id=-1, offset=0, size=10): def pictorial_topic_sort_peoplehot(pictorial_id=-1, user_id=-1, offset=0, size=10):
try: try:
...@@ -399,3 +503,28 @@ def pictorial_topic_sort_peoplehot(pictorial_id=-1, user_id=-1, offset=0, size=1 ...@@ -399,3 +503,28 @@ def pictorial_topic_sort_peoplehot(pictorial_id=-1, user_id=-1, offset=0, size=1
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"pict_pictorial_ids_list": []} return {"pict_pictorial_ids_list": []}
@bind("physical/search/pictorial_activity_sort")
def get_pictorial_activeity_sort(activity_id=0, pictorial_id=0):
try:
pictorial_activity_sort_only = []
pictorial_ids_list = pictorial_activity_sort(activity_id=activity_id, size=999, offset=0, sort_type=1)
logging.info("get pictorial_ids_list:%s" % pictorial_ids_list)
data = pictorial_ids_list.get("pictorial_activity_sort", [])
logging.info("get data:%s" % data)
if data:
pictorial_ids = []
for item in data:
pictorial_ids.append(item["pictorial_id"])
if pictorial_id in pictorial_ids:
pictorial_sort = pictorial_ids.index(pictorial_id)
pictorial_activity_sort_only.append(
{"pictorial_id": pictorial_id, "pictorial_sort": pictorial_sort + 1})
else:
pictorial_activity_sort_only.append({"pictorial_id": pictorial_id, "pictorial_sort": 1000})
return {"pictorial_activity_sort": pictorial_activity_sort_only}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"pictorial_activity_sort": []}
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.product import ProductUtils
from search.utils.common import GroupSortTypes
from libs.es import ESPerform
@bind("physical/search/query_product_sku")
def product_hot_sort(query='', offset=0, size=10, filters={}):
'''
商品sku排序
:param query:
:param offset:
:param size:
:param sort_type:
:param filters:
:return:
'''
try:
res = ProductUtils.get_product_sku(query=query, offset=offset, size=size, filters=filters)
product_list = []
res_hit = res["hits"]
for item in res_hit:
product_id = item["_source"]["id"]
product_list.append(product_id)
return {"product_hot_ids": product_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"product_hot_ids": []}
...@@ -49,7 +49,7 @@ def query_tag(query,offset,size): ...@@ -49,7 +49,7 @@ def query_tag(query,offset,size):
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="tag",query_body=pre_q,offset=0,size=1) result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="tag",query_body=pre_q,offset=0,size=1)
if len(result_dict["hits"])>0: if len(result_dict["hits"])>0:
hitLight = u'<ems>%s</ems>' % query hitLight = u'<ems>%s</ems>' % query
result_dict["hits"][0]["_source"]["highlight"] = result_dict["hits"][0]["name"].replace(query, hitLight) result_dict["hits"][0]["_source"]["highlight"] = result_dict["hits"][0]["_source"]["name"].replace(query, hitLight)
ret_list.append(result_dict["hits"][0]["_source"]) ret_list.append(result_dict["hits"][0]["_source"])
size -= 1 size -= 1
......
...@@ -19,6 +19,9 @@ ...@@ -19,6 +19,9 @@
"effective":{"type":"boolean"}, "effective":{"type":"boolean"},
"offline_score":{"type":"long"}, "offline_score":{"type":"long"},
"is_default":{"type":"long"}, "is_default":{"type":"long"},
"is_cover":{"type":"boolean"} "is_cover":{"type":"boolean"},
"topic_vote_number":{"type":"long"},
"activity_join":{"type":"long"}
} }
} }
\ No newline at end of file
{
"dynamic":"strict",
"_routing": {"required": false},
"properties": {
"id":{"type":"long"}, //id
"is_online":{"type":"boolean"},//上线
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"},
"create_time_val":{"type":"long"},//创建时间
"update_time_val":{"type":"long"},//更新时间
"is_deleted":{"type":"boolean"}, //是否被删除
"price":{"type":"double"}, //价格
"cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"}, //商品名称
"en_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"alias":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"brand_cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"brand_en_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"brand_alias":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"category_cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"effect_cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"cn_name_pre":{"type": "text","analyzer":"keyword"}, //商品名称
"cn_name_sort":{"type": "text", "fielddata":"true"}, //商品名称
"en_name_pre":{"type": "text", "analyzer":"keyword"}, //商品原名
"alias_pre":{"type": "text", "analyzer":"keyword"},
"description":{"type":"text","analyzer":"keyword","search_analyzer":"keyword"},
"have_image":{"type":"boolean"},
"comment_nums":{"type":"long"},
"brand_cn_name_pre":{"type": "text", "analyzer":"keyword"}, //所属品牌名称
"brand_en_name_pre":{"type": "text", "analyzer":"keyword"}, //所属品牌原名
"brand_alias_pre":{"type": "text", "analyzer":"keyword"}, //所属品牌别名
"category_cn_name_pre":{"type": "text", "analyzer":"keyword"}, //所属类目的名称
"effect_cn_name_pre":{"type": "text", "analyzer":"keyword"}//所属功效的名称
}
}
...@@ -4,7 +4,7 @@ import logging ...@@ -4,7 +4,7 @@ import logging
import traceback import traceback
from .tag import Tag from .tag import Tag
from .topic import Topic from .topic import Topic, TopicBillBoard
class PictorialFollow(models.Model): class PictorialFollow(models.Model):
...@@ -78,7 +78,8 @@ class Pictorial(models.Model): ...@@ -78,7 +78,8 @@ class Pictorial(models.Model):
for topic_id in topic_id_list: for topic_id in topic_id_list:
topic_id_object = Topic.objects.filter(id=int(topic_id)).first() topic_id_object = Topic.objects.filter(id=int(topic_id)).first()
if topic_id_object and topic_id_object.is_online and int(topic_id_object.content_level) in [0, 3, 4, 5,6]: if topic_id_object and topic_id_object.is_online and int(topic_id_object.content_level) in [0, 3, 4, 5,
6]:
effective_num += 1 effective_num += 1
if effective_num >= 5: if effective_num >= 5:
ret = True ret = True
...@@ -145,6 +146,31 @@ class Pictorial(models.Model): ...@@ -145,6 +146,31 @@ class Pictorial(models.Model):
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False return False
def get_topic_vote_number(self):
try:
topic_vote = TopicBillBoard.objects.filter(pictorial_id=self.id).values("real_vote_cnt", "virt_vote_cnt")
total_vote_cnt = 0
if topic_vote:
for item in topic_vote:
total_vote_cnt += int(item["virt_vote_cnt"]) + int(item["real_vote_cnt"])
return total_vote_cnt
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0
def get_activity_join(self):
try:
activicy_list = list(CommunityPictorialActivityRelation.objects.filter(pictorial_id=self.id, is_online=True,
is_deleted=False).values_list(
"pictorial_activity_id", flat=True))
return activicy_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
class PictorialTag(models.Model): class PictorialTag(models.Model):
"""画报关注标签""" """画报关注标签"""
...@@ -160,3 +186,31 @@ class PictorialTag(models.Model): ...@@ -160,3 +186,31 @@ class PictorialTag(models.Model):
pictorial_id = models.BigIntegerField(verbose_name=u'画报ID', max_length=20) pictorial_id = models.BigIntegerField(verbose_name=u'画报ID', max_length=20)
tag_id = models.BigIntegerField(verbose_name=u'标签ID', max_length=20) tag_id = models.BigIntegerField(verbose_name=u'标签ID', max_length=20)
is_online = models.BooleanField(verbose_name=u'是否上线', max_length=1) is_online = models.BooleanField(verbose_name=u'是否上线', max_length=1)
class CommunityPictorialActivity(models.Model):
"""榜单活动"""
class Meta:
verbose_name = u"画报标签"
app_label = "community"
db_table = "community_pictorial_activity"
start_time = models.DateTimeField(verbose_name=u'开始时间', default=datetime.datetime.fromtimestamp(0))
end_time = models.DateTimeField(verbose_name=u'结束时间', default=datetime.datetime.fromtimestamp(0))
is_online = models.BooleanField(verbose_name=u'是否上线', max_length=1)
is_deleted = models.BooleanField(verbose_name=u'是否被删除', max_length=1)
class CommunityPictorialActivityRelation(models.Model):
"""榜单活动关系"""
class Meta:
verbose_name = u"画报标签"
app_label = "community"
db_table = "community_pictorial_activity_relation"
is_online = models.BooleanField(verbose_name=u'是否上线', max_length=1)
is_deleted = models.BooleanField(verbose_name=u'是否被删除', max_length=1)
pictorial_activity_id = models.BigIntegerField(verbose_name=u'活动ID', max_length=20)
pictorial_id = models.BigIntegerField(verbose_name=u'榜单ID', max_length=20)
import datetime
from django.db import models
import logging
import traceback
from .tag import Tag
from .topic import Topic
class CommodityProduct(models.Model):
"""画报关注"""
class Meta:
verbose_name = u"商品"
app_label = "commodity"
db_table = "commodity_product"
id = models.IntegerField(verbose_name=u'商品ID', primary_key=True)
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
is_online = models.BooleanField(verbose_name=u'是否上线')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
cn_name = models.CharField(verbose_name=u'商品名称', max_length=64)
en_name = models.CharField(verbose_name=u'商品原名', max_length=64, default="")
alias = models.CharField(verbose_name=u'别名', max_length=64)
image = models.CharField(verbose_name=u'图片', max_length=120)
description = models.CharField(verbose_name=u'商品描述', max_length=200)
comment_nums = models.IntegerField(verbose_name=u'评论数', max_length=11)
price = models.IntegerField(verbose_name="价格", max_length=11)
def get_brand_name(self):
try:
brand_id = CommodityProductBrand.objects.filter(product_id=self.id, is_deleted=False).values_list(
"brand_id", flat=True)
result_name = CommodityBrand.objects.filter(id=brand_id, is_online=True, is_deleted=False).values("cn_name",
"en_name",
"alias").first()
return result_name
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_category_en_name(self):
try:
category_id = CommodityProductCategory.objects.filter(product_id=self.id, is_deleted=False).values_list(
"category_id", flat=True)
result_name = CommodityCategory.objects.filter(id=category_id, is_online=True,
is_deleted=False).values_list("cn_name", flat=True)
return list(result_name)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return ""
def get_effect_cn_name(self):
try:
effect_id = CommodityProductEffect.objects.filter(product_id=self.id, is_deleted=False).values_list(
"effect_id", flat=True)
result_name = CommodityEffect.objects.filter(id=effect_id, is_deleted=False).values_list("cn_name",
flat=True)
return list(result_name)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return ""
class CommodityBrand(models.Model):
"""品牌"""
class Meta:
verbose_name = u'品牌'
app_label = 'commodity'
db_table = 'commodity_brand'
id = models.IntegerField(verbose_name=u'品牌ID', primary_key=True)
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
is_online = models.BooleanField(verbose_name=u'是否上线')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
cn_name = models.CharField(verbose_name=u'品牌名称', max_length=64)
en_name = models.CharField(verbose_name=u'品牌原名', max_length=64)
alias = models.CharField(verbose_name=u'别名', max_length=64)
description = models.CharField(verbose_name=u'品牌描述', max_length=200)
class CommodityCategory(models.Model):
"""类目"""
class Meta:
verbose_name = u'类目'
app_label = 'commodity'
db_table = 'commodity_category'
id = models.IntegerField(verbose_name=u'品牌ID', primary_key=True)
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
is_online = models.BooleanField(verbose_name=u'是否上线')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
cn_name = models.CharField(verbose_name=u'品牌名称', max_length=64)
class CommodityEffect(models.Model):
"""功效"""
class Meta:
verbose_name = u'功效'
app_label = 'commodity'
db_table = 'commodity_effect'
id = models.IntegerField(verbose_name=u'品牌ID', primary_key=True)
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
is_deleted = models.BooleanField(verbose_name=u'是否删除')
cn_name = models.CharField(verbose_name=u'品牌名称', max_length=64)
class CommodityProductBrand(models.Model):
"""商品品牌关系"""
class Meta:
verbose_name = u'商品品牌关系'
app_label = 'commodity'
db_table = 'commodity_productbrand'
id = models.IntegerField(verbose_name=u'ID', primary_key=True)
product_id = models.BigIntegerField(verbose_name=u'商品ID')
brand_id = models.BigIntegerField(verbose_name=u'品牌ID')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
class CommodityProductCategory(models.Model):
"""画报关注"""
class Meta:
verbose_name = u"画报"
app_label = "commodity"
db_table = "commodity_productcategory"
id = models.IntegerField(verbose_name=u'关注ID', primary_key=True)
is_deleted = models.BooleanField(verbose_name=u'是否删除')
product_id = models.BigIntegerField(verbose_name=u'商品ID')
category_id = models.BigIntegerField(verbose_name=u'分类ID')
class CommodityProductEffect(models.Model):
"""画报关注标签"""
class Meta:
verbose_name = u"画报标签"
app_label = "commodity"
db_table = "commodity_producteffect"
id = models.IntegerField(verbose_name=u'关注ID', primary_key=True)
is_deleted = models.BooleanField(verbose_name=u'是否删除')
product_id = models.BigIntegerField(verbose_name=u'商品ID')
effect_id = models.BigIntegerField(verbose_name=u'功效ID')
...@@ -6,12 +6,10 @@ from django.conf import settings ...@@ -6,12 +6,10 @@ from django.conf import settings
from django.core.management.base import BaseCommand, CommandError from django.core.management.base import BaseCommand, CommandError
import traceback import traceback
import logging import logging
import datetime
from libs.es import ESPerform from libs.es import ESPerform
from django.db import models from django.db import models
import datetime
from alpha_types.venus import GRAP_PLATFORM from alpha_types.venus import GRAP_PLATFORM
from .pick_topic import PickTopic from .pick_topic import PickTopic
from .tag import TopicTag, Tag from .tag import TopicTag, Tag
from .user_extra import UserExtra from .user_extra import UserExtra
...@@ -321,18 +319,22 @@ class Topic(models.Model): ...@@ -321,18 +319,22 @@ class Topic(models.Model):
datetime_list = list() datetime_list = list()
if reply_query_results.count() > 0: if reply_query_results.count() > 0:
for reply in reply_query_results: for reply in reply_query_results:
UserExtra_results = UserExtra.objects.using(settings.SLAVE_DB_NAME).filter( UserExtra_results = UserExtra.objects.using(settings.SLAVE_DB_NAME).filter(user_id=reply['user_id']).values('is_shadow')
user_id=reply['user_id']).values('is_shadow') if UserExtra_results.count()>0:
if UserExtra_results.count() > 0: if not UserExtra_results[0]['is_shadow']:
if not UserExtra_results[0]['is_shadow']: datetime_list.append(reply['create_time'])
datetime_list.append(reply['create_time']) if len(datetime_list)>0:
dt = max(datetime_list) dt = max(datetime_list)
return datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, return datetime.datetime(dt.year,dt.month,dt.day,dt.hour, dt.minute, dt.second,tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
tzinfo=datetime.timezone(datetime.timedelta(hours=8))) else:
dt = self.create_time
return datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
else: else:
return datetime.datetime(1980, 1, 1, 0, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(hours=8))) dt = self.create_time
return datetime.datetime(dt.year,dt.month,dt.day,dt.hour, dt.minute, dt.second,tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
except: except:
return datetime.datetime(1980, 1, 1, 0, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(hours=8))) return datetime.datetime(1980,1,1,0,0,0,tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
class TopicComplaint(models.Model): class TopicComplaint(models.Model):
......
...@@ -14,7 +14,7 @@ import sys ...@@ -14,7 +14,7 @@ import sys
from libs.cache import redis_client from libs.cache import redis_client
import copy import copy
from trans2es.models import topic, user, pick_celebrity, group, celebrity, tag, contrast_similar,pictorial from trans2es.models import topic, user, pick_celebrity, group, celebrity, tag, contrast_similar, pictorial, product
from trans2es.utils.user_transfer import UserTransfer from trans2es.utils.user_transfer import UserTransfer
from trans2es.utils.pick_celebrity_transfer import PickCelebrityTransfer from trans2es.utils.pick_celebrity_transfer import PickCelebrityTransfer
from trans2es.utils.group_transfer import GroupTransfer from trans2es.utils.group_transfer import GroupTransfer
...@@ -24,7 +24,7 @@ from trans2es.utils.pictorial_transfer import PictorialTransfer ...@@ -24,7 +24,7 @@ from trans2es.utils.pictorial_transfer import PictorialTransfer
from trans2es.utils.celebrity_transfer import CelebrityTransfer from trans2es.utils.celebrity_transfer import CelebrityTransfer
from trans2es.utils.tag_transfer import TagTransfer from trans2es.utils.tag_transfer import TagTransfer
from trans2es.utils.contrast_similar_transfer import Contrast_Similar_Transfer from trans2es.utils.contrast_similar_transfer import Contrast_Similar_Transfer
from trans2es.utils.product_transfer import ProductTransfer
__es = None __es = None
...@@ -160,12 +160,11 @@ class TypeInfo(object): ...@@ -160,12 +160,11 @@ class TypeInfo(object):
old_data["is_history"] = True old_data["is_history"] = True
data_list.append(old_data) data_list.append(old_data)
if int_ori_topic_star>=4: if int_ori_topic_star >= 4:
topic_data_high_star_list.append(old_data) topic_data_high_star_list.append(old_data)
redis_client.hset(self.physical_topic_star, data["id"], data["content_level"]) redis_client.hset(self.physical_topic_star, data["id"], data["content_level"])
if data["content_level"] and int(data["content_level"]) >= 4:
if data["content_level"] and int(data["content_level"])>=4:
topic_data_high_star_list.append(data) topic_data_high_star_list.append(data)
elif self.type == "tag" or self.type == "tag_v1": elif self.type == "tag" or self.type == "tag_v1":
(res, begin_res) = data (res, begin_res) = data
...@@ -174,7 +173,7 @@ class TypeInfo(object): ...@@ -174,7 +173,7 @@ class TypeInfo(object):
else: else:
data_list.append(data) data_list.append(data)
return (data_list,topic_data_high_star_list) return (data_list, topic_data_high_star_list)
def elasticsearch_bulk_insert_data(self, sub_index_name, data_list, es=None): def elasticsearch_bulk_insert_data(self, sub_index_name, data_list, es=None):
...@@ -217,17 +216,17 @@ class TypeInfo(object): ...@@ -217,17 +216,17 @@ class TypeInfo(object):
else: else:
qs = self.model.objects.all() qs = self.model.objects.all()
end = time.time() end = time.time()
time0=end-begin time0 = end - begin
begin = time.time() begin = time.time()
instance_list = qs.filter(pk__in=pk_list) instance_list = qs.filter(pk__in=pk_list)
end = time.time() end = time.time()
time1=end-begin time1 = end - begin
begin = time.time() begin = time.time()
data_list, topic_data_high_star_list = self.bulk_get_data(instance_list) data_list, topic_data_high_star_list = self.bulk_get_data(instance_list)
end = time.time() end = time.time()
time2=end-begin time2 = end - begin
begin = time.time() begin = time.time()
# logging.info("get sub_index_name:%s"%sub_index_name) # logging.info("get sub_index_name:%s"%sub_index_name)
...@@ -239,7 +238,7 @@ class TypeInfo(object): ...@@ -239,7 +238,7 @@ class TypeInfo(object):
es=es, es=es,
) )
if sub_index_name=="topic": if sub_index_name == "topic":
self.elasticsearch_bulk_insert_data( self.elasticsearch_bulk_insert_data(
sub_index_name="topic-star-routing", sub_index_name="topic-star-routing",
data_list=data_list, data_list=data_list,
...@@ -247,7 +246,7 @@ class TypeInfo(object): ...@@ -247,7 +246,7 @@ class TypeInfo(object):
) )
# 同时写4星及以上的帖子 # 同时写4星及以上的帖子
if len(topic_data_high_star_list)>0: if len(topic_data_high_star_list) > 0:
self.elasticsearch_bulk_insert_data( self.elasticsearch_bulk_insert_data(
sub_index_name="topic-high-star", sub_index_name="topic-high-star",
data_list=topic_data_high_star_list, data_list=topic_data_high_star_list,
...@@ -255,9 +254,8 @@ class TypeInfo(object): ...@@ -255,9 +254,8 @@ class TypeInfo(object):
) )
end = time.time() end = time.time()
time3=end-begin time3 = end - begin
logging.info("duan add,insert_table_by_pk_list time cost:%ds,%ds,%ds,%ds" % (time0,time1,time2,time3)) logging.info("duan add,insert_table_by_pk_list time cost:%ds,%ds,%ds,%ds" % (time0, time1, time2, time3))
def insert_table_chunk(self, sub_index_name, table_chunk, es=None): def insert_table_chunk(self, sub_index_name, table_chunk, es=None):
try: try:
...@@ -279,7 +277,7 @@ class TypeInfo(object): ...@@ -279,7 +277,7 @@ class TypeInfo(object):
auto_create_index=True auto_create_index=True
) )
logging.info("es_helpers_bulk,sub_index_name:%s,data_list len:%d" % (sub_index_name,len(data_list))) logging.info("es_helpers_bulk,sub_index_name:%s,data_list len:%d" % (sub_index_name, len(data_list)))
stage_3_time = time.time() stage_3_time = time.time()
end_clock = time.clock() end_clock = time.clock()
...@@ -317,8 +315,8 @@ def get_type_info_map(): ...@@ -317,8 +315,8 @@ def get_type_info_map():
name='topic-star', name='topic-star',
type='topic-star', type='topic-star',
model=topic.Topic, model=topic.Topic,
query_deferred=lambda: topic.Topic.objects.all().query,#假的 query_deferred=lambda: topic.Topic.objects.all().query, # 假的
get_data_func=TopicTransfer.get_topic_data,#假的 get_data_func=TopicTransfer.get_topic_data, # 假的
bulk_insert_chunk_size=100, bulk_insert_chunk_size=100,
round_insert_chunk_size=5, round_insert_chunk_size=5,
round_insert_period=2, round_insert_period=2,
...@@ -467,7 +465,7 @@ def get_type_info_map(): ...@@ -467,7 +465,7 @@ def get_type_info_map():
bulk_insert_chunk_size=100, bulk_insert_chunk_size=100,
round_insert_chunk_size=5, round_insert_chunk_size=5,
round_insert_period=2, round_insert_period=2,
) ),
# TypeInfo( # TypeInfo(
# name="account_user_tag", # 用户标签 # name="account_user_tag", # 用户标签
# type="account_user_tag", # type="account_user_tag",
...@@ -478,6 +476,16 @@ def get_type_info_map(): ...@@ -478,6 +476,16 @@ def get_type_info_map():
# round_insert_chunk_size=5, # round_insert_chunk_size=5,
# round_insert_period=2, # round_insert_period=2,
# ) # )
TypeInfo(
name="product", # 商品
type="product",
model=product.CommodityProduct,
query_deferred=lambda: product.CommodityProduct.objects.all().query,
get_data_func=ProductTransfer.get_product_data,
bulk_insert_chunk_size=100,
round_insert_chunk_size=5,
round_insert_period=2,
)
] ]
type_info_map = { type_info_map = {
...@@ -487,4 +495,3 @@ def get_type_info_map(): ...@@ -487,4 +495,3 @@ def get_type_info_map():
_get_type_info_map_result = type_info_map _get_type_info_map_result = type_info_map
return type_info_map return type_info_map
...@@ -68,6 +68,8 @@ class PictorialTransfer(object): ...@@ -68,6 +68,8 @@ class PictorialTransfer(object):
res["offline_score"] = cls.get_offline_score(instance, res["topic_id_list"]) res["offline_score"] = cls.get_offline_score(instance, res["topic_id_list"])
res["is_default"] = instance.is_default res["is_default"] = instance.is_default
res["is_cover"] = instance.get_is_cover(res["topic_id_list"]) res["is_cover"] = instance.get_is_cover(res["topic_id_list"])
res["topic_vote_number"] = instance.get_topic_vote_number()
res["activity_join"] = instance.get_activity_join()
return res return res
except: except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc()) logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
import traceback
import time
from libs.tools import tzlc
from pypinyin import lazy_pinyin
class ProductTransfer(object):
@classmethod
def get_product_data(cls, instance):
try:
res = dict()
res["id"] = instance.id
res["is_online"] = instance.is_online
res["is_deleted"] = instance.is_deleted
res["create_time"] = tzlc(instance.create_time)
res["update_time"] = tzlc(instance.update_time)
res["create_time_val"] = int(time.mktime(instance.create_time.timetuple()))
res["update_time_val"] = int(time.mktime(instance.update_time.timetuple()))
res["price"] = instance.price
res["cn_name_sort"] = ''
for i in lazy_pinyin(instance.cn_name):
res["cn_name_sort"] += str(i[0])
res["cn_name_pre"] = instance.cn_name
res["en_name_pre"] = instance.en_name
res["alias"] = instance.alias
res["cn_name"] = instance.cn_name
res["en_name"] = instance.en_name
res["alias_pre"] = instance.alias
res['description'] = instance.description
res["have_image"] = True if instance.image else False
res["comment_nums"] = instance.comment_nums
result_name = instance.get_brand_name()
if result_name:
res["brand_cn_name"] = result_name.get("cn_name", "")
res["brand_en_name"] = result_name.get("en_name", "")
res["brand_alias"] = result_name.get("alias", "")
res["brand_cn_name_pre"] = result_name.get("cn_name", "")
res["brand_en_name_pre"] = result_name.get("en_name", "")
res["brand_alias_pre"] = result_name.get("alias", "")
else:
res["brand_cn_name"] = ""
res["brand_en_name"] = ""
res["brand_alias"] = ""
res["brand_cn_name_pre"] = ""
res["brand_en_name_pre"] = ""
res["brand_alias_pre"] = ""
category_cn_name = instance.get_category_en_name()
if category_cn_name:
res["category_cn_name"] = category_cn_name
res["category_cn_name_pre"] = category_cn_name
else:
res["category_cn_name"] = []
res["category_cn_name_pre"] = []
effect_cn_name = instance.get_effect_cn_name()
if effect_cn_name:
res['effect_cn_name'] = effect_cn_name
res['effect_cn_name_pre'] = effect_cn_name
else:
res["effect_cn_name"] = []
res["effect_cn_name_pre"] = []
logging.info("get product:%s" % res)
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return dict()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment