Commit c46b9ff4 authored by lixiaofang's avatar lixiaofang

add

parent af213e1d
......@@ -4,42 +4,46 @@
import time
import logging
class GroupSortTypes(object):
# 热度排序
HOT_RECOMMEND=0
HOT_RECOMMEND = 0
# 关注排序
ATTENTION_RECOMMEND=1
ATTENTION_RECOMMEND = 1
class PickType(object):
# 明星榜
CELEBRITY_PICK=0
CELEBRITY_PICK = 0
# 帖子榜
TOPIC_PICK=1
TOPIC_PICK = 1
class TopicDocumentField(object):
"""
帖子索引字段信息
"""
ID="id",
ID = "id",
IS_ONLINE = "is_online",
TAG_LIST = "tag_list"
class TopicPageType(object):
# 首页推荐
HOME_RECOMMEND=1
HOME_RECOMMEND = 1
# 发现页面
FIND_PAGE=2
FIND_PAGE = 2
def time_consuming_decorator(func):
def time_consuming(*args, **kwargs):
start_time = time.time()
func(*args,**kwargs)
func(*args, **kwargs)
end_time = time.time()
logging.info("func consuming time:%fs" % (end_time-start_time))
logging.info("func consuming time:%fs" % (end_time - start_time))
return time_consuming
......@@ -59,8 +59,7 @@ def pictorial_sort(user_id=-1, sort_type=GroupSortTypes.HOT_RECOMMEND, offset=0,
redis_tag_data = redis_client.hget("physical:linucb:register_user_tag_info", user_id)
attention_tag_list = json.loads(redis_tag_data) if redis_tag_data else []
pictorial_ids_list = GroupUtils.get_hot_pictorial_recommend_result_list(offset, size, es_cli_obj,
attention_tag_list)
pictorial_ids_list = GroupUtils.get_hot_pictorial_recommend_result_list(offset, size, es_cli_obj, attention_tag_list)
return {"pictorial_recommend_ids": pictorial_ids_list}
elif sort_type == GroupSortTypes.ATTENTION_RECOMMEND:
......
......@@ -372,7 +372,8 @@ def topic_detail_page_recommend(device_id="", user_id=-1, topic_id=-1, topic_pic
if len(topic_tag_list) != 0:
topic_tag_result = TopicUtils.top_get_topic_detail_recommend_list(user_id, topic_id, have_read_topic_list,
size, es_cli_obj,
index_type="topic-test", routing="3,4,5,6",
index_type="topic-test",
routing="3,4,5,6",
topic_tag_list=topic_tag_list)
topic_tag_size = len(topic_tag_result)
have_read_topic_list.extend(topic_tag_result)
......@@ -498,7 +499,8 @@ def query_topic_by_user_similarity(topic_similarity_score_dict, offset=0, size=1
must_topic_id_list = list(topic_similarity_score_dict.keys())
topic_id_list = TopicUtils.get_recommend_topic_ids(tag_id=0, user_id=-1, offset=offset, size=size,
single_size=size,
must_topic_id_list=must_topic_id_list, index_type="topic-test",
must_topic_id_list=must_topic_id_list,
index_type="topic-test",
routing="4,5,6")
return {"recommend_topic_ids": topic_id_list}
......
......@@ -14,7 +14,7 @@ import sys
from libs.cache import redis_client
import copy
from trans2es.models import topic, user, pick_celebrity, group, celebrity, tag, contrast_similar,pictorial
from trans2es.models import topic, user, pick_celebrity, group, celebrity, tag, contrast_similar, pictorial
from trans2es.utils.user_transfer import UserTransfer
from trans2es.utils.pick_celebrity_transfer import PickCelebrityTransfer
from trans2es.utils.group_transfer import GroupTransfer
......@@ -196,7 +196,7 @@ class TypeInfo(object):
old_data["is_history"] = True
data_list.append(old_data)
if int_ori_topic_star>=4:
if int_ori_topic_star >= 4:
topic_data_high_star_list.append(old_data)
redis_client.hset(self.physical_topic_star, data["id"], data["content_level"])
# data_list = [
......@@ -207,12 +207,11 @@ class TypeInfo(object):
# ]
# ESPerform.es_helpers_bulk(ESPerform.get_cli(), data_list, "topic-star")
if data["content_level"] and int(data["content_level"])>=4:
if data["content_level"] and int(data["content_level"]) >= 4:
topic_data_high_star_list.append(data)
data_list.append(data)
return (data_list,topic_data_high_star_list)
return (data_list, topic_data_high_star_list)
def elasticsearch_bulk_insert_data(self, sub_index_name, data_list, es=None):
......@@ -255,17 +254,17 @@ class TypeInfo(object):
else:
qs = self.model.objects.all()
end = time.time()
time0=end-begin
time0 = end - begin
begin = time.time()
instance_list = qs.filter(pk__in=pk_list)
end = time.time()
time1=end-begin
time1 = end - begin
begin = time.time()
data_list, topic_data_high_star_list = self.bulk_get_data(instance_list)
end = time.time()
time2=end-begin
time2 = end - begin
begin = time.time()
# logging.info("get sub_index_name:%s"%sub_index_name)
......@@ -277,7 +276,7 @@ class TypeInfo(object):
es=es,
)
if sub_index_name=="topic":
if sub_index_name == "topic":
self.elasticsearch_bulk_insert_data(
sub_index_name="topic-star-routing",
data_list=data_list,
......@@ -285,7 +284,7 @@ class TypeInfo(object):
)
# 同时写4星及以上的帖子
if len(topic_data_high_star_list)>0:
if len(topic_data_high_star_list) > 0:
self.elasticsearch_bulk_insert_data(
sub_index_name="topic-high-star",
data_list=topic_data_high_star_list,
......@@ -293,9 +292,8 @@ class TypeInfo(object):
)
end = time.time()
time3=end-begin
logging.info("duan add,insert_table_by_pk_list time cost:%ds,%ds,%ds,%ds" % (time0,time1,time2,time3))
time3 = end - begin
logging.info("duan add,insert_table_by_pk_list time cost:%ds,%ds,%ds,%ds" % (time0, time1, time2, time3))
def insert_table_chunk(self, sub_index_name, table_chunk, es=None):
try:
......@@ -317,7 +315,7 @@ class TypeInfo(object):
auto_create_index=True
)
logging.info("es_helpers_bulk,sub_index_name:%s,data_list len:%d" % (sub_index_name,len(data_list)))
logging.info("es_helpers_bulk,sub_index_name:%s,data_list len:%d" % (sub_index_name, len(data_list)))
stage_3_time = time.time()
end_clock = time.clock()
......@@ -355,8 +353,8 @@ def get_type_info_map():
name='topic-star',
type='topic-star',
model=topic.Topic,
query_deferred=lambda: topic.Topic.objects.all().query,#假的
get_data_func=TopicTransfer.get_topic_data,#假的
query_deferred=lambda: topic.Topic.objects.all().query, # 假的
get_data_func=TopicTransfer.get_topic_data, # 假的
bulk_insert_chunk_size=100,
round_insert_chunk_size=5,
round_insert_period=2,
......@@ -525,4 +523,3 @@ def get_type_info_map():
_get_type_info_map_result = type_info_map
return type_info_map
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment