Commit 414af3cf authored by lixiaofang's avatar lixiaofang

type_info

parent 68b92d75
......@@ -14,7 +14,7 @@ import sys
from libs.cache import redis_client
import copy
from trans2es.models import topic, user, pick_celebrity, group, celebrity, tag, contrast_similar, pictorial
from trans2es.models import topic, user, pick_celebrity, group, celebrity, tag, contrast_similar,pictorial
from trans2es.utils.user_transfer import UserTransfer
from trans2es.utils.pick_celebrity_transfer import PickCelebrityTransfer
from trans2es.utils.group_transfer import GroupTransfer
......@@ -196,7 +196,7 @@ class TypeInfo(object):
old_data["is_history"] = True
data_list.append(old_data)
if int_ori_topic_star >= 4:
if int_ori_topic_star>=4:
topic_data_high_star_list.append(old_data)
redis_client.hset(self.physical_topic_star, data["id"], data["content_level"])
# data_list = [
......@@ -207,11 +207,12 @@ class TypeInfo(object):
# ]
# ESPerform.es_helpers_bulk(ESPerform.get_cli(), data_list, "topic-star")
if data["content_level"] and int(data["content_level"]) >= 4:
if data["content_level"] and int(data["content_level"])>=4:
topic_data_high_star_list.append(data)
data_list.append(data)
return (data_list, topic_data_high_star_list)
return (data_list,topic_data_high_star_list)
def elasticsearch_bulk_insert_data(self, sub_index_name, data_list, es=None):
......@@ -254,17 +255,17 @@ class TypeInfo(object):
else:
qs = self.model.objects.all()
end = time.time()
time0 = end - begin
time0=end-begin
begin = time.time()
instance_list = qs.filter(pk__in=pk_list)
end = time.time()
time1 = end - begin
time1=end-begin
begin = time.time()
data_list, topic_data_high_star_list = self.bulk_get_data(instance_list)
end = time.time()
time2 = end - begin
time2=end-begin
begin = time.time()
# logging.info("get sub_index_name:%s"%sub_index_name)
......@@ -276,7 +277,7 @@ class TypeInfo(object):
es=es,
)
if sub_index_name == "topic":
if sub_index_name=="topic":
self.elasticsearch_bulk_insert_data(
sub_index_name="topic-star-routing",
data_list=data_list,
......@@ -284,7 +285,7 @@ class TypeInfo(object):
)
# 同时写4星及以上的帖子
if len(topic_data_high_star_list) > 0:
if len(topic_data_high_star_list)>0:
self.elasticsearch_bulk_insert_data(
sub_index_name="topic-high-star",
data_list=topic_data_high_star_list,
......@@ -292,8 +293,9 @@ class TypeInfo(object):
)
end = time.time()
time3 = end - begin
logging.info("duan add,insert_table_by_pk_list time cost:%ds,%ds,%ds,%ds" % (time0, time1, time2, time3))
time3=end-begin
logging.info("duan add,insert_table_by_pk_list time cost:%ds,%ds,%ds,%ds" % (time0,time1,time2,time3))
def insert_table_chunk(self, sub_index_name, table_chunk, es=None):
try:
......@@ -315,7 +317,7 @@ class TypeInfo(object):
auto_create_index=True
)
logging.info("es_helpers_bulk,sub_index_name:%s,data_list len:%d" % (sub_index_name, len(data_list)))
logging.info("es_helpers_bulk,sub_index_name:%s,data_list len:%d" % (sub_index_name,len(data_list)))
stage_3_time = time.time()
end_clock = time.clock()
......@@ -353,8 +355,8 @@ def get_type_info_map():
name='topic-star',
type='topic-star',
model=topic.Topic,
query_deferred=lambda: topic.Topic.objects.all().query, # 假的
get_data_func=TopicTransfer.get_topic_data, # 假的
query_deferred=lambda: topic.Topic.objects.all().query,#假的
get_data_func=TopicTransfer.get_topic_data,#假的
bulk_insert_chunk_size=100,
round_insert_chunk_size=5,
round_insert_period=2,
......@@ -523,3 +525,4 @@ def get_type_info_map():
_get_type_info_map_result = type_info_map
return type_info_map
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment