1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
"""日记本统计信息vote_count初始化。"""
import math
import time
from multiprocessing import Pool, Queue, Manager
from django.core.management.base import BaseCommand
from django.db.models import Q, Max, Count
from django import db
from talos.models.diary import DiaryExtra
def update_extra_info(queue, limit):
"""更新limit条日记本统计信息。
其中我们的开始地址从queue中获取,每次进程工作的时候会将当前的最大日记本放入到相关的
"""
start_id = queue.get()
diary_extras = DiaryExtra.objects.filter(Q(diary__pk__gt=start_id))[: limit]
max_id = diary_extras.aggregate(max_id=Max('diary_id'))
queue.put(max_id["max_id"])
if not diary_extras:
return
for extra_info in diary_extras:
diary = extra_info.diary
extra_info.vote_count = diary.vote_num
extra_info.total_pv = diary.view_num
extra_info.reply_count = diary.reply_num
extra_info.topic_count = diary.topic_num
extra_info.save()
class Command(BaseCommand):
def handle(self, *args, **options):
"""全量更新 api_diary_extra 中的统计信息 。"""
print('------ starting -----')
start_time = time.time()
print("start at: ", start_time)
queue = Manager().Queue(maxsize=4)
queue.put(0) # 触发程序开始
args_list = []
per_num = 500
count = DiaryExtra.objects.count()
cnt = math.ceil(count/per_num)
for _ in range(cnt):
args_list.append((queue, per_num))
db.connections.close_all()
pool = Pool(processes=4)
pool.starmap(update_extra_info, args_list)
pool.close()
pool.join()
end_time = time.time()
print("end at: ", end_time)
print('total use {} s.'.format(end_time - start_time))
print('Done!')