Commit 6d93d07a authored by litaolemo's avatar litaolemo

update

parent 2494f893
......@@ -205,20 +205,19 @@ def single_thead(processe,name):
for single_data in crawler.releaser_page_by_time(start_time=start_time,
end_time=int(now.timestamp() * 1e3), url=releaser_body["releaserUrl"],
allow=allow,proxies_num=proxies_num,**kwargs_dict):
print("111",single_data)
count_has = True
video_time = single_data.get("release_time")
if video_time:
if start_time < video_time:
if video_time < end_time:
rds_1.hset("weibo", key=single_data["doc_id"], value=json.dumps(single_data))
rds_save.hset(platform, key=single_data["doc_id"], value=json.dumps(single_data))
# data_list.append(single_data)
else:
count_false += 1
if count_false > allow*3:
break
else:
rds_save.hset("weibo", key=single_data["doc_id"], value=json.dumps(single_data))
rds_save.hset(platform, key=single_data["doc_id"], value=json.dumps(single_data))
# if len(data_list) >= 100:
# output_result(result_Lst=data_list,
# platform=platform,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment