Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
S
serviceRec
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
郭羽
serviceRec
Commits
e5b1c88a
Commit
e5b1c88a
authored
Aug 23, 2021
by
郭羽
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
get redis keys test
parent
bec7bcab
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
495 additions
and
261 deletions
+495
-261
get_redis_keys_hash.py
test/get_redis_keys_hash.py
+495
-261
No files found.
test/get_redis_keys_hash.py
View file @
e5b1c88a
import
redis
# import redis
import
json
# import json
import
time
# import time
import
pymysql
# import pymysql
def
getRedisConn
():
# from rediscluster import StrictRedisCluster
conn
=
redis
.
Redis
(
host
=
"172.18.51.10"
,
port
=
6379
,
db
=
0
)
# def getRedisConn():
conn
.
execute_command
()
# conn = redis.Redis(host="172.18.51.10", port=6379,db=0)
return
conn
# conn.execute_command()
# return conn
#
def
getRedisConn1
():
#
pool
=
redis
.
ConnectionPool
(
host
=
"172.16.40.133"
,
password
=
"ReDis!GmTx*0aN6"
,
port
=
6379
,
db
=
0
)
# def getRedisConn1():
conn
=
redis
.
Redis
(
connection_pool
=
pool
)
# pool = redis.ConnectionPool(host="172.16.40.133",password="ReDis!GmTx*0aN6",port=6379,db=0)
return
conn
# conn = redis.Redis(connection_pool=pool)
# return conn
def
getRedisConn2
():
#
pool
=
redis
.
ConnectionPool
(
host
=
"172.16.50.145"
,
password
=
"XfkMCCdWDIU
%
ls$h"
,
port
=
6379
,
db
=
0
)
# def getRedisConn2():
conn
=
redis
.
Redis
(
connection_pool
=
pool
)
# pool = redis.ConnectionPool(host="172.16.40.173",password="ReDis!GmTx*0aN9",port=6379,db=0)
return
conn
# conn = redis.Redis(connection_pool=pool)
# return conn
def
getRedisConn3
():
#
pool
=
redis
.
ConnectionPool
(
host
=
"172.16.40.164"
,
password
=
"ReDis!GmTx*0aN12"
,
port
=
6379
,
db
=
0
)
# def getRedisConn3():
conn
=
redis
.
Redis
(
connection_pool
=
pool
)
# pool = redis.ConnectionPool(host="172.16.40.164",password="ReDis!GmTx*0aN12",port=6379,db=0)
return
conn
# conn = redis.Redis(connection_pool=pool)
# return conn
def
getRedisConn4
():
#
pool
=
redis
.
ConnectionPool
(
host
=
"172.16.50.145"
,
password
=
"XfkMCCdWDIU
%
ls$h"
,
port
=
6379
,
db
=
0
)
# def getRedisConn4():
conn
=
redis
.
Redis
(
connection_pool
=
pool
)
# # startup_nodes = [
return
conn
# # {'host': '172.16.179.131', 'port': '7000'},
# # ]
# # conn = StrictRedisCluster(host="172.16.50.145",password="XfkMCCdWDIU%ls$h",port=6379,decode_responses=True)
# pool = redis.ConnectionPool(host="172.16.50.145",password="XfkMCCdWDIU%ls$h",port=6379,db=0)
# conn = redis.Redis(connection_pool=pool)
# return conn
#
# def getRedisConn5():
# def getRedisConn5():
# pool = redis.ConnectionPool(host="172.16.50.159",password="XfkMCCdWDIU%ls$h3",port=6379,db=0)
# pool = redis.ConnectionPool(host="172.16.50.159",password="XfkMCCdWDIU%ls$h3",port=6379,db=0)
# conn = redis.Redis(connection_pool=pool)
# conn = redis.Redis(connection_pool=pool)
# return conn
# return conn
#
def
getDeviceIds
():
# def getDeviceIds():
ids_set
=
set
()
# ids_set = set()
#
db
=
pymysql
.
connect
(
host
=
'172.16.30.136'
,
port
=
3306
,
user
=
'doris_olap'
,
passwd
=
'bA27hXasdfswuolap'
,
# db = pymysql.connect(host='172.16.30.136', port=3306, user='doris_olap', passwd='bA27hXasdfswuolap',
db
=
'doris_olap'
)
# db='doris_olap')
sql
=
"select distinct cl_id from user_tag3_portrait;"
# sql = "select distinct cl_id from user_tag3_portrait;"
#
cursor
=
db
.
cursor
()
# cursor = db.cursor()
cursor
.
execute
(
sql
)
# cursor.execute(sql)
datas
=
cursor
.
fetchall
()
# datas = cursor.fetchall()
for
d
in
datas
:
# for d in datas:
device_id
=
str
(
d
[
0
])
# device_id = str(d[0])
if
device_id
and
len
(
device_id
)
>
0
:
# if device_id and len(device_id) > 0:
ids_set
.
add
(
str
(
d
[
0
]))
# ids_set.add(str(d[0]))
#
print
(
"deviceIds size:{}"
.
format
(
str
(
len
(
ids_set
))))
# print("deviceIds size:{}".format(str(len(ids_set))))
return
ids_set
# return ids_set
#
#
def
redis2
():
#
sum
=
0
# keys = set()
conn
=
getRedisConn2
()
# sum = 0
# key = "feed:recommend:by:user:portrait*"
# key_sum = 0
# key = "doris:user_portrait:tag3:candidate_latest_dict:*"
# conn2 = getRedisConn2()
# key = "doris:user_portrait:tag3:candidate_list:*" #1
# pipline = conn2.pipeline()
# key = "device:latest:action:tag:names:update:*" #1831254
# # pipline = conn.pipeline()
# key = "device:latest:action:tags:update:*" #1698541
# # key = "have_reply_answer_comment*" #139285
# key = "feed:recommend:device_id:*" #1895536 #@bind('doris/recommend/feed')
# # key = "device_register_qa_read_set:869574031646601*" #139285
# key = "feed:recommend:topic:device_id:*" #995546 #@bind('doris/recommend/feed')
# # key = "doris_feed:*" #139285
# key = "user:service_portrait_tags2:cl_id:*" #21830
# cursor = 0
# key = "doris:aichannel_smart_rank:tag3:read:device_id:*" #727
# while True:
# key = "doris:user_portrait:tag3:read_v2:device_id:*" # 1658011 过期时间设置
# del_datas = set()
key
=
"aaa:diary*"
# 5
# cursor, data = conn2.scan(cursor,"*",10000)
cursor
=
0
# # cursor = str(cursor, encoding='utf-8')
keys_map
=
{}
# for d in data:
while
True
:
# dd = str(d, encoding='utf-8')
del_datas
=
set
()
# pipline.type(dd)
cursor
,
data
=
conn
.
scan
(
cursor
=
cursor
,
match
=
key
,
count
=
10000
)
# pip_datas = pipline.execute()
for
d
in
data
:
# for i,pp in enumerate(pip_datas):
if
len
(
d
)
>
len
(
key
)
-
2
:
# if str(pp,encoding='utf-8') == "list":
del_datas
.
add
(
str
(
d
,
encoding
=
'utf-8'
))
# keys.add(data[i])
# pipline = conn.pipeline()
# key_sum += 1
# for d in del_datas:
# # if cursor == 0 or len(data) == 0:
# pipline.delete(d)
# sum += len(data)
# # pipline.expire(d,60*60*24*2)
# print(cursor,sum,key_sum)
# pipline.execute()
# if cursor == 0:
sum
+=
len
(
data
)
# break
print
(
cursor
,
len
(
data
),
sum
)
# print(key, key_sum)
# if cursor == 0 or len(data) == 0:
#
if
cursor
==
0
:
#
break
#
# def redis2():
def
redis1
():
# sum = 0
keys
=
[
'doris:query_compre_question_id:have_read_list'
# conn = getRedisConn2()
,
'query_wiki_have_read:'
# # key = "feed:recommend:by:user:portrait*"
,
'doris:query_compre_answer:have_read_list:'
# # key = "doris:user_portrait:tag3:candidate_latest_dict:*"
,
'doris_feed:doctor_have_read:device_id:'
# # key = "doris:user_portrait:tag3:candidate_list:*" #1
,
'have_read_live_playback_device_id'
# # key = "device:latest:action:tag:names:update:*" #1831254
,
'doris_feed:ganhuo_have_read:device_id:'
# # key = "device:latest:action:tags:update:*" #1698541
,
'doris_feed:home_video_diary:device_id:'
# # key = "feed:recommend:device_id:*" #1895536 #@bind('doris/recommend/feed')
,
'doris_feed:home_video_answer:device_id:'
# # key = "feed:recommend:topic:device_id:*" #995546 #@bind('doris/recommend/feed')
,
'doris_feed:home_video_tractate:device_id:'
# # key = "user:service_portrait_tags2:cl_id:*" #21830
,
'doris_feed:good_look_have_read:device_id:'
# # key = "doris:aichannel_smart_rank:tag3:read:device_id:*" #727
,
'doris_feed:ganhuo_have_read:device_id:'
# # key = "doris:user_portrait:tag3:read_v2:device_id:*" # 1658011 过期时间设置
,
'doris_feed:bendi_have_read:device_id:'
# key = "aaa:diary*" # 5
,
'service:homepage_slide_tab:have_read:sku_ids:'
# cursor = 0
,
'doris_feed:home_recommend_diary_v1:device_id:'
# keys_map = {}
,
'search:query_type:word:'
# while True:
,
'doris_search:'
# del_datas = set()
,
'servicehot:'
# cursor, data = conn.scan(cursor=cursor, match=key, count=10000)
,
'interest:'
# for d in data:
,
'device:'
# if len(d) > len(key) - 2:
,
'gaia:hot_tractate_keyword:city_id:'
# del_datas.add(str(d, encoding='utf-8'))
,
'gaia:hot_wiki_keyword:city_id:'
]
# # pipline = conn.pipeline()
for
key_k
in
keys
:
# # for d in del_datas:
key
=
key_k
+
'*'
# # pipline.delete(d)
sum
=
0
# # # pipline.expire(d,60*60*24*2)
conn
=
getRedisConn1
()
# # pipline.execute()
pipline
=
conn
.
pipeline
()
# sum += len(data)
# key = "have_reply_answer_comment*" #139285
# print(cursor, len(data), sum)
# key = "device_register_qa_read_set:869574031646601*" #139285
# # if cursor == 0 or len(data) == 0:
# key = "doris_feed:*" #139285
# if cursor == 0:
cursor
=
0
# break
key_sum
=
0
#
while
True
:
# def redis1():
del_datas
=
set
()
# keys = ['doris:query_compre_question_id:have_read_list'
cursor
,
data
=
conn
.
scan
(
cursor
=
cursor
,
match
=
key
,
count
=
10000
)
# , 'query_wiki_have_read:'
for
d
in
data
:
# , 'doris:query_compre_answer:have_read_list:'
dd
=
str
(
d
,
encoding
=
'utf-8'
)
# , 'doris_feed:doctor_have_read:device_id:'
if
len
(
dd
)
>
0
and
dd
.
startswith
(
key_k
):
# , 'have_read_live_playback_device_id'
del_datas
.
add
(
str
(
d
,
encoding
=
'utf-8'
))
# , 'doris_feed:ganhuo_have_read:device_id:'
pipline
=
conn
.
pipeline
()
# , 'doris_feed:home_video_diary:device_id:'
for
d
in
del_datas
:
# , 'doris_feed:home_video_answer:device_id:'
# pipline.delete(d)
# , 'doris_feed:home_video_tractate:device_id:'
pipline
.
expire
(
d
,
60
*
60
*
24
*
3
)
# , 'doris_feed:good_look_have_read:device_id:'
pipline
.
execute
()
# , 'doris_feed:ganhuo_have_read:device_id:'
sum
+=
len
(
data
)
# , 'doris_feed:bendi_have_read:device_id:'
key_sum
+=
len
(
del_datas
)
# , 'service:homepage_slide_tab:have_read:sku_ids:'
print
(
cursor
,
len
(
data
),
sum
)
# , 'doris_feed:home_recommend_diary_v1:device_id:'
# if cursor == 0 or len(data) == 0:
# , 'search:query_type:word:'
if
cursor
==
0
:
# , 'doris_search:'
break
# , 'servicehot:'
print
(
key
,
key_sum
)
# , 'interest:'
# break
# , 'device:'
# , 'gaia:hot_tractate_keyword:city_id:'
# , 'gaia:hot_wiki_keyword:city_id:']
def
redis4
():
# for key_k in keys:
nodes
=
[
'7877da182171e313bc9326729f82999d1b629c79'
# key = key_k + '*'
,
'a4d4034faa81b935c2fd583053105b37f1c92ff1'
# sum = 0
,
'a8835d4c987847302bac66c5fc17ee1faae91fa3'
# conn = getRedisConn1()
,
'5a48236679f22637508651530633c9cc2f56f489'
# pipline = conn.pipeline()
,
'fc3715919081c2cf3b30a2f8defb055c03564fdc'
# # key = "have_reply_answer_comment*" #139285
,
'eec89d7480980749c998add484e80f23fe5022a4'
# # key = "device_register_qa_read_set:869574031646601*" #139285
,
'f3f1ec6df458a5093c31663517a3cadaed5ab29c'
# # key = "doris_feed:*" #139285
,
'd35c630aad0a8b7f579bf4100f2860401b5d4f52'
]
# cursor = 0
keys
=
[
'doris:user_portrait:tag3:candidate:tractate:device_id:'
]
# key_sum = 0
for
key_k
in
keys
:
# while True:
key
=
key_k
+
'*'
# del_datas = set()
sum
=
0
# cursor, data = conn.scan(cursor=cursor, match=key, count=10000)
key_sum
=
0
# for d in data:
conn
=
getRedisConn4
()
# dd = str(d, encoding='utf-8')
pipline
=
conn
.
pipeline
()
# if len(dd) > 0 and dd.startswith(key_k):
# key = "have_reply_answer_comment*" #139285
# del_datas.add(str(d, encoding='utf-8'))
# key = "device_register_qa_read_set:869574031646601*" #139285
# pipline = conn.pipeline()
# key = "doris_feed:*" #139285
# for d in del_datas:
for
node
in
nodes
:
# # pipline.delete(d)
cursor
=
"0"
# pipline.expire(d, 60 * 60 * 24 * 3)
while
True
:
# pipline.execute()
del_datas
=
set
()
# sum += len(data)
cursor
,
data
=
conn
.
execute_command
(
"scan {} match {} count {} {}"
.
format
(
cursor
,
key
,
10000
,
node
))
# key_sum += len(del_datas)
cursor
=
str
(
cursor
,
encoding
=
'utf-8'
)
# print(cursor, len(data), sum)
for
d
in
data
:
# # if cursor == 0 or len(data) == 0:
dd
=
str
(
d
,
encoding
=
'utf-8'
)
# if cursor == 0:
if
len
(
dd
)
>
0
and
dd
.
startswith
(
key_k
):
# break
del_datas
.
add
(
str
(
d
,
encoding
=
'utf-8'
))
# print(key,key_sum)
# pipline = conn.pipeline()
# # break
# for d in del_datas:
#
# # pipline.delete(d)
# conn2 = getRedisConn2()
# pipline.expire(d, 60 * 60 * 24 * 3)
# conn = getRedisConn4()
# pipline.execute()
# # doris:tag_v3:coldstart:diary:
sum
+=
len
(
data
)
# # for key in ["rims:tag_v3:coldstart:answer","rims:tag_v3:coldstart:tractate"]:
key_sum
+=
len
(
del_datas
)
# # key='user_portrait:doris:tag3:es:queue'
print
(
node
,
cursor
,
len
(
data
),
sum
)
# # key='rims:tag_v3:coldstart:answer'
# if cursor == 0 or len(data) == 0:
# key='rims:tag_v3:coldstart:tractate'
if
cursor
==
"0"
:
# ls = [str(s, encoding='utf-8') for s in conn2.lrange(key, 0, -1)]
break
# conn.delete(key)
print
(
key
,
key_sum
)
# for i in range(0,len(ls),1000):
# data = ls[i:i+1000]
nodes
=
[
'7877da182171e313bc9326729f82999d1b629c79'
# conn.lpush(key,*data)
,
'a4d4034faa81b935c2fd583053105b37f1c92ff1'
# # for i in range(0,len(ls),10000):
,
'a8835d4c987847302bac66c5fc17ee1faae91fa3'
# # conn.lpush(key, )
,
'5a48236679f22637508651530633c9cc2f56f489'
#
,
'fc3715919081c2cf3b30a2f8defb055c03564fdc'
#
,
'eec89d7480980749c998add484e80f23fe5022a4'
# #677
,
'f3f1ec6df458a5093c31663517a3cadaed5ab29c'
#
,
'd35c630aad0a8b7f579bf4100f2860401b5d4f52'
]
# #659
conn
=
getRedisConn4
()
#
num
=
0
# def tett():
key_sum
=
0
# # keys = ['rims:tag_v3:coldstart:'] #177
res
=
{}
# # keys = ['doris:tag_v3:coldstart:'] #471
keys_set
=
set
()
# # keys = ['rims:tag_v3:coldstart:','doris:tag_v3:coldstart:']
for
node
in
nodes
:
# keys = ['coldstart:light:clinic:beauty:']#11
cursor
=
"0"
# conn2 = getRedisConn2()
while
True
:
#
cursor
,
data
=
conn
.
execute_command
(
"scan {} match {} count {} {}"
.
format
(
cursor
,
"*"
,
10000
,
node
))
# for key_k in keys:
cursor
=
str
(
cursor
,
encoding
=
'utf-8'
)
# key = key_k + '*'
# pipline = conn.pipeline()
# sum = 0
keys
=
[]
# key_sum = 0
for
d
in
data
:
# conn = getRedisConn4()
key
=
str
(
d
,
encoding
=
'utf-8'
)
# # pipline = conn.pipeline()
is_flag
=
True
# # key = "have_reply_answer_comment*" #139285
for
kk
in
[
'streaming:candidate:'
,
'tag3:user_portrait:topn:'
]:
# # key = "device_register_qa_read_set:869574031646601*" #139285
if
key
.
startswith
(
kk
):
# # key = "doris_feed:*" #139285
is_flag
=
False
# cursor = 0
break
# while True:
if
is_flag
:
# del_datas = set()
end
=
key
.
split
(
":"
)[
-
1
]
# cursor, data = conn2.scan(cursor,key,10000)
keys_set
.
add
(
key
[
0
:
len
(
key
)
-
len
(
end
)])
# # cursor = str(cursor, encoding='utf-8')
keys
.
append
(
key
)
# for d in data:
key_sum
+=
len
(
data
)
# dd = str(d, encoding='utf-8')
print
(
"node:{},curor:{},key_sum:{},keys size:{}"
.
format
(
node
,
str
(
cursor
),
str
(
key_sum
),
str
(
len
(
keys_set
))))
# if len(dd) > 0 and dd.startswith(key_k):
if
cursor
==
"0"
:
# del_datas.add(str(d, encoding='utf-8'))
break
# # pipline = conn.pipeline()
print
(
list
(
keys_set
)[
0
:
100
])
# for d in del_datas:
# if d in ["rims:tag_v3:coldstart:answer","rims:tag_v3:coldstart:tractate"]:
if
__name__
==
'__main__'
:
# continue
# if str(conn2.type(d),encoding='utf-8') == "list":
# print(d)
# # ls = conn2.lrange(d, 0, -1)
# ls = [str(s, encoding='utf-8') for s in conn2.lrange(d, 0, -1)]
conn
=
getRedisConn4
()
# conn.delete(d)
num
=
0
# conn.lpush(d, *ls)
key_sum
=
0
# # conn.delete(d)
res
=
{}
# # # pipline.expire(d, 60 * 60 * 24 * 3)
cursor
=
0
# # pipline.execute()
while
True
:
# sum += len(data)
cursor
,
data
=
conn
.
scan
(
cursor
=
cursor
,
count
=
10000
)
# key_sum += len(del_datas)
pipline
=
conn
.
pipeline
()
# print(cursor, len(data), key_sum)
keys
=
[]
# # if cursor == 0 or len(data) == 0:
for
d
in
data
:
# if cursor == 0:
key
=
str
(
d
,
encoding
=
'utf-8'
)
# break
keys
.
append
(
key
)
# print(key, key_sum)
t
=
pipline
.
type
(
key
)
#
idle
=
pipline
.
object
(
'idletime'
,
key
)
#
ttl
=
pipline
.
ttl
(
key
)
#
datass
=
pipline
.
execute
()
# def redis4():
for
i
in
range
(
0
,
len
(
datass
),
3
):
# nodes=['7877da182171e313bc9326729f82999d1b629c79'
t
=
str
(
datass
[
i
],
encoding
=
'utf-8'
)
# ,'a4d4034faa81b935c2fd583053105b37f1c92ff1'
if
idle
and
ttl
and
t
==
"set"
and
datass
[
i
+
2
]
==
-
1
:
# ,'a8835d4c987847302bac66c5fc17ee1faae91fa3'
idle
=
datass
[
i
+
1
]
# ,'5a48236679f22637508651530633c9cc2f56f489'
ttl
=
datass
[
i
+
2
]
# ,'fc3715919081c2cf3b30a2f8defb055c03564fdc'
key
=
keys
[
i
//
3
]
# ,'eec89d7480980749c998add484e80f23fe5022a4'
add_d
=
res
.
setdefault
(
t
,
{})
# ,'f3f1ec6df458a5093c31663517a3cadaed5ab29c'
add_dd
=
add_d
.
setdefault
(
key
,
{})
# ,'d35c630aad0a8b7f579bf4100f2860401b5d4f52']
add_dd
[
"ttl"
]
=
ttl
# keys = ['rims:tag_v3:coldstart:','doris:tag_v3:coldstart:']
add_dd
[
"idle"
]
=
idle
# conn2 = getRedisConn2()
add_d
[
key
]
=
add_dd
# for key_k in keys:
res
[
t
]
=
add_d
# key = key_k + '*'
num
+=
1
# sum = 0
print
(
"curor:{},key_sum:{},num:{},keys size:{},pipline size:{}"
.
format
(
str
(
cursor
),
str
(
key_sum
),
str
(
num
),
str
(
len
(
data
)),
str
(
len
(
datass
))))
# key_sum = 0
key_sum
+=
len
(
data
)
# conn = getRedisConn4()
if
cursor
==
0
or
len
(
data
)
==
0
:
# # pipline = conn.pipeline()
break
# # key = "have_reply_answer_comment*" #139285
# # key = "device_register_qa_read_set:869574031646601*" #139285
#
# # key = "doris_feed:*" #139285
# end = int(time.time())
# for node in nodes:
# print("conn:{},size:{},cost:{}s".format(str(index+1),str(num), str(end - start)))
# cursor = "0"
#
# while True:
# open('/tmp/redis_keys_hash_2.json', mode='w', encoding='utf-8').write(json.dumps(res, ensure_ascii=False))
# del_datas = set()
# cursor, data = conn.execute_command("scan {} match {} count {} {}".format(cursor,key,10000,node))
# cursor = str(cursor,encoding='utf-8')
# for d in data:
# dd = str(d, encoding='utf-8')
# if len(dd) > 0 and dd.startswith(key_k):
# del_datas.add(str(d, encoding='utf-8'))
# # pipline = conn.pipeline()
# for d in del_datas:
# ls = [str(s,encoding='utf-8') for s in conn2.lrange(d,0,-1)]
# conn.lpush(d,ls)
# # conn.delete(d)
# # # pipline.expire(d, 60 * 60 * 24 * 3)
# # pipline.execute()
# sum += len(data)
# key_sum += len(del_datas)
# print(node,cursor, len(data), sum)
# # if cursor == 0 or len(data) == 0:
# if cursor == "0":
# break
# print(key,key_sum)
#
# nodes = ['7877da182171e313bc9326729f82999d1b629c79'
# , 'a4d4034faa81b935c2fd583053105b37f1c92ff1'
# , 'a8835d4c987847302bac66c5fc17ee1faae91fa3'
# , '5a48236679f22637508651530633c9cc2f56f489'
# , 'fc3715919081c2cf3b30a2f8defb055c03564fdc'
# , 'eec89d7480980749c998add484e80f23fe5022a4'
# , 'f3f1ec6df458a5093c31663517a3cadaed5ab29c'
# , 'd35c630aad0a8b7f579bf4100f2860401b5d4f52']
# conn = getRedisConn4()
# num = 0
# key_sum = 0
# res = {}
# keys_set = set()
# for node in nodes:
# cursor = "0"
# while True:
# cursor, data = conn.execute_command("scan {} match {} count {} {}".format(cursor, "*", 10000, node))
# cursor = str(cursor, encoding='utf-8')
# # pipline = conn.pipeline()
# keys = []
# for d in data:
# key = str(d, encoding='utf-8')
# is_flag = True
# for kk in ['streaming:candidate:','tag3:user_portrait:topn:']:
# if key.startswith(kk):
# is_flag = False
# break
# if is_flag:
# end = key.split(":")[-1]
# keys_set.add(key[0:len(key)-len(end)])
# keys.append(key)
# key_sum += len(data)
# print("node:{},curor:{},key_sum:{},keys size:{}".format(node,str(cursor), str(key_sum), str(len(keys_set))))
# if cursor == "0":
# break
# print(list(keys_set)[0:100])
#
#
#
# def redis5():
# key_sum = 0
# conn = getRedisConn5()
# conn4 = getRedisConn4()
# nodes=['d6b136db85c4c97c7bf9817f45bd4a2039e45fed'
# ,'1ea04348ffc7a9858b2f86f5ef3b6565125f43df'
# ,'9327036c8f0b4406f198f857b98198bd3472c85c'
# ,'b6f89870957ef22d2aff0c6fa18a6bb9401a8294'
# ,'4c5e23ed43c5b2bb4706a6335308920ac5990f94'
# ,'0927a69bd5df3007175e55bb3ff0ac11bb9cd816'
# ,'492a36ac916ed461a214d9c1e76ab21760a4b40d'
# ,'5d8b9b2d7814df5664958e10178947a2fbd07f90']
# for node in nodes:
# cursor = "0"
# while True:
# cursor, data = conn.execute_command("scan {} match {} count {} {}".format(cursor, "*", 10000, node))
# cursor = str(cursor, encoding='utf-8')
# for d in data:
# dd = str(d, encoding='utf-8')
# # print(dd)
# t = str(conn.type(dd),encoding='utf-8')
# if t == "string":
# conn4.set(dd,conn.get(dd))
# if t == "hash":
# conn4.hmset(dd,conn.hgetall(dd))
# key_sum += len(data)
# print(node, cursor, len(data), key_sum)
# if cursor == "0":
# break
#
#
# def write2ToRedis4_copy():
# res={"hash":0,"string":0,"list":0}
# key_sum = 0
# conn = getRedisConn2()
# conn4 = getRedisConn4()
# cursor = 0
# while True:
# cursor, data = conn.scan(cursor=cursor,match="*",count=10000)
# for d in data:
# dd = str(d, encoding='utf-8')
# # t = str(conn.type(dd), encoding='utf-8')
# # ttl = conn.ttl(dd)
# # print(dd)
# t = str(conn.type(dd), encoding='utf-8')
# # if t == "string":
# # conn4.set(dd, conn.get(dd))
# # if ttl:
# # conn4.expire(dd, ttl)
# # res["string"] += 1
# # if t == "hash":
# # conn4.hmset(dd, conn.hgetall(dd))
# # if ttl:
# # conn4.expire(dd, ttl)
# # res["hash"] += 1
# if t == "list":
# datas = [conn.lrange(dd, 0, -1)]
# conn4.lpush(dd, conn.lrange(dd,0,-1))
# print(dd)
# # if ttl:
# # conn4.expire(dd, ttl)
# res["list"] += 1
# key_sum += len(data)
# print(cursor, len(data), key_sum)
# if cursor == 0:
# break
#
# def write2ToRedis4():
# res={"hash":0,"string":0,"list":0}
# key_sum = 0
# conn = getRedisConn2()
# conn4 = getRedisConn4()
# pipeline4 = conn4.pipeline()
# cursor = 0
# while True:
# cursor, data = conn.scan(cursor=cursor,match="*",count=20000)
# keys = []
# for d in data:
# dd = str(d, encoding='utf-8')
# keys.append(dd)
# pipeline = conn.pipeline()
# for k in keys:
# pipeline.type(k)
# pip_datas1 = pipeline.execute()
# pipeline = conn.pipeline()
# for i,p in enumerate(pip_datas1):
# t = str(p, encoding='utf-8')
# dd = keys[i]
# pipeline.ttl(dd)
# if t == "string":
# pipeline.get(dd)
# elif t == "hash":
# pipeline.hgetall(dd)
# elif t == "list":
# pipeline.lrange(dd, 0, -1)
# else:
# pipeline.get("123456787654321")
# pip_datas2 = pipeline.execute()
# for i in range(0,len(pip_datas2),2):
# dd = keys[i//2]
# t = str(pip_datas1[i//2], encoding='utf-8')
# ttl = pip_datas2[i]
# v = pip_datas2[i+1]
# if t == "string":
# if v:
# pipeline4.set(dd, v)
# if ttl:
# pipeline4.expire(dd, ttl)
# res["string"] += 1
# if t == "hash":
# if v:
# pipeline4.hmset(dd, v)
# if ttl:
# pipeline4.expire(dd, ttl)
# res["hash"] += 1
# if t == "list":
# if v:
# pipeline4.lpush(dd, v)
# if ttl:
# pipeline4.expire(dd, ttl)
# res["list"] += 1
# pipeline4.execute()
# key_sum += len(data)
# print(cursor, len(data), key_sum)
# if cursor == 0:
# break
#
#
# if __name__ == '__main__':
#
# conn = getRedisConn4()
# num = 0
# key_sum = 0
# res = {}
# cursor = 0
# while True:
# cursor, data = conn.scan(cursor=cursor, count=10000)
# pipline = conn.pipeline()
# keys = []
# for d in data:
# key = str(d, encoding='utf-8')
# keys.append(key)
# t = pipline.type(key)
# idle = pipline.object('idletime', key)
# ttl = pipline.ttl(key)
# datass = pipline.execute()
# for i in range(0, len(datass), 3):
# t = str(datass[i], encoding='utf-8')
# if idle and ttl and t=="set" and datass[i + 2] == -1:
# idle = datass[i + 1]
# ttl = datass[i + 2]
# key = keys[i // 3]
# add_d = res.setdefault(t, {})
# add_dd = add_d.setdefault(key, {})
# add_dd["ttl"] = ttl
# add_dd["idle"] = idle
# add_d[key] = add_dd
# res[t] = add_d
# num += 1
# print("curor:{},key_sum:{},num:{},keys size:{},pipline size:{}".format(str(cursor),str(key_sum),str(num),str(len(data)),str(len(datass))))
# key_sum += len(data)
# if cursor == 0 or len(data) == 0:
# break
#
# #
# # end = int(time.time())
# # print("conn:{},size:{},cost:{}s".format(str(index+1),str(num), str(end - start)))
# #
# # open('/tmp/redis_keys_hash_2.json', mode='w', encoding='utf-8').write(json.dumps(res, ensure_ascii=False))
#
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment