Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
P
physical
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
alpha
physical
Commits
90fd98d3
Commit
90fd98d3
authored
Dec 17, 2018
by
段英荣
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
modify
parent
0646e3d4
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
50 additions
and
74 deletions
+50
-74
user.py
search/utils/user.py
+13
-8
topic.py
search/views/topic.py
+37
-66
No files found.
search/utils/user.py
View file @
90fd98d3
...
...
@@ -73,14 +73,6 @@ class UserUtils(object):
recursion_attention_user_list
=
cls
.
___get_should_term_list
(
recursion_attention_user_id_list
,
field_name
=
"user_id"
)
functions_list
=
[
{
"filter"
:{
"bool"
:{
"should"
:
recursion_attention_user_list
}
},
"weight"
:
10
},
{
"gauss"
:
{
"latest_topic_time_val"
:
{
...
...
@@ -91,6 +83,19 @@ class UserUtils(object):
}
}
]
if
len
(
recursion_attention_user_list
)
>
0
:
functions_list
.
append
(
{
"filter"
:{
"bool"
:{
"should"
:
recursion_attention_user_list
}
},
"weight"
:
10
}
)
query_function_score
=
{
"query"
:
{
"bool"
:
{
...
...
search/views/topic.py
View file @
90fd98d3
...
...
@@ -11,35 +11,23 @@ from libs.es import ESPerform
from
libs.cache
import
redis_client
from
search.utils.common
import
*
@bind
(
"physical/search/home_recommend"
)
def
home_recommend
(
session_id
=
""
,
user_id
=-
1
,
offset
=
0
,
size
=
10
):
"""
:remark:首页推荐,目前只推荐日记
:param session_id:
:param user_id:
:param offset:
:param size:
:return:
"""
def
get_home_recommend_topic_ids
(
user_id
,
session_id
,
offset
,
size
,
query
=
None
):
try
:
if
not
user_id
:
user_id
=-
1
if
not
isinstance
(
session_id
,
str
):
session_id
=
""
redis_key
=
"physical:home_recommend"
+
":user_id:"
+
str
(
user_id
)
+
":session_id:"
+
session_id
if
query
is
None
:
redis_key
=
"physical:home_recommend"
+
":user_id:"
+
str
(
user_id
)
+
":session_id:"
+
session_id
else
:
redis_key
=
"physical:home_query"
+
":user_id:"
+
str
(
user_id
)
+
":session_id:"
+
session_id
+
":query:"
+
str
(
query
)
redis_field_list
=
[
b
'last_offset_num'
,
b
'unread_topic_id'
]
for
page_id
in
range
(
0
,
offset
,
size
):
redis_field_list
.
append
(
str
(
page_id
))
redis_field_val_list
=
redis_client
.
hmget
(
redis_key
,
redis_field_list
)
#logging.info("duan add,redis_field_list:%s,redis_field_val_list:%s" % (str(redis_field_list),str(redis_field_val_list)))
#redis_val_dict = redis_client.hgetall(redis_key)
last_offset_num
=
int
(
redis_field_val_list
[
0
])
if
redis_field_val_list
[
0
]
else
-
1
recommend_topic_ids
=
[]
topic_id_list
=
TopicUtils
.
get_recommend_topic_ids
(
user_id
,
offset
,
size
*
size
)
topic_id_list
=
TopicUtils
.
get_recommend_topic_ids
(
user_id
,
offset
,
size
*
size
,
query
)
have_read_group_id_set
=
set
()
unread_topic_id_list
=
list
()
have_read_topic_id_set
=
set
()
...
...
@@ -50,16 +38,13 @@ def home_recommend(session_id="",user_id=-1,offset=0,size=10):
if
(
user_id
>
0
and
offset
==
last_offset_num
)
or
user_id
==-
1
:
ori_unread_topic_id_list
=
json
.
loads
(
redis_field_val_list
[
1
])
topic_id_list
=
ori_unread_topic_id_list
+
topic_id_list
logging
.
info
(
"duan add,redis_field_val_list_1111:
%
d"
%
len
(
redis_field_val_list
))
for
have_read_item
in
redis_field_val_list
[
2
:]:
logging
.
info
(
"duan add,redis_field_val_list_OOOO:
%
s"
%
str
(
have_read_item
))
if
have_read_item
:
have_read_topic_id_set
=
have_read_topic_id_set
.
union
(
json
.
loads
(
have_read_item
))
#if user_id==-1:
# topic_id_list = list(set([item["id"] for item in topic_id_list]).difference(have_read_topic_id_set))
logging
.
info
(
"duan add,redis_field_val_list:
%
s,have_read_topic_id_set:
%
s"
%
(
str
(
redis_field_val_list
),
str
(
have_read_topic_id_set
)))
for
item
in
topic_id_list
:
if
item
[
"group_id"
]
in
have_read_group_id_set
:
unread_topic_id_list
.
append
(
item
)
...
...
@@ -90,69 +75,48 @@ def home_recommend(session_id="",user_id=-1,offset=0,size=10):
# 每个session key保存15分钟
redis_client
.
expire
(
redis_key
,
15
*
60
*
60
)
return
{
"recommend_topic_ids"
:
recommend_topic_ids
}
return
recommend_topic_ids
except
:
logging
.
error
(
"catch exception,err_msg:
%
s"
%
traceback
.
format_exc
())
return
{
"recommend_topic_ids"
:
[]}
return
[]
@bind
(
"physical/search/home_
query
"
)
def
home_
query
(
session_id
=
""
,
user_id
=-
1
,
query
=
""
,
offset
=
0
,
size
=
10
):
@bind
(
"physical/search/home_
recommend
"
)
def
home_
recommend
(
session_id
=
""
,
user_id
=-
1
,
offset
=
0
,
size
=
10
):
"""
:remark:首页
搜索
,目前只推荐日记
:remark:首页
推荐
,目前只推荐日记
:param session_id:
:param user_id:
:param query:
:param offset:
:param size:
:return:
"""
try
:
"""
if
not
user_id
:
user_id
=-
1
if
not
isinstance
(
session_id
,
str
):
session_id
=
""
redis_key = "physical:home_query" + ":user_id:" + str(user_id) + ":session_id:" + session_id + ":query:" + str(query)
redis_val_dict = redis_client.hgetall(redis_key)
last_offset_num = int(redis_val_dict[b"last_offset_num"]) if b"last_offset_num" in redis_val_dict else -1
recommend_topic_ids = []
topic_id_list = TopicUtils.get_recommend_topic_ids(user_id, offset, size*size,query)
have_read_group_id_tuple = set()
unread_topic_id_list = list()
have_read_topic_id_tuple = set()
recommend_topic_ids
=
get_home_recommend_topic_ids
(
user_id
,
session_id
,
offset
,
size
)
if len(redis_val_dict)>0 and offset>0 and user_id>0 and offset==last_offset_num:
topic_id_list = json.loads(redis_val_dict[b"unread_topic_id"]) + topic_id_list
have_read_topic_id_tuple = set(json.loads(redis_val_dict[b"have_read_topic_id"]))
for item in topic_id_list:
if item["group_id"] in have_read_group_id_tuple:
unread_topic_id_list.append(item)
else:
if item["id"] not in have_read_topic_id_tuple:
recommend_topic_ids.append(item["id"])
if isinstance(item["group_id"],int) and item["group_id"]>0:
have_read_group_id_tuple.add(item["group_id"])
return
{
"recommend_topic_ids"
:
recommend_topic_ids
}
except
:
logging
.
error
(
"catch exception,err_msg:
%
s"
%
traceback
.
format_exc
())
return
{
"recommend_topic_ids"
:
[]}
have_read_topic_id_tuple.add(item["id"])
if len(recommend_topic_ids) >= size:
break
if len(recommend_topic_ids) < size and len(unread_topic_id_list)>0:
recommend_len = len(recommend_topic_ids)
offi_unread_topic_id = [item["id"] for item in unread_topic_id_list[:(size-recommend_len)]]
recommend_topic_ids = recommend_topic_ids + offi_unread_topic_id
unread_topic_id_list = unread_topic_id_list[(size-recommend_len):]
redis_dict = {
"unread_topic_id":json.dumps(unread_topic_id_list),
"have_read_topic_id":json.dumps(list(have_read_topic_id_tuple)),
"last_offset_num":offset+size
}
redis_client.hmset(redis_key,redis_dict)
# 每个session key保存15分钟
redis_client.expire(redis_key,15*60*60)
@bind
(
"physical/search/home_query"
)
def
home_query
(
session_id
=
""
,
user_id
=-
1
,
query
=
""
,
offset
=
0
,
size
=
10
):
"""
:remark:首页搜索,目前只推荐日记
:param session_id:
:param user_id:
:param query:
:param offset:
:param size:
:return:
"""
try
:
"""
if not user_id:
user_id=-1
...
...
@@ -215,7 +179,14 @@ def home_query(session_id="",user_id=-1,query="",offset=0,size=10):
}
redis_client.hmset(redis_key,redis_dict)
# 每个session key保存15分钟
redis_client
.
expire
(
redis_key
,
15
*
60
*
60
)
redis_client.expire(redis_key,15*60*60)
"""
if
not
user_id
:
user_id
=-
1
if
not
isinstance
(
session_id
,
str
):
session_id
=
""
recommend_topic_ids
=
get_home_recommend_topic_ids
(
user_id
,
session_id
,
offset
,
size
,
query
)
return
{
"recommend_topic_ids"
:
recommend_topic_ids
}
except
:
logging
.
error
(
"catch exception,err_msg:
%
s"
%
traceback
.
format_exc
())
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment