Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
F
ffm-baseline
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ML
ffm-baseline
Commits
342779ca
Commit
342779ca
authored
Aug 05, 2020
by
赵威
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update db
parent
ef7819be
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
14 additions
and
15 deletions
+14
-15
tag3_update_user_portrait_offline.py
eda/smart_rank/tag3_update_user_portrait_offline.py
+14
-15
No files found.
eda/smart_rank/tag3_update_user_portrait_offline.py
View file @
342779ca
...
...
@@ -267,23 +267,22 @@ def update_tag3_user_portrait(cl_id):
(
len
(
first_solutions_score
.
keys
())
>
0
)
or
(
len
(
second_solutions_score
.
keys
())
>
0
)
or
\
(
len
(
first_positions_score
.
keys
())
>
0
)
or
(
len
(
second_positions_score
.
keys
())
>
0
)
or
\
(
len
(
projects_score
.
keys
())
>
0
):
# TODO
# redis_client.set(key, json.dumps(res))
# redis_client.expire(key, 60 * 60 * 24 * 180)
redis_client
.
set
(
key
,
json
.
dumps
(
res
))
redis_client
.
expire
(
key
,
60
*
60
*
24
*
180
)
#
write_user_portrait(cl_id, ",".join(first_solutions_score.keys()), ",".join(second_solutions_score.keys()),
#
",".join(first_demands_score.keys()), ",".join(second_demands_score.keys()),
#
",".join(first_positions_score.keys()), ",".join(second_positions_score.keys()),
#
",".join(projects_score.keys()))
write_user_portrait
(
cl_id
,
","
.
join
(
first_solutions_score
.
keys
()),
","
.
join
(
second_solutions_score
.
keys
()),
","
.
join
(
first_demands_score
.
keys
()),
","
.
join
(
second_demands_score
.
keys
()),
","
.
join
(
first_positions_score
.
keys
()),
","
.
join
(
second_positions_score
.
keys
()),
","
.
join
(
projects_score
.
keys
()))
body
=
{}
for
(
k
,
v
)
in
res
.
items
():
body
[
k
]
=
list
(
v
.
keys
())
#
body = {}
#
for (k, v) in res.items():
#
body[k] = list(v.keys())
body
[
"device_id"
]
=
cl_id
body
[
"last_modified"
]
=
datetime
.
datetime
.
strftime
(
datetime
.
datetime
.
now
(
pytz
.
timezone
(
"Asia/Shanghai"
)),
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S.
%
f"
)[:
-
7
]
+
"Z"
es_insert_device_info
(
cl_id
,
body
)
#
body["device_id"] = cl_id
#
body["last_modified"] = datetime.datetime.strftime(datetime.datetime.now(pytz.timezone("Asia/Shanghai")),
#
"%Y-%m-%dT%H:%M:%S.%f")[:-7] + "Z"
#
es_insert_device_info(cl_id, body)
# # write_user_portrait_doris(cl_id, ",".join(first_solutions_score.keys()), ",".join(second_solutions_score.keys()),
# # ",".join(first_demands_score.keys()), ",".join(second_demands_score.keys()),
...
...
@@ -315,7 +314,7 @@ def consume_kafka():
spark
=
SparkSession
.
builder
.
config
(
conf
=
sparkConf
)
.
enableHiveSupport
()
.
getOrCreate
()
spark
.
sparkContext
.
setLogLevel
(
"WARN"
)
spark
.
sparkContext
.
addPyFile
(
"/srv/apps/ffm-baseline_git/eda/smart_rank/tool.py"
)
spark
.
sparkContext
.
addPyFile
(
"/srv/apps/ffm-baseline_git/eda/smart_rank/es_tool.py"
)
#
spark.sparkContext.addPyFile("/srv/apps/ffm-baseline_git/eda/smart_rank/es_tool.py")
device_ids_lst_rdd
=
spark
.
sparkContext
.
parallelize
(
device_ids_lst
,
numSlices
=
1000
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment