Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
M
meta_base_code
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
黎涛
meta_base_code
Commits
4ad33c30
Commit
4ad33c30
authored
Sep 17, 2020
by
litaolemo
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update
parent
46903334
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
4 additions
and
5 deletions
+4
-5
new_user_has_protratit_rate.py
new_user_analysis/new_user_has_protratit_rate.py
+1
-2
user_behavior_path.py
new_user_analysis/user_behavior_path.py
+3
-3
No files found.
new_user_analysis/new_user_has_protratit_rate.py
View file @
4ad33c30
...
...
@@ -105,7 +105,7 @@ for t in range(1, task_days):
one_week_age_str
=
(
now
+
datetime
.
timedelta
(
days
=-
7
))
.
strftime
(
"
%
Y
%
m
%
d"
)
new_urser_device_id_sql
=
r"""
select t2.device_id as device_id from
(select device_id from online.ml_device_day_active_status where partition_date = '
$
{today_str}' and active_type in (1,2)) t2
(select device_id from online.ml_device_day_active_status where partition_date = '{today_str}' and active_type in (1,2)) t2
LEFT JOIN
(
select distinct device_id
...
...
@@ -195,7 +195,6 @@ WHERE spam_pv.device_id IS NULL
for
count_user_count
,
res
in
enumerate
(
sql_res
):
# print(count, res)
portratit_res
=
get_user_portrait_tag3_from_redis
(
res
.
device_id
)
portratit_res
# print(count_user_count, res, portratit_res)
temp_count
=
0
...
...
new_user_analysis/user_behavior_path.py
View file @
4ad33c30
...
...
@@ -86,7 +86,7 @@ spark.sql("CREATE TEMPORARY FUNCTION is_json AS 'com.gmei.hive.common.udf.UDFJso
spark
.
sql
(
"CREATE TEMPORARY FUNCTION arrayMerge AS 'com.gmei.hive.common.udf.UDFArryMerge'"
)
task_list
=
[]
task_days
=
30
task_days
=
8
name_dict
=
{
"/api/my_order/recommend/service/list"
:
"我的订单页推荐美购列表"
,
...
...
@@ -870,7 +870,7 @@ for t in range(1, task_days):
one_week_age_str
=
(
now
+
datetime
.
timedelta
(
days
=-
7
))
.
strftime
(
"
%
Y
%
m
%
d"
)
new_urser_device_id_sql
=
r"""
select t2.device_id as device_id from
(select device_id from online.ml_device_day_active_status where partition_date = '
$
{today_str}' and active_type in (1,2)) t2
(select device_id from online.ml_device_day_active_status where partition_date = '{today_str}' and active_type in (1,2)) t2
LEFT JOIN
(
select distinct device_id
...
...
@@ -954,7 +954,7 @@ WHERE spam_pv.device_id IS NULL
left join
(select cl_id, concat_ws(',', collect_list(action)) as track from
(select * from online.bl_hdfs_maidian_updates where partition_date = {partition_date} and cl_id is not null ) group by cl_id) t2
on t1.device_id = t2.cl_id"""
.
format
(
partition_date
=
yester
day_str
)
on t1.device_id = t2.cl_id"""
.
format
(
partition_date
=
to
day_str
)
print
(
maidian_sql
)
track_df
=
spark
.
sql
(
maidian_sql
)
# track_df.createOrReplaceTempView("device_id_view")
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment