Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
M
meta_base_code
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
宋柯
meta_base_code
Commits
9c51845d
Commit
9c51845d
authored
Jun 17, 2021
by
郭羽
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
个性化push指标
parent
b9fda69d
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
394 additions
and
0 deletions
+394
-0
ctr_push_strategy.py
task/ctr_push_strategy.py
+277
-0
个性化push.sql
task/个性化push.sql
+117
-0
No files found.
task/ctr_push_strategy.py
0 → 100644
View file @
9c51845d
import
hashlib
import
json
import
pymysql
import
xlwt
,
datetime
import
redis
# from pyhive import hive
from
maintenance.func_send_email_with_file
import
send_file_email
from
typing
import
Dict
,
List
from
elasticsearch_7
import
Elasticsearch
from
elasticsearch_7.helpers
import
scan
import
sys
import
time
from
pyspark
import
SparkConf
from
pyspark.sql
import
SparkSession
,
DataFrame
# from pyspark.sql.functions import lit
# import pytispark.pytispark as pti
def
con_sql
(
sql
):
# 从数据库的表里获取数据
db
=
pymysql
.
connect
(
host
=
'172.16.50.175'
,
port
=
3306
,
user
=
'doris'
,
passwd
=
'o5gbA27hXHHm'
,
db
=
'doris_olap'
)
cursor
=
db
.
cursor
()
cursor
.
execute
(
sql
)
result
=
cursor
.
fetchall
()
db
.
close
()
return
result
startTime
=
time
.
time
()
def
getSpark
():
sparkConf
=
SparkConf
()
sparkConf
.
set
(
"spark.sql.crossJoin.enabled"
,
True
)
sparkConf
.
set
(
"spark.debug.maxToStringFields"
,
"100"
)
sparkConf
.
set
(
"spark.tispark.plan.allow_index_double_read"
,
False
)
sparkConf
.
set
(
"spark.tispark.plan.allow_index_read"
,
True
)
sparkConf
.
set
(
"spark.hive.mapred.supports.subdirectories"
,
True
)
sparkConf
.
set
(
"spark.hadoop.mapreduce.input.fileinputformat.input.dir.recursive"
,
True
)
sparkConf
.
set
(
"spark.serializer"
,
"org.apache.spark.serializer.KryoSerializer"
)
sparkConf
.
set
(
"mapreduce.output.fileoutputformat.compress"
,
False
)
sparkConf
.
set
(
"mapreduce.map.output.compress"
,
False
)
sparkConf
.
set
(
"prod.gold.jdbcuri"
,
"jdbc:mysql://172.16.30.136/doris_prod?user=doris&password=o5gbA27hXHHm&rewriteBatchedStatements=true"
)
sparkConf
.
set
(
"prod.mimas.jdbcuri"
,
"jdbc:mysql://172.16.30.138/mimas_prod?user=mimas&password=GJL3UJe1Ck9ggL6aKnZCq4cRvM&rewriteBatchedStatements=true"
)
sparkConf
.
set
(
"prod.gaia.jdbcuri"
,
"jdbc:mysql://172.16.30.143/zhengxing?user=work&password=BJQaT9VzDcuPBqkd&rewriteBatchedStatements=true"
)
sparkConf
.
set
(
"prod.tidb.jdbcuri"
,
"jdbc:mysql://172.16.40.158:4000/eagle?user=st_user&password=aqpuBLYzEV7tML5RPsN1pntUzFy&rewriteBatchedStatements=true"
)
# sparkConf.set("prod.jerry.jdbcuri",
# "jdbc:mysql://172.16.40.158:4000/jerry_prod?user=st_user&password=aqpuBLYzEV7tML5RPsN1pntUzFy&rewriteBatchedStatements=true")
sparkConf
.
set
(
"prod.tispark.pd.addresses"
,
"172.16.40.158:2379"
)
sparkConf
.
set
(
"prod.tispark.pd.addresses"
,
"172.16.40.170:4000"
)
# sparkConf.set("prod.tidb.database", "jerry_prod")
spark
=
(
SparkSession
.
builder
.
config
(
conf
=
sparkConf
)
.
config
(
"spark.sql.extensions"
,
"org.apache.spark.sql.TiExtensions"
)
.
config
(
"spark.tispark.pd.addresses"
,
"172.16.40.170:2379"
)
.
appName
(
"search_meigou_ctr"
)
.
enableHiveSupport
()
.
getOrCreate
())
spark
.
sql
(
"ADD JAR hdfs:///user/hive/share/lib/udf/brickhouse-0.7.1-SNAPSHOT.jar"
)
spark
.
sql
(
"ADD JAR hdfs:///user/hive/share/lib/udf/hive-udf-1.0-SNAPSHOT.jar"
)
spark
.
sql
(
"CREATE TEMPORARY FUNCTION json_map AS 'brickhouse.udf.json.JsonMapUDF'"
)
spark
.
sql
(
"CREATE TEMPORARY FUNCTION is_json AS 'com.gmei.hive.common.udf.UDFJsonFormatCheck'"
)
spark
.
sql
(
"CREATE TEMPORARY FUNCTION arrayMerge AS 'com.gmei.hive.common.udf.UDFArryMerge'"
)
return
spark
def
getSql
(
startDay
,
endDay
):
sql
=
"""
--push接收数据
select day_id
,case when device_os_type = '' then '其他' end
,active_type
,sum(received_dev_num) as all_receive_num
,NVL(sum(case when push_type = '101' then received_dev_num end),0) as tractate_receive_num
,NVL(sum(case when push_type = '102' then received_dev_num end),0) as diary_receive_num
,NVL(sum(case when push_type = '103' then received_dev_num end),0) as answer_receive_num
,NVL(sum(case when push_type = '101' then click_dev_num end),0) as tractate_click_num
,NVL(sum(case when push_type = '102' then click_dev_num end),0) as diary_click_num
,NVL(sum(case when push_type = '103' then click_dev_num end),0) as answer_click_num
from
(
SELECT t1.partition_day as day_id
,t1.device_os_type as device_os_type
,'老活' as active_type
,t1.push_type as push_type
,count(distinct t1.device_id) as received_dev_num
,count(t1.msg_id) as received_msg_num
,count(distinct t2.cl_id) as click_dev_num
,count(t2.msg_id) as click_msg_num
FROM
(--接收设备数
SELECT partition_day
,device_os_type
,push_type
,a.device_id
,a.msg_id
FROM
(
select partition_day
,device_id,msg_id
from bl.bl_et_bg_trackingpushlog_inc_d
where partition_day >= {startDay} and partition_day<{endDay}
group by partition_day,device_id,msg_id
)a
JOIN
(
select device_id,device_os_type
from online.ml_device_history_detail
where partition_date >= {startDay} and partition_date<{endDay}
and first_channel_source_type not in ('yqxiu1','yqxiu2','yqxiu3','yqxiu4','yqxiu5','mxyc1','mxyc2','mxyc3'
,'wanpu','jinshan','jx','maimai','zhuoyi','huatian','suopingjingling','mocha','mizhe','meika','lamabang'
,'js-az1','js-az2','js-az3','js-az4','js-az5','jfq-az1','jfq-az2','jfq-az3','jfq-az4','jfq-az5','toufang1'
,'toufang2','toufang3','toufang4','toufang5','toufang6','TF-toufang1','TF-toufang2','TF-toufang3','TF-toufang4'
,'TF-toufang5','tf-toufang1','tf-toufang2','tf-toufang3','tf-toufang4','tf-toufang5','benzhan','promotion_aso100'
,'promotion_qianka','promotion_xiaoyu','promotion_dianru','promotion_malioaso','promotion_malioaso-shequ'
,'promotion_shike','promotion_julang_jl03','promotion_zuimei')
AND first_channel_source_type not like 'promotion
\
_jf
\
_
%
'
)b
on a.device_id=b.device_id
JOIN
(
select msg_id,push_type,time_stamp
from online.tl_hdfs_push2_new_view --增量表
where partition_date >= {startDay} and partition_date<{endDay}
group by msg_id,push_type,time_stamp
union all
SELECT msg_id,regexp_replace(labels['event'], '
\\
s+', '') AS push_type,time_stamp
FROM online.tl_hdfs_push2_task_view
WHERE partition_date >= {startDay} and partition_date<{endDay}
AND labels['event_type'] = 'push'
group by msg_id,regexp_replace(labels['event'], '
\\
s+', ''),time_stamp
)c
on a.msg_id=c.msg_id
left join
(
select
partition_date
,device_id
from online.ml_device_day_active_status
where partition_date >= {startDay} and partition_date<{endDay}
AND active_type in ('1','2','3')
and device_os_type='ios'
)d
on a.partition_day=d.partition_date and a.device_id=d.device_id
where d.device_id is null
group by partition_day
,device_os_type
,push_type
,a.device_id
,a.msg_id
)t1
left join
(--点击设备数
select partition_date
,cl_id,params['message_id'] as msg_id,time_stamp
from online.bl_hdfs_maidian_updates
where partition_date >= {startDay} and partition_date<{endDay}
and action='notification_open'
group by partition_date,cl_id,params['message_id'],time_stamp
)t2
on t2.partition_date=t1.partition_day
and t2.msg_id=t1.msg_id
and t2.cl_id=t1.device_id
group by t1.partition_day
,t1.device_os_type
,t1.push_type
)t
where day_id >= {startDay} and day_id<{endDay}
group by day_id
,device_os_type
,active_type
order by day_id,device_os_type,active_type
"""
.
format
(
startDay
=
startDay
,
endDay
=
endDay
)
return
sql
def
getCount
(
types
,
days
):
assert
isinstance
(
types
,
list
)
assert
isinstance
(
days
,
list
)
REDIS_URL2
=
'redis://:ReDis!GmTx*0aN9@172.16.40.173:6379'
redis_client2
=
redis
.
StrictRedis
.
from_url
(
REDIS_URL2
,
decode_responses
=
True
)
key
=
"strategy:ctr:push:count:{}"
res_d
=
{}
for
t
in
types
:
res
=
{}
for
day
in
days
:
count
=
redis_client2
.
hget
(
key
.
format
(
t
),
day
)
if
count
:
res
[
day
]
=
int
(
count
)
res_d
[
t
]
=
res
return
res_d
if
__name__
==
'__main__'
:
if
len
(
sys
.
argv
)
<
2
:
print
(
'请输入day'
,
flush
=
True
)
sys
.
exit
(
1
)
day
=
int
(
sys
.
argv
[
1
])
now
=
(
datetime
.
datetime
.
now
()
+
datetime
.
timedelta
(
days
=
0
))
endDay
=
now
.
strftime
(
"
%
Y
%
m
%
d"
)
startDay
=
(
now
+
datetime
.
timedelta
(
days
=-
day
))
.
strftime
(
"
%
Y
%
m
%
d"
)
print
(
startDay
,
endDay
)
# redis中获取push 设备数
days
=
[(
now
+
datetime
.
timedelta
(
days
=-
(
i
+
1
)))
.
strftime
(
"
%
Y
%
m
%
d"
)
for
i
in
range
(
day
)]
push_count_d
=
getCount
([
'diary'
,
'tractate'
,
'answer'
],
days
)
sql
=
getSql
(
startDay
,
endDay
)
spark
=
getSpark
()
df
=
spark
.
sql
(
sql
)
# spam_pv_df.createOrReplaceTempView("dev_view")
df
.
show
(
1
)
sql_res
=
df
.
collect
()
for
res
in
sql_res
:
print
(
res
)
device_os_type
=
res
.
device_os_type
active_type
=
res
.
active_type
day_id
=
res
.
day_id
all_receive_num
=
res
.
all_receive_num
diary_click_num
=
res
.
diary_click_num
tractate_click_num
=
res
.
tractate_click_num
answer_click_num
=
res
.
answer_click_num
diary_receive_num
=
res
.
diary_receive_num
tractate_receive_num
=
res
.
tractate_receive_num
answer_receive_num
=
res
.
answer_receive_num
diary_push_num
=
push_count_d
[
'diary'
]
.
setdefault
(
day_id
,
0
)
tractate_push_num
=
push_count_d
[
'tractate'
]
.
setdefault
(
day_id
,
0
)
answer_push_num
=
push_count_d
[
'answer'
]
.
setdefault
(
day_id
,
0
)
diary_click_rate
=
round
(
diary_click_num
/
diary_receive_num
,
2
)
if
diary_receive_num
!=
0
else
0
tractate_click_rate
=
round
(
tractate_click_num
/
tractate_receive_num
,
2
)
if
tractate_receive_num
!=
0
else
0
answer_click_rate
=
round
(
answer_click_num
/
answer_receive_num
,
2
)
if
answer_receive_num
!=
0
else
0
diary_cover_rate
=
round
(
diary_receive_num
/
all_receive_num
,
2
)
if
all_receive_num
!=
0
else
0
tractate_cover_rate
=
round
(
tractate_receive_num
/
all_receive_num
,
2
)
if
all_receive_num
!=
0
else
0
answer_cover_rate
=
round
(
answer_receive_num
/
all_receive_num
,
2
)
if
all_receive_num
!=
0
else
0
diary_receive_rate
=
round
(
diary_receive_num
/
diary_push_num
,
2
)
if
diary_push_num
!=
0
else
0
tractate_receive_rate
=
round
(
tractate_receive_num
/
tractate_push_num
,
2
)
if
tractate_push_num
!=
0
else
0
answer_receive_rate
=
round
(
answer_receive_num
/
answer_push_num
,
2
)
if
answer_push_num
!=
0
else
0
pid
=
hashlib
.
md5
((
day_id
+
device_os_type
+
active_type
)
.
encode
(
"utf8"
))
.
hexdigest
()
instert_sql
=
"""replace into ctr_push_strategy(
day_id,device_os_type,active_type,pid,diary_click_num,tractate_click_num,answer_click_num,diary_receive_num,tractate_receive_num,answer_receive_num
,diary_click_rate,tractate_click_rate,answer_receive_rate,diary_cover_rate,tractate_cover_rate,answer_cover_rate,cover_rate
,diary_push_num,tractate_push_num,answer_push_num,all_receive_num)
VALUES('{day_id}','{device_os_type}','{active_type}','{pid}',{diary_click_num},{tractate_click_num},{answer_click_num},{diary_receive_num},{tractate_receive_num},{answer_receive_num}
,{diary_click_rate},{tractate_click_rate},{answer_receive_rate},{diary_cover_rate},{tractate_cover_rate},{answer_cover_rate},{diary_push_num},{tractate_push_num},{answer_push_num},{all_receive_num});"""
\
.
format
(
day_id
=
day_id
,
device_os_type
=
device_os_type
,
active_type
=
active_type
,
pid
=
pid
,
diary_click_num
=
diary_click_num
,
tractate_click_num
=
tractate_click_num
,
diary_receive_num
=
diary_receive_num
,
tractate_receive_num
=
tractate_receive_num
,
diary_click_rate
=
diary_click_rate
,
tractate_click_rate
=
tractate_click_rate
,
answer_receive_rate
=
answer_receive_rate
,
diary_cover_rate
=
diary_cover_rate
,
tractate_cover_rate
=
tractate_cover_rate
,
answer_cover_rate
=
answer_cover_rate
,
diary_push_num
=
diary_push_num
,
tractate_push_num
=
tractate_push_num
,
answer_push_num
=
answer_push_num
,
all_receive_num
=
all_receive_num
)
print
(
instert_sql
)
# cursor.execute("set names 'UTF8'")
# db = pymysql.connect(host='172.16.50.175', port=3306, user='doris', passwd='o5gbA27hXHHm',
# db='doris_olap')
# cursor = db.cursor()
# res = cursor.execute(instert_sql)
# db.commit()
# print(res)
task/个性化push.sql
0 → 100644
View file @
9c51845d
--push接收数据
select
day_id
as
`日期`
,
device_os_type
as
`设备类型`
,
active_type
as
`活跃类型`
,
NVL
(
CONCAT
(
ROUND
(
sum
(
case
when
push_type
=
'101'
then
click_dev_num
end
)
/
sum
(
case
when
push_type
=
'101'
then
received_dev_num
end
)
*
100
,
2
),
'%'
),
0
)
as
`ctr帖子点击率`
,
NVL
(
CONCAT
(
ROUND
(
sum
(
case
when
push_type
=
'102'
then
click_dev_num
end
)
/
sum
(
case
when
push_type
=
'102'
then
received_dev_num
end
)
*
100
,
2
),
'%'
),
0
)
as
`ctr日记点击率`
,
NVL
(
CONCAT
(
ROUND
(
sum
(
case
when
push_type
=
'103'
then
click_dev_num
end
)
/
sum
(
case
when
push_type
=
'103'
then
received_dev_num
end
)
*
100
,
2
),
'%'
),
0
)
as
`ctr回答点击率`
,
NVL
(
CONCAT
(
ROUND
(
sum
(
case
when
push_type
=
'111'
then
click_dev_num
end
)
/
sum
(
case
when
push_type
=
'111'
then
received_dev_num
end
)
*
100
,
2
),
'%'
),
0
)
as
`非ctr帖子点击率`
,
NVL
(
CONCAT
(
ROUND
(
sum
(
case
when
push_type
=
'112'
then
click_dev_num
end
)
/
sum
(
case
when
push_type
=
'112'
then
received_dev_num
end
)
*
100
,
2
),
'%'
),
0
)
as
`非ctr日记点击率`
,
NVL
(
CONCAT
(
ROUND
(
sum
(
case
when
push_type
=
'113'
then
click_dev_num
end
)
/
sum
(
case
when
push_type
=
'113'
then
received_dev_num
end
)
*
100
,
2
),
'%'
),
0
)
as
`非ctr回答点击率`
,
NVL
(
ROUND
(
sum
(
case
when
push_type
=
'101'
then
received_dev_num
end
)
/
sum
(
case
when
push_type
=
'111'
then
received_dev_num
end
),
2
),
0
)
as
`ctr帖子接收设备数/非ctr帖子接收设备数`
,
NVL
(
ROUND
(
sum
(
case
when
push_type
=
'102'
then
received_dev_num
end
)
/
sum
(
case
when
push_type
=
'112'
then
received_dev_num
end
),
2
),
0
)
as
`ctr日记接收设备数/非ctr日记接收设备数`
,
NVL
(
ROUND
(
sum
(
case
when
push_type
=
'103'
then
received_dev_num
end
)
/
sum
(
case
when
push_type
=
'113'
then
received_dev_num
end
),
2
),
0
)
as
`ctr回答接收设备数/非ctr回答接收设备数`
,
NVL
(
sum
(
case
when
push_type
=
'101'
then
received_dev_num
end
),
0
)
as
`ctr帖子接收设备数`
,
NVL
(
sum
(
case
when
push_type
=
'102'
then
received_dev_num
end
),
0
)
as
`ctr日记接收设备数`
,
NVL
(
sum
(
case
when
push_type
=
'103'
then
received_dev_num
end
),
0
)
as
`ctr回答接收设备数`
,
NVL
(
sum
(
case
when
push_type
=
'111'
then
received_dev_num
end
),
0
)
as
`非ctr帖子接收设备数`
,
NVL
(
sum
(
case
when
push_type
=
'112'
then
received_dev_num
end
),
0
)
as
`非ctr日记接收设备数`
,
NVL
(
sum
(
case
when
push_type
=
'113'
then
received_dev_num
end
),
0
)
as
`非ctr回答接收设备数`
from
(
SELECT
t1
.
partition_day
as
day_id
,
t1
.
device_os_type
as
device_os_type
,
'老活'
as
active_type
,
t1
.
push_type
as
push_type
,
count
(
distinct
t1
.
device_id
)
as
received_dev_num
,
count
(
t1
.
msg_id
)
as
received_msg_num
,
count
(
distinct
t2
.
cl_id
)
as
click_dev_num
,
count
(
t2
.
msg_id
)
as
click_msg_num
FROM
(
--接收设备数
SELECT
partition_day
,
device_os_type
,
push_type
,
a
.
device_id
,
a
.
msg_id
FROM
(
select
partition_day
,
device_id
,
msg_id
from
bl
.
bl_et_bg_trackingpushlog_inc_d
where
partition_day
=
regexp_replace
(
DATE_SUB
(
current_date
,
1
)
,
'-'
,
''
)
group
by
partition_day
,
device_id
,
msg_id
)
a
JOIN
(
select
device_id
,
device_os_type
from
online
.
ml_device_history_detail
where
partition_date
=
regexp_replace
(
DATE_SUB
(
current_date
,
1
)
,
'-'
,
''
)
and
first_channel_source_type
not
in
(
'yqxiu1'
,
'yqxiu2'
,
'yqxiu3'
,
'yqxiu4'
,
'yqxiu5'
,
'mxyc1'
,
'mxyc2'
,
'mxyc3'
,
'wanpu'
,
'jinshan'
,
'jx'
,
'maimai'
,
'zhuoyi'
,
'huatian'
,
'suopingjingling'
,
'mocha'
,
'mizhe'
,
'meika'
,
'lamabang'
,
'js-az1'
,
'js-az2'
,
'js-az3'
,
'js-az4'
,
'js-az5'
,
'jfq-az1'
,
'jfq-az2'
,
'jfq-az3'
,
'jfq-az4'
,
'jfq-az5'
,
'toufang1'
,
'toufang2'
,
'toufang3'
,
'toufang4'
,
'toufang5'
,
'toufang6'
,
'TF-toufang1'
,
'TF-toufang2'
,
'TF-toufang3'
,
'TF-toufang4'
,
'TF-toufang5'
,
'tf-toufang1'
,
'tf-toufang2'
,
'tf-toufang3'
,
'tf-toufang4'
,
'tf-toufang5'
,
'benzhan'
,
'promotion_aso100'
,
'promotion_qianka'
,
'promotion_xiaoyu'
,
'promotion_dianru'
,
'promotion_malioaso'
,
'promotion_malioaso-shequ'
,
'promotion_shike'
,
'promotion_julang_jl03'
,
'promotion_zuimei'
)
AND
first_channel_source_type
not
like
'promotion
\_
jf
\_
%'
)
b
on
a
.
device_id
=
b
.
device_id
JOIN
(
select
msg_id
,
push_type
,
time_stamp
from
online
.
tl_hdfs_push2_new_view
--增量表
where
partition_date
=
regexp_replace
(
DATE_SUB
(
current_date
,
1
)
,
'-'
,
''
)
group
by
msg_id
,
push_type
,
time_stamp
union
all
SELECT
msg_id
,
regexp_replace
(
labels
[
'event'
],
'
\\
s+'
,
''
)
AS
push_type
,
time_stamp
FROM
online
.
tl_hdfs_push2_task_view
WHERE
partition_date
=
regexp_replace
(
DATE_SUB
(
current_date
,
1
)
,
'-'
,
''
)
AND
labels
[
'event_type'
]
=
'push'
group
by
msg_id
,
regexp_replace
(
labels
[
'event'
],
'
\\
s+'
,
''
),
time_stamp
)
c
on
a
.
msg_id
=
c
.
msg_id
left
join
(
select
partition_date
,
device_id
from
online
.
ml_device_day_active_status
where
partition_date
=
regexp_replace
(
DATE_SUB
(
current_date
,
1
)
,
'-'
,
''
)
AND
active_type
in
(
'1'
,
'2'
,
'3'
)
and
device_os_type
=
'ios'
)
d
on
a
.
partition_day
=
d
.
partition_date
and
a
.
device_id
=
d
.
device_id
where
d
.
device_id
is
null
group
by
partition_day
,
device_os_type
,
push_type
,
a
.
device_id
,
a
.
msg_id
)
t1
left
join
(
--点击设备数
select
partition_date
,
cl_id
,
params
[
'message_id'
]
as
msg_id
,
time_stamp
from
online
.
bl_hdfs_maidian_updates
where
partition_date
=
regexp_replace
(
DATE_SUB
(
current_date
,
1
)
,
'-'
,
''
)
and
action
=
'notification_open'
group
by
partition_date
,
cl_id
,
params
[
'message_id'
],
time_stamp
)
t2
on
t2
.
partition_date
=
t1
.
partition_day
and
t2
.
msg_id
=
t1
.
msg_id
and
t2
.
cl_id
=
t1
.
device_id
group
by
t1
.
partition_day
,
t1
.
device_os_type
,
t1
.
push_type
)
t
where
day_id
>=
'20200626'
group
by
day_id
,
device_os_type
,
active_type
order
by
`日期`
,
`设备类型`
,
`活跃类型`
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment