1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
# -*- coding:UTF-8 -*-
# @Time : 2020/9/15 13:37
# @File : meigou_data.py
# @email : litao@igengmei.com
# @author : litao
import hashlib
import json
import pymysql
import xlwt, datetime
import redis
# from pyhive import hive
from maintenance.func_send_email_with_file import send_file_email
from typing import Dict, List
from elasticsearch_7 import Elasticsearch
from elasticsearch_7.helpers import scan
import sys
import time
from pyspark import SparkConf
from pyspark.sql import SparkSession, DataFrame
# from pyspark.sql.functions import lit
# import pytispark.pytispark as pti
def con_sql(sql):
# 从数据库的表里获取数据
db = pymysql.connect(host='172.16.40.158', port=4000, user='st_user', passwd='aqpuBLYzEV7tML5RPsN1pntUzFy',
db='jerry_prod')
cursor = db.cursor()
cursor.execute(sql)
result = cursor.fetchall()
db.close()
return result
startTime = time.time()
sparkConf = SparkConf()
sparkConf.set("spark.sql.crossJoin.enabled", True)
sparkConf.set("spark.debug.maxToStringFields", "100")
sparkConf.set("spark.tispark.plan.allow_index_double_read", False)
sparkConf.set("spark.tispark.plan.allow_index_read", True)
sparkConf.set("spark.hive.mapred.supports.subdirectories", True)
sparkConf.set("spark.hadoop.mapreduce.input.fileinputformat.input.dir.recursive", True)
sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
sparkConf.set("mapreduce.output.fileoutputformat.compress", False)
sparkConf.set("mapreduce.map.output.compress", False)
sparkConf.set("prod.gold.jdbcuri",
"jdbc:mysql://172.16.30.136/doris_prod?user=doris&password=o5gbA27hXHHm&rewriteBatchedStatements=true")
sparkConf.set("prod.mimas.jdbcuri",
"jdbc:mysql://172.16.30.138/mimas_prod?user=mimas&password=GJL3UJe1Ck9ggL6aKnZCq4cRvM&rewriteBatchedStatements=true")
sparkConf.set("prod.gaia.jdbcuri",
"jdbc:mysql://172.16.30.143/zhengxing?user=work&password=BJQaT9VzDcuPBqkd&rewriteBatchedStatements=true")
sparkConf.set("prod.tidb.jdbcuri",
"jdbc:mysql://172.16.40.158:4000/eagle?user=st_user&password=aqpuBLYzEV7tML5RPsN1pntUzFy&rewriteBatchedStatements=true")
sparkConf.set("prod.jerry.jdbcuri",
"jdbc:mysql://172.16.40.158:4000/jerry_prod?user=st_user&password=aqpuBLYzEV7tML5RPsN1pntUzFy&rewriteBatchedStatements=true")
sparkConf.set("prod.tispark.pd.addresses", "172.16.40.158:2379")
sparkConf.set("prod.tispark.pd.addresses", "172.16.40.170:4000")
sparkConf.set("prod.tidb.database", "jerry_prod")
sparkConf.setAppName("meigou_data")
spark = (SparkSession.builder.config(conf=sparkConf).config("spark.sql.extensions", "org.apache.spark.sql.TiExtensions")
.config("spark.tispark.pd.addresses", "172.16.40.170:2379").appName(
"meigou_data ").enableHiveSupport().getOrCreate())
# spark.sparkContext.setLogLevel("ERROR")
spark.sql("ADD JAR hdfs:///user/hive/share/lib/udf/brickhouse-0.7.1-SNAPSHOT.jar")
spark.sql("ADD JAR hdfs:///user/hive/share/lib/udf/hive-udf-1.0-SNAPSHOT.jar")
spark.sql("CREATE TEMPORARY FUNCTION json_map AS 'brickhouse.udf.json.JsonMapUDF'")
spark.sql("CREATE TEMPORARY FUNCTION is_json AS 'com.gmei.hive.common.udf.UDFJsonFormatCheck'")
spark.sql("CREATE TEMPORARY FUNCTION arrayMerge AS 'com.gmei.hive.common.udf.UDFArryMerge'")
task_list = []
task_days = 3
for t in range(0, task_days):
day_num = 0 - t
now = (datetime.datetime.now() + datetime.timedelta(days=day_num))
last_30_day_str = (now + datetime.timedelta(days=-30)).strftime("%Y%m%d")
today_str = now.strftime("%Y%m%d")
yesterday_str = (now + datetime.timedelta(days=-1)).strftime("%Y%m%d")
one_week_age_str = (now + datetime.timedelta(days=-7)).strftime("%Y%m%d")
sql_search_ctr = r"""
SELECT
t1.partition_date as partition_date
,active_type as active_type
,device_os_type as device_os_type
,grey_type as grey_type
,round(NVL(sum(click_pv),0)/NVL(sum(exp_pv),0),4) as clickpv_div_exposurepv--`卡片点击pv/卡片精准曝光pv(%)`
,round(NVL(sum(two_click_pv),0)/NVL(sum(exp_pv),0),4) as ertiaopv_div_exposurepv--`有效二跳pv/卡片精准曝光pv(%)`
,round(NVL(sum(two_click_pv),0)/NVL(sum(click_pv),0),4) as ertiaopv_div_clickpv --`有效二跳pv/卡片点击pv(%)`
,round(NVL(sum(cpc_exp_pv),0)/NVL(sum(exp_pv),0),4) as cpcpv_div_exposurepv --`cpc卡片曝光pv/卡片精准曝光pv(%)`
,NVL(sum(click_pv),0) as click_pv--`卡片点击pv`
,NVL(sum(exp_pv),0) as exp_pv--`卡片曝光pv`
,NVL(sum(two_click_pv),0) as two_click_pv--`有效二跳pv`
,NVL(sum(cpc_click_pv),0) as cpc_click_pv--`cpc卡片点击pv`
,NVL(sum(cpc_exp_pv),0) as cpc_exp_pv --`cpc卡片曝光pv`
FROM
(
SELECT partition_date
,device_os_type
,CASE WHEN active_type = '4' THEN '老活'
WHEN active_type IN ('1','2') THEN '新增' END AS active_type
,device_id
,CASE WHEN substr(md5(device_id),-1) in ('0','1','2','3','4','5','6','7') THEN '灰度' ELSE '非灰' END AS grey_type
FROM online.ml_device_day_active_status
WHERE partition_date>={yesterday_str}
AND partition_date<= {partition_date}
AND active_type IN ('1','2','4')
)t1
JOIN
(--精准曝光
SELECT cl_id,partition_date,card_id,count(1) as exp_pv,count(CASE WHEN get_json_object(exposure_card, '$.is_cpc')=1 THEN 1 END) as cpc_exp_pv
FROM online.ml_community_precise_exposure_detail
WHERE partition_date>={yesterday_str}
AND partition_date<= {partition_date}
AND action in ('page_precise_exposure','home_choiceness_card_exposure') --7745版本action改为page_precise_exposure
AND page_name in('welfare_home')
AND tab_name in ('精选')
AND card_content_type ='service'
and (get_json_object(exposure_card,'$.in_page_pos')='' or get_json_object(exposure_card,'$.in_page_pos') is null)
group by partition_date,cl_id,card_id
)t2
on t1.device_id=t2.cl_id and t1.partition_date=t2.partition_date
LEFT JOIN
(--卡片点击
SELECT cl_id,partition_date,params['card_id'] as card_id,count(1) as click_pv,count(CASE WHEN params['is_cpc']=1 THEN 1 END) as cpc_click_pv
FROM online.bl_hdfs_maidian_updates
WHERE partition_date>={yesterday_str}
AND partition_date<= {partition_date}
AND action='on_click_card'
AND params['tab_name']='精选'
AND params['page_name'] ='welfare_home'
AND params['card_content_type'] ='service'
GROUP BY cl_id,partition_date,params['card_id']
)t3
on t2.partition_date=t3.partition_date
and t2.cl_id=t3.cl_id
and t2.card_id=t3.card_id
LEFT JOIN
(--商祥二跳
SELECT cl_id,partition_date,params['service_id'] as service_id,count(1) as two_click_pv
FROM online.bl_hdfs_maidian_updates
WHERE partition_date>={yesterday_str}
AND partition_date<= {partition_date}
AND (referrer in ('welfare_home')
or (params['referrer_link'] like '%[%' and json_split(params['referrer_link'])[size(json_split(params['referrer_link']))-1] in ('welfare_home')))
AND ((action in ('welfare_multiattribute_click_add','welfare_multiattribute_click_buy') AND page_name = 'welfare_detail')
or action = 'welfare_detail_click_message')
GROUP BY cl_id,partition_date,params['service_id']
)t4
on t3.partition_date=t4.partition_date
and t3.cl_id=t4.cl_id
and t3.card_id=t4.service_id
LEFT JOIN
(
select distinct device_id
from ML.ML_D_CT_DV_DEVICECLEAN_DIMEN_D
where PARTITION_DAY = '{start_date}'
AND is_abnormal_device = 'true'
)b
on t1.device_id=b.device_id
where b.device_id is null or b.device_id =''
GROUP BY t1.partition_date
,grey_type,active_type,device_os_type
order by 1
""".format(partition_date=today_str, yesterday_str=yesterday_str)
print(sql_search_ctr)
search_ctr_df = spark.sql(sql_search_ctr)
# spam_pv_df.createOrReplaceTempView("dev_view")
search_ctr_df.show(1)
sql_res = search_ctr_df.collect()
for res in sql_res:
# print(res)
partition_date = res.partition_date
device_os_type = res.device_os_type
active_type = res.active_type
grey_type = res.grey_type
clickpv_div_exposurepv = res.clickpv_div_exposurepv
ertiaopv_div_exposurepv = res.ertiaopv_div_exposurepv
ertiaopv_div_clickpv = res.ertiaopv_div_clickpv
cpcpv_div_exposurepv = res.cpcpv_div_exposurepv
click_pv = res.click_pv
exp_pv = res.exp_pv
two_click_pv = res.two_click_pv
cpc_click_pv = res.cpc_click_pv
cpc_exp_pv = res.cpc_exp_pv
pid = hashlib.md5(
(partition_date + device_os_type + active_type + grey_type).encode("utf8")).hexdigest()
db = pymysql.connect(host='172.16.40.158', port=4000, user='st_user', passwd='aqpuBLYzEV7tML5RPsN1pntUzFy',
db='jerry_prod')
cursor = db.cursor()
commit_sql = """replace into meigou_data(partition_date,device_os_type,active_type,grey_type,clickpv_div_exposurepv,
ertiaopv_div_exposurepv,ertiaopv_div_clickpv,cpcpv_div_exposurepv,click_pv,exp_pv,two_click_pv,cpc_click_pv,cpc_exp_pv,
pid
) VALUES('{partition_date}','{device_os_type}','{active_type}','{grey_type}',{clickpv_div_exposurepv},
{ertiaopv_div_exposurepv},{ertiaopv_div_clickpv},{cpcpv_div_exposurepv},{click_pv},{exp_pv},{two_click_pv},
{cpc_click_pv},{cpc_exp_pv},'{pid}'
);""".format(partition_date=partition_date,device_os_type=device_os_type,active_type=active_type,
grey_type=grey_type,clickpv_div_exposurepv=clickpv_div_exposurepv,ertiaopv_div_exposurepv=ertiaopv_div_exposurepv,
ertiaopv_div_clickpv=ertiaopv_div_clickpv,cpcpv_div_exposurepv=cpcpv_div_exposurepv,
click_pv=click_pv,exp_pv=exp_pv,two_click_pv=two_click_pv,cpc_click_pv=cpc_click_pv,cpc_exp_pv=cpc_exp_pv,
pid=pid
)
print(commit_sql)
# cursor.execute("set names 'UTF8'")
res = cursor.execute(commit_sql)
db.commit()
print(res)
# cursor.executemany()
db.close()