meigou_data.py 9.89 KB
# -*- coding:UTF-8 -*-
# @Time  : 2020/9/15 13:37
# @File  : meigou_data.py
# @email : litao@igengmei.com
# @author : litao

import hashlib
import json

import pymysql
import xlwt, datetime
import redis
# from pyhive import hive
from maintenance.func_send_email_with_file import send_file_email
from typing import Dict, List
from elasticsearch_7 import Elasticsearch
from elasticsearch_7.helpers import scan
import sys
import time
from pyspark import SparkConf
from pyspark.sql import SparkSession, DataFrame


# from pyspark.sql.functions import lit
# import pytispark.pytispark as pti


def con_sql(sql):
    # 从数据库的表里获取数据

    db = pymysql.connect(host='172.16.40.158', port=4000, user='st_user', passwd='aqpuBLYzEV7tML5RPsN1pntUzFy',
                         db='jerry_prod')
    cursor = db.cursor()
    cursor.execute(sql)
    result = cursor.fetchall()
    db.close()
    return result


startTime = time.time()
sparkConf = SparkConf()
sparkConf.set("spark.sql.crossJoin.enabled", True)
sparkConf.set("spark.debug.maxToStringFields", "100")
sparkConf.set("spark.tispark.plan.allow_index_double_read", False)
sparkConf.set("spark.tispark.plan.allow_index_read", True)
sparkConf.set("spark.hive.mapred.supports.subdirectories", True)
sparkConf.set("spark.hadoop.mapreduce.input.fileinputformat.input.dir.recursive", True)
sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
sparkConf.set("mapreduce.output.fileoutputformat.compress", False)
sparkConf.set("mapreduce.map.output.compress", False)
sparkConf.set("prod.gold.jdbcuri",
              "jdbc:mysql://172.16.30.136/doris_prod?user=doris&password=o5gbA27hXHHm&rewriteBatchedStatements=true")
sparkConf.set("prod.mimas.jdbcuri",
              "jdbc:mysql://172.16.30.138/mimas_prod?user=mimas&password=GJL3UJe1Ck9ggL6aKnZCq4cRvM&rewriteBatchedStatements=true")
sparkConf.set("prod.gaia.jdbcuri",
              "jdbc:mysql://172.16.30.143/zhengxing?user=work&password=BJQaT9VzDcuPBqkd&rewriteBatchedStatements=true")
sparkConf.set("prod.tidb.jdbcuri",
              "jdbc:mysql://172.16.40.158:4000/eagle?user=st_user&password=aqpuBLYzEV7tML5RPsN1pntUzFy&rewriteBatchedStatements=true")
sparkConf.set("prod.jerry.jdbcuri",
              "jdbc:mysql://172.16.40.158:4000/jerry_prod?user=st_user&password=aqpuBLYzEV7tML5RPsN1pntUzFy&rewriteBatchedStatements=true")
sparkConf.set("prod.tispark.pd.addresses", "172.16.40.158:2379")
sparkConf.set("prod.tispark.pd.addresses", "172.16.40.170:4000")
sparkConf.set("prod.tidb.database", "jerry_prod")

sparkConf.setAppName("meigou_data")
spark = (SparkSession.builder.config(conf=sparkConf).config("spark.sql.extensions", "org.apache.spark.sql.TiExtensions")
         .config("spark.tispark.pd.addresses", "172.16.40.170:2379").appName(
    "meigou_data ").enableHiveSupport().getOrCreate())
# spark.sparkContext.setLogLevel("ERROR")

spark.sql("ADD JAR hdfs:///user/hive/share/lib/udf/brickhouse-0.7.1-SNAPSHOT.jar")
spark.sql("ADD JAR hdfs:///user/hive/share/lib/udf/hive-udf-1.0-SNAPSHOT.jar")
spark.sql("CREATE TEMPORARY FUNCTION json_map AS 'brickhouse.udf.json.JsonMapUDF'")
spark.sql("CREATE TEMPORARY FUNCTION is_json AS 'com.gmei.hive.common.udf.UDFJsonFormatCheck'")
spark.sql("CREATE TEMPORARY FUNCTION arrayMerge AS 'com.gmei.hive.common.udf.UDFArryMerge'")

task_list = []
task_days = 3
for t in range(0, task_days):
    day_num = 0 - t
    now = (datetime.datetime.now() + datetime.timedelta(days=day_num))
    last_30_day_str = (now + datetime.timedelta(days=-30)).strftime("%Y%m%d")
    today_str = now.strftime("%Y%m%d")
    yesterday_str = (now + datetime.timedelta(days=-1)).strftime("%Y%m%d")
    one_week_age_str = (now + datetime.timedelta(days=-7)).strftime("%Y%m%d")

    sql_search_ctr = r"""
SELECT
t1.partition_date as partition_date
,active_type as active_type
,device_os_type as device_os_type
,grey_type as grey_type
,round(NVL(sum(click_pv),0)/NVL(sum(exp_pv),0),4) as clickpv_div_exposurepv--`卡片点击pv/卡片精准曝光pv(%)`
,round(NVL(sum(two_click_pv),0)/NVL(sum(exp_pv),0),4) as ertiaopv_div_exposurepv--`有效二跳pv/卡片精准曝光pv(%)`
,round(NVL(sum(two_click_pv),0)/NVL(sum(click_pv),0),4) as ertiaopv_div_clickpv --`有效二跳pv/卡片点击pv(%)`
,round(NVL(sum(cpc_exp_pv),0)/NVL(sum(exp_pv),0),4) as cpcpv_div_exposurepv --`cpc卡片曝光pv/卡片精准曝光pv(%)`
,NVL(sum(click_pv),0) as click_pv--`卡片点击pv`
,NVL(sum(exp_pv),0) as exp_pv--`卡片曝光pv`
,NVL(sum(two_click_pv),0) as two_click_pv--`有效二跳pv`
,NVL(sum(cpc_click_pv),0) as cpc_click_pv--`cpc卡片点击pv`
,NVL(sum(cpc_exp_pv),0) as cpc_exp_pv --`cpc卡片曝光pv`

FROM
(
    SELECT partition_date
         ,device_os_type
         ,CASE WHEN active_type = '4'  THEN '老活'
               WHEN active_type  IN ('1','2')  THEN '新增' END AS active_type
         ,device_id
         ,CASE WHEN substr(md5(device_id),-1) in ('0','1','2','3','4','5','6','7') THEN '灰度' ELSE '非灰' END AS grey_type 
    FROM online.ml_device_day_active_status
    WHERE partition_date>={yesterday_str}
    AND partition_date<= {partition_date}
    AND active_type IN ('1','2','4')
)t1
JOIN
(--精准曝光
    SELECT cl_id,partition_date,card_id,count(1) as exp_pv,count(CASE WHEN get_json_object(exposure_card, '$.is_cpc')=1 THEN 1 END) as cpc_exp_pv
    FROM online.ml_community_precise_exposure_detail
    WHERE partition_date>={yesterday_str}
    AND partition_date<= {partition_date}
    AND action in ('page_precise_exposure','home_choiceness_card_exposure') --7745版本action改为page_precise_exposure
    AND page_name in('welfare_home')
    AND tab_name in ('精选')
    AND card_content_type ='service'
    and (get_json_object(exposure_card,'$.in_page_pos')='' or get_json_object(exposure_card,'$.in_page_pos') is null)
    group by partition_date,cl_id,card_id
)t2
on t1.device_id=t2.cl_id and t1.partition_date=t2.partition_date
LEFT JOIN
(--卡片点击                                                                  
    SELECT  cl_id,partition_date,params['card_id'] as card_id,count(1) as click_pv,count(CASE WHEN params['is_cpc']=1 THEN 1 END) as cpc_click_pv
    FROM online.bl_hdfs_maidian_updates
    WHERE partition_date>={yesterday_str}
    AND partition_date<= {partition_date}
    AND action='on_click_card'
    AND params['tab_name']='精选'
    AND params['page_name'] ='welfare_home'
    AND params['card_content_type'] ='service'
    GROUP BY cl_id,partition_date,params['card_id']
)t3
on t2.partition_date=t3.partition_date
and t2.cl_id=t3.cl_id
and t2.card_id=t3.card_id
LEFT JOIN
(--商祥二跳
    SELECT cl_id,partition_date,params['service_id'] as service_id,count(1) as two_click_pv
    FROM online.bl_hdfs_maidian_updates 
    WHERE partition_date>={yesterday_str}
    AND partition_date<= {partition_date}
    AND (referrer in ('welfare_home') 
            or (params['referrer_link'] like '%[%' and json_split(params['referrer_link'])[size(json_split(params['referrer_link']))-1] in ('welfare_home')))
    AND ((action in ('welfare_multiattribute_click_add','welfare_multiattribute_click_buy') AND page_name = 'welfare_detail') 
            or action = 'welfare_detail_click_message') 
    GROUP BY cl_id,partition_date,params['service_id']
)t4
on t3.partition_date=t4.partition_date
and t3.cl_id=t4.cl_id
and t3.card_id=t4.service_id
LEFT JOIN
(
    select distinct device_id
    from ML.ML_D_CT_DV_DEVICECLEAN_DIMEN_D
    where PARTITION_DAY = '{start_date}'
    AND is_abnormal_device = 'true'
)b
on  t1.device_id=b.device_id
where b.device_id is null or b.device_id =''
GROUP BY t1.partition_date 
,grey_type,active_type,device_os_type
order by 1
   
    """.format(partition_date=today_str, yesterday_str=yesterday_str)

    print(sql_search_ctr)
    search_ctr_df = spark.sql(sql_search_ctr)
    # spam_pv_df.createOrReplaceTempView("dev_view")
    search_ctr_df.show(1)
    sql_res = search_ctr_df.collect()
    for res in sql_res:
        # print(res)
        partition_date = res.partition_date
        device_os_type = res.device_os_type
        active_type = res.active_type
        grey_type = res.grey_type
        clickpv_div_exposurepv = res.clickpv_div_exposurepv
        ertiaopv_div_exposurepv = res.ertiaopv_div_exposurepv
        ertiaopv_div_clickpv = res.ertiaopv_div_clickpv
        cpcpv_div_exposurepv = res.cpcpv_div_exposurepv
        click_pv = res.click_pv
        exp_pv = res.exp_pv
        two_click_pv = res.two_click_pv
        cpc_click_pv = res.cpc_click_pv
        cpc_exp_pv = res.cpc_exp_pv
        pid = hashlib.md5(
            (partition_date + device_os_type + active_type + grey_type).encode("utf8")).hexdigest()

        db = pymysql.connect(host='172.16.40.158', port=4000, user='st_user', passwd='aqpuBLYzEV7tML5RPsN1pntUzFy',
                             db='jerry_prod')
        cursor = db.cursor()
        commit_sql = """replace into meigou_data(partition_date,device_os_type,active_type,grey_type,clickpv_div_exposurepv,
        ertiaopv_div_exposurepv,ertiaopv_div_clickpv,cpcpv_div_exposurepv,click_pv,exp_pv,two_click_pv,cpc_click_pv,cpc_exp_pv,
        pid
            ) VALUES('{partition_date}','{device_os_type}','{active_type}','{grey_type}',{clickpv_div_exposurepv},
        {ertiaopv_div_exposurepv},{ertiaopv_div_clickpv},{cpcpv_div_exposurepv},{click_pv},{exp_pv},{two_click_pv},
        {cpc_click_pv},{cpc_exp_pv},'{pid}'
            );""".format(partition_date=partition_date,device_os_type=device_os_type,active_type=active_type,
                         grey_type=grey_type,clickpv_div_exposurepv=clickpv_div_exposurepv,ertiaopv_div_exposurepv=ertiaopv_div_exposurepv,
                         ertiaopv_div_clickpv=ertiaopv_div_clickpv,cpcpv_div_exposurepv=cpcpv_div_exposurepv,
                         click_pv=click_pv,exp_pv=exp_pv,two_click_pv=two_click_pv,cpc_click_pv=cpc_click_pv,cpc_exp_pv=cpc_exp_pv,
        pid=pid

        )
        print(commit_sql)
        # cursor.execute("set names 'UTF8'")
        res = cursor.execute(commit_sql)
        db.commit()
        print(res)
# cursor.executemany()
db.close()