meigou_huidu_huisu.py 9.35 KB
# -*- coding:UTF-8 -*-
# @Time  : 2020/9/14 14:53
# @File  : meigou_huidu_huisu.py
# @email : litao@igengmei.com
# @author : litao

import hashlib
import json

import pymysql
import xlwt, datetime
import redis
# from pyhive import hive
from maintenance.func_send_email_with_file import send_file_email
from typing import Dict, List
from elasticsearch_7 import Elasticsearch
from elasticsearch_7.helpers import scan
import sys
import time
from pyspark import SparkConf
from pyspark.sql import SparkSession, DataFrame


# from pyspark.sql.functions import lit
# import pytispark.pytispark as pti


def con_sql(sql):
    # 从数据库的表里获取数据

    db = pymysql.connect(host='172.16.30.136', port=3306, user='doris_olap', passwd='bA27hXasdfswuolap',
                         db='doris_olap')
    cursor = db.cursor()
    cursor.execute(sql)
    result = cursor.fetchall()
    db.close()
    return result


startTime = time.time()
sparkConf = SparkConf()
sparkConf.set("spark.sql.crossJoin.enabled", True)
sparkConf.set("spark.debug.maxToStringFields", "100")
sparkConf.set("spark.tispark.plan.allow_index_double_read", False)
sparkConf.set("spark.tispark.plan.allow_index_read", True)
sparkConf.set("spark.hive.mapred.supports.subdirectories", True)
sparkConf.set("spark.hadoop.mapreduce.input.fileinputformat.input.dir.recursive", True)
sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
sparkConf.set("mapreduce.output.fileoutputformat.compress", False)
sparkConf.set("mapreduce.map.output.compress", False)
sparkConf.set("prod.gold.jdbcuri",
              "jdbc:mysql://172.16.30.136/doris_prod?user=doris&password=o5gbA27hXHHm&rewriteBatchedStatements=true")
sparkConf.set("prod.mimas.jdbcuri",
              "jdbc:mysql://172.16.30.138/mimas_prod?user=mimas&password=GJL3UJe1Ck9ggL6aKnZCq4cRvM&rewriteBatchedStatements=true")
sparkConf.set("prod.gaia.jdbcuri",
              "jdbc:mysql://172.16.30.143/zhengxing?user=work&password=BJQaT9VzDcuPBqkd&rewriteBatchedStatements=true")
sparkConf.set("prod.tidb.jdbcuri",
              "jdbc:mysql://172.16.40.158:4000/eagle?user=st_user&password=aqpuBLYzEV7tML5RPsN1pntUzFy&rewriteBatchedStatements=true")
# sparkConf.set("prod.jerry.jdbcuri",
              # "jdbc:mysql://172.16.40.158:4000/jerry_prod?user=st_user&password=aqpuBLYzEV7tML5RPsN1pntUzFy&rewriteBatchedStatements=true")
sparkConf.set("prod.tispark.pd.addresses", "172.16.40.158:2379")
sparkConf.set("prod.tispark.pd.addresses", "172.16.40.170:4000")
# sparkConf.set("prod.tidb.database", "jerry_prod")

sparkConf.setAppName("meigou_huidu_huisu")
spark = (SparkSession.builder.config(conf=sparkConf).config("spark.sql.extensions", "org.apache.spark.sql.TiExtensions")
         .config("spark.tispark.pd.addresses", "172.16.40.170:2379").appName(
    "meigou_huidu_huisu").enableHiveSupport().getOrCreate())
# spark.sparkContext.setLogLevel("ERROR")

spark.sql("ADD JAR hdfs:///user/hive/share/lib/udf/brickhouse-0.7.1-SNAPSHOT.jar")
spark.sql("ADD JAR hdfs:///user/hive/share/lib/udf/hive-udf-1.0-SNAPSHOT.jar")
spark.sql("CREATE TEMPORARY FUNCTION json_map AS 'brickhouse.udf.json.JsonMapUDF'")
spark.sql("CREATE TEMPORARY FUNCTION is_json AS 'com.gmei.hive.common.udf.UDFJsonFormatCheck'")
spark.sql("CREATE TEMPORARY FUNCTION arrayMerge AS 'com.gmei.hive.common.udf.UDFArryMerge'")

task_list = []
task_days = 3
for t in range(1, task_days):
    day_num = 0 - t
    now = (datetime.datetime.now() + datetime.timedelta(days=day_num))
    last_30_day_str = (now + datetime.timedelta(days=-30)).strftime("%Y%m%d")
    today_str = now.strftime("%Y%m%d")
    yesterday_str = (now + datetime.timedelta(days=-1)).strftime("%Y%m%d")
    one_week_age_str = (now + datetime.timedelta(days=-7)).strftime("%Y%m%d")

    sql_search_ctr = r"""
    SELECT
t1.partition_date as `日期`
,active_type as `系统`
,device_os_type as `活跃`
,grey_type as `灰度类型`
,round(NVL(sum(click_pv),0)/NVL(sum(exp_pv),0)*100,2) as `卡片点击pv/卡片精准曝光pv(%)`
,round(NVL(sum(two_click_pv),0)/NVL(sum(exp_pv),0)*100,2) as `有效二跳pv/卡片精准曝光pv(%)`
,round(NVL(sum(two_click_pv),0)/NVL(sum(click_pv),0)*100,2) as `有效二跳pv/卡片点击pv(%)`
,round(NVL(sum(cpc_exp_pv),0)/NVL(sum(exp_pv),0)*100,2) as `cpc卡片曝光pv/卡片精准曝光pv(%)`
,NVL(sum(click_pv),0) as `卡片点击pv`
,NVL(sum(exp_pv),0) as `卡片曝光pv`
,NVL(sum(two_click_pv),0) as `有效二跳pv`
,NVL(sum(cpc_click_pv),0) as `cpc卡片点击pv`
,NVL(sum(cpc_exp_pv),0) as `cpc卡片曝光pv`
FROM
(
    SELECT partition_date
         ,device_os_type
         ,CASE WHEN active_type = '4'  THEN '老活'
               WHEN active_type  IN ('1','2')  THEN '新增' END AS active_type
         ,device_id
         ,CASE WHEN substr(md5(device_id),-1) in ('0','1','2','3','4','5','6','7') THEN '灰度' ELSE '非灰' END AS grey_type 
    FROM online.ml_device_day_active_status
    WHERE partition_date>={start_day}
    AND partition_date<= {partition_day}
    AND active_type IN ('1','2','4')
)t1
JOIN
(--精准曝光
    SELECT cl_id,partition_date,card_id,count(1) as exp_pv,count(CASE WHEN get_json_object(exposure_card, '$.is_cpc')=1 THEN 1 END) as cpc_exp_pv
    FROM online.ml_community_precise_exposure_detail
    WHERE partition_date>={start_day}
    AND partition_date<= {partition_day}
    AND action in ('page_precise_exposure','home_choiceness_card_exposure') --7745版本action改为page_precise_exposure
    AND page_name in('welfare_home')
    AND tab_name in ('精选')
    AND card_content_type ='service'
    and (get_json_object(exposure_card,'$.in_page_pos')='' or get_json_object(exposure_card,'$.in_page_pos') is null)
    group by partition_date,cl_id,card_id
)t2
on t1.device_id=t2.cl_id and t1.partition_date=t2.partition_date
LEFT JOIN
(--卡片点击
    SELECT  cl_id,partition_date,params['card_id'] as card_id,count(1) as click_pv,count(CASE WHEN params['is_cpc']=1 THEN 1 ELSE 0 END) as cpc_click_pv
    FROM online.bl_hdfs_maidian_updates
    WHERE partition_date>={start_day}
    AND partition_date<= {partition_day}
    AND action='on_click_card'
    AND params['tab_name']='精选'
    AND params['page_name'] ='welfare_home'
    AND params['card_content_type'] ='service'
    GROUP BY cl_id,partition_date,params['card_id']
)t3
on t2.partition_date=t3.partition_date
and t2.cl_id=t3.cl_id
and t2.card_id=t3.card_id
LEFT JOIN
(--商祥二跳
    SELECT cl_id,partition_date,params['service_id'] as service_id,count(1) as two_click_pv
    FROM online.bl_hdfs_maidian_updates 
    WHERE partition_date>={start_day}
    AND partition_date<= {partition_day}
    AND (referrer in ('welfare_home') 
            or (params['referrer_link'] like '%[%' and json_split(params['referrer_link'])[size(json_split(params['referrer_link']))-1] in ('welfare_home')))
    AND ((action in ('welfare_multiattribute_click_add','welfare_multiattribute_click_buy') AND page_name = 'welfare_detail') 
            or action = 'welfare_detail_click_message') 
    GROUP BY cl_id,partition_date,params['service_id']
)t4
on t3.partition_date=t4.partition_date
and t3.cl_id=t4.cl_id
and t3.card_id=t4.service_id
LEFT JOIN
(
    SELECT distinct device_id
    FROM dim.dim_device_user_staff   --去除内网用户
    
    UNION ALL
    SELECT device_id
    FROM ml.ml_d_ct_dv_devicespam_d   --剔除刷量设备
    WHERE partition_day={partition_day}
)a 
    on t1.device_id=a.device_id
LEFT JOIN
(
    SELECT partition_date,device_id
    FROM 
    (--找出user_id当天活跃的第一个设备id
        SELECT user_id,partition_date,
                if(size(device_list) > 0, device_list [ 0 ], '') AS device_id
          FROM online.ml_user_updates
          WHERE partition_date>={start_day}
          AND partition_date<= {partition_day}
    )t1
    JOIN
    (  --医生账号
        SELECT distinct user_id
        FROM online.tl_hdfs_doctor_view
        WHERE partition_date = {partition_day}

        --马甲账号/模特用户
        UNION ALL
        SELECT user_id
        FROM ml.ml_c_ct_ui_user_dimen_d
        WHERE partition_day = {partition_day}
        AND (is_puppet = 'true' or is_classifyuser = 'true')

        UNION ALL
        --公司内网覆盖用户
        select distinct user_id
        from dim.dim_device_user_staff

        UNION ALL
        --登陆过医生设备
        SELECT distinct t1.user_id
        FROM
        (
            SELECT user_id, v.device_id as device_id
            FROM online.ml_user_history_detail
                LATERAL VIEW EXPLODE(device_history_list) v AS device_id
            WHERE partition_date = {partition_day}
        )t1
        JOIN
        (
            SELECT device_id
            FROM online.ml_device_history_detail
            WHERE partition_date = {partition_day}
            AND is_login_doctor = '1'
        )t2
            ON t1.device_id = t2.device_id
    )t2
    on t1.user_id=t2.user_id
    group by partition_date,device_id
)b
on t1.partition_date=b.partition_date and t1.device_id=b.device_id
where (a.device_id is NULL or a.device_id ='')
and (b.device_id is null or b.device_id ='')
GROUP BY t1.partition_date 
,grey_type,active_type,device_os_type
order by 1
    """.format(partition_day=today_str,start_day=yesterday_str)

    print(sql_search_ctr)
    search_ctr_df = spark.sql(sql_search_ctr)
    # spam_pv_df.createOrReplaceTempView("dev_view")
    search_ctr_df.show(1)
    sql_res = search_ctr_df.collect()
    for res in sql_res:
        print(res)
        task_list.append(res)
print(task_list)
# cursor.executemany()