Commit 20a228e2 authored by 张彦钊's avatar 张彦钊

change

parent f0771557
......@@ -8,111 +8,111 @@ import time
from pyspark import StorageLevel
def click(x):
def all_click(x):
total = []
sum = 0
date = (datetime.date.today() - datetime.timedelta(days=x)).strftime("%Y%m%d")
total.append(date)
print("美购首页美购列表卡片点击")
tmp = spark.sql("select count(*) from online.bl_hdfs_maidian_updates "
"where partition_date='{}' "
"and action = 'goto_welfare_detail' and params['from'] = 'welfare_home_list_item'"
.format(date)).rdd.map(lambda x: x[0]).collect()[0]
total.append(tmp)
sum = sum + tmp
print("美券相关的美购列表页美购卡片点击")
tmp = spark.sql("select count(*) from online.bl_hdfs_maidian_updates "
"where partition_date='{}' "
"and action = 'goto_welfare_detail' and params['from'] = 'coupon_welfare_list'"
.format(date)).rdd.map(lambda x: x[0]).collect()[0]
total.append(tmp)
sum = sum + tmp
print("新美购首页-固定ICON美购卡片点击")
tmp = spark.sql("select count(*) from online.bl_hdfs_maidian_updates "
"where partition_date='{}' "
"and action = 'goto_welfare_detail' and params['from'] = 'welfare_list'"
.format(date)).rdd.map(lambda x: x[0]).collect()[0]
total.append(tmp)
sum = sum + tmp
print("首页-品类模块点击跳转到品类聚合美购卡片点击")
tmp = spark.sql("select count(*) from online.bl_hdfs_maidian_updates "
"where partition_date='{}' "
"and action = 'goto_welfare_detail' and params['from'] = 'category'"
.format(date)).rdd.map(lambda x: x[0]).collect()[0]
total.append(tmp)
sum = sum + tmp
print("serach")
tmp = spark.sql("select count(*) from online.bl_hdfs_maidian_updates "
"where partition_date='{}' "
"and action = 'search_result_welfare_click_item'"
.format(date)).rdd.map(lambda x: x[0]).collect()[0]
total.append(tmp)
sum = sum + tmp
total.append(sum)
print(total)
return total
def cpc_click(x):
cpc = []
date = (datetime.date.today() - datetime.timedelta(days=x)).strftime("%Y%m%d")
cpc.append(date)
sum = 0
print("美购首页美购列表卡片点击")
tmp = spark.sql("select count(*) from online.bl_hdfs_maidian_updates "
"where partition_date='{}' "
"and action = 'goto_welfare_detail' and params['from'] = 'welfare_home_list_item' "
"and params['is_cpc'] = '1'"
.format(date)).rdd.map(lambda x: x[0]).collect()[0]
cpc.append(tmp)
sum = sum + tmp
print("美券相关的美购列表页美购卡片点击")
tmp = spark.sql("select count(*) from online.bl_hdfs_maidian_updates "
"where partition_date='{}' "
"and action = 'goto_welfare_detail' and params['from'] = 'coupon_welfare_list' "
"and params['is_cpc'] = '1'"
.format(date)).rdd.map(lambda x: x[0]).collect()[0]
cpc.append(tmp)
sum = sum + tmp
print("新美购首页-固定ICON美购卡片点击")
tmp = spark.sql("select count(*) from online.bl_hdfs_maidian_updates "
"where partition_date='{}' "
"and action = 'goto_welfare_detail' and params['from'] = 'welfare_list' "
"and params['is_cpc'] = '1'"
.format(date)).rdd.map(lambda x: x[0]).collect()[0]
cpc.append(tmp)
sum = sum + tmp
print("首页-品类模块点击跳转到品类聚合美购卡片点击")
tmp = spark.sql("select count(*) from online.bl_hdfs_maidian_updates "
"where partition_date='{}' "
"and action = 'goto_welfare_detail' and params['from'] = 'category' "
"and params['is_cpc'] = '1'"
.format(date)).rdd.map(lambda x: x[0]).collect()[0]
cpc.append(tmp)
sum = sum + tmp
print("serach")
tmp = spark.sql("select count(*) from online.bl_hdfs_maidian_updates "
"where partition_date='{}' "
"and action = 'search_result_welfare_click_item' "
"and app['version'] in ('7.15.0','7.14.0')".format(date)).rdd.map(lambda x:x[0]).collect()
print(tmp)
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "search_result_welfare_click_item" and app["version"] in ('7.15.0','7.14.0')
# |and params["is_cpc"] = "1"
# """.stripMargin).show(6)
#
# println("首页点全部icon进入的列表-美购卡片点击")
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "goto_welfare_detail" and app["version"] in ('7.15.0','7.14.0')
# |and params["from"] = "welfare_list" and params["cpc_referer"] = "6"
# """.stripMargin).show()
#
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "goto_welfare_detail" and app["version"] in ('7.15.0','7.14.0')
# |and params["from"] = "welfare_list" and params["cpc_referer"] = "6"
# |and params["is_cpc"] = "1"
# """.stripMargin).show()
#
# println("首页点击icon进入的列表-美购卡片点击")
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "goto_welfare_detail" and app["version"] in ('7.15.0','7.14.0')
# |and params["from"] = "category" and params["cpc_referer"] = "19"
# """.stripMargin).show()
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "goto_welfare_detail" and app["version"] in ('7.15.0','7.14.0')
# |and params["from"] = "category" and params["cpc_referer"] = "19"
# |and params["is_cpc"] = "1"
# """.stripMargin).show()
#
# println("美购首页全部点击")
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "goto_welfare_detail" and app["version"] in ('7.15.0','7.14.0')
# |and params["from"] = "welfare_list" and params["cpc_referer"] = "21"
# """.stripMargin).show()
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "goto_welfare_detail" and app["version"] in ('7.15.0','7.14.0')
# |and params["from"] = "welfare_list" and params["cpc_referer"] = "21"
# |and params["is_cpc"] = "1"
# """.stripMargin).show()
#
# println("美购首页icon美购点击")
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "goto_welfare_detail" and app["version"] in ('7.15.0','7.14.0')
# |and params["from"] = "welfare_list" and params["cpc_referer"] = "18"
# """.stripMargin).show()
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "goto_welfare_detail" and app["version"] in ('7.15.0','7.14.0')
# |and params["from"] = "welfare_list" and params["cpc_referer"] = "18"
# |and params["is_cpc"] = "1"
# """.stripMargin).show()
#
# println("美购首页相关推荐")
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "goto_welfare_detail" and app["version"] in ('7.15.0','7.14.0')
# |and params["from"] = "welfare_home_list_item"
# """.stripMargin).show()
# spark.sql(
# s
# """
# |select count(*) from online.bl_hdfs_maidian_updates where partition_date='$date'
# |and action = "goto_welfare_detail" and app["version"] in ('7.15.0','7.14.0')
# |and params["from"] = "welfare_home_list_item"
# |and params["is_cpc"] = "1"
# """.stripMargin).show()
# return sum
"and params['is_cpc'] = '1'"
.format(date)).rdd.map(lambda x: x[0]).collect()[0]
cpc.append(tmp)
sum = sum + tmp
cpc.append(sum)
print(cpc)
return cpc
......@@ -126,7 +126,16 @@ if __name__ == '__main__':
.set("spark.driver.maxResultSize", "8g").set("spark.sql.avro.compression.codec", "snappy")
spark = SparkSession.builder.config(conf=sparkConf).enableHiveSupport().getOrCreate()
click(1)
clicks = []
cpcs = []
for i in range(1,26):
clicks.extend(all_click(i))
cpcs.extend(cpc_click(i))
print("clicks")
print(clicks)
print("cpcs")
print(cpcs)
spark.stop()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment