Commit 98da90ee authored by 张彦钊's avatar 张彦钊

Merge branch 'master' of git.wanmeizhensuo.com:ML/ffm-baseline

增加获取城市列表
parents 0027e2d0 9715983a
......@@ -34,7 +34,7 @@ object GmeiConfig extends Serializable {
val sparkConf = new SparkConf
sparkConf.set("spark.sql.crossJoin.enabled", "true")
sparkConf.set("spark.debug.maxToStringFields", "100")
sparkConf.set("spark.sql.broadcastTimeout", "1000")
sparkConf.set("spark.sql.broadcastTimeout", "6000")
if (!sparkConf.contains("spark.master")) {
sparkConf.setMaster("local[3]")
......
......@@ -13,8 +13,7 @@ object Recommendation_strategy_all {
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.apache.eclipse.jetty.server").setLevel(Level.OFF)
case class Params(env: String = "dev",
date: String = "2018-08-01"
case class Params(env: String = "dev"
) extends AbstractParams[Params] with Serializable
val defaultParams = Params()
......@@ -24,9 +23,6 @@ object Recommendation_strategy_all {
opt[String]("env")
.text(s"the databases environment you used")
.action((x, c) => c.copy(env = x))
opt[String] ("date")
.text(s"the date you used")
.action((x,c) => c.copy(date = x))
note(
"""
|For example, the following command runs this app on a tidb dataset:
......@@ -54,6 +50,7 @@ object Recommendation_strategy_all {
import sc.implicits._
val stat_date = GmeiConfig.getMinusNDate(1)
println(stat_date)
//println(param.date)
val partition_date = stat_date.replace("-","")
val decive_id_oldUser = sc.sql(
......
......@@ -264,9 +264,61 @@ object NdDataInput {
tidb_input.show()
println(tidb_input.count())
}
}
}
object ServiceStat {
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.apache.eclipse.jetty.server").setLevel(Level.OFF)
case class Params(env: String = "dev"
) extends AbstractParams[Params] with Serializable
val defaultParams = Params()
val parser = new OptionParser[Params]("Feed_EDA") {
head("WeafareStat")
opt[String]("env")
.text(s"the databases environment you used")
.action((x, c) => c.copy(env = x))
note("winter is coming")
}
def main(args: Array[String]): Unit = {
parser.parse(args, defaultParams).map { param =>
GmeiConfig.setup(param.env)
val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2
val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "nd_data_meigou_cid")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "eagle", tableName = "feed_diary_boost")
val result00 = sc.sql(
s"""
|select a.cl_id as device_id,
|COALESCE(a.params['diary_id'], a.params['business_id'], 0) as diary_id,
|c.level1_id as level1_id
|from online.tl_hdfs_maidian_view a
|left join online.tl_hdfs_diary_tags_view b on COALESCE(a.params['diary_id'], a.params['business_id'], 0)=b.diary_id
|left join online.bl_tag_hierarchy_detail c on b.tag_id=c.id
|where a.partition_date > "20181112"
|and a.action="on_click_diary_card"
|and a.params["page_name"]="home"
|and a.cl_id != "NULL"
|and b.partition_date="20181119"
|and c.partition_date="20181119"
""".stripMargin
)
result00.collect.foreach(println)
}
}
}
}
\ No newline at end of file
This diff is collapsed.
......@@ -179,3 +179,174 @@ object strategy_other {
}
}
//下边内容开始分析统计推荐系统评价指标
//使用信息熵描述推荐系统对长尾优质物品(日记本)的挖掘能力
//使用基尼系数描述推荐系统对日记本推荐是否具有马太效应
object evaluation_indicator_ {
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.apache.eclipse.jetty.server").setLevel(Level.OFF)
case class Params(env: String = "dev",
date: String = "2018-08-01"
) extends AbstractParams[Params] with Serializable
val defaultParams = Params()
val parser = new OptionParser[Params]("Feed_EDA") {
head("WeafareStat")
opt[String]("env")
.text(s"the databases environment you used")
.action((x, c) => c.copy(env = x))
opt[String] ("date")
.text(s"the date you used")
.action((x,c) => c.copy(date = x))
note(
"""
|For example, the following command runs this app on a tidb dataset:
|
| spark-submit --class com.gmei.WeafareStat ./target/scala-2.11/feededa-assembly-0.1.jar \
""".stripMargin +
s"| --env ${defaultParams.env}"
)
}
def main(args: Array[String]): Unit = {
parser.parse(args, defaultParams).map { param =>
GmeiConfig.setup(param.env)
val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2
import sc.implicits._
//val stat_date = GmeiConfig.getMinusNDate(1)
//println(param.date)
val partition_date = param.date.replace("-","")
val devicee_id_oldUser = sc.sql(
s"""
|select distinct(device_id) as device_id
|from online.ml_device_day_active_status
|where active_type = '4'
|and first_channel_source_type not in ('yqxiu1','yqxiu2','yqxiu3','yqxiu4','yqxiu5','mxyc1','mxyc2','mxyc3'
| ,'wanpu','jinshan','jx','maimai','zhuoyi','huatian','suopingjingling','mocha','mizhe','meika','lamabang'
| ,'js-az1','js-az2','js-az3','js-az4','js-az5','jfq-az1','jfq-az2','jfq-az3','jfq-az4','jfq-az5','toufang1'
| ,'toufang2','toufang3','toufang4','toufang5','toufang6','TF-toufang1','TF-toufang2','TF-toufang3','TF-toufang4'
| ,'TF-toufang5','tf-toufang1','tf-toufang2','tf-toufang3','tf-toufang4','tf-toufang5','benzhan','promotion_aso100'
| ,'promotion_qianka','promotion_xiaoyu','promotion_dianru','promotion_malioaso','promotion_malioaso-shequ'
| ,'promotion_shike','promotion_julang_jl03')
|and partition_date ='${partition_date}'
""".stripMargin
)
devicee_id_oldUser.show()
devicee_id_oldUser.createOrReplaceTempView("device_id_old")
//device_id尾号1有点击用户日记本点击数
val clk_active_1 = sc.sql(
s"""
|select '${param.date}' as stat_date, count(jd.cid_id) as clk_active_1
|from data_feed_click jd inner join device_id_old
|on jd.device_id = device_id_old.device_id
|where (jd.cid_type = 'diary' or jd.cid_type = 'diary_video')
|and jd.device_id regexp'1$$'
|and jd.device_id not in (select device_id from bl_device_list)
|and jd.device_id not in (select device_id from blacklist)
|and jd.stat_date ='${param.date}'
""".stripMargin
)
//device_id尾号1有点击用户日记本曝光数
val imp_active_1 = sc.sql(
s"""
|select '${param.date}' as stat_date, count(je.cid_id) as imp_active_1
|from data_feed_exposure je inner join device_id_old
|on je.device_id = device_id_old.device_id
|where je.cid_type = 'diary'
|and je.device_id in (select distinct(device_id) from data_feed_click where device_id regexp '1$$' and stat_date = '${param.date}')
|and je.device_id not in (select device_id from bl_device_list)
|and je.device_id not in (select device_id from blacklist)
|and je.stat_date ='${param.date}'
""".stripMargin
)
//device_id尾号1点击日记本用户数
val clk_diary_device = sc.sql(
s"""
|select '${param.date}' as stat_date, count(distinct(jd.device_id)) as clk_diary_device
|from data_feed_click jd inner join device_id_old
|on jd.device_id = device_id_old.device_id
|where (jd.cid_type = 'diary' or jd.cid_type = 'diary_video')
|and jd.device_id regexp'1$$'
|and jd.device_id not in (select device_id from bl_device_list)
|and jd.device_id not in (select device_id from blacklist)
|and jd.stat_date ='${param.date}'
""".stripMargin
)
//所有有点击用户日记本点击数
val clk_active_all = sc.sql(
s"""
|select '${param.date}' as stat_date, count(jd.cid_id) as clk_active_all
|from data_feed_click jd inner join device_id_old
|on jd.device_id = device_id_old.device_id
|where (jd.cid_type = 'diary' or jd.cid_type = 'diary_video')
|and jd.device_id not in (select device_id from bl_device_list)
|and jd.device_id not in (select device_id from blacklist)
|and jd.stat_date ='${param.date}'
""".stripMargin
)
//所有有点击用户日记本曝光数
val imp_active_all = sc.sql(
s"""
|select '${param.date}' as stat_date, count(je.cid_id) as imp_active_all
|from data_feed_exposure je inner join device_id_old
|on je.device_id = device_id_old.device_id
|where je.cid_type = 'diary'
|and je.device_id in (select distinct(device_id) from data_feed_click where stat_date = '${param.date}')
|and je.device_id not in (select device_id from bl_device_list)
|and je.device_id not in (select device_id from blacklist)
|and je.stat_date ='${param.date}'
""".stripMargin
)
//策略命中用户点击日记本用户数
val clk_diary_device_cover = sc.sql(
s"""
|select '${param.date}' as stat_date,count(distinct(device_id)) as clk_diary_device_cover
|from merge_queue_table
|where device_id in (select distinct(device_id) from data_feed_click where stat_date = '${param.date}')
""".stripMargin
)
//策略命中用户总数
val device_all_cover = sc.sql(
s"""
|select '${param.date}' as stat_date,count(distinct(device_id)) as device_all_cover
|from merge_queue_table
""".stripMargin
)
val result = clk_active_1.join(imp_active_1,"stat_date")
.join(clk_active_all,"stat_date")
.join(imp_active_all,"stat_date")
.join(clk_diary_device,"stat_date")
.join(clk_diary_device_cover,"stat_date")
.join(device_all_cover,"stat_date")
result.show()
GmeiConfig.writeToJDBCTable(result, "strategy_other", SaveMode.Append)
}
}
}
......@@ -54,7 +54,6 @@ object testt {
|from online.tl_hdfs_maidian_view
|where action="page_view"
|and params["page_name"]="diary_detail"
|and (params["out"]-params["in"])<7200
|and partition_date >='20180901'
""".stripMargin
)
......@@ -71,9 +70,6 @@ object testt {
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment