Commit 28b30cc0 authored by 张彦钊's avatar 张彦钊

Merge branch 'master' of git.wanmeizhensuo.com:ML/ffm-baseline

change predict
parents 359518f7 da292eb3
...@@ -182,3 +182,118 @@ object temp_analysis { ...@@ -182,3 +182,118 @@ object temp_analysis {
} }
} }
object ARPU_COM {
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.apache.eclipse.jetty.server").setLevel(Level.OFF)
case class Params(env: String = "dev",
date: String = "2018-08-01"
) extends AbstractParams[Params] with Serializable
val defaultParams = Params()
val parser = new OptionParser[Params]("Feed_EDA") {
head("WeafareStat")
opt[String]("env")
.text(s"the databases environment you used")
.action((x, c) => c.copy(env = x))
opt[String] ("date")
.text(s"the date you used")
.action((x,c) => c.copy(date = x))
note(
"""
|For example, the following command runs this app on a tidb dataset:
|
| spark-submit --class com.gmei.WeafareStat ./target/scala-2.11/feededa-assembly-0.1.jar \
""".stripMargin +
s"| --env ${defaultParams.env}"
)
}
def main(args: Array[String]): Unit = {
parser.parse(args, defaultParams).map { param =>
GmeiConfig.setup(param.env)
val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2
val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
import sc.implicits._
val stat_date = GmeiConfig.getMinusNDate(1)
//println(param.date)
val partition_date = stat_date.replace("-","")
val agency_id = sc.sql(
s"""
|SELECT DISTINCT(cl_id) as device_id
|FROM online.ml_hospital_spam_pv_day
|WHERE partition_date >= '20180402'
|AND partition_date <= '${partition_date}'
|AND pv_ratio >= 0.95
|UNION ALL
|SELECT DISTINCT(cl_id) as device_id
|FROM online.ml_hospital_spam_pv_month
|WHERE partition_date >= '20171101'
|AND partition_date <= '${partition_date}'
|AND pv_ratio >= 0.95
""".stripMargin
)
agency_id.createOrReplaceTempView("agency_id")
val blacklist_id = sc.sql(
s"""
|SELECT device_id
|from blacklist
""".stripMargin
)
blacklist_id.createOrReplaceTempView("blacklist_id")
val final_id = sc.sql(
s"""
|select device_id
|from agency_id
|UNION ALL
|select device_id
|from blacklist_id
""".stripMargin
)
final_id.createOrReplaceTempView("final_id")
val diary_clk_all = sc.sql(
s"""
|select count(md.payment) as pay_all,count(distinct(md.device_id)) as pay_people,count(md.payment)/count(distinct(md.device_id))
|from online.ml_meigou_order_detail md left join final_id
|on md.device_id = final_id.device_id
|where md.status='2'
|and final_id.device_id is null
|and md.partition_date = '20181218'
|and md.pay_time is not null
|and md.pay_time >= '2018-01-01'
""".stripMargin
)
diary_clk_all.show(80)
}
}
}
...@@ -66,13 +66,13 @@ object testt { ...@@ -66,13 +66,13 @@ object testt {
|SELECT DISTINCT(cl_id) as device_id |SELECT DISTINCT(cl_id) as device_id
|FROM online.ml_hospital_spam_pv_day |FROM online.ml_hospital_spam_pv_day
|WHERE partition_date >= '20180402' |WHERE partition_date >= '20180402'
|AND partition_date <= '20181203' |AND partition_date <= '${partition_date}'
|AND pv_ratio >= 0.95 |AND pv_ratio >= 0.95
|UNION ALL |UNION ALL
|SELECT DISTINCT(cl_id) as device_id |SELECT DISTINCT(cl_id) as device_id
|FROM online.ml_hospital_spam_pv_month |FROM online.ml_hospital_spam_pv_month
|WHERE partition_date >= '20171101' |WHERE partition_date >= '20171101'
|AND partition_date <= '20181203' |AND partition_date <= '${partition_date}'
|AND pv_ratio >= 0.95 |AND pv_ratio >= 0.95
""".stripMargin """.stripMargin
) )
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment