Commit 3e1a1860 authored by 张彦钊's avatar 张彦钊

Merge branch 'master' of git.wanmeizhensuo.com:ML/ffm-baseline

修改sql
parents 37bd4e5e 802f65c4
...@@ -3,7 +3,7 @@ import java.io.{File, PrintWriter, Serializable} ...@@ -3,7 +3,7 @@ import java.io.{File, PrintWriter, Serializable}
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SaveMode, TiContext} import org.apache.spark.sql.{DataFrame, SaveMode}
import scopt.OptionParser import scopt.OptionParser
...@@ -47,9 +47,11 @@ object Data2FFM { ...@@ -47,9 +47,11 @@ object Data2FFM {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) sc.sql("use jerry_test")
ti.tidbMapTable(dbName = "jerry_test", tableName = "esmm_train_data")
ti.tidbMapTable(dbName = "jerry_test", tableName = "esmm_pre_data") // val ti = new TiContext(sc)
// ti.tidbMapTable(dbName = "jerry_test", tableName = "esmm_train_data")
// ti.tidbMapTable(dbName = "jerry_test", tableName = "esmm_pre_data")
val train_sep_date = GmeiConfig.getMinusNDate(10) val train_sep_date = GmeiConfig.getMinusNDate(10)
......
...@@ -3,7 +3,7 @@ package com.gmei ...@@ -3,7 +3,7 @@ package com.gmei
import java.io.Serializable import java.io.Serializable
import com.gmei.WeafareStat.{defaultParams, parser} import com.gmei.WeafareStat.{defaultParams, parser}
import org.apache.spark.sql.{SaveMode, TiContext} import org.apache.spark.sql.{SaveMode}
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import scopt.OptionParser import scopt.OptionParser
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
...@@ -43,12 +43,14 @@ object Recommendation_strategy_all { ...@@ -43,12 +43,14 @@ object Recommendation_strategy_all {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
val stat_date = GmeiConfig.getMinusNDate(1) val stat_date = GmeiConfig.getMinusNDate(1)
...@@ -403,11 +405,11 @@ object Gini_coefficient { ...@@ -403,11 +405,11 @@ object Gini_coefficient {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "eagle", tableName = "src_mimas_prod_api_diary") // ti.tidbMapTable(dbName = "eagle", tableName = "src_mimas_prod_api_diary")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
import sc.implicits._ import sc.implicits._
...@@ -419,7 +421,7 @@ object Gini_coefficient { ...@@ -419,7 +421,7 @@ object Gini_coefficient {
val diary_id = sc.sql( val diary_id = sc.sql(
s""" s"""
|select id as diary_id |select id as diary_id
|from src_mimas_prod_api_diary |from eagle.src_mimas_prod_api_diary
|where content_level >=3.5 |where content_level >=3.5
|and doctor_id is not null |and doctor_id is not null
""".stripMargin """.stripMargin
......
...@@ -3,7 +3,7 @@ package com.gmei ...@@ -3,7 +3,7 @@ package com.gmei
import java.io.Serializable import java.io.Serializable
import com.gmei.WeafareStat.{defaultParams, parser} import com.gmei.WeafareStat.{defaultParams, parser}
import org.apache.spark.sql.{SaveMode, TiContext} import org.apache.spark.sql.{SaveMode}
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import scopt.OptionParser import scopt.OptionParser
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
......
...@@ -2,7 +2,7 @@ package com.gmei ...@@ -2,7 +2,7 @@ package com.gmei
import java.io.Serializable import java.io.Serializable
import org.apache.spark.sql.{SaveMode, TiContext} import org.apache.spark.sql.{SaveMode}
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import scopt.OptionParser import scopt.OptionParser
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
......
...@@ -4,7 +4,7 @@ import java.io.Serializable ...@@ -4,7 +4,7 @@ import java.io.Serializable
import org.apache.spark.sql.functions.udf import org.apache.spark.sql.functions.udf
import com.gmei.WeafareStat.{defaultParams, parser} import com.gmei.WeafareStat.{defaultParams, parser}
import org.apache.spark.sql.{SaveMode, TiContext} import org.apache.spark.sql.{SaveMode}
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import scopt.OptionParser import scopt.OptionParser
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
...@@ -49,13 +49,15 @@ object app_list { ...@@ -49,13 +49,15 @@ object app_list {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
import sc.implicits._ import sc.implicits._
...@@ -218,10 +220,12 @@ object coincidence_xinyang { ...@@ -218,10 +220,12 @@ object coincidence_xinyang {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "device_id_applist") // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "device_id_coincidence") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "device_id_applist")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "device_id_coincidence")
//println(param.date) //println(param.date)
......
...@@ -411,11 +411,11 @@ object data_feed_exposure_precise { ...@@ -411,11 +411,11 @@ object data_feed_exposure_precise {
) )
// GmeiConfig.writeToJDBCTable(result, table = "data_feed_exposure_precise", SaveMode.Append) // GmeiConfig.writeToJDBCTable(result, table = "data_feed_exposure_precise", SaveMode.Append)
// GmeiConfig.writeToJDBCTable("jdbc:mysql://152.136.44.138:4000/jerry_prod?user=root&password=3SYz54LS9#^9sBvC&rewriteBatchedStatements=true",result, table="data_feed_exposure_precise",SaveMode.Append) GmeiConfig.writeToJDBCTable("jdbc:mysql://152.136.44.138:4000/jerry_prod?user=root&password=3SYz54LS9#^9sBvC&rewriteBatchedStatements=true",result, table="data_feed_exposure_precise",SaveMode.Append)
println("开始写入") // println("开始写入")
GmeiConfig.writeToJDBCTable("jerry.jdbcuri",result, table="data_feed_exposure_precise",SaveMode.Append) // GmeiConfig.writeToJDBCTable("jerry.jdbcuri",result, table="data_feed_exposure_precise",SaveMode.Append)
println("写入完成") // println("写入完成")
} }
......
...@@ -3,7 +3,7 @@ package com.gmei ...@@ -3,7 +3,7 @@ package com.gmei
import java.io.Serializable import java.io.Serializable
import com.gmei.WeafareStat.{defaultParams, parser} import com.gmei.WeafareStat.{defaultParams, parser}
import org.apache.spark.sql.{SaveMode, TiContext} import org.apache.spark.sql.{SaveMode}
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import scopt.OptionParser import scopt.OptionParser
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment