Commit 50ffafe3 authored by 王志伟's avatar 王志伟

更改连接

parent 7cac8d2d
...@@ -48,12 +48,16 @@ object GmeiConfig extends Serializable { ...@@ -48,12 +48,16 @@ object GmeiConfig extends Serializable {
val spark = SparkSession val spark = SparkSession
.builder() .builder()
.config(sparkConf) // .config(sparkConf)
.appName("feededa") .appName("feededa")
.enableHiveSupport() .enableHiveSupport()
.config("spark.tispark.pd.addresses","172.16.40.158:2379")
.config("spark.sql.extensions","org.apache.spark.sql.TiExtensions")
.getOrCreate() .getOrCreate()
spark.sql("use online") spark.sql("SET mapreduce.job.queuename=data")
spark.sql("SET mapred.input.dir.recursive=true")
spark.sql("SET hive.mapred.supports.subdirectories=true")
spark.sql("ADD JAR hdfs:///user/hive/share/lib/udf/brickhouse-0.7.1-SNAPSHOT.jar") spark.sql("ADD JAR hdfs:///user/hive/share/lib/udf/brickhouse-0.7.1-SNAPSHOT.jar")
spark.sql("ADD JAR hdfs:///user/hive/share/lib/udf/hive-udf-1.0-SNAPSHOT.jar") spark.sql("ADD JAR hdfs:///user/hive/share/lib/udf/hive-udf-1.0-SNAPSHOT.jar")
spark.sql("CREATE TEMPORARY FUNCTION json_map AS 'brickhouse.udf.json.JsonMapUDF'") spark.sql("CREATE TEMPORARY FUNCTION json_map AS 'brickhouse.udf.json.JsonMapUDF'")
......
...@@ -3,7 +3,7 @@ package com.gmei ...@@ -3,7 +3,7 @@ package com.gmei
import java.io.Serializable import java.io.Serializable
import com.gmei.WeafareStat.{defaultParams, parser} import com.gmei.WeafareStat.{defaultParams, parser}
import org.apache.spark.sql.{SaveMode, TiContext} import org.apache.spark.sql.{SaveMode}
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import scopt.OptionParser import scopt.OptionParser
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
...@@ -43,12 +43,13 @@ object data_feed_exposure_precise { ...@@ -43,12 +43,13 @@ object data_feed_exposure_precise {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
val stat_date = GmeiConfig.getMinusNDate(1) val stat_date = GmeiConfig.getMinusNDate(1)
......
...@@ -3,7 +3,8 @@ package com.gmei ...@@ -3,7 +3,8 @@ package com.gmei
import java.io.Serializable import java.io.Serializable
import com.gmei.WeafareStat.{defaultParams, parser} import com.gmei.WeafareStat.{defaultParams, parser}
import org.apache.spark.sql.{SaveMode, TiContext} import org.apache.spark.sql.{SaveMode}
//import org.apache.spark.sql.{SaveMode, TiContext}
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import scopt.OptionParser import scopt.OptionParser
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
...@@ -43,12 +44,13 @@ object find_bug { ...@@ -43,12 +44,13 @@ object find_bug {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
val stat_date = GmeiConfig.getMinusNDate(1) val stat_date = GmeiConfig.getMinusNDate(1)
...@@ -342,12 +344,13 @@ object CTR_precise { ...@@ -342,12 +344,13 @@ object CTR_precise {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
val stat_date = GmeiConfig.getMinusNDate(1) val stat_date = GmeiConfig.getMinusNDate(1)
......
...@@ -3,7 +3,8 @@ package com.gmei ...@@ -3,7 +3,8 @@ package com.gmei
import java.io.Serializable import java.io.Serializable
import com.gmei.WeafareStat.{defaultParams, parser} import com.gmei.WeafareStat.{defaultParams, parser}
import org.apache.spark.sql.{SaveMode, TiContext} import org.apache.spark.sql.{SaveMode}
//import org.apache.spark.sql.{SaveMode, TiContext}
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import scopt.OptionParser import scopt.OptionParser
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
...@@ -46,13 +47,14 @@ object temp_analysis { ...@@ -46,13 +47,14 @@ object temp_analysis {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
import sc.implicits._ import sc.implicits._
...@@ -209,13 +211,14 @@ object ARPU_COM { ...@@ -209,13 +211,14 @@ object ARPU_COM {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
import sc.implicits._ import sc.implicits._
...@@ -331,13 +334,14 @@ object hospital_gengmei { ...@@ -331,13 +334,14 @@ object hospital_gengmei {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
import sc.implicits._ import sc.implicits._
...@@ -402,13 +406,14 @@ object meigou_xiaofei_renshu { ...@@ -402,13 +406,14 @@ object meigou_xiaofei_renshu {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
...@@ -543,13 +548,14 @@ object alpha_ctr { ...@@ -543,13 +548,14 @@ object alpha_ctr {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
import sc.implicits._ import sc.implicits._
...@@ -708,13 +714,14 @@ object copy_database { ...@@ -708,13 +714,14 @@ object copy_database {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_test", tableName = "tl_hdfs_wiki_item_tag_view") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_test", tableName = "Knowledge_network") // ti.tidbMapTable(dbName = "jerry_test", tableName = "tl_hdfs_wiki_item_tag_view")
ti.tidbMapTable(dbName = "eagle", tableName = "src_mimas_prod_api_diary") // ti.tidbMapTable(dbName = "jerry_test", tableName = "Knowledge_network")
// ti.tidbMapTable(dbName = "eagle", tableName = "src_mimas_prod_api_diary")
import sc.implicits._ import sc.implicits._
......
...@@ -5,7 +5,8 @@ import java.text.SimpleDateFormat ...@@ -5,7 +5,8 @@ import java.text.SimpleDateFormat
import breeze.linalg.split import breeze.linalg.split
import com.gmei.WeafareStat.{defaultParams, parser} import com.gmei.WeafareStat.{defaultParams, parser}
import org.apache.spark.sql.{Row, SaveMode, SparkSession, TiContext} import org.apache.spark.sql.{Row, SaveMode, SparkSession}
//import org.apache.spark.sql.{Row, SaveMode, SparkSession, TiContext}
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import scopt.OptionParser import scopt.OptionParser
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
...@@ -50,12 +51,13 @@ object temp_count { ...@@ -50,12 +51,13 @@ object temp_count {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
val stat_date = GmeiConfig.getMinusNDate(1) val stat_date = GmeiConfig.getMinusNDate(1)
...@@ -213,12 +215,13 @@ object Repeated_content_recommendation { ...@@ -213,12 +215,13 @@ object Repeated_content_recommendation {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
// val stat_date = GmeiConfig.getMinusNDate(1) // val stat_date = GmeiConfig.getMinusNDate(1)
...@@ -409,12 +412,13 @@ object Repeated_content_recommendation_moreday { ...@@ -409,12 +412,13 @@ object Repeated_content_recommendation_moreday {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
val stat_date = GmeiConfig.getMinusNDate(1) val stat_date = GmeiConfig.getMinusNDate(1)
...@@ -512,12 +516,13 @@ object GetHiveSearchData { ...@@ -512,12 +516,13 @@ object GetHiveSearchData {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "GetHiveSearchData_CTR") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "GetHiveSearchData_CTR")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
val stat_date = GmeiConfig.getMinusNDate(1) val stat_date = GmeiConfig.getMinusNDate(1)
...@@ -795,11 +800,12 @@ object find_reason { ...@@ -795,11 +800,12 @@ object find_reason {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise")
// val stat_date = GmeiConfig.getMinusNDate(1) // val stat_date = GmeiConfig.getMinusNDate(1)
val stat_date=param.date val stat_date=param.date
......
...@@ -3,7 +3,8 @@ package com.gmei ...@@ -3,7 +3,8 @@ package com.gmei
import java.io.Serializable import java.io.Serializable
import com.gmei.WeafareStat.{defaultParams, parser} import com.gmei.WeafareStat.{defaultParams, parser}
import org.apache.spark.sql.{SaveMode, TiContext} //import org.apache.spark.sql.{SaveMode, TiContext}
import org.apache.spark.sql.{SaveMode}
import org.apache.log4j.{Level, Logger} import org.apache.log4j.{Level, Logger}
import scopt.OptionParser import scopt.OptionParser
import com.gmei.lib.AbstractParams import com.gmei.lib.AbstractParams
...@@ -44,11 +45,12 @@ object testt { ...@@ -44,11 +45,12 @@ object testt {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
val stat_date = GmeiConfig.getMinusNDate(1) val stat_date = GmeiConfig.getMinusNDate(1)
// val stat_date=param.date // val stat_date=param.date
...@@ -456,11 +458,12 @@ object diary_clk_card { ...@@ -456,11 +458,12 @@ object diary_clk_card {
val spark_env = GmeiConfig.getSparkSession() val spark_env = GmeiConfig.getSparkSession()
val sc = spark_env._2 val sc = spark_env._2
val ti = new TiContext(sc) // val ti = new TiContext(sc)
ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video") sc.sql("use jerry_prod")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise") // ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure_precise")
val stat_date = GmeiConfig.getMinusNDate(1) val stat_date = GmeiConfig.getMinusNDate(1)
// val stat_date=param.date // val stat_date=param.date
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment