package com.gmei

import java.io.Serializable

import com.gmei.WeafareStat.{defaultParams, parser}
import org.apache.spark.sql.{SaveMode, TiContext}
import org.apache.log4j.{Level, Logger}
import scopt.OptionParser
import com.gmei.lib.AbstractParams

object testt {

  Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
  Logger.getLogger("org.apache.eclipse.jetty.server").setLevel(Level.OFF)

  case class Params(env: String = "dev") extends AbstractParams[Params] with Serializable

  val defaultParams = Params()

  val parser = new OptionParser[Params]("Feed_EDA") {
    head("testt")
    opt[String]("env")
      .text(s"the databases environment you used")
      .action((x, c) => c.copy(env = x))
    //opt[String] ("date")
     // .text(s"the date you used")
     // .action((x,c) => c.copy(date = x))
    note(
      """
        |For example, the following command runs this app on a tidb dataset:
        |
        | spark-submit --class com.gmei.testt ./target/scala-2.11/feededa-assembly-0.1.jar \
      """.stripMargin +
        s"|   --env ${defaultParams.env}"
    )
  }

  def main(args: Array[String]): Unit = {
    parser.parse(args, defaultParams).map { param =>
      GmeiConfig.setup(param.env)
      val spark_env = GmeiConfig.getSparkSession()
      val sc = spark_env._2

      val ti = new TiContext(sc)
      ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
      ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
      ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
      ti.tidbMapTable(dbName = "jerry_test", tableName = "bl_device_list")
      ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")

      val strategies = Seq("3$","4$","5$","6$","7$","8$","c$","d$","e$","A$","B$","C$","D$")
      for (strategy <- strategies){
        println(strategy)
        val get_data_dura = sc.sql(
          s"""
             |select partition_date, sum(params['duration']) as total_dur,count(distinct(cl_id)) as num
             |from online.tl_hdfs_maidian_view
             |where action="on_app_session_over"
         """.stripMargin
        )
        get_data_dura.show()


      }


    }


  }

}