Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
F
ffm-baseline
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ML
ffm-baseline
Commits
7fc3bb53
Commit
7fc3bb53
authored
Apr 19, 2019
by
王志伟
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
修复bug
parent
1c5a1343
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
17 additions
and
15 deletions
+17
-15
Recommendation_strategy_all.scala
...src/main/scala/com/gmei/Recommendation_strategy_all.scala
+15
-13
Search_keywords_count.scala
...ededa/src/main/scala/com/gmei/Search_keywords_count.scala
+1
-1
WeafareStat.scala
eda/feededa/src/main/scala/com/gmei/WeafareStat.scala
+1
-1
No files found.
eda/feededa/src/main/scala/com/gmei/Recommendation_strategy_all.scala
View file @
7fc3bb53
...
...
@@ -3,7 +3,7 @@ package com.gmei
import
java.io.Serializable
import
com.gmei.WeafareStat.
{
defaultParams
,
parser
}
import
org.apache.spark.sql.
{
SaveMode
,
TiContext
}
import
org.apache.spark.sql.
{
SaveMode
}
import
org.apache.log4j.
{
Level
,
Logger
}
import
scopt.OptionParser
import
com.gmei.lib.AbstractParams
...
...
@@ -43,12 +43,14 @@ object Recommendation_strategy_all {
val
spark_env
=
GmeiConfig
.
getSparkSession
()
val
sc
=
spark_env
.
_2
val
ti
=
new
TiContext
(
sc
)
ti
.
tidbMapTable
(
dbName
=
"jerry_prod"
,
tableName
=
"diary_video"
)
ti
.
tidbMapTable
(
dbName
=
"jerry_prod"
,
tableName
=
"data_feed_click"
)
ti
.
tidbMapTable
(
dbName
=
"jerry_prod"
,
tableName
=
"blacklist"
)
ti
.
tidbMapTable
(
dbName
=
"jerry_prod"
,
tableName
=
"data_feed_exposure"
)
ti
.
tidbMapTable
(
dbName
=
"jerry_prod"
,
tableName
=
"merge_queue_table"
)
sc
.
sql
(
"use jerry_prod"
)
// val ti = new TiContext(sc)
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "diary_video")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
// ti.tidbMapTable(dbName = "jerry_prod", tableName = "merge_queue_table")
val
stat_date
=
GmeiConfig
.
getMinusNDate
(
1
)
...
...
@@ -403,11 +405,11 @@ object Gini_coefficient {
val
spark_env
=
GmeiConfig
.
getSparkSession
()
val
sc
=
spark_env
.
_2
val
ti
=
new
TiContext
(
sc
)
ti
.
tidbMapTable
(
dbName
=
"eagle"
,
tableName
=
"src_mimas_prod_api_diary"
)
ti
.
tidbMapTable
(
dbName
=
"jerry_prod"
,
tableName
=
"data_feed_click"
)
ti
.
tidbMapTable
(
dbName
=
"jerry_prod"
,
tableName
=
"blacklist"
)
ti
.
tidbMapTable
(
dbName
=
"jerry_prod"
,
tableName
=
"data_feed_exposure"
)
//
val ti = new TiContext(sc)
//
ti.tidbMapTable(dbName = "eagle", tableName = "src_mimas_prod_api_diary")
//
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_click")
//
ti.tidbMapTable(dbName = "jerry_prod", tableName = "blacklist")
//
ti.tidbMapTable(dbName = "jerry_prod", tableName = "data_feed_exposure")
import
sc.implicits._
...
...
@@ -419,7 +421,7 @@ object Gini_coefficient {
val
diary_id
=
sc
.
sql
(
s
"""
|select id as diary_id
|from src_mimas_prod_api_diary
|from
eagle.
src_mimas_prod_api_diary
|where content_level >=3.5
|and doctor_id is not null
"""
.
stripMargin
...
...
eda/feededa/src/main/scala/com/gmei/Search_keywords_count.scala
View file @
7fc3bb53
...
...
@@ -3,7 +3,7 @@ package com.gmei
import
java.io.Serializable
import
com.gmei.WeafareStat.
{
defaultParams
,
parser
}
import
org.apache.spark.sql.
{
SaveMode
,
TiContext
}
import
org.apache.spark.sql.
{
SaveMode
}
import
org.apache.log4j.
{
Level
,
Logger
}
import
scopt.OptionParser
import
com.gmei.lib.AbstractParams
...
...
eda/feededa/src/main/scala/com/gmei/WeafareStat.scala
View file @
7fc3bb53
...
...
@@ -2,7 +2,7 @@ package com.gmei
import
java.io.Serializable
import
org.apache.spark.sql.
{
SaveMode
,
TiContext
}
import
org.apache.spark.sql.
{
SaveMode
}
import
org.apache.log4j.
{
Level
,
Logger
}
import
scopt.OptionParser
import
com.gmei.lib.AbstractParams
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment