Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
B
bi-report
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
data
bi-report
Commits
019e780b
Commit
019e780b
authored
Jun 22, 2020
by
魏艺敏
Browse files
Options
Browse Files
Download
Plain Diff
update codes
parents
f3a4c0fe
f538306f
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
12 additions
and
3 deletions
+12
-3
daily_recommend_strategy.zip
jobs/daily_recommend_strategy/daily_recommend_strategy.zip
+0
-0
daily_recommend_strategy.sh
shells/daily_recommend_strategy/daily_recommend_strategy.sh
+12
-3
No files found.
jobs/daily_recommend_strategy/daily_recommend_strategy.zip
View file @
019e780b
No preview for this file type
shells/daily_recommend_strategy/daily_recommend_strategy.sh
View file @
019e780b
#!/bin/bash
#!/bin/bash
# 定义变量方便修改
# 定义变量方便修改
hive
=
"/opt/spark/spark-2.4.5-bin-hadoop2.7/bin/beeline -u jdbc:hive2://bj-gm-prod-cos-datacenter003:10010/online -n data"
#hive="/opt/spark/spark-2.4.5-bin-hadoop2.7/bin/beeline -u jdbc:hive2://bj-gm-prod-cos-datacenter003:10010 -n data"
hive
=
"/opt/hive/hive-1.1.0-cdh5.16.1/bin/beeline -u jdbc:hive2://bj-gm-prod-cos-datacenter005:10000 -n data"
# 如果是输入的日期按照取输入日期;如果没输入日期取当前时间的前一天
# 如果是输入的日期按照取输入日期;如果没输入日期取当前时间的前一天
if
[
-n
"
$1
"
]
;
then
if
[
-n
"
$1
"
]
;
then
...
@@ -12,11 +13,19 @@ fi
...
@@ -12,11 +13,19 @@ fi
sql
=
"
sql
=
"
SET mapreduce.job.queuename=pm;
SET mapreduce.map.memory.mb=8192;
SET mapreduce.map.java.opts=-Xmx8000m;
SET mapreduce.reduce.memory.mb=8192;
SET mapreduce.reduce.java.opts=-Xmx8000m;
set hive.auto.convert.join=true;
SET mapred.reduce.tasks=20;
SET role admin;
ADD JAR hdfs:///user/hive/share/lib/udf/hive-udf-1.0-SNAPSHOT.jar;
ADD JAR hdfs:///user/hive/share/lib/udf/hive-udf-1.0-SNAPSHOT.jar;
CREATE TEMPORARY FUNCTION convup AS 'com.gmei.hive.common.udf.UDFConvUpgrade';
CREATE TEMPORARY FUNCTION convup AS 'com.gmei.hive.common.udf.UDFConvUpgrade';
INSERT OVERWRITE TABLE pm.tl_pm_recommend_strategy_d PARTITION (PARTITION_DAY =
$partition_day
)
INSERT OVERWRITE TABLE pm.tl_pm_recommend_strategy_d PARTITION (PARTITION_DAY =
$partition_day
)
SELECT
SELECT
t1.partition_date as day_id,
t1.partition_date as day_id,
t1.device_os_type as device_os_type,
t1.device_os_type as device_os_type,
...
@@ -236,7 +245,7 @@ FROM
...
@@ -236,7 +245,7 @@ FROM
FROM online.bl_hdfs_maidian_updates
FROM online.bl_hdfs_maidian_updates
WHERE partition_date>=regexp_replace(DATE_SUB(current_date,1) ,'-','')
WHERE partition_date>=regexp_replace(DATE_SUB(current_date,1) ,'-','')
AND partition_date<regexp_replace((current_date),'-','')
AND partition_date<regexp_replace((current_date),'-','')
AND (get_json_object(params['extra_param'], '
$.
type')='
���
互栏'
AND (get_json_object(params['extra_param'], '
$.
type')='
交
互栏'
or get_json_object(params['extra_param'], '
$.
jump_from')='msg_link')
or get_json_object(params['extra_param'], '
$.
jump_from')='msg_link')
AND action='on_click_card'
AND action='on_click_card'
and params['card_content_type']='service'
and params['card_content_type']='service'
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment