Commit e388a661 authored by yindanlei's avatar yindanlei

update codes

parents 6e9fff6e ab609662
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SqlDialectMappings">
<file url="file://$PROJECT_DIR$/sqls/bak/demo_spark_param/demo_spark_param.sql" dialect="SQL92" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ChangeListManager">
<<<<<<< HEAD
<list default="true" id="765e09a6-fc0b-4ec9-8de8-88bb721f9a89" name="Default Changelist" comment="" />
=======
<list default="true" id="034b59f7-f608-4144-b0af-f6951647bd51" name="Default Changelist" comment="">
<change beforePath="$PROJECT_DIR$/lib/shell/hive.sh" beforeDir="false" afterPath="$PROJECT_DIR$/lib/shell/hive.sh" afterDir="false" />
</list>
>>>>>>> 0a1e615beb0ca00f6a1a43796de5b7488c9dd780
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<<<<<<< HEAD
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
</component>
<component name="ProjectId" id="1dfVlMQlC7SI7HHCgC0U76ZyFWG" />
<component name="ProjectLevelVcsManager" settingsEditedManually="true" />
<component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent">
<property name="RunOnceActivity.OpenProjectViewOnStart" value="true" />
<property name="RunOnceActivity.ShowReadmeOnStart" value="true" />
<property name="last_opened_file_path" value="$PROJECT_DIR$" />
=======
<component name="CodeStyleSettingsInfer">
<option name="done" value="true" />
</component>
<component name="ComposerSettings">
<execution>
<executable />
</execution>
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
</component>
<component name="ProjectCodeStyleSettingsMigration">
<option name="version" value="1" />
</component>
<component name="ProjectId" id="1dfQ0VEii7hX7vb7p4D7taxyaw0" />
<component name="ProjectLevelVcsManager" settingsEditedManually="true" />
<component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showExcludedFiles" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent">
<property name="RunOnceActivity.ShowReadmeOnStart" value="true" />
<property name="WebServerToolWindowFactoryState" value="false" />
<property name="aspect.path.notification.shown" value="true" />
</component>
<component name="RunManager">
<configuration default="true" type="ClojureREPL" factoryName="Local" activateToolWindowBeforeRun="false">
<setting name="replType" value="NREPL" />
<setting name="execution" value="LEININGEN" />
<setting name="jvmArgs" value="" />
<setting name="parameters" value="" />
<setting name="workingDir" value="" />
<setting name="profiles" value="" />
<setting name="aliases" value="" />
<setting name="fixLineNumbers" value="false" />
<method v="2" />
</configuration>
<configuration default="true" type="ClojureREPL" factoryName="Remote" activateToolWindowBeforeRun="false">
<setting name="host" value="" />
<setting name="port" value="0" />
<setting name="replType" value="SOCKET" />
<setting name="configType" value="SPECIFY" />
<setting name="fixLineNumbers" value="false" />
<method v="2" />
</configuration>
>>>>>>> 0a1e615beb0ca00f6a1a43796de5b7488c9dd780
</component>
<component name="SvnConfiguration">
<configuration />
</component>
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<<<<<<< HEAD
<changelist id="765e09a6-fc0b-4ec9-8de8-88bb721f9a89" name="Default Changelist" comment="" />
<created>1592816285414</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1592816285414</updated>
</task>
<servers />
</component>
=======
<changelist id="034b59f7-f608-4144-b0af-f6951647bd51" name="Default Changelist" comment="" />
<created>1592813444854</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1592813444854</updated>
<workItem from="1592813446154" duration="2153000" />
</task>
<servers />
</component>
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="1" />
</component>
>>>>>>> 0a1e615beb0ca00f6a1a43796de5b7488c9dd780
</project>
\ No newline at end of file
#!/bin/bash #!/bin/bash
set -e
set -x
git checkout master
git pull origin master git pull origin master
\ No newline at end of file
#!/bin/bash
set -e
set -x
git checkout wangyan
git pull origin wangyan
\ No newline at end of file
#!/bin/bash
set -e
set -x
git checkout wangzhuo
git pull origin wangzhuo
\ No newline at end of file
#!/bin/bash #!/bin/bash
set -e
set -x
git checkout yindanlei git checkout yindanlei
git pull origin yindanlei git pull origin yindanlei
\ No newline at end of file
#!/bin/bash
set -e
set -x
git checkout zhaofei
git pull origin zhaofei
\ No newline at end of file
#!/bin/bash #!/bin/bash
set -e
set -x
git checkout zhaojianwei
git pull origin zhaojianwei git pull origin zhaojianwei
\ No newline at end of file
mazhimo
bi
sjxuwei
zhaojianwei
zhaofei
weiyimin
yindanlei
zhanghuixin
wangzhuo
wangyan
root
hadoop
\ No newline at end of file
demo=示例程序-demo
demo_bak=示例程序-demo_bak
demo_param=带参数的示例程序
demo_java=JAVA实例程序
meigou-detail-page=美购详情页
meigou-detail-page-dispense-pv=美购详情页分发pv
meigou-detail-page-dispense-uv=美购详情页分发uv
meigou-detail-page-source-pv=美购详情页来源pv
meigou-detail-page-source-uv=美购详情页来源uv
daily_content_data=内容日报-新
search_click_path=分搜索入口的搜索框点击数据
search_result_click_rate=分搜索词转化率数据
search_input_type=分搜索方式近7天数据
daily_recommend_strategy=首页推荐策略日报
...@@ -5,9 +5,6 @@ BI report project init. ...@@ -5,9 +5,6 @@ BI report project init.
部署节点: 部署节点:
bj-gm-prod-cos-dataweb005 bj-gm-prod-cos-dataweb005
临时文件目录
/data/bi-report/tmp
结果目录 结果目录
/data/bi-report/result /data/bi-report/result
...@@ -22,3 +19,11 @@ BI report project init. ...@@ -22,3 +19,11 @@ BI report project init.
mkdir -p /data/bi-report/logs mkdir -p /data/bi-report/logs
chown -R bi:bi /data/bi-report chown -R bi:bi /data/bi-report
开发过程中需要注意的事项:
1.在en-cn.properties中维护映射关系,则显示中文名,否则显示英文名
2.在azkaban中替换项目内容不需要重新配置定时任务;删除后重新上传,则需要重新配置定时任务
规范约定:
1.job文件的命名
必须以job1_01的方式命名,以便在azkaban中可以有序查看
开发过程中需要注意的事项:
1.在en-cn.properties中维护映射关系,则显示中文名,否则显示英文名
2.在编写类似insert into tableA select * from tableB语句时,不需要添加set role admin以及set mapreduce.job.queuename=data语句,而且不能带有";"符号
3.在azkaban中替换项目内容不需要重新配置定时任务;删除后重新上传,则需要重新配置定时任务
需求v1.0:
1.附件改为xlsx格式 【metabase支持csv、xlsx两种格式】 python代码实现 ok
2.指定发送人 优先级比较低【metabase默认为doctor】 重新写邮件发送代码 ok
3.更改附件的名 指定 中文 带账期【metabase默认取的是问题的名称】 指定附件的名称为中文,重新转换或者直接指定 ok
4.优化邮件内容,wps打开去掉 样式格式化【metabase中包含内容简略视图,而且有metabaselogo】 邮件内容格式化 python实现 ok
5.一个附件多个sheet 或者 一封邮件 多个附件【metabase默认支持指定多个问题、多个附件】 python发送邮件实现,一个文件多个sheet实现起来相对比较难 发送邮件时需要指定附件名称(带账期) ok
6.excel文件内容格式化 优先级比较低 【指定每个字段的最大长度】 使用python实现,尝试 ok
#step1.job
type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online ml_device_day_active_status
\ No newline at end of file
#step2.job
type=command
dependencies=step1
command=sh /home/bi/bi-report/lib/shell/hive2csv.sh demo_hive
\ No newline at end of file
#step1_5.job
type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online ml_device_day_active_status
\ No newline at end of file
#step1_4.job
type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online bl_hdfs_maidian_updates
\ No newline at end of file
#step2.job
type=command
dependencies=step1_1,step1_2,step1_3,step1_4
command=curl -X GET http://localhost:8553/api/report/sendEmail/zhaojianwei@igengmei.com/jianweizhao@yeah.net/demo_java
\ No newline at end of file
#step1_4.job
type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online bl_hdfs_maidian_updates
\ No newline at end of file
#step2.job
type=command
dependencies=step1_1,step1_2,step1_3,step1_4
command=curl -X GET http://localhost:8553/api/report/genExcel?name=demo_java
\ No newline at end of file
#step1_1.job
type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online tl_hdfs_api_tag_3_0_view
\ No newline at end of file
#step1_2.job
type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online tl_hdfs_api_tag_aggregate_view
\ No newline at end of file
#step1_4.job
type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online tl_hdfs_api_tag_attr_view
\ No newline at end of file
#step2.job
type=command
dependencies=step1_1,step1_2,step1_3,step1_4,step1_5,step1_6
command=sh /home/bi/bi-report/lib/shell/sparksql2email.sh demo_spark_param zhaojianwei zhaojianwei
\ No newline at end of file
#step2.job
type=command
command=curl -X GET http://localhost:8553/api/report/execSql?name=hive_insert
\ No newline at end of file
#step1_1.job
type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online bl_hdfs_maidian_updates
\ No newline at end of file
#step1_4.job
type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online ml_device_day_active_status
\ No newline at end of file
#step1_4.job
type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online ml_hospital_spam_pv_day
\ No newline at end of file
#step2.job
type=command
dependencies=step1_1,step1_2,step1_3,step1_4
command=curl -X GET http://localhost:8553/api/report/sendEmail/zhanghuixin@igengmei.com/mazhimo@igengmei.com,zhaojianwei@igengmei.com/meigou-detail-page
\ No newline at end of file
#step4.job
type=command
dependencies=step3
command=curl -X GET http://localhost:8553/api/report/sendEmail/liudi@igengmei.com,wangxin@igengmei.com,malinxi@igengmei.com,dengguangyu@igengmei.com/weiyimin@igengmei.com,zhaojianwei@igengmei.com,zhaofei@igengmei.com,yindanlei@igengmei.com/daily_content_data
\ No newline at end of file
#step4.job
type=command
dependencies=step3
command=curl -X GET http://localhost:8553/api/report/sendEmail/weiyimin@igengmei.com/zhaojianwei@igengmei.com/daily_recommend_strategy
\ No newline at end of file
#step2.job
type=command
dependencies=step1_1,step1_2
command=curl -X GET http://localhost:8553/api/report/execSql?name=search_related_insert
\ No newline at end of file
#step4.job
type=command
dependencies=step3
command=curl -X GET http://localhost:8553/api/report/sendEmail/weiyimin@igengmei.com/search_related
\ No newline at end of file
...@@ -3,17 +3,14 @@ ...@@ -3,17 +3,14 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<packaging>pom</packaging> <packaging>pom</packaging>
<groupId>com.gmei.data</groupId> <groupId>com.gmei.data</groupId>
<artifactId>bi-report-service</artifactId> <artifactId>bi-report-service</artifactId>
<version>1.0.0-SNAPSHOT</version> <version>1.0.0-SNAPSHOT</version>
<name>bi-report-service</name> <name>bi-report-service</name>
<description>Demo project for Spring Boot</description> <description>Demo project for Spring Boot</description>
<modules> <modules>
<module>service-server</module> <module>service-server</module>
</modules> </modules>
<!--定义属性值--> <!--定义属性值-->
<properties> <properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
...@@ -21,7 +18,6 @@ ...@@ -21,7 +18,6 @@
<java.version>1.8</java.version> <java.version>1.8</java.version>
<spring.boot.version>2.0.0.RELEASE</spring.boot.version> <spring.boot.version>2.0.0.RELEASE</spring.boot.version>
</properties> </properties>
<!--Maven远程仓库设置 非必要,可以删除--> <!--Maven远程仓库设置 非必要,可以删除-->
<repositories> <repositories>
<repository> <repository>
...@@ -47,7 +43,6 @@ ...@@ -47,7 +43,6 @@
</snapshots> </snapshots>
</pluginRepository> </pluginRepository>
</pluginRepositories> </pluginRepositories>
<!-- 依赖声明 --> <!-- 依赖声明 -->
<dependencyManagement> <dependencyManagement>
<dependencies> <dependencies>
......
#!/usr/bin/env bash
curl -X GET http://localhost:8553/api/report/email/demo/zhaojianwei@igengmei.com/jianweizhao@yeah.net
\ No newline at end of file
#!/bin/bash
exit `curl -X GET http://172.16.44.8:8970/api/oozie/task/finance`
\ No newline at end of file
#!/bin/bash
function replace_date(){
#old date
old_start_date=`cat job.properties | grep start_date | awk -F"=" '{print $2}'`
old_end_date=`cat job.properties | grep end_date | awk -F"=" '{print $2}'`
#new date
new_start_date=`date +"%Y-%m-%d"`"T00:00+0800"
new_end_date=`date +"%Y-%m-%d"`"T12:00+0800"
#replace
sed -i "s/$old_start_date/$new_start_date/g" job.properties
sed -i "s/$old_end_date/$new_end_date/g" job.properties
}
function start_job(){
export OOZIE_URL=http://bj-gm-prod-cos-datacenter005:11000/oozie
/opt/oozie-4.1.0-cdh5.16.1/bin/oozie job -config job.properties -run
#echo "/opt/oozie-4.1.0-cdh5.16.1/bin/oozie job -config job.properties -run"
}
cd /opt/warehouse/workflow/tl_finance_financeaccount_view/
replace_date
start_job
cd ../tl_finance_financeexpand_view/
replace_date
start_job
cd ../tl_finance_financerevenue_view/
replace_date
start_job
\ No newline at end of file
...@@ -7,16 +7,13 @@ ...@@ -7,16 +7,13 @@
<groupId>com.gmei.data</groupId> <groupId>com.gmei.data</groupId>
<version>1.0.0-SNAPSHOT</version> <version>1.0.0-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>report-service-server</artifactId> <artifactId>report-service-server</artifactId>
<properties> <properties>
<java.version>1.8</java.version> <java.version>1.8</java.version>
<scala.version>2.11</scala.version> <scala.version>2.11</scala.version>
<spark.version>2.4.5</spark.version> <spark.version>2.4.5</spark.version>
</properties> </properties>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
......
...@@ -34,4 +34,9 @@ public class BiReportEndpoint { ...@@ -34,4 +34,9 @@ public class BiReportEndpoint {
public int sendEmail(@PathVariable("to") String to,@PathVariable("cc") String cc,@PathVariable("name") String name){ public int sendEmail(@PathVariable("to") String to,@PathVariable("cc") String cc,@PathVariable("name") String name){
return biReportService.sendEmail(to,cc,name); return biReportService.sendEmail(to,cc,name);
} }
@RequestMapping(value = "/email/{name}/{to}/{cc}",method = RequestMethod.GET)
public void email(@PathVariable("name") String name,@PathVariable("to") String to,@PathVariable("cc") String cc) throws Exception{
biReportService.email(name,to,cc);
}
} }
...@@ -2,7 +2,8 @@ package com.gmei.data.gateway.server.service; ...@@ -2,7 +2,8 @@ package com.gmei.data.gateway.server.service;
public interface BiReportService { public interface BiReportService {
int execSql(String name); int execSql(String name);
int execSqlByHive(String name);
int genExcel(String name); int genExcel(String name);
int execSqlByHive(String name);
void email(String name, String to, String cc) throws Exception;
int sendEmail(String to,String cc,String name); int sendEmail(String to,String cc,String name);
} }
...@@ -4,5 +4,6 @@ import com.gmei.data.gateway.server.dto.ExcelGenDto; ...@@ -4,5 +4,6 @@ import com.gmei.data.gateway.server.dto.ExcelGenDto;
public interface SparksqlService { public interface SparksqlService {
int executeSql(String name); int executeSql(String name);
ExcelGenDto getResult(String name);
ExcelGenDto getResultDto(String name); ExcelGenDto getResultDto(String name);
} }
...@@ -34,6 +34,22 @@ public class BiReportServiceImpl implements BiReportService { ...@@ -34,6 +34,22 @@ public class BiReportServiceImpl implements BiReportService {
return hivesqlService.executeSql(name); return hivesqlService.executeSql(name);
} }
@Override
public void email(String name, String to, String cc) throws Exception{
ExcelGenDto resultDto = sparksqlService.getResult(name);
if(null == resultDto){
throw new Exception("获取查询结果失败,请核实!");
}else{
int genExcelRs = excelService.genExcel(resultDto,name);
if(Constants.SUCCESS == genExcelRs){
int rs = emailService.sendAttachmentsMail(to, cc, name);
if(rs == Constants.FAIL){
throw new Exception("收件人或抄送人邮件地址有误,未完全发送成功,请核实!");
}
}
}
}
@Override @Override
public int genExcel(String name){ public int genExcel(String name){
ExcelGenDto resultDto = sparksqlService.getResultDto(name); ExcelGenDto resultDto = sparksqlService.getResultDto(name);
...@@ -53,7 +69,11 @@ public class BiReportServiceImpl implements BiReportService { ...@@ -53,7 +69,11 @@ public class BiReportServiceImpl implements BiReportService {
}else{ }else{
int genExcelRs = excelService.genExcel(resultDto,name); int genExcelRs = excelService.genExcel(resultDto,name);
if(Constants.SUCCESS == genExcelRs){ if(Constants.SUCCESS == genExcelRs){
rs = emailService.sendAttachmentsMail(to,cc,name); try{
emailService.sendAttachmentsMail(to,cc,name);
}catch (Exception e){
rs = Constants.FAIL;
}
} }
} }
return rs; return rs;
......
...@@ -4,17 +4,24 @@ import com.gmei.data.gateway.server.common.Constants; ...@@ -4,17 +4,24 @@ import com.gmei.data.gateway.server.common.Constants;
import com.gmei.data.gateway.server.service.EmailService; import com.gmei.data.gateway.server.service.EmailService;
import com.gmei.data.gateway.server.utils.DateUtils; import com.gmei.data.gateway.server.utils.DateUtils;
import com.gmei.data.gateway.server.utils.ProperUtils; import com.gmei.data.gateway.server.utils.ProperUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.FileSystemResource;
import org.springframework.mail.MailSendException;
import org.springframework.mail.javamail.JavaMailSender; import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.mail.javamail.MimeMessageHelper; import org.springframework.mail.javamail.MimeMessageHelper;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import javax.mail.Address;
import javax.mail.SendFailedException;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMessage;
import java.io.File; import java.io.File;
import java.util.ArrayList;
import java.util.List;
@Component @Component
...@@ -29,35 +36,63 @@ public class EmailServiceImpl implements EmailService { ...@@ -29,35 +36,63 @@ public class EmailServiceImpl implements EmailService {
private String filePath; private String filePath;
@Value("${proper.file.path}") @Value("${proper.file.path}")
private String properFilePath; private String properFilePath;
@Value("${proper.base.path}")
private String properBasePath;
@Autowired @Autowired
private JavaMailSender mailSender; private JavaMailSender mailSender;
private static final Logger logger = LoggerFactory.getLogger(EmailServiceImpl.class); private static final Logger logger = LoggerFactory.getLogger(EmailServiceImpl.class);
private List<String> getInvalidAddresses(Throwable throwable) {
List<String> rsList = new ArrayList<String>();
if (throwable == null) {
return rsList;
}
if (throwable instanceof MailSendException) {
Exception[] exceptions = ((MailSendException) throwable).getMessageExceptions();
for (Exception exception : exceptions) {
if (exception instanceof SendFailedException) {
rsList = getStringAddress(((SendFailedException) exception).getInvalidAddresses());
}
}
}
if (throwable instanceof SendFailedException) {
rsList = getStringAddress(((SendFailedException) throwable).getInvalidAddresses());
}
return rsList;
}
private List<String> getStringAddress(Address[] addresses) {
List<String> invalid = new ArrayList<String>();
for (Address a : addresses) {
String address = ((InternetAddress) a).getAddress();
if (address != null && StringUtils.isNotEmpty(address)) {
invalid.add(address);
}
}
return invalid;
}
@Override @Override
public int sendAttachmentsMail(String to,String cc,String name){ public int sendAttachmentsMail(String to,String cc,String name){
int rs = Constants.SUCCESS; String[] toUsers = to.split(",");
String[] ccUsers = cc.split(",");
try {
String subject = ""; String subject = "";
String value = ProperUtils.getValue(properFilePath, name); String properPath = properBasePath + "pm/" + name + "/en-cn.properties";
logger.info("邮件主题属性文件目录为:{}",properPath);
String value = ProperUtils.getValue(properPath , name);
if(null != value){ if(null != value){
subject = value; subject = value;
}else{ }else{
subject = name; subject = name;
} }
MimeMessage message = mailSender.createMimeMessage(); MimeMessage message = mailSender.createMimeMessage();
try {
//MimeMessageHelper helper = new MimeMessageHelper(message, true);
MimeMessageHelper helper = new MimeMessageHelper(message, true,"UTF-8"); MimeMessageHelper helper = new MimeMessageHelper(message, true,"UTF-8");
helper.setFrom(from); helper.setFrom(from);
String[] toUsers = to.split(",");
helper.setTo(toUsers); helper.setTo(toUsers);
helper.setSubject(subject); helper.setSubject(subject);
helper.setText(content, true); helper.setText(content, true);
if (cc != null && !"".equals(cc.trim())) {
String[] ccUsers = cc.split(",");
helper.setCc(ccUsers); helper.setCc(ccUsers);
}
if (bcc != null && !"".equals(bcc.trim())) { if (bcc != null && !"".equals(bcc.trim())) {
String[] bccUsers = bcc.split(","); String[] bccUsers = bcc.split(",");
helper.setBcc(bccUsers); helper.setBcc(bccUsers);
...@@ -66,14 +101,33 @@ public class EmailServiceImpl implements EmailService { ...@@ -66,14 +101,33 @@ public class EmailServiceImpl implements EmailService {
String fileNameSrc = name + "-" + DateUtils.getYesterdayDateSimpleStr() + ".xlsx"; String fileNameSrc = name + "-" + DateUtils.getYesterdayDateSimpleStr() + ".xlsx";
FileSystemResource file = new FileSystemResource(new File(filePath + fileNameSrc)); FileSystemResource file = new FileSystemResource(new File(filePath + fileNameSrc));
helper.addAttachment(fileName, file); helper.addAttachment(fileName, file);
//helper.addAttachment(MimeUtility.encodeWord(file.getFilename(),"utf-8","B"), file);
mailSender.send(message); mailSender.send(message);
logger.info("带附件的邮件已经发送。"); }catch(Throwable e){
} catch (Exception e) { String validToUsers = "";
logger.error("发送带附件的邮件时发生异常!", e); List<String> invalid = getInvalidAddresses(e);
rs = Constants.FAIL; for(String toUser : toUsers){
if(invalid.contains(toUser)){
logger.error("剔除无效接收人邮箱地址:{}",toUser);
continue;
} }
return rs; validToUsers = validToUsers + toUser + ",";
}
String validCcUsers = "";
for(String ccUser : ccUsers){
if(invalid.contains(ccUser)){
logger.error("剔除无效抄送人邮箱地址:{}",ccUser);
continue;
}
validCcUsers = validCcUsers + ccUser + ",";
}
if (invalid.size() > 0) {
validToUsers = validToUsers.substring(0, validToUsers.length() - 1);
validCcUsers = validCcUsers.substring(0, validCcUsers.length() - 1);
sendAttachmentsMail(validToUsers,validCcUsers,name);
return Constants.FAIL;
}
}
return Constants.SUCCESS;
} }
@Override @Override
......
...@@ -37,8 +37,6 @@ public class ExcelServiceImpl implements ExcelService { ...@@ -37,8 +37,6 @@ public class ExcelServiceImpl implements ExcelService {
@Value("${excel.output.path}") @Value("${excel.output.path}")
private String filePath; private String filePath;
@Value("${proper.file.path}")
private String properFilePath;
private static final Logger logger = LoggerFactory.getLogger(ExcelServiceImpl.class); private static final Logger logger = LoggerFactory.getLogger(ExcelServiceImpl.class);
/** /**
...@@ -64,13 +62,13 @@ public class ExcelServiceImpl implements ExcelService { ...@@ -64,13 +62,13 @@ public class ExcelServiceImpl implements ExcelService {
rs = Constants.FAIL; rs = Constants.FAIL;
logger.error("Error!"); logger.error("Error!");
} }
// String subject = ""; //String subject = "";
// String value = ProperUtils.getValue(properFilePath, name); //String value = ProperUtils.getValue(properFilePath, name);
// if(null != value){ //if(null != value){
// subject = value; // subject = value;
// }else{ //}else{
// subject = name; // subject = name;
// } //}
FileOutputStream out = new FileOutputStream(filePath + name + "-" + DateUtils.getYesterdayDateSimpleStr() + ".xlsx"); FileOutputStream out = new FileOutputStream(filePath + name + "-" + DateUtils.getYesterdayDateSimpleStr() + ".xlsx");
wb.write(out); wb.write(out);
out.close(); out.close();
...@@ -158,7 +156,7 @@ public class ExcelServiceImpl implements ExcelService { ...@@ -158,7 +156,7 @@ public class ExcelServiceImpl implements ExcelService {
} }
}else{ }else{
cell.setCellValue(""); cell.setCellValue("");
logger.error("the value is null!"); logger.warn("the value is null!");
} }
} }
...@@ -172,9 +170,7 @@ public class ExcelServiceImpl implements ExcelService { ...@@ -172,9 +170,7 @@ public class ExcelServiceImpl implements ExcelService {
HSSFCellStyle style = workbook.createCellStyle(); HSSFCellStyle style = workbook.createCellStyle();
style.setAlignment(HSSFCellStyle.ALIGN_CENTER); style.setAlignment(HSSFCellStyle.ALIGN_CENTER);
style.setVerticalAlignment(HSSFCellStyle.VERTICAL_CENTER); style.setVerticalAlignment(HSSFCellStyle.VERTICAL_CENTER);
HSSFSheet sheet = workbook.createSheet("sheet"); HSSFSheet sheet = workbook.createSheet("sheet");
HSSFRow row0 = sheet.createRow(0); HSSFRow row0 = sheet.createRow(0);
HSSFCell cell_00 = row0.createCell(0); HSSFCell cell_00 = row0.createCell(0);
cell_00.setCellStyle(style); cell_00.setCellStyle(style);
......
...@@ -62,7 +62,6 @@ public class HivesqlServiceImpl implements HivesqlService { ...@@ -62,7 +62,6 @@ public class HivesqlServiceImpl implements HivesqlService {
String hql = sbf.toString(); String hql = sbf.toString();
hql = hql.replaceAll("#partition_date", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr())) hql = hql.replaceAll("#partition_date", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()))
.replaceAll("#partition_day", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr())); .replaceAll("#partition_day", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()));
Connection conn = null; Connection conn = null;
try{ try{
conn = DriverManager.getConnection(hiveSqlJdbcUrl,"bi",""); conn = DriverManager.getConnection(hiveSqlJdbcUrl,"bi","");
......
...@@ -29,10 +29,14 @@ public class SparksqlServiceImpl implements SparksqlService { ...@@ -29,10 +29,14 @@ public class SparksqlServiceImpl implements SparksqlService {
@Value("${sql.input.path}") @Value("${sql.input.path}")
private String sqlPath; private String sqlPath;
@Value("${sql.base.path}")
private String sqlBasePath;
@Value("${datasource.sparkSql.jdbcUrl}") @Value("${datasource.sparkSql.jdbcUrl}")
private String sparkSqlJdbcUrl; private String sparkSqlJdbcUrl;
@Value("${proper.file.path}") @Value("${proper.file.path}")
private String properFilePath; private String properFilePath;
@Value("${proper.base.path}")
private String properBasePath;
private static final Logger logger = LoggerFactory.getLogger(SparksqlServiceImpl.class); private static final Logger logger = LoggerFactory.getLogger(SparksqlServiceImpl.class);
@Override @Override
...@@ -67,13 +71,12 @@ public class SparksqlServiceImpl implements SparksqlService { ...@@ -67,13 +71,12 @@ public class SparksqlServiceImpl implements SparksqlService {
String hql = sbf.toString(); String hql = sbf.toString();
hql = hql.replaceAll("#partition_date", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr())) hql = hql.replaceAll("#partition_date", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()))
.replaceAll("#partition_day", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr())); .replaceAll("#partition_day", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()));
Connection conn = null; Connection conn = null;
try{ try{
conn = DriverManager.getConnection(sparkSqlJdbcUrl,"data",""); conn = DriverManager.getConnection(sparkSqlJdbcUrl,"data","");
Statement statement = conn.createStatement(); Statement statement = conn.createStatement();
statement.executeUpdate("ADD JAR hdfs:///user/hive/share/lib/udf/hive-udf-1.0-SNAPSHOT.jar"); //statement.executeUpdate("ADD JAR hdfs:///user/hive/share/lib/udf/hive-udf-1.0-SNAPSHOT.jar");
statement.executeUpdate("CREATE TEMPORARY FUNCTION convup AS 'com.gmei.hive.common.udf.UDFConvUpgrade'"); //statement.executeUpdate("CREATE TEMPORARY FUNCTION convup AS 'com.gmei.hive.common.udf.UDFConvUpgrade'");
int updateCount = statement.executeUpdate(hql); int updateCount = statement.executeUpdate(hql);
if(updateCount > 0){ if(updateCount > 0){
logger.info("*** Execute By Sparksql success, hql: {}",hql); logger.info("*** Execute By Sparksql success, hql: {}",hql);
...@@ -90,17 +93,96 @@ public class SparksqlServiceImpl implements SparksqlService { ...@@ -90,17 +93,96 @@ public class SparksqlServiceImpl implements SparksqlService {
} }
@Override @Override
public ExcelGenDto getResultDto(String name){ public ExcelGenDto getResult(String name) {
List<String> sheetNameList = new ArrayList<String>(); List<String> sheetNameList = new ArrayList<String>();
List<List<String>> headersList = new ArrayList<List<String>>(); List<List<String>> headersList = new ArrayList<List<String>>();
List<List<List>> rowsList = new ArrayList<List<List>>(); List<List<List>> rowsList = new ArrayList<List<List>>();
File sqlDir = new File(sqlBasePath + "pm/" + name + "/report");
File[] files = sqlDir.listFiles();
for(File file : files){
String fileName = file.getName();
String nameStr =fileName.replace(".sql","");
String subject = "";
String properPath = properBasePath + "pm/" + name + "/en-cn.properties";
logger.info("Sheets属性文件目录为:{}",properPath);
String value = ProperUtils.getValue(properPath , nameStr);
if(null != value){
subject = value;
}else{
subject = nameStr;
}
sheetNameList.add(subject);
List<String> headers = new ArrayList<String>();
List<List> rows = new ArrayList<List>();
BufferedReader reader = null;
StringBuffer sbf = new StringBuffer();
try {
reader = new BufferedReader(new FileReader(file));
String tempStr;
while ((tempStr = reader.readLine()) != null) {
sbf.append("\n").append(tempStr);
}
reader.close();
} catch (IOException e) {
e.printStackTrace();
logger.error(e.getMessage());
continue;
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e1) {
e1.printStackTrace();
logger.error(e1.getMessage());
continue;
}
}
}
String hql = sbf.toString();
hql = hql.replace("${partition_date}", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()))
.replace("${partition_day}", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()))
.replace("$partition_date", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()))
.replace("$partition_day", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()))
.replace(";","");
Connection conn = null;
try{
conn = DriverManager.getConnection(sparkSqlJdbcUrl,"data","");
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(hql);
ResultSetMetaData metaData = rs.getMetaData();
int columnCount = metaData.getColumnCount();
for(int i=1;i<= columnCount;i++){
String columnName = metaData.getColumnName(i);
headers.add(columnName);
}
while(rs.next()){
List<String> rowVals = new ArrayList<String>();
for(int i=1;i<= columnCount;i++){
rowVals.add(rs.getString(i));
}
rows.add(rowVals);
}
}catch (Exception e){
e.printStackTrace();
logger.error(e.getMessage());
return null;
}
headersList.add(headers);
rowsList.add(rows);
}
return new ExcelGenDto(sheetNameList,headersList,rowsList);
}
@Override
public ExcelGenDto getResultDto(String name){
List<String> sheetNameList = new ArrayList<String>();
List<List<String>> headersList = new ArrayList<List<String>>();
List<List<List>> rowsList = new ArrayList<List<List>>();
File sqlDir = new File(sqlPath + name); File sqlDir = new File(sqlPath + name);
File[] files = sqlDir.listFiles(); File[] files = sqlDir.listFiles();
for(File file : files){ for(File file : files){
String fileName = file.getName(); String fileName = file.getName();
String nameStr =fileName.replace(".sql",""); String nameStr =fileName.replace(".sql","");
String subject = ""; String subject = "";
String value = ProperUtils.getValue(properFilePath, nameStr); String value = ProperUtils.getValue(properFilePath, nameStr);
if(null != value){ if(null != value){
...@@ -109,10 +191,8 @@ public class SparksqlServiceImpl implements SparksqlService { ...@@ -109,10 +191,8 @@ public class SparksqlServiceImpl implements SparksqlService {
subject = nameStr; subject = nameStr;
} }
sheetNameList.add(subject); sheetNameList.add(subject);
List<String> headers = new ArrayList<String>(); List<String> headers = new ArrayList<String>();
List<List> rows = new ArrayList<List>(); List<List> rows = new ArrayList<List>();
BufferedReader reader = null; BufferedReader reader = null;
StringBuffer sbf = new StringBuffer(); StringBuffer sbf = new StringBuffer();
try { try {
...@@ -140,7 +220,6 @@ public class SparksqlServiceImpl implements SparksqlService { ...@@ -140,7 +220,6 @@ public class SparksqlServiceImpl implements SparksqlService {
String hql = sbf.toString(); String hql = sbf.toString();
hql = hql.replaceAll("#partition_date", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr())) hql = hql.replaceAll("#partition_date", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()))
.replaceAll("#partition_day", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr())); .replaceAll("#partition_day", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()));
Connection conn = null; Connection conn = null;
try{ try{
conn = DriverManager.getConnection(sparkSqlJdbcUrl,"data",""); conn = DriverManager.getConnection(sparkSqlJdbcUrl,"data","");
......
...@@ -8,7 +8,6 @@ import java.util.Date; ...@@ -8,7 +8,6 @@ import java.util.Date;
* @author apple * @author apple
*/ */
public class DateUtils { public class DateUtils {
private static final String DATE_FORMATE_YMDHMS = "yyyy-MM-dd HH:mm:ss"; private static final String DATE_FORMATE_YMDHMS = "yyyy-MM-dd HH:mm:ss";
private static final String DATE_FORMATE_YMD_SIMPLE = "yyyyMMdd"; private static final String DATE_FORMATE_YMD_SIMPLE = "yyyyMMdd";
private static final String DATE_FORMATE_YMD = "yyyy-MM-dd"; private static final String DATE_FORMATE_YMD = "yyyy-MM-dd";
...@@ -29,14 +28,6 @@ public class DateUtils { ...@@ -29,14 +28,6 @@ public class DateUtils {
return new SimpleDateFormat(DATE_FORMATE_YMD).format(new Date()); return new SimpleDateFormat(DATE_FORMATE_YMD).format(new Date());
} }
/**
* 获取当前时间字符串
* @return
*/
// public static String getCurrentDateSimpleStr() {
// return new SimpleDateFormat(DATE_FORMATE_YMD_SIMPLE).format(new Date());
// }
/** /**
* 获取昨天的日期字符串 * 获取昨天的日期字符串
* @return * @return
......
...@@ -27,6 +27,8 @@ mail: ...@@ -27,6 +27,8 @@ mail:
sql: sql:
input: input:
path: /home/bi/bi-report/sqls/ path: /home/bi/bi-report/sqls/
base:
path: /home/bi/bi-report/
#---excel config--- #---excel config---
excel: excel:
...@@ -41,3 +43,5 @@ proper: ...@@ -41,3 +43,5 @@ proper:
file: file:
path: /home/bi/bi-report/conf/en-cn.properties path: /home/bi/bi-report/conf/en-cn.properties
#path: /Users/apple/git-repo/bi-report/conf/en-cn.properties #path: /Users/apple/git-repo/bi-report/conf/en-cn.properties
base:
path: /home/bi/bi-report/
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false" scan="true" scanPeriod="1 seconds"> <configuration debug="false" scan="true" scanPeriod="1 seconds">
<contextName>logback</contextName> <contextName>logback</contextName>
<property name="log.path" value="/data/bi-report/logs/bi-report-service.log"/> <property name="log.path" value="/data/bi-report/logs/bi-report-service.log"/>
<appender name="console" class="ch.qos.logback.core.ConsoleAppender"> <appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder> <encoder>
<pattern>%d{HH:mm:ss.SSS} %contextName [%thread] %-5level %logger{36} - %msg%n</pattern> <pattern>%d{HH:mm:ss.SSS} %contextName [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder> </encoder>
</appender> </appender>
<appender name="file" class="ch.qos.logback.core.rolling.RollingFileAppender"> <appender name="file" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}</file> <file>${log.path}</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
...@@ -20,10 +17,8 @@ ...@@ -20,10 +17,8 @@
</pattern> </pattern>
</encoder> </encoder>
</appender> </appender>
<root level="info"> <root level="info">
<appender-ref ref="console"/> <appender-ref ref="console"/>
<appender-ref ref="file"/> <appender-ref ref="file"/>
</root> </root>
</configuration> </configuration>
\ No newline at end of file
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<h1 style="text-align:center;">北极星监控预警信息</h1>
<h2 style="color:red;">北极星监控预警</h2>
<h3>北极星监控预警</h3>
<h4>北极星监控预警</h4>
<h5>北极星监控预警</h5>
<h6>北极星监控预警</h6>
\ No newline at end of file
...@@ -21,7 +21,7 @@ public class EmailServiceTest { ...@@ -21,7 +21,7 @@ public class EmailServiceTest {
// @Test // @Test
// public void testSendAttachmentsMail() { // public void testSendAttachmentsMail() {
// emailService.sendAttachmentsMail("zhaojianwei@igengmei.com","jianweizhao@yeah.net","demo"); // emailService.sendAttachmentsMail("zhaojianwei@igengmei.com,zjw@igengmei.com","jianweizhao@yeah.net,aa@bb.com","demo");
// } // }
// //
// @Test // @Test
......
#coding: utf-8
#author:
#date:
#desc:send email with attach
import smtplib
import sys
import getopt
import os
import getpass
import time
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.header import Header
#email info
to_address = ""
subject = ""
content = ""
attach = ""
memail = ""
htable = ""
#userinfo
authfile = "/home/bi/bi-report/conf/auth.cf"
def readallow(filepath):
usernames = []
f = open(filepath,"r")
lines = f.readlines()
for line in lines:
usernames.append(line.strip())
return usernames
def checkauth():
global authfile
usernames = readallow(authfile)
username = getpass.getuser()
return username in usernames
def isfile(filestr):
if os.path.isfile(filestr):
file = open(filestr,'r')
lines = file.readlines()
if len(lines) >= 2:
return True
write2file("%s file is less than 2 lines or does not exist...." % getfilename(filestr))
print "--->%s file is less than 2 lines or does not exist...." % getfilename(filestr)
return False
def getfilename(filestr):
return os.path.basename(filestr)
def makeaddress():
global to_address
address_str = ""
for address in to_address.split(","):
if address.find("@") > 0:
address_str += address + ";"
else:
address_str += "%s@igengmei.com;" % address
return address_str[:-1]
def addattch(msgRoot):
global attach
if attach.strip() == "":
return 0
for item in attach.split(","):
if isfile(item):
att = MIMEText(open('%s' % item, 'rb').read(), 'base64', 'utf-8')
att["Content-Type"] = 'application/octet-stream'
att["Content-Disposition"] = 'attachment; filename="%s"'% getfilename(item)
msgRoot.attach(att)
else:
print "--->send mail failed, %s file is not exist or file row num less than 2" % getfilename(item)
return -1
def makecc():
global memail
address_str = ""
for address in memail.split(","):
if address.find("@") > 0:
address_str += address + ";"
else:
address_str += "%s@igengmei.com;" % address
print address_str
return address_str[:-1]
def addsubject(msgRoot):
global subject
global content
global htable
msgRoot['Subject'] = Header(subject, 'utf-8')
msgRoot['To'] = makeaddress()
msgRoot['Cc'] = makecc()
if htable != "":
for ff in htable.split(","):
if not isfile(ff):
return -1
hcontent = trans2table(ff)
content += hcontent
content += "<br><br><br><br><font color=red size='3'>这是一封自动发出的邮件,邮件发送自大数据BI组。<br>请使用wps打开,appstore地址:https://appsto.re/cn/MAqWJ.i</font>"
msg = MIMEText(content,'html','utf-8')
msgRoot.attach(msg)
return True
def trans2table(path):
for ff in path.split(","):
index = 0
result = "<br><br><table border='1' style='border-collapse:collapse' bordercolor='#9D9D9D'>\n"
for eachline in open(ff,'r'):
if len(eachline.strip()) == 0:
continue
result += " <tr>"
for item in eachline.strip().split(","):
if index == 0:
result += "<td align='left' bgcolor='#8EA9DB' style='font-family:Microsoft YaHei;padding:2px 5px'>%s</td>" % item
else:
result += "<td align='left' style='font-family:Microsoft YaHei;padding:2px 5px'>%s</td>" % item
index += 1
result += "</tr>\n"
result += "</table>"
return result
def sendmail():
global to_address
#account info
sender = 'youjiantongji@wanmeizhensuo.com'
username = 'youjiantongji@wanmeizhensuo.com'
password = 'mq37tgk8nGi6eg1Hgq388oro3itvdmPl'
smtpserver = 'smtp.exmail.qq.com'
server_port = 25
if not checkparam():
return 0
msgRoot = MIMEMultipart()
if addsubject(msgRoot) == -1:
return 0
if addattch(msgRoot) == -1:
return 0
smtp = smtplib.SMTP(smtpserver, server_port)
smtp.ehlo()
smtp.starttls()
smtp.login(username, password)
to_receiver = makeaddress()
to_cc = makecc()
smtp.sendmail(sender, to_receiver.split(";") + to_cc.split(";"), msgRoot.as_string())
smtp.quit()
print "send mail success...."
write2file("send mail success")
write2file("--------------------------------------------------------------------------------------")
def processcmd(cmd):
global to_address
global subject
global content
global attach
global memail
global htable
try:
options, args = getopt.getopt(cmd, "c:a:t:s:m:f:")
for name,value in options:
if name in ("-t"):
to_address = value
elif name in ("-s"):
subject = value
elif name in ("-c"):
content = value
elif name in ("-a"):
attach = value
elif name in ("-m"):
memail = value
elif name in ("-f"):
htable = value
except Exception, e:
print "--->%s, remove this param" % str(e)
usage()
return -1
def checkparam():
global to_address
global subject
if to_address.strip() == "" or subject.strip() == "":
print "--->must input -t to_address -s subject, and the value can not be empty"
usage()
return False
else:
return True
def loginlog(command):
day = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
user = getpass.getuser()
logstr = "%s\tuser:%s\tcmd:%s" % (day, user, command)
write2file(logstr)
def processlog():
global to_address
global subject
global content
global attach
day = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
user = getpass.getuser()
logstr = "%s\tuser:%s\ttoaddredd:%s\tsubject:%s\tcontent:%s\tattach:%s" % (day, user, to_address, subject, content, attach)
write2file(logstr)
write2file("--------------------------------------------------------------------------------------")
def write2file(sss):
logfile = "/data/bi-report/logs/sendmail.log"
output=open(logfile,'a')
output.write(str(sss) + "\n")
output.close()
def usage():
print 'sendmail usage:'
print ' -t: mail to (Required)'
print ' -s: subject (Required)'
print ' -m: cc address (Required)'
print ' -c: mail content (Optional)'
print ' -a: attach (Optional)'
print ' -f: trans file content to html table(Optional)'
if __name__ == '__main__':
loginlog(sys.argv)
if not checkauth():
print "--->You do not have permission to send email, please contact fangguoqing@igengmei.com"
sys.exit(0)
if len(sys.argv) >= 3:
if processcmd(sys.argv[1:]) != -1:
sendmail()
else:
usage()
\ No newline at end of file
#!/bin/bash
# 定义变量方便修改
hive="/opt/hive/hive-1.1.0-cdh5.16.1/bin/beeline -u jdbc:hive2://bj-gm-prod-cos-datacenter005:10000 -n data"
#至少输入一个参数 第一个参数为需求码(必选项),第二个参数为分区日期(可选)
if [ $# -lt 1 ]; then
echo "Param project_name lost, please check again!"
exit 0
fi
#判断sql脚本是否存在
project_name=$1
sql_path="/home/bi/bi-report/pm/$project_name/etl/$project_name.sql"
if [ ! -f $sql_path ]; then
echo "Param project_name error, please check again!"
exit `$sql_path`
fi
# 如果是输入的日期按照取输入日期;如果没输入日期取当前时间的前一天
if [ -n "$2" ] ;then
partition_date=$2
else
partition_date=`date --date="-1 day" +%Y%m%d`
fi
#参数替换
tmp_sql=`cat ${sql_path}`
sql=${tmp_sql//'${partition_day}'/${partition_date}}
echo -e "${sql}"
#执行sql
$hive -e "$sql"
\ No newline at end of file
This diff is collapsed.
#!/bin/bash
source ~/.bashrc
source /etc/profile
if [ $# -lt 1 ]; then
echo "Param error, please check again!"
exit 1
fi
#默认取输入的账期
V_DATE=$2
partition_date=`date +"%Y%m%d"`
timestamp=`date +"%Y%m%d%H%M%S"`
tmp_file_name="/data/bi-report/tmp/tmp_"$timestamp".sql"
result_file_name="/data/bi-report/result/"$1"_"$partition_date".csv"
[ $? -eq 0 ] || exit 1
#输入的账期为空,则取T+1默认账期
if [ -z ${V_DATE} ];then
V_DATE=`date --date="-1 day" +%Y-%m-%d`
fi
#默认支持的日期函数
V_PARYMD=`date --date="$V_DATE" +%Y%m%d`
V_PARY_M_D=`date --date="$V_DATE" +%Y-%m-%d`
V_1DAYMD_AGO=`date --date="$V_PARYMD -1 day" +%Y%m%d`
V_7DAYMD_AGO=`date --date="$V_PARYMD -7 day" +%Y%m%d`
V_1DAY_M_D_AGO=`date --date="$V_PARYMD -1 day" +%Y-%m-%d`
V_7DAY_M_D_AGO=`date --date="$V_PARYMD -7 day" +%Y-%m-%d`
#获取当前时间对应的年、月、日
V_PARYEAR=`date --date="$V_DATE" +%Y`
V_PARMONTH=`date --date="$V_DATE" +%m`
V_PARDAY=`date --date="$V_DATE" +%d`
#打印账期
echo ""
echo "************************************"
echo "V_PARYMD: "$V_PARYMD
echo "V_PARY_M_D: "$V_PARY_M_D
echo "V_1DAYMD_AGO: "$V_1DAYMD_AGO
echo "V_7DAYMD_AGO: "$V_7DAYMD_AGO
echo "V_1DAY_M_D_AGO: "$V_1DAY_M_D_AGO
echo "V_7DAY_M_D_AGO: "$V_7DAY_M_D_AGO
echo "************************************"
echo ""
cp "/home/bi/bi-report/sqls/"$1"/"$1".sql" $tmp_file_name
sed -i "s/\$V_PARYMD/$V_PARYMD/g" $tmp_file_name
sed -i "s/\$V_PARY_M_D/$V_PARY_M_D/g" $tmp_file_name
sed -i "s/\$V_1DAYMD_AGO/$V_1DAYMD_AGO/g" $tmp_file_name
sed -i "s/\$V_7DAYMD_AGO/$V_7DAYMD_AGO/g" $tmp_file_name
sed -i "s/\$V_1DAY_M_D_AGO/$V_1DAY_M_D_AGO/g" $tmp_file_name
sed -i "s/\$V_7DAY_M_D_AGO/$V_7DAY_M_D_AGO/g" $tmp_file_name
[ $? -eq 0 ] || exit 1
# 新标签与新标签对应集合类型 的对应关系
/opt/hive/hive-1.1.0-cdh5.16.1/bin/beeline -u jdbc:hive2://bj-gm-prod-cos-datacenter004:10000 -n data --outputformat=csv2 --showHeader=true -e "`cat $tmp_file_name`" > $result_file_name
rm -rf $tmp_file_name
exit $?
\ No newline at end of file
#!/bin/bash
source ~/.bashrc
source /etc/profile
if [ $# -lt 1 ]; then
echo "Param error, please check again!"
exit 1
fi
#默认取输入的账期
V_DATE=$2
timestamp=`date +"%Y%m%d%H%M%S"`
tmp_file_name="/data/bi-report/tmp/tmp_"$timestamp".sql"
[ $? -eq 0 ] || exit 1
#输入的账期为空,则取T+1默认账期
if [ -z ${V_DATE} ];then
V_DATE=`date --date="-1 day" +%Y-%m-%d`
fi
#默认支持的日期函数
V_PARYMD=`date --date="$V_DATE" +%Y%m%d`
V_PARY_M_D=`date --date="$V_DATE" +%Y-%m-%d`
V_1DAYMD_AGO=`date --date="$V_PARYMD -1 day" +%Y%m%d`
V_7DAYMD_AGO=`date --date="$V_PARYMD -7 day" +%Y%m%d`
V_1DAY_M_D_AGO=`date --date="$V_PARYMD -1 day" +%Y-%m-%d`
V_7DAY_M_D_AGO=`date --date="$V_PARYMD -7 day" +%Y-%m-%d`
#获取当前时间对应的年、月、日
V_PARYEAR=`date --date="$V_DATE" +%Y`
V_PARMONTH=`date --date="$V_DATE" +%m`
V_PARDAY=`date --date="$V_DATE" +%d`
#打印账期
echo ""
echo "************************************"
echo "V_PARYMD: "$V_PARYMD
echo "V_PARY_M_D: "$V_PARY_M_D
echo "V_1DAYMD_AGO: "$V_1DAYMD_AGO
echo "V_7DAYMD_AGO: "$V_7DAYMD_AGO
echo "V_1DAY_M_D_AGO: "$V_1DAY_M_D_AGO
echo "V_7DAY_M_D_AGO: "$V_7DAY_M_D_AGO
echo "************************************"
echo ""
cp "/home/bi/bi-report/sqls/"$1"/"$1".sql" $tmp_file_name
sed -i "s/\$V_PARYMD/$V_PARYMD/g" $tmp_file_name
sed -i "s/\$V_PARY_M_D/$V_PARY_M_D/g" $tmp_file_name
sed -i "s/\$V_1DAYMD_AGO/$V_1DAYMD_AGO/g" $tmp_file_name
sed -i "s/\$V_7DAYMD_AGO/$V_7DAYMD_AGO/g" $tmp_file_name
sed -i "s/\$V_1DAY_M_D_AGO/$V_1DAY_M_D_AGO/g" $tmp_file_name
sed -i "s/\$V_7DAY_M_D_AGO/$V_7DAY_M_D_AGO/g" $tmp_file_name
[ $? -eq 0 ] || exit 1
# 新标签与新标签对应集合类型 的对应关系
/opt/hive/hive-1.1.0-cdh5.16.1/bin/beeline -u jdbc:hive2://bj-gm-prod-cos-datacenter004:10000 -n data -e "`cat $tmp_file_name`"
rm -rf $tmp_file_name
exit $?
\ No newline at end of file
#!/bin/bash
source ~/.bashrc
source /etc/profile
if [ $# -lt 2 ]; then
echo "Param error, please check again!"
exit 1
fi
#默认取输入的账期
V_DATE=$3
#输入的账期为空,则取T+1默认账期
if [ -z ${V_DATE} ];then
V_DATE=`date --date="-1 day" +%Y-%m-%d`
fi
#默认支持的日期函数
V_PARYMD=`date --date="$V_DATE" +%Y%m%d`
V_PARY_M_D=`date --date="$V_DATE" +%Y-%m-%d`
V_1DAYMD_AGO=`date --date="$V_PARYMD -1 day" +%Y%m%d`
V_7DAYMD_AGO=`date --date="$V_PARYMD -7 day" +%Y%m%d`
V_1DAY_M_D_AGO=`date --date="$V_PARYMD -1 day" +%Y-%m-%d`
V_7DAY_M_D_AGO=`date --date="$V_PARYMD -7 day" +%Y-%m-%d`
#获取当前时间对应的年、月、日
V_PARYEAR=`date --date="$V_DATE" +%Y`
V_PARMONTH=`date --date="$V_DATE" +%m`
V_PARDAY=`date --date="$V_DATE" +%d`
#打印账期
echo ""
echo "************************************"
echo "V_PARYMD: "$V_PARYMD
echo "V_PARY_M_D: "$V_PARY_M_D
echo "V_1DAYMD_AGO: "$V_1DAYMD_AGO
echo "V_7DAYMD_AGO: "$V_7DAYMD_AGO
echo "V_1DAY_M_D_AGO: "$V_1DAY_M_D_AGO
echo "V_7DAY_M_D_AGO: "$V_7DAY_M_D_AGO
echo "************************************"
echo ""
#cp sqls/$1".sql" tmp.sql
cp $1 tmp.sql
sed -i "s/\$V_PARYMD/$V_PARYMD/g" tmp.sql
sed -i "s/\$V_PARY_M_D/$V_PARY_M_D/g" tmp.sql
sed -i "s/\$V_1DAYMD_AGO/$V_1DAYMD_AGO/g" tmp.sql
sed -i "s/\$V_7DAYMD_AGO/$V_7DAYMD_AGO/g" tmp.sql
sed -i "s/\$V_1DAY_M_D_AGO/$V_1DAY_M_D_AGO/g" tmp.sql
sed -i "s/\$V_7DAY_M_D_AGO/$V_7DAY_M_D_AGO/g" tmp.sql
[ $? -eq 0 ] || exit 1
# 新标签与新标签对应集合类型 的对应关系
/opt/spark/spark-2.4.5-bin-hadoop2.7/bin/beeline -u jdbc:hive2://bj-gm-prod-cos-datacenter003:10010/online -n data --outputformat=csv2 --showHeader=true -e "`cat tmp.sql`">$2
rm -rf tmp.sql
exit $?
\ No newline at end of file
#!/bin/bash
source ~/.bashrc
source /etc/profile
if [ $# -lt 3 ]; then
echo "Param error, please check again!"
exit 1
fi
#默认取输入的账期
V_DATE=$4
partition_date=`date +"%Y%m%d"`
timestamp=`date +"%Y%m%d%H%M%S"`
tmp_file_name="/data/bi-report/tmp/tmp_"$timestamp".sql"
result_file_name="/data/bi-report/result/"$1"_"$partition_date".csv"
[ $? -eq 0 ] || exit 1
#输入的账期为空,则取T+1默认账期
if [ -z ${V_DATE} ];then
V_DATE=`date --date="-1 day" +%Y-%m-%d`
fi
#默认支持的日期函数
V_PARYMD=`date --date="$V_DATE" +%Y%m%d`
V_PARY_M_D=`date --date="$V_DATE" +%Y-%m-%d`
V_1DAYMD_AGO=`date --date="$V_PARYMD -1 day" +%Y%m%d`
V_7DAYMD_AGO=`date --date="$V_PARYMD -7 day" +%Y%m%d`
V_1DAY_M_D_AGO=`date --date="$V_PARYMD -1 day" +%Y-%m-%d`
V_7DAY_M_D_AGO=`date --date="$V_PARYMD -7 day" +%Y-%m-%d`
#获取当前时间对应的年、月、日
V_PARYEAR=`date --date="$V_DATE" +%Y`
V_PARMONTH=`date --date="$V_DATE" +%m`
V_PARDAY=`date --date="$V_DATE" +%d`
#打印账期
echo ""
echo "************************************"
echo "V_PARYMD: "$V_PARYMD
echo "V_PARY_M_D: "$V_PARY_M_D
echo "V_1DAYMD_AGO: "$V_1DAYMD_AGO
echo "V_7DAYMD_AGO: "$V_7DAYMD_AGO
echo "V_1DAY_M_D_AGO: "$V_1DAY_M_D_AGO
echo "V_7DAY_M_D_AGO: "$V_7DAY_M_D_AGO
echo "************************************"
echo ""
cp "/home/bi/bi-report/sqls/"$1"/"$1".sql" $tmp_file_name
sed -i "s/\$V_PARYMD/$V_PARYMD/g" $tmp_file_name
sed -i "s/\$V_PARY_M_D/$V_PARY_M_D/g" $tmp_file_name
sed -i "s/\$V_1DAYMD_AGO/$V_1DAYMD_AGO/g" $tmp_file_name
sed -i "s/\$V_7DAYMD_AGO/$V_7DAYMD_AGO/g" $tmp_file_name
sed -i "s/\$V_1DAY_M_D_AGO/$V_1DAY_M_D_AGO/g" $tmp_file_name
sed -i "s/\$V_7DAY_M_D_AGO/$V_7DAY_M_D_AGO/g" $tmp_file_name
[ $? -eq 0 ] || exit 1
# 新标签与新标签对应集合类型 的对应关系
/opt/spark/spark-2.4.5-bin-hadoop2.7/bin/beeline -u jdbc:hive2://bj-gm-prod-cos-datacenter003:10010/online -n data --outputformat=csv2 --showHeader=true -e "`cat $tmp_file_name`"> $result_file_name
[ $? -eq 0 ] || exit 1
rm -rf $tmp_file_name
[ $? -eq 0 ] || exit 1
to=$2
cc=$3
echo "to: "$to
echo "cc: "$cc
python2.7 /home/bi/bi-report/lib/python/sendmail.py -t $to -s 自动发送 -m $cc -a $result_file_name
exit $?
\ No newline at end of file
daily_content_data=内容日报
\ No newline at end of file
--*************************************************************** --***************************************************************
--*脚本名称: create_pm_c_op_co_content_dimen_d.sql --*脚本名称:
--*功能: 内容日报 --*功能: 内容日报
--*业务名称: pm --*业务名称: pm
--*输入数据: --*输入数据:
--*作者: weiyimin@igengmei.com --*作者: weiyimin@igengmei.com
--*更新时间: 2020-5-25 11:00 --*更新时间:
--*************************************************************** --***************************************************************
--设置全局变量&UDF --设置全局变量&UDF
......
#step2.job #step2.job
type=command type=command
dependencies=step1_1,step1_2,step1_3,step1_4,step1_5,step1_6,step1_7,step1_8,step1_9,step1_10,step1_11,step1_12,step1_13 dependencies=step1_1,step1_2,step1_3,step1_4,step1_5,step1_6,step1_7,step1_8,step1_9,step1_10,step1_11,step1_12,step1_13
command=curl -X GET http://localhost:8553/api/report/execSql?name=daily_content_data_insert command=/home/bi/bi-report/lib/shell/hive daily_content_data
\ No newline at end of file \ No newline at end of file
#step3.job
type=command
dependencies=step2
command=curl -X GET http://localhost:8553/api/report/email/daily_content_data/liudi@igengmei.com,wangxin@igengmei.com,dengguangyu@igengmei.com/weiyimin@igengmei.com,zhaofei@igengmei.com,yindanlei@igengmei.com
\ No newline at end of file
daily_push=push日报-策略方向
daily_push_click=push拉起设备数据
personal_push_received=push接收设备数据
\ No newline at end of file
--***************************************************************
--*脚本名称:
--*功能: push日报
--*业务名称: pm
--*输入数据:
--*作者: weiyimin@igengmei.com
--*更新时间:
--***************************************************************
--设置全局变量&UDF
SET mapreduce.job.queuename=data;
--使用bl数据库
USE pm;
--创建BL层内部表
CREATE TABLE IF NOT EXISTS pm.tl_pm_push_d
(
day_id string comment '{"chs_name":"当天日期","description":"","etl":"","value":"","remark":""}',
device_os_type string comment '{"chs_name":"设备类型","description":"","etl":"","value":"","remark":""}',
active_type string comment '{"chs_name":"活跃类型","description":"","etl":"","value":"","remark":""}',
dau BIGINT comment '{"chs_name":"DAU","description":"","etl":"","value":"","remark":""}',
is_open_uv BIGINT comment '{"chs_name":"push开启设备数","description":"","etl":"","value":"","remark":""}',
push_open_uv BIGINT comment '{"chs_name":"push拉起设备数","description":"","etl":"","value":"","remark":""}',
push_open_pv BIGINT comment '{"chs_name":"push拉起人次","description":"","etl":"","value":"","remark":""}',
push_open_2_uv BIGINT comment '{"chs_name":"push拉起有2跳设备数","description":"","etl":"","value":"","remark":""}',
push_open_2_pv BIGINT comment '{"chs_name":"push拉起有2跳人次","description":"","etl":"","value":"","remark":""}',
op_push_open_uv BIGINT comment '{"chs_name":"运营push拉起设备数","description":"","etl":"","value":"","remark":""}',
op_push_open_pv BIGINT comment '{"chs_name":"运营push拉起人次","description":"","etl":"","value":"","remark":""}',
op_push_open_2_uv BIGINT comment '{"chs_name":"运营push拉起有2跳设备数","description":"","etl":"","value":"","remark":""}',
op_push_open_2_pv BIGINT comment '{"chs_name":"运营push拉起有2跳人次","description":"","etl":"","value":"","remark":""}',
auto_push_open_uv BIGINT comment '{"chs_name":"除运营、个性化、签到、意愿外push拉起设备数","description":"","etl":"","value":"","remark":""}',
auto_push_open_pv BIGINT comment '{"chs_name":"除运营、个性化、签到、意愿外push拉起人次","description":"","etl":"","value":"","remark":""}',
auto_push_open_2_uv BIGINT comment '{"chs_name":"除运营、个性化、签到、意愿外push拉起有2跳设备数","description":"","etl":"","value":"","remark":""}',
auto_push_open_2_pv BIGINT comment '{"chs_name":"除运营、个性化、签到、意愿外push拉起有2跳人次","description":"","etl":"","value":"","remark":""}',
will_push_open_uv BIGINT comment '{"chs_name":"意愿探索push拉起设备数","description":"","etl":"","value":"","remark":""}',
will_push_open_pv BIGINT comment '{"chs_name":"意愿探索push拉起人次","description":"","etl":"","value":"","remark":""}',
will_push_open_2_uv BIGINT comment '{"chs_name":"意愿探索push拉起有2跳设备数","description":"","etl":"","value":"","remark":""}',
will_push_open_2_pv BIGINT comment '{"chs_name":"意愿探索push拉起有2跳人次","description":"","etl":"","value":"","remark":""}',
sign_push_open_uv BIGINT comment '{"chs_name":"签到push拉起设备数","description":"","etl":"","value":"","remark":""}',
sign_push_open_pv BIGINT comment '{"chs_name":"签到push拉起人次","description":"","etl":"","value":"","remark":""}',
sign_push_open_2_uv BIGINT comment '{"chs_name":"签到push拉起有2跳设备数","description":"","etl":"","value":"","remark":""}',
sign_push_open_2_pv BIGINT comment '{"chs_name":"签到push拉起有2跳人次","description":"","etl":"","value":"","remark":""}',
personal_push_open_uv BIGINT comment '{"chs_name":"个性化push拉起设备数","description":"","etl":"","value":"","remark":""}',
personal_push_open_pv BIGINT comment '{"chs_name":"个性化push拉起人次","description":"","etl":"","value":"","remark":""}',
personal_push_open_2_uv BIGINT comment '{"chs_name":"个性化push拉起有2跳设备数","description":"","etl":"","value":"","remark":""}',
personal_push_open_2_pv BIGINT comment '{"chs_name":"个性化push拉起有2跳人次","description":"","etl":"","value":"","remark":""}',
ctr_push_open_uv BIGINT comment '{"chs_name":"ctr_push拉起设备数","description":"","etl":"","value":"","remark":""}',
ctr_push_open_pv BIGINT comment '{"chs_name":"ctr_push拉起人次","description":"","etl":"","value":"","remark":""}',
ctr_push_open_2_uv BIGINT comment '{"chs_name":"ctr_push拉起有2跳设备数","description":"","etl":"","value":"","remark":""}',
ctr_push_open_2_pv BIGINT comment '{"chs_name":"ctr_push拉起有2跳人次","description":"","etl":"","value":"","remark":""}',
non_ctr_push_open_uv BIGINT comment '{"chs_name":"非ctr_push拉起设备数","description":"","etl":"","value":"","remark":""}',
non_ctr_push_open_pv BIGINT comment '{"chs_name":"非ctr_push拉起人次","description":"","etl":"","value":"","remark":""}',
non_ctr_push_open_2_uv BIGINT comment '{"chs_name":"非ctr_push拉起有2跳设备数","description":"","etl":"","value":"","remark":""}',
non_ctr_push_open_2_pv BIGINT comment '{"chs_name":"非ctr_push拉起有2跳人次","description":"","etl":"","value":"","remark":""}',
AI_push_open_uv BIGINT comment '{"chs_name":"AI_push拉起设备数","description":"","etl":"","value":"","remark":""}',
AI_push_open_pv BIGINT comment '{"chs_name":"AI_push拉起人次","description":"","etl":"","value":"","remark":""}',
AI_push_open_2_uv BIGINT comment '{"chs_name":"AI_push拉起有2跳设备数","description":"","etl":"","value":"","remark":""}',
AI_push_open_2_pv BIGINT comment '{"chs_name":"AI_push拉起有2跳人次","description":"","etl":"","value":"","remark":""}',
orignal_auto_push_open_uv BIGINT comment '{"chs_name":"除运营外push拉起设备数","description":"","etl":"","value":"","remark":""}',
orignal_auto_push_open_pv BIGINT comment '{"chs_name":"除运营外push拉起人次","description":"","etl":"","value":"","remark":""}',
orginal_auto_push_open_2_uv BIGINT comment '{"chs_name":"除运营外push拉起有2跳设备数","description":"","etl":"","value":"","remark":""}',
orginal_auto_push_open_2_pv BIGINT comment '{"chs_name":"除运营外push拉起有2跳人次","description":"","etl":"","value":"","remark":""}'
)comment 'push日报'
PARTITIONED BY (PARTITION_DAY STRING comment '分区日期')
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\t'
COLLECTION ITEMS TERMINATED BY '\002'
MAP KEYS TERMINATED BY '\003'
LINES TERMINATED BY '\n'
STORED AS TEXTFILE;
\ No newline at end of file
--***************************************************************
--*脚本名称:
--*功能: push接收数据
--*业务名称: pm
--*输入数据:
--*作者: weiyimin@igengmei.com
--*更新时间:
--***************************************************************
--设置全局变量&UDF
SET mapreduce.job.queuename=data;
--使用bl数据库
USE pm;
--创建BL层内部表
CREATE TABLE IF NOT EXISTS pm.tl_pm_push_receive_d
(
day_id string comment '{"chs_name":"当天日期","description":"","etl":"","value":"","remark":""}',
device_os_type string comment '{"chs_name":"设备类型","description":"","etl":"","value":"","remark":""}',
active_type string comment '{"chs_name":"活跃类型","description":"","etl":"","value":"","remark":""}',
push_type BIGINT comment '{"chs_name":"push类型","description":"","etl":"","value":"","remark":""}',
received_dev_num BIGINT comment '{"chs_name":"push接收设备数","description":"","etl":"","value":"","remark":""}',
received_msg_num BIGINT comment '{"chs_name":"push接收消息数","description":"","etl":"","value":"","remark":""}',
click_dev_num BIGINT comment '{"chs_name":"push点击设备数","description":"","etl":"","value":"","remark":""}',
click_msg_num BIGINT comment '{"chs_name":"push点击消息数","description":"","etl":"","value":"","remark":""}'
)comment 'push接收数据'
PARTITIONED BY (PARTITION_DAY STRING comment '分区日期')
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\t'
COLLECTION ITEMS TERMINATED BY '\002'
MAP KEYS TERMINATED BY '\003'
LINES TERMINATED BY '\n'
STORED AS TEXTFILE;
\ No newline at end of file
This diff is collapsed.
#step1_2.job #step1_2.job
type=command type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online ml_hospital_spam_pv_day command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online tl_hdfs_push2_new_view
\ No newline at end of file \ No newline at end of file
#step1_5.job #step1_5.job
type=command type=command
command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online ml_device_day_active_status command=sh /home/bi/bi-report/lib/shell/waitsuccess.sh hive online tl_hdfs_api_pushtask2_view
\ No newline at end of file \ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment