Commit c1754fc6 authored by 魏艺敏's avatar 魏艺敏

update

parents ca0537f5 b78cda04
File deleted
This diff is collapsed.
This diff is collapsed.
...@@ -17,6 +17,12 @@ public class BiReportEndpoint { ...@@ -17,6 +17,12 @@ public class BiReportEndpoint {
return biReportService.execSql(name); return biReportService.execSql(name);
} }
@ResponseBody
@RequestMapping(value = "/execSqlByHive")
public int execSqlByHive(@RequestParam String name){
return biReportService.execSql(name);
}
@ResponseBody @ResponseBody
@RequestMapping(value = "/genExcel") @RequestMapping(value = "/genExcel")
public int genExcel(@RequestParam String name){ public int genExcel(@RequestParam String name){
......
...@@ -2,6 +2,7 @@ package com.gmei.data.gateway.server.service; ...@@ -2,6 +2,7 @@ package com.gmei.data.gateway.server.service;
public interface BiReportService { public interface BiReportService {
int execSql(String name); int execSql(String name);
int execSqlByHive(String name);
int genExcel(String name); int genExcel(String name);
int sendEmail(String to,String cc,String name); int sendEmail(String to,String cc,String name);
} }
package com.gmei.data.gateway.server.service; package com.gmei.data.gateway.server.service;
import com.gmei.data.gateway.server.dto.ExcelGenDto;
public interface HivesqlService { public interface HivesqlService {
int executeSql(String name); int executeSql(String name);
} }
...@@ -3,5 +3,6 @@ package com.gmei.data.gateway.server.service; ...@@ -3,5 +3,6 @@ package com.gmei.data.gateway.server.service;
import com.gmei.data.gateway.server.dto.ExcelGenDto; import com.gmei.data.gateway.server.dto.ExcelGenDto;
public interface SparksqlService { public interface SparksqlService {
int executeSql(String name);
ExcelGenDto getResultDto(String name); ExcelGenDto getResultDto(String name);
} }
...@@ -26,6 +26,11 @@ public class BiReportServiceImpl implements BiReportService { ...@@ -26,6 +26,11 @@ public class BiReportServiceImpl implements BiReportService {
@Override @Override
public int execSql(String name){ public int execSql(String name){
return sparksqlService.executeSql(name);
}
@Override
public int execSqlByHive(String name){
return hivesqlService.executeSql(name); return hivesqlService.executeSql(name);
} }
......
...@@ -5,9 +5,11 @@ import com.gmei.data.gateway.server.dto.ExcelGenDto; ...@@ -5,9 +5,11 @@ import com.gmei.data.gateway.server.dto.ExcelGenDto;
import com.gmei.data.gateway.server.service.ExcelService; import com.gmei.data.gateway.server.service.ExcelService;
import com.gmei.data.gateway.server.utils.DateUtils; import com.gmei.data.gateway.server.utils.DateUtils;
import com.gmei.data.gateway.server.utils.ProperUtils; import com.gmei.data.gateway.server.utils.ProperUtils;
import org.apache.poi.hssf.usermodel.*;
import org.apache.poi.ss.usermodel.BorderStyle; import org.apache.poi.ss.usermodel.BorderStyle;
import org.apache.poi.ss.usermodel.HorizontalAlignment; import org.apache.poi.ss.usermodel.HorizontalAlignment;
import org.apache.poi.ss.usermodel.VerticalAlignment; import org.apache.poi.ss.usermodel.VerticalAlignment;
import org.apache.poi.ss.util.CellRangeAddress;
import org.apache.poi.xssf.usermodel.*; import org.apache.poi.xssf.usermodel.*;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -15,6 +17,7 @@ import org.springframework.beans.factory.annotation.Value; ...@@ -15,6 +17,7 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
...@@ -156,5 +159,102 @@ public class ExcelServiceImpl implements ExcelService { ...@@ -156,5 +159,102 @@ public class ExcelServiceImpl implements ExcelService {
logger.error("the value is null!"); logger.error("the value is null!");
} }
} }
/**
* 画excel格局的demo代码
* @param args
*/
public static void main(String[] args) {
HSSFWorkbook workbook = new HSSFWorkbook();
HSSFCellStyle style = workbook.createCellStyle();
style.setAlignment(HSSFCellStyle.ALIGN_CENTER);
style.setVerticalAlignment(HSSFCellStyle.VERTICAL_CENTER);
HSSFSheet sheet = workbook.createSheet("sheet");
HSSFRow row0 = sheet.createRow(0);
HSSFCell cell_00 = row0.createCell(0);
cell_00.setCellStyle(style);
cell_00.setCellValue("分类");
HSSFCell cell_01 = row0.createCell(1);
cell_01.setCellStyle(style);
cell_01.setCellValue("指标");
HSSFCell cell_02 = row0.createCell(2);
cell_02.setCellStyle(style);
cell_02.setCellValue("昨日值");
HSSFCell cell_03 = row0.createCell(3);
cell_03.setCellStyle(style);
cell_03.setCellValue("前日值");
HSSFCell cell_04 = row0.createCell(4);
cell_04.setCellStyle(style);
cell_04.setCellValue("前日环比");
HSSFCell cell_05 = row0.createCell(5);
cell_05.setCellStyle(style);
cell_05.setCellValue("上周同比");
HSSFCell cell_06 = row0.createCell(6);
cell_06.setCellStyle(style);
cell_06.setCellValue("上月日均值环比");
HSSFCell cell_07 = row0.createCell(7);
cell_07.setCellStyle(style);
cell_07.setCellValue("历史同期上月日均值同比");
HSSFCell cell_08 = row0.createCell(8);
cell_08.setCellStyle(style);
cell_08.setCellValue("预警");
HSSFRow row1 = sheet.createRow(1);
HSSFCell cell_10 = row1.createCell(0);
cell_10.setCellStyle(style);
cell_10.setCellValue("活跃情况");
HSSFCell cell_11 = row1.createCell(1);
cell_11.setCellStyle(style);
cell_11.setCellValue("DAU");
HSSFRow row2 = sheet.createRow(2);
HSSFCell cell_21 = row2.createCell(1);
cell_21.setCellStyle(style);
cell_21.setCellValue("新增");
HSSFRow row3 = sheet.createRow(3);
HSSFCell cell_31 = row3.createCell(1);
cell_31.setCellStyle(style);
cell_31.setCellValue("老活");
HSSFRow row4 = sheet.createRow(4);
HSSFCell cell_41 = row4.createCell(1);
cell_41.setCellStyle(style);
cell_41.setCellValue("新增次日app留存率(前日)");
HSSFRow row5 = sheet.createRow(5);
HSSFCell cell_51 = row5.createCell(1);
cell_51.setCellStyle(style);
cell_51.setCellValue("老活次日app留存率(前日)");
HSSFRow row6 = sheet.createRow(6);
HSSFCell cell_61 = row6.createCell(1);
cell_61.setCellStyle(style);
cell_61.setCellValue("新增单设备app时长");
HSSFRow row7 = sheet.createRow(7);
HSSFCell cell_71 = row7.createCell(1);
cell_71.setCellStyle(style);
cell_71.setCellValue("新增单设备app打开");
// 合并日期占两行(4个参数,分别为起始行,结束行,起始列,结束列)
// 行和列都是从0开始计数,且起始结束都会合并
// 这里是合并excel中日期的两行为一行
CellRangeAddress region = new CellRangeAddress(1, 7, 0, 0);
sheet.addMergedRegion(region);
try{
File file = new File("/Users/apple/Downloads/北极星指标_dev.xls");
FileOutputStream fout = new FileOutputStream(file);
workbook.write(fout);
fout.close();
}catch (Exception e){
e.printStackTrace();
}
}
} }
package com.gmei.data.gateway.server.service.impl; package com.gmei.data.gateway.server.service.impl;
import com.gmei.data.gateway.server.service.HivesqlService; import com.gmei.data.gateway.server.service.HivesqlService;
import com.gmei.data.gateway.server.utils.DateUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
...@@ -40,7 +41,7 @@ public class HivesqlServiceImpl implements HivesqlService { ...@@ -40,7 +41,7 @@ public class HivesqlServiceImpl implements HivesqlService {
reader = new BufferedReader(new FileReader(file)); reader = new BufferedReader(new FileReader(file));
String tempStr; String tempStr;
while ((tempStr = reader.readLine()) != null) { while ((tempStr = reader.readLine()) != null) {
sbf.append("\t").append(tempStr); sbf.append("\n").append(tempStr);
} }
reader.close(); reader.close();
} catch (IOException e) { } catch (IOException e) {
...@@ -59,6 +60,8 @@ public class HivesqlServiceImpl implements HivesqlService { ...@@ -59,6 +60,8 @@ public class HivesqlServiceImpl implements HivesqlService {
} }
} }
String hql = sbf.toString(); String hql = sbf.toString();
hql = hql.replaceAll("#partition_date", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()))
.replaceAll("#partition_day", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()));
Connection conn = null; Connection conn = null;
try{ try{
...@@ -68,13 +71,13 @@ public class HivesqlServiceImpl implements HivesqlService { ...@@ -68,13 +71,13 @@ public class HivesqlServiceImpl implements HivesqlService {
statement.executeUpdate("set mapreduce.job.queuename=data"); statement.executeUpdate("set mapreduce.job.queuename=data");
int updateCount = statement.executeUpdate(hql); int updateCount = statement.executeUpdate(hql);
if(updateCount > 0){ if(updateCount > 0){
logger.info("*** Execute success, hql: {}",hql); logger.info("*** Execute By Hive success, hql: {}",hql);
}else{ }else{
logger.warn("*** Execute finish, hql: {}",hql); logger.warn("*** Execute By Hive finish, hql: {}",hql);
} }
}catch (Exception e){ }catch (Exception e){
e.printStackTrace(); e.printStackTrace();
logger.error("*** Execute failed, error message: {}, hql: {}",e.getMessage(),hql); logger.error("*** Execute By Hive failed, error message: {}, hql: {}",e.getMessage(),hql);
return 1; return 1;
} }
} }
......
...@@ -2,6 +2,7 @@ package com.gmei.data.gateway.server.service.impl; ...@@ -2,6 +2,7 @@ package com.gmei.data.gateway.server.service.impl;
import com.gmei.data.gateway.server.dto.ExcelGenDto; import com.gmei.data.gateway.server.dto.ExcelGenDto;
import com.gmei.data.gateway.server.service.SparksqlService; import com.gmei.data.gateway.server.service.SparksqlService;
import com.gmei.data.gateway.server.utils.DateUtils;
import com.gmei.data.gateway.server.utils.ProperUtils; import com.gmei.data.gateway.server.utils.ProperUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
...@@ -33,6 +34,58 @@ public class SparksqlServiceImpl implements SparksqlService { ...@@ -33,6 +34,58 @@ public class SparksqlServiceImpl implements SparksqlService {
private String properFilePath; private String properFilePath;
private static final Logger logger = LoggerFactory.getLogger(SparksqlServiceImpl.class); private static final Logger logger = LoggerFactory.getLogger(SparksqlServiceImpl.class);
@Override
public int executeSql(String name){
File sqlDir = new File(sqlPath + name);
File[] files = sqlDir.listFiles();
for(File file : files){
BufferedReader reader = null;
StringBuffer sbf = new StringBuffer();
try {
reader = new BufferedReader(new FileReader(file));
String tempStr;
while ((tempStr = reader.readLine()) != null) {
sbf.append("\n").append(tempStr);
}
reader.close();
} catch (IOException e) {
e.printStackTrace();
logger.error(e.getMessage());
continue;
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e1) {
e1.printStackTrace();
logger.error(e1.getMessage());
continue;
}
}
}
String hql = sbf.toString();
hql = hql.replaceAll("#partition_date", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()))
.replaceAll("#partition_day", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()));
Connection conn = null;
try{
conn = DriverManager.getConnection(sparkSqlJdbcUrl,"bi","");
Statement statement = conn.createStatement();
int updateCount = statement.executeUpdate(hql);
if(updateCount > 0){
logger.info("*** Execute By Sparksql success, hql: {}",hql);
}else{
logger.warn("*** Execute By Sparksql finish, hql: {}",hql);
}
}catch (Exception e){
e.printStackTrace();
logger.error("*** Execute By Sparksql failed, error message: {}, hql: {}",e.getMessage(),hql);
return 1;
}
}
return 0;
}
@Override @Override
public ExcelGenDto getResultDto(String name){ public ExcelGenDto getResultDto(String name){
List<String> sheetNameList = new ArrayList<String>(); List<String> sheetNameList = new ArrayList<String>();
...@@ -82,6 +135,9 @@ public class SparksqlServiceImpl implements SparksqlService { ...@@ -82,6 +135,9 @@ public class SparksqlServiceImpl implements SparksqlService {
} }
} }
String hql = sbf.toString(); String hql = sbf.toString();
hql = hql.replaceAll("#partition_date", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()))
.replaceAll("#partition_day", String.format("'%s'", DateUtils.getYesterdayDateSimpleStr()));
Connection conn = null; Connection conn = null;
try{ try{
conn = DriverManager.getConnection(sparkSqlJdbcUrl,"data",""); conn = DriverManager.getConnection(sparkSqlJdbcUrl,"data","");
......
#!/bin/bash #!/bin/bash
nohup java -jar /home/bi/bi-report/lib/java/bi-report-service/service-server/target/report-service-server-1.0.0-SNAPSHOT.jar >> /data/bi-report/logs/bi-report-service.out 2>&1 & nohup java -jar /home/bi/bi-report/lib/java/bi-report-service/service-server/target/report-service-server-1.0.0-SNAPSHOT.jar >> /data/bi-report/logs/bi-report-service.out 2>&1 &
tail -100f /data/bi-report/logs/bi-report-service.out
\ No newline at end of file
INSERT OVERWRITE TABLE tl_pm_content_d PARTITION (PARTITION_DAY = #partition_day) INSERT OVERWRITE TABLE pm.tl_pm_content_d PARTITION (PARTITION_DAY = #partition_day)
SELECT SELECT
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment