Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
S
strategy_data_sync
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
黎涛
strategy_data_sync
Commits
1e9e7c1c
Commit
1e9e7c1c
authored
Nov 16, 2020
by
litaolemo
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update
parent
269a9bd8
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
209 additions
and
0 deletions
+209
-0
pom.xml
pom.xml
+144
-0
ServiceTransfer.scala
src/main/scala/com/gengmei/meigou/ServiceTransfer.scala
+65
-0
No files found.
pom.xml
0 → 100644
View file @
1e9e7c1c
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns=
"http://maven.apache.org/POM/4.0.0"
xmlns:xsi=
"http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
>
<modelVersion>
4.0.0
</modelVersion>
<groupId>
org.example
</groupId>
<artifactId>
strategy_data_sync
</artifactId>
<version>
1.0-SNAPSHOT
</version>
<properties>
<flink.version>
1.11.0
</flink.version>
<scala.binary.version>
2.11
</scala.binary.version>
<slf4j.version>
1.7.30
</slf4j.version>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>
<project.reporting.outputEncoding>
UTF-8
</project.reporting.outputEncoding>
</properties>
<dependencies>
<dependency>
<groupId>
redis.clients
</groupId>
<artifactId>
jedis
</artifactId>
<version>
2.8.0
</version>
<!--版本号可根据实际情况填写-->
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-streaming-java_2.11
</artifactId>
<version>
${flink.version}
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-kafka_2.11
</artifactId>
<version>
1.11.0
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-streaming-scala_2.11
</artifactId>
<version>
${flink.version}
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-common
</artifactId>
<version>
${flink.version}
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-planner_2.11
</artifactId>
<version>
1.11.0
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-csv
</artifactId>
<version>
1.11.0
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-table-api-scala-bridge_2.11
</artifactId>
<version>
1.11.0
</version>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-connector-elasticsearch7_2.12
</artifactId>
<version>
1.11.0
</version>
<scope>
provided
</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-shaded-jackson -->
<dependency>
<groupId>
org.apache.flink
</groupId>
<artifactId>
flink-shaded-jackson
</artifactId>
<version>
2.10.1-11.0
</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.slf4j</groupId>-->
<!-- <artifactId>slf4j-simple</artifactId>-->
<!-- <version>${slf4j.version}</version>-->
<!-- <scope>provided</scope>-->
<!-- </dependency>-->
</dependencies>
<build>
<plugins>
<plugin>
<groupId>
net.alchim31.maven
</groupId>
<artifactId>
scala-maven-plugin
</artifactId>
<version>
3.4.6
</version>
<executions>
<execution>
<!-- 声明绑定到maven的compile阶段 -->
<goals>
<goal>
compile
</goal>
<goal>
testCompile
</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>
org.scala-tools
</groupId>
<artifactId>
maven-scala-plugin
</artifactId>
<version>
2.15.2
</version>
<executions>
<execution>
<goals>
<goal>
compile
</goal>
<goal>
testCompile
</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-assembly-plugin
</artifactId>
<version>
3.0.0
</version>
<configuration>
<descriptorRefs>
<descriptorRef>
jar-with-dependencies
</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>
make-assembly
</id>
<phase>
package
</phase>
<goals>
<goal>
single
</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
\ No newline at end of file
src/main/scala/com/gengmei/meigou/ServiceTransfer.scala
0 → 100644
View file @
1e9e7c1c
package
com.gengmei.meigou
import
org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import
org.apache.flink.streaming.api.scala._
import
org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import
org.apache.flink.api.scala._
import
org.apache.flink.table.api.
{
DataTypes
,
EnvironmentSettings
}
import
org.apache.flink.table.descriptors.
{
Csv
,
Kafka
,
Schema
}
object
ServiceTransfer
{
def
main
(
args
:
Array
[
String
])
:
Unit
=
{
val
bsEnv
=
StreamExecutionEnvironment
.
getExecutionEnvironment
val
bsSettings
=
EnvironmentSettings
.
newInstance
().
useBlinkPlanner
().
inStreamingMode
().
build
()
val
bsTableEnv
=
StreamTableEnvironment
.
create
(
bsEnv
,
bsSettings
)
val
tableSql
=
"""
|CREATE TABLE AreaTable (
| id INT,
| area_name STRING,
| phone_prefix STRING
|) WITH (
| 'connector' = 'jdbc',
| 'url' = 'jdbc:mysql://47.94.7.135:3306/test',
| 'table-name' = 'table1',
| 'username' = 'test',
| 'password' = 'Test@1234'
|)
"""
.
stripMargin
bsTableEnv
.
executeSql
(
tableSql
)
val
tableSql1
=
"""
|CREATE TABLE AreaTable1 (
| id INT,
| area_name STRING,
| phone_prefix STRING
|) WITH (
| 'connector' = 'jdbc',
| 'url' = 'jdbc:mysql://47.94.7.135:3306/test',
| 'table-name' = 'table2',
| 'username' = 'test',
| 'password' = 'Test@1234'
|)
"""
.
stripMargin
// bsTableEnv.executeSql(tableSql1)
val
KafkaTable
=
"""
|CREATE TABLE kafkaTable (
| id INT,
| area_name STRING,
| phone_prefix STRING
|) WITH (
|'connector' = 'kafka',
|'topic' = 'topic_2', -- required: topic name from which the table is read
|'properties.bootstrap.servers' = '47.94.7.135:9092', -- required: specify the Kafka server connection string
|'format' = 'csv'
|)
|"""
.
stripMargin
bsTableEnv
.
executeSql
(
KafkaTable
)
bsTableEnv
.
executeSql
(
"INSERT INTO kafkaTable SELECT id,area_name,phone_prefix from AreaTable"
)
// bsTableEnv.executeSql("INSERT INTO AreaTable1 SELECT id,area_name,phone_prefix from AreaTable")
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment