Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
F
ffm-baseline
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ML
ffm-baseline
Commits
cbbb26f3
Commit
cbbb26f3
authored
Mar 26, 2019
by
张彦钊
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
修改test文件
parent
cf6bc33b
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
34 additions
and
34 deletions
+34
-34
test.py
tensnsorflow/test.py
+34
-34
No files found.
tensnsorflow/test.py
View file @
cbbb26f3
import
datetime
from
pyspark.sql
import
HiveContext
from
pyspark.context
import
SparkContext
from
pyspark.conf
import
SparkConf
from
pyspark.sql
import
SQLContext
from
pyspark.sql
import
SparkSession
from
py4j.java_gateway
import
java_import
import
pytispark.pytispark
as
pti
#
from pyspark.sql import HiveContext
#
from pyspark.context import SparkContext
#
from pyspark.conf import SparkConf
#
from pyspark.sql import SQLContext
#
from pyspark.sql import SparkSession
#
from py4j.java_gateway import java_import
#
import pytispark.pytispark as pti
import
pandas
as
pd
import
pymysql
...
...
@@ -25,35 +25,35 @@ def con_sql(db,sql):
return
df
def
test
():
conf
=
SparkConf
()
.
setAppName
(
"My App"
)
.
set
(
"spark.io.compression.codec"
,
"lzf"
)
sc
=
SparkContext
(
conf
=
conf
)
hive_context
=
HiveContext
(
sc
)
hive_context
.
sql
(
''' select device["device_type"] from online.tl_hdfs_maidian_view
where partition_date = '20181012' and action = "page_view"
and params["page_name"] = "diary_detail" and params["referrer"] = "home" limit 10 '''
)
.
show
(
6
)
#
def test():
#
conf = SparkConf().setAppName("My App").set("spark.io.compression.codec", "lzf")
#
sc = SparkContext(conf = conf)
#
hive_context = HiveContext(sc)
# hive_context.sql(''' select device["device_type"] from online.tl_hdfs_maidian_view
# where partition_date = '20181012' and action = "page_view"
#
and params["page_name"] = "diary_detail" and params["referrer"] = "home" limit 10 ''').show(6)
def
esmm_pre
():
yesterday
=
(
datetime
.
date
.
today
()
-
datetime
.
timedelta
(
days
=
1
))
.
strftime
(
"
%
Y-
%
m-
%
d"
)
print
(
yesterday
)
spark
=
SparkSession
.
builder
.
enableHiveSupport
()
.
getOrCreate
()
# gw = SparkContext._gateway
#
# # Import TiExtensions
# java_import(gw.jvm, "org.apache.spark.sql.TiContext")
# Inject TiExtensions, and get a TiContext
# ti = gw.jvm.TiExtensions.getInstance(spark._jsparkSession).getOrCreateTiContext(spark._jsparkSession)
ti
=
pti
.
TiContext
(
spark
)
ti
.
tidbMapDatabase
(
"jerry_test"
)
# sql("use tpch_test")
spark
.
sql
(
"select count(*) from esmm_pre_data"
)
.
show
(
6
)
# conf = SparkConf().setAppName("esmm_pre").set("spark.io.compression.codec", "lzf")
#
def esmm_pre():
#
yesterday = (datetime.date.today() - datetime.timedelta(days=1)).strftime("%Y-%m-%d")
#
print(yesterday)
#
#
spark = SparkSession.builder.enableHiveSupport().getOrCreate()
#
# gw = SparkContext._gateway
#
#
#
# # Import TiExtensions
#
# java_import(gw.jvm, "org.apache.spark.sql.TiContext")
#
#
# Inject TiExtensions, and get a TiContext
#
# ti = gw.jvm.TiExtensions.getInstance(spark._jsparkSession).getOrCreateTiContext(spark._jsparkSession)
#
ti = pti.TiContext(spark)
#
#
ti.tidbMapDatabase("jerry_test")
#
#
# sql("use tpch_test")
#
spark.sql("select count(*) from esmm_pre_data").show(6)
#
#
# conf = SparkConf().setAppName("esmm_pre").set("spark.io.compression.codec", "lzf")
#
# spark.sql("""
# select concat(tmp1.device_id,",",tmp1.city_id) as device_city, tmp1.merge_queue from (select device_id,if(city_id='world','worldwide',city_id) city_id,similarity_cid as merge_queue from nd_device_cid_similarity_matrix
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment