Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
M
meta_base_code
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
宋柯
meta_base_code
Commits
fe0390a9
Commit
fe0390a9
authored
Sep 04, 2020
by
litaolemo
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update
parent
44f76a08
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
31 additions
and
31 deletions
+31
-31
search_strategy_d.py
task/search_strategy_d.py
+31
-31
No files found.
task/search_strategy_d.py
View file @
fe0390a9
...
@@ -718,35 +718,35 @@ for t in range(1, task_days):
...
@@ -718,35 +718,35 @@ for t in range(1, task_days):
for
res
in
sql_res
:
for
res
in
sql_res
:
print
(
res
)
print
(
res
)
device_df
.
createOrReplaceTempView
(
"data_table"
)
device_df
.
createOrReplaceTempView
(
"data_table"
)
#
collects_sql
=
"""
#
collects_sql = """
SELECT device_type,active_type,channel_type,ROUND(if(NVL(sum(uv),0) <> 0 ,NVL(sum(search_core_pv),0)/NVL(sum(uv),0) ,0),5) as core_pv_division_uv,
#
SELECT device_type,active_type,channel_type,ROUND(if(NVL(sum(uv),0) <> 0 ,NVL(sum(search_core_pv),0)/NVL(sum(uv),0) ,0),5) as core_pv_division_uv,
ROUND(if(NVL(sum(uv),0) <> 0 ,NVL(sum(search_pv),0)/NVL(sum(uv),0) , 0),5) as pv_division_uv
#
ROUND(if(NVL(sum(uv),0) <> 0 ,NVL(sum(search_pv),0)/NVL(sum(uv),0) , 0),5) as pv_division_uv
FROM data_table GROUP BY device_type,active_type,channel_type
#
FROM data_table GROUP BY device_type,active_type,channel_type
"""
#
"""
finnal_df
=
spark
.
sql
(
collects_sql
)
#
finnal_df = spark.sql(collects_sql)
#
finnal_df
.
show
(
1
,
False
)
#
finnal_df.show(1, False)
sql_res
=
finnal_df
.
collect
()
#
sql_res = finnal_df.collect()
for
res
in
sql_res
:
#
for res in sql_res:
# print(res)
#
# print(res)
device_type
=
res
.
device_type
#
device_type = res.device_type
active_type
=
res
.
active_type
#
active_type = res.active_type
channel_type
=
res
.
channel_type
#
channel_type = res.channel_type
core_pv_division_uv
=
res
.
core_pv_division_uv
#
core_pv_division_uv = res.core_pv_division_uv
pv_division_uv
=
res
.
pv_division_uv
#
pv_division_uv = res.pv_division_uv
pid
=
hashlib
.
md5
(
#
pid = hashlib.md5(
(
today_str
+
device_type
+
active_type
+
channel_type
)
.
encode
(
"utf8"
))
.
hexdigest
()
#
(today_str + device_type + active_type + channel_type).encode("utf8")).hexdigest()
instert_sql
=
"""replace into search_strategy_d(
#
instert_sql = """replace into search_strategy_d(
day_id,device_type,active_type,channel_type,core_pv_division_uv,pv_division_uv,pid
#
day_id,device_type,active_type,channel_type,core_pv_division_uv,pv_division_uv,pid
) VALUES('{day_id}','{device_type}','{active_type}','{channel_type}',{core_pv_division_uv},{pv_division_uv},'{pid}');"""
.
format
(
#
) VALUES('{day_id}','{device_type}','{active_type}','{channel_type}',{core_pv_division_uv},{pv_division_uv},'{pid}');""".format(
day_id
=
today_str
,
device_type
=
device_type
,
#
day_id=today_str, device_type=device_type,
active_type
=
active_type
,
channel_type
=
channel_type
,
core_pv_division_uv
=
core_pv_division_uv
,
pv_division_uv
=
pv_division_uv
,
pid
=
pid
#
active_type=active_type, channel_type=channel_type, core_pv_division_uv=core_pv_division_uv,pv_division_uv=pv_division_uv,pid=pid
#
)
#
)
print
(
instert_sql
)
#
print(instert_sql)
# cursor.execute("set names 'UTF8'")
#
# cursor.execute("set names 'UTF8'")
res
=
cursor
.
execute
(
instert_sql
)
#
res = cursor.execute(instert_sql)
db
.
commit
()
#
db.commit()
print
(
res
)
#
print(res)
db
.
close
()
db
.
close
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment