Commit 55ea7c2a authored by litaolemo's avatar litaolemo

update

parent f5878fea
......@@ -122,36 +122,33 @@ spark.sql("CREATE TEMPORARY FUNCTION json_map AS 'brickhouse.udf.json.JsonMapUDF
spark.sql("CREATE TEMPORARY FUNCTION is_json AS 'com.gmei.hive.common.udf.UDFJsonFormatCheck'")
spark.sql("CREATE TEMPORARY FUNCTION arrayMerge AS 'com.gmei.hive.common.udf.UDFArryMerge'")
tableDF1 = spark.read.format("jdbc").\
option("url","jdbc:mysql://172.16.30.143/zhengxing").\
option("driver","com.mysql.jdbc.Driver").\
option("dbtable", "wiki_item").\
option("user", "work").\
option("password", "BJQaT9VzDcuPBqkd").load().createOrReplaceTempView("wiki_item")
tableDF2 = spark.read.format("jdbc").\
option("url","jdbc:mysql://172.16.30.143/zhengxing").\
option("driver","com.mysql.jdbc.Driver").\
option("dbtable", "wiki_product").\
option("user", "work").\
option("password", "BJQaT9VzDcuPBqkd").load().createOrReplaceTempView("wiki_product")
tableDF3 = spark.read.format("jdbc").\
option("url","jdbc:mysql://172.16.30.143/zhengxing").\
option("driver","com.mysql.jdbc.Driver").\
option("dbtable", "wiki_collect").\
option("user", "work").\
option("password", "BJQaT9VzDcuPBqkd").load().createOrReplaceTempView("wiki_collect")
tableDF4 = spark.read.format("jdbc").\
option("url","jdbc:mysql://172.16.30.143/zhengxing").\
option("driver","com.mysql.jdbc.Driver").\
option("dbtable", "wiki_brand").\
option("user", "work").\
option("password", "BJQaT9VzDcuPBqkd").load().createOrReplaceTempView("wiki_brand")
tableDF1 = spark.read.format("jdbc"). \
option("url", "jdbc:mysql://172.16.30.143/zhengxing"). \
option("driver", "com.mysql.jdbc.Driver"). \
option("dbtable", "wiki_item"). \
option("user", "work"). \
option("password", "BJQaT9VzDcuPBqkd").load().createOrReplaceTempView("wiki_item")
tableDF2 = spark.read.format("jdbc"). \
option("url", "jdbc:mysql://172.16.30.143/zhengxing"). \
option("driver", "com.mysql.jdbc.Driver"). \
option("dbtable", "wiki_product"). \
option("user", "work"). \
option("password", "BJQaT9VzDcuPBqkd").load().createOrReplaceTempView("wiki_product")
tableDF3 = spark.read.format("jdbc"). \
option("url", "jdbc:mysql://172.16.30.143/zhengxing"). \
option("driver", "com.mysql.jdbc.Driver"). \
option("dbtable", "wiki_collect"). \
option("user", "work"). \
option("password", "BJQaT9VzDcuPBqkd").load().createOrReplaceTempView("wiki_collect")
tableDF4 = spark.read.format("jdbc"). \
option("url", "jdbc:mysql://172.16.30.143/zhengxing"). \
option("driver", "com.mysql.jdbc.Driver"). \
option("dbtable", "wiki_brand"). \
option("user", "work"). \
option("password", "BJQaT9VzDcuPBqkd").load().createOrReplaceTempView("wiki_brand")
# df = spark.read.jdbc(url="jdbc:mysql://172.16.30.143/zhengxing?user=work&password=BJQaT9VzDcuPBqkd&rewriteBatchedStatements=true",table="wiki_item")
# print(huidu_device_id_sql)
......@@ -298,7 +295,7 @@ for t in range(1, task_days):
sql_res = search_ctr_df.collect()
tag_names_list_week = []
tag_dict = get_all_tag()
for key_count,name in enumerate(sql_res):
for key_count, name in enumerate(sql_res):
# print(name)
keywords = name.query
if not keywords:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment