Commit 1bddbcc3 authored by 赵威's avatar 赵威

update requirements.txt

parent ea3786db
......@@ -24,3 +24,6 @@ gevent==1.2.1
pymysql==0.10.1
gensim==3.8.3
pyspark==2.3.0
pytispark==2.0
from django.test import TestCase
# from django.test import TestCase
from pyspark import SparkConf
from pyspark.sql import SparkSession
from pytispark import pytispark as pti
# Create your tests here.
def get_spark():
sparkConf = SparkConf()
sparkConf.set("spark.sql.crossJoin.enabled", True)
sparkConf.set("spark.debug.maxToStringFields", "100")
sparkConf.set("spark.tispark.plan.allow_index_double_read", False)
sparkConf.set("spark.tispark.plan.allow_index_read", True)
sparkConf.set("spark.hive.mapred.supports.subdirectories", True)
sparkConf.set("spark.hadoop.mapreduce.input.fileinputformat.input.dir.recursive", True)
sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
sparkConf.set("mapreduce.output.fileoutputformat.compress", False)
sparkConf.set("mapreduce.map.output.compress", False)
spark = (SparkSession.builder.config(conf=sparkConf).config(
"spark.sql.extensions", "org.apache.spark.sql.TiExtensions").config(
"spark.tispark.pd.addresses",
"172.16.40.170:2379").appName("SERVICE FM CSV EXPORT").enableHiveSupport().getOrCreate())
ti = pti.TiContext(spark)
ti.tidbMapDatabase("jerry_test")
return spark
if __name__ == "__main__":
spark = get_spark()
a = spark.sql("select * from online.bl_hdfs_maidian_updates limit 1")
a.show()
# spark-submit --master yarn --deploy-mode client --queue root.strategy --driver-memory 16g --executor-memory 1g --executor-cores 1 --num-executors 70 --conf spark.default.parallelism=100 --conf spark.storage.memoryFraction=0.5 --conf spark.shuffle.memoryFraction=0.3 --conf spark.locality.wait=0 --jars /srv/apps/tispark-core-2.1-SNAPSHOT-jar-with-dependencies.jar,/srv/apps/spark-connector_2.11-1.9.0-rc2.jar,/srv/apps/mysql-connector-java-5.1.38.jar /srv/apps/strategy_embedding/word_vector/tests.py
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment