Commit 620301cd authored by litaolemo's avatar litaolemo

update

parent c9187a48
...@@ -30,15 +30,15 @@ startTime = time.time() ...@@ -30,15 +30,15 @@ startTime = time.time()
sparkConf = SparkConf() sparkConf = SparkConf()
sparkConf.set("spark.sql.crossJoin.enabled", True) sparkConf.set("spark.sql.crossJoin.enabled", True)
sparkConf.set("spark.debug.maxToStringFields", "100") sparkConf.set("spark.debug.maxToStringFields", "100")
sparkConf.set("spark.tispark.plan.allow_index_double_read", False) # sparkConf.set("spark.tispark.plan.allow_index_double_read", False)
sparkConf.set("spark.tispark.plan.allow_index_read", True) # sparkConf.set("spark.tispark.plan.allow_index_read", True)
sparkConf.set("spark.hive.mapred.supports.subdirectories", True) # sparkConf.set("spark.hive.mapred.supports.subdirectories", True)
# sparkConf.set("spark.sql.adaptive.enabled", True) # sparkConf.set("spark.sql.adaptive.enabled", True)
# sparkConf.set("spark.sql.adaptive.skewedJoin.enabled", True) # sparkConf.set("spark.sql.adaptive.skewedJoin.enabled", True)
sparkConf.set("spark.shuffle.statistics.verbose", True) sparkConf.set("spark.shuffle.statistics.verbose", True)
# sparkConf.set("spark.sql.adaptive.shuffle.targetPostShuffleInputSize", "67108864") # sparkConf.set("spark.sql.adaptive.shuffle.targetPostShuffleInputSize", "67108864")
# sparkConf.set("spark.sql.adaptive.shuffle.targetPostShuffleRowCount", "20000000") # sparkConf.set("spark.sql.adaptive.shuffle.targetPostShuffleRowCount", "20000000")
sparkConf.set("spark.hadoop.mapreduce.input.fileinputformat.input.dir.recursive", True) # sparkConf.set("spark.hadoop.mapreduce.input.fileinputformat.input.dir.recursive", True)
# sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") # sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
sparkConf.set("mapreduce.output.fileoutputformat.compress", False) sparkConf.set("mapreduce.output.fileoutputformat.compress", False)
sparkConf.set("mapreduce.map.output.compress", False) sparkConf.set("mapreduce.map.output.compress", False)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment