Commit aa78de0d authored by 宋柯's avatar 宋柯

模型上线

parent 240e771c
...@@ -6,13 +6,13 @@ itemStatisticStartDays=$2 ...@@ -6,13 +6,13 @@ itemStatisticStartDays=$2
spark_mode=$3 spark_mode=$3
content_type="service" content_type="service"
pythonFile=${path}/spark/featureEngSk.py pythonFile=${path}/spark/featureEngSk.py
#log_file=~/${content_type}_feature_csv_export.log log_file=/data/gmuser/logs/${content_type}_feature_csv_export.log
/opt/hadoop/bin/hdfs dfs -rmr /${content_type}_feature_v1_train /opt/hadoop/bin/hdfs dfs -rmr /${content_type}_feature_v1_train
/opt/hadoop/bin/hdfs dfs -rmr /${content_type}_feature_v1_test /opt/hadoop/bin/hdfs dfs -rmr /${content_type}_feature_v1_test
if [ -n "${spark_mode}" ]; then if [ -n "${spark_mode}" ]; then
/opt/spark/bin/spark-submit --master local[8] --deploy-mode client --driver-memory 16g --executor-memory 2g --executor-cores 1 --num-executors 4 --conf spark.pyspark.python=${python_os} --conf spark.default.parallelism=100 --conf spark.storage.memoryFraction=0.5 --conf spark.shuffle.memoryFraction=0.3 --conf spark.locality.wait=0 --jars /srv/apps/tispark-core-2.1-SNAPSHOT-jar-with-dependencies.jar,/srv/apps/spark-connector_2.11-1.9.0-rc2.jar,/srv/apps/mysql-connector-java-5.1.38.jar ${pythonFile} $trainDays $itemStatisticStartDays /opt/spark/bin/spark-submit --master local[8] --deploy-mode client --driver-memory 16g --executor-memory 2g --executor-cores 1 --num-executors 4 --conf spark.pyspark.python=${python_os} --conf spark.default.parallelism=100 --conf spark.storage.memoryFraction=0.5 --conf spark.shuffle.memoryFraction=0.3 --conf spark.locality.wait=0 --jars /srv/apps/tispark-core-2.1-SNAPSHOT-jar-with-dependencies.jar,/srv/apps/spark-connector_2.11-1.9.0-rc2.jar,/srv/apps/mysql-connector-java-5.1.38.jar ${pythonFile} $trainDays $itemStatisticStartDays > $log_file
else else
/opt/spark/bin/spark-submit --master yarn --deploy-mode client --queue root.strategy --driver-memory 8g --executor-memory 2g --executor-cores 1 --num-executors 8 --conf spark.pyspark.python=${python_os} --conf spark.default.parallelism=100 --conf spark.storage.memoryFraction=0.5 --conf spark.shuffle.memoryFraction=0.3 --conf spark.locality.wait=0 --jars /srv/apps/tispark-core-2.1-SNAPSHOT-jar-with-dependencies.jar,/srv/apps/spark-connector_2.11-1.9.0-rc2.jar,/srv/apps/mysql-connector-java-5.1.38.jar ${pythonFile} $trainDays $itemStatisticStartDays /opt/spark/bin/spark-submit --master yarn --deploy-mode client --queue root.strategy --driver-memory 8g --executor-memory 2g --executor-cores 1 --num-executors 8 --conf spark.pyspark.python=${python_os} --conf spark.default.parallelism=100 --conf spark.storage.memoryFraction=0.5 --conf spark.shuffle.memoryFraction=0.3 --conf spark.locality.wait=0 --jars /srv/apps/tispark-core-2.1-SNAPSHOT-jar-with-dependencies.jar,/srv/apps/spark-connector_2.11-1.9.0-rc2.jar,/srv/apps/mysql-connector-java-5.1.38.jar ${pythonFile} $trainDays $itemStatisticStartDays > $log_file
fi fi
#/opt/spark/bin/spark-submit --master local[4] --deploy-mode client --driver-memory 8g --executor-memory 2g --executor-cores 1 --num-executors 4 --conf spark.pyspark.python=/srv/envs/serviceRec/bin/python --conf spark.default.parallelism=100 --conf spark.storage.memoryFraction=0.5 --conf spark.shuffle.memoryFraction=0.3 --conf spark.locality.wait=0 --jars /srv/apps/tispark-core-2.1-SNAPSHOT-jar-with-dependencies.jar,/srv/apps/spark-connector_2.11-1.9.0-rc2.jar,/srv/apps/mysql-connector-java-5.1.38.jar ${pythonFile} $day_count #/opt/spark/bin/spark-submit --master local[4] --deploy-mode client --driver-memory 8g --executor-memory 2g --executor-cores 1 --num-executors 4 --conf spark.pyspark.python=/srv/envs/serviceRec/bin/python --conf spark.default.parallelism=100 --conf spark.storage.memoryFraction=0.5 --conf spark.shuffle.memoryFraction=0.3 --conf spark.locality.wait=0 --jars /srv/apps/tispark-core-2.1-SNAPSHOT-jar-with-dependencies.jar,/srv/apps/spark-connector_2.11-1.9.0-rc2.jar,/srv/apps/mysql-connector-java-5.1.38.jar ${pythonFile} $day_count
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment