1
0

Compare commits

..

8 Commits

Author SHA1 Message Date
fd28b0d2f8 Update 'sparksubmitcache.sh' 2018-05-29 10:55:20 +00:00
87d459a204 Update 'sparksubmit.sh' 2018-05-29 10:55:07 +00:00
c442baa464 Update 'too_few_partitions-submit_bad.sh' 2018-05-29 10:54:37 +00:00
1daeb1f209 Delete 'spark_shell.sh' 2018-05-29 10:53:48 +00:00
ceefb04c27 Update 'spark-submit.sh' 2018-05-29 10:53:38 +00:00
ee8224b1c3 Delete 'pyspark.sh' 2018-05-29 10:53:20 +00:00
3868f4d01b Update 'mysql-submit.sh' 2018-05-29 10:52:41 +00:00
6a9fd2a4dd Update 'mysql-submit.sh' 2018-05-29 10:52:27 +00:00
7 changed files with 18 additions and 42 deletions

View File

@ -1,5 +1,8 @@
#!/bin/bash #!/usr/bin/env bash
export HADOOP_CONF_DIR=$HOME/hadoop/conf
export HADOOP_HOME=$HOME/hadoop
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HADOOP_HOME/lib/native
export PYTHONIOENCODING=utf8 export PYTHONIOENCODING=utf8
/home/faculty/hrucinska/spark/bin/spark-submit\ /home/faculty/hrucinska/spark/bin/spark-submit\

View File

@ -1,20 +0,0 @@
#!/bin/bash
. /etc/profile
export JAVA_HOME="/usr/lib/jvm/java-8-oracle"
/home/hc_client_bddm_prod/spark-2.3.0-bin-2.6.0-cdh5.6.0/bin/pyspark\
--master yarn\
--deploy-mode client\
--queue bddm_prod_users\
--name "OffersDailyCleanData"\
--num-executors 100\
--conf "spark.driver.memory=1g"\
--conf "spark.yarn.driver.memoryOverhead=600m"\
--conf "spark.executor.memory=2000m"\
--conf "spark.yarn.executor.memoryOverhead=1200m"\
--conf "spark.driver.extraClassPath=/opt/hive_extras/*:/opt/spark_extras/*"\
--conf "spark.yarn.appMasterEnv.JAVA_HOME=/opt/jre1.8.0"\
--conf "spark.executorEnv.JAVA_HOME=/opt/jre1.8.0"\
--packages mysql:mysql-connector-java:5.1.38

View File

@ -1,4 +1,9 @@
#!/bin/bash #!/usr/bin/env bash
export HADOOP_CONF_DIR=$HOME/hadoop/conf
export HADOOP_HOME=$HOME/hadoop
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HADOOP_HOME/lib/native
export PYTHONIOENCODING=utf8
$HOME/spark/bin/spark-submit\ $HOME/spark/bin/spark-submit\
--master local[*]\ --master local[*]\

View File

@ -1,19 +0,0 @@
#!/bin/bash
. /etc/profile
export JAVA_HOME="/usr/lib/jvm/java-8-oracle"
/home/hc_client_bddm_prod/spark-2.2.0-bin-2.6.0-cdh5.6.0/bin/spark-shell\
--master yarn\
--deploy-mode client\
--queue bddm_prod_users\
--name "OffersDailyCleanData"\
--num-executors 100\
--conf "spark.driver.memory=1g"\
--conf "spark.yarn.driver.memoryOverhead=600m"\
--conf "spark.executor.memory=2000m"\
--conf "spark.yarn.executor.memoryOverhead=1200m"\
--conf "spark.driver.extraClassPath=/opt/hive_extras/*:/opt/spark_extras/*"\
--conf "spark.yarn.appMasterEnv.JAVA_HOME=/opt/jre1.8.0"\
--conf "spark.executorEnv.JAVA_HOME=/opt/jre1.8.0"

View File

@ -4,6 +4,7 @@ export HADOOP_CONF_DIR=$HOME/hadoop/conf
export HADOOP_HOME=$HOME/hadoop export HADOOP_HOME=$HOME/hadoop
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HADOOP_HOME/lib/native export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HADOOP_HOME/lib/native
export SPARK_JAVA_OPTS="-Dhdp.version=2.6.3.0-235" export SPARK_JAVA_OPTS="-Dhdp.version=2.6.3.0-235"
export PYTHONIOENCODING=utf8
$HOME/spark/bin/spark-submit\ $HOME/spark/bin/spark-submit\
--master yarn\ --master yarn\

View File

@ -4,6 +4,7 @@ export HADOOP_CONF_DIR=$HOME/hadoop/conf
export HADOOP_HOME=$HOME/hadoop export HADOOP_HOME=$HOME/hadoop
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HADOOP_HOME/lib/native export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HADOOP_HOME/lib/native
export SPARK_JAVA_OPTS="-Dhdp.version=2.6.3.0-235" export SPARK_JAVA_OPTS="-Dhdp.version=2.6.3.0-235"
export PYTHONIOENCODING=utf8
$HOME/spark/bin/spark-submit\ $HOME/spark/bin/spark-submit\
--master yarn\ --master yarn\

View File

@ -1,4 +1,9 @@
#!/bin/bash #!/usr/bin/env bash
export HADOOP_CONF_DIR=$HOME/hadoop/conf
export HADOOP_HOME=$HOME/hadoop
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HADOOP_HOME/lib/native
export PYTHONIOENCODING=utf8
$HOME/spark/bin/spark-submit\ $HOME/spark/bin/spark-submit\