標籤:save require miss 函數 bug $? setting odi backspace
#!/usr/bin/env bash## Licensed to the Apache Software Foundation (ASF) under one or more# contributor license agreements. See the NOTICE file distributed with# this work for additional information regarding copyright ownership.# The ASF licenses this file to You under the Apache License, Version 2.0# (the "License"); you may not use this file except in compliance with# the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.### Shell script for starting the Spark Shell REPL#判斷是否為cygwincygwin=falsecase "`uname`" inCYGWIN*) cygwin=true;;esac# Enter posix mode for bashset -o posix## Global script variables#進入到spark的安裝目錄FWDIR="$(cd `dirname $0`/..; pwd)"#定義協助資訊的方法#調用spark-submit的協助資訊,只是把submit以下協助資訊過濾掉# Usage: spark-submit [options] <app jar | python file> [app arguments]# Usage: spark-submit --kill [submission ID] --master [spark://...]# Usage: spark-submit --status [submission ID] --master [spark://...]function usage() {echo "Usage: ./bin/spark-shell [options]"$FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2exit 0}if [[ "[email protected]" = *--help ]] || [[ "[email protected]" = *-h ]]; thenusagefi#引用utils.sh指令碼,指令碼的功能為整理指令碼參數、判斷部分參數的合法性,給以下兩個變數賦值#SUBMISSION_OPTS:#SUBMISSION_OPTS參數包括:# K-V形式的有: --master | --deploy-mode | --class | --name | --jars | --py-files | --files | # --conf | --properties-file | --driver-memory | --driver-java-options | # --driver-library-path | --driver-class-path | --executor-memory | --driver-cores | # --total-executor-cores | --executor-cores | --queue | --num-executors | --archives # 非K-V形式的有 # --verbose | -v | --supervise# KV形式的需要對個數進行判斷#
#APPLICATION_OPTS參數包括除SUBMISSION_OPTS之外的參數
source $FWDIR/bin/utils.sh
#定義協助資訊方法的變數SUBMIT_USAGE_FUNCTION=usage
#調用utils.sh指令碼中的gatherSparkSubmitOpts方法。對參數進行整理gatherSparkSubmitOpts "[email protected]"#主函數,調用spark-submit --class org.apache.spark.repl.Main方法function main() {if $cygwin; then# Workaround for issue involving JLine and Cygwin# (see http://sourceforge.net/p/jline/bugs/40/).# If you‘re using the Mintty terminal emulator in Cygwin, may need to set the# "Backspace sends ^H" setting in "Keys" section of the Mintty options# (see https://github.com/sbt/sbt/issues/562).stty -icanon min 1 -echo > /dev/null 2>&1export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"$FWDIR/bin/spark-submit --class org.apache.spark.repl.Main "${SUBMISSION_OPTS[@]}" spark-shell "${APPLICATION_OPTS[@]}"stty icanon echo > /dev/null 2>&1elseexport SPARK_SUBMIT_OPTS$FWDIR/bin/spark-submit --class org.apache.spark.repl.Main "${SUBMISSION_OPTS[@]}" spark-shell "${APPLICATION_OPTS[@]}"fi}# Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in# binary distribution of Spark where Scala is not installedexit_status=127saved_stty=""# restore stty settings (echo in particular)function restoreSttySettings() {stty $saved_sttysaved_stty=""}function onExit() {if [[ "$saved_stty" != "" ]]; thenrestoreSttySettingsfiexit $exit_status}# to reenable echo if we are interrupted before completing.trap onExit INT# save terminal settingssaved_stty=$(stty -g 2>/dev/null)# clear on error so we don‘t later try to restore themif [[ ! $? ]]; thensaved_stty=""fimain "[email protected]"# record the exit status lest it be overwritten:# then reenable echo and propagate the code.exit_status=$?onExit
utils.sh指令碼內容:
1 #!/usr/bin/env bash 2 3 # 4 # Licensed to the Apache Software Foundation (ASF) under one or more 5 # contributor license agreements. See the NOTICE file distributed with 6 # this work for additional information regarding copyright ownership. 7 # The ASF licenses this file to You under the Apache License, Version 2.0 8 # (the "License"); you may not use this file except in compliance with 9 # the License. You may obtain a copy of the License at10 #11 # http://www.apache.org/licenses/LICENSE-2.012 #13 # Unless required by applicable law or agreed to in writing, software14 # distributed under the License is distributed on an "AS IS" BASIS,15 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.16 # See the License for the specific language governing permissions and17 # limitations under the License.18 #19 20 # Gather all all spark-submit options into SUBMISSION_OPTS21 function gatherSparkSubmitOpts() {22 23 if [ -z "$SUBMIT_USAGE_FUNCTION" ]; then24 echo "Function for printing usage of $0 is not set." 1>&225 echo "Please set usage function to shell variable ‘SUBMIT_USAGE_FUNCTION‘ in $0" 1>&226 exit 127 fi28 29 # NOTE: If you add or remove spark-sumbmit options,30 # modify NOT ONLY this script but also SparkSubmitArgument.scala31 SUBMISSION_OPTS=()32 APPLICATION_OPTS=()33 while (($#)); do34 case "$1" in35 --master | --deploy-mode | --class | --name | --jars | --py-files | --files | 36 --conf | --properties-file | --driver-memory | --driver-java-options | 37 --driver-library-path | --driver-class-path | --executor-memory | --driver-cores | 38 --total-executor-cores | --executor-cores | --queue | --num-executors | --archives)39 if [[ $# -lt 2 ]]; then40 "$SUBMIT_USAGE_FUNCTION"41 exit 1;42 fi43 SUBMISSION_OPTS+=("$1"); shift44 SUBMISSION_OPTS+=("$1"); shift45 ;;46 47 --verbose | -v | --supervise)48 SUBMISSION_OPTS+=("$1"); shift49 ;;50 51 *)52 APPLICATION_OPTS+=("$1"); shift53 ;;54 esac55 done56 57 export SUBMISSION_OPTS58 export APPLICATION_OPTS59 }
View Code
Spark-shell啟動指令碼解讀