Spark-shell啟動指令碼解讀

來源:互聯網
上載者:User

標籤:save   require   miss   函數   bug   $?   setting   odi   backspace   

#!/usr/bin/env bash## Licensed to the Apache Software Foundation (ASF) under one or more# contributor license agreements. See the NOTICE file distributed with# this work for additional information regarding copyright ownership.# The ASF licenses this file to You under the Apache License, Version 2.0# (the "License"); you may not use this file except in compliance with# the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.### Shell script for starting the Spark Shell REPL#判斷是否為cygwincygwin=falsecase "`uname`" inCYGWIN*) cygwin=true;;esac# Enter posix mode for bashset -o posix## Global script variables#進入到spark的安裝目錄FWDIR="$(cd `dirname $0`/..; pwd)"#定義協助資訊的方法#調用spark-submit的協助資訊,只是把submit以下協助資訊過濾掉#   Usage: spark-submit [options] <app jar | python file> [app arguments]#   Usage: spark-submit --kill [submission ID] --master [spark://...]#   Usage: spark-submit --status [submission ID] --master [spark://...]function usage() {echo "Usage: ./bin/spark-shell [options]"$FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2exit 0}if [[ "[email protected]" = *--help ]] || [[ "[email protected]" = *-h ]]; thenusagefi#引用utils.sh指令碼,指令碼的功能為整理指令碼參數、判斷部分參數的合法性,給以下兩個變數賦值#SUBMISSION_OPTS:#SUBMISSION_OPTS參數包括:#      K-V形式的有: --master | --deploy-mode | --class | --name | --jars | --py-files | --files | #                   --conf | --properties-file | --driver-memory | --driver-java-options | #                   --driver-library-path | --driver-class-path | --executor-memory | --driver-cores | #                   --total-executor-cores | --executor-cores | --queue | --num-executors | --archives #      非K-V形式的有 #                   --verbose | -v | --supervise#      KV形式的需要對個數進行判斷#
#APPLICATION_OPTS參數包括除SUBMISSION_OPTS之外的參數
source $FWDIR/bin/utils.sh
#定義協助資訊方法的變數SUBMIT_USAGE_FUNCTION=usage
#調用utils.sh指令碼中的gatherSparkSubmitOpts方法。對參數進行整理gatherSparkSubmitOpts "[email protected]"#主函數,調用spark-submit --class org.apache.spark.repl.Main方法function main() {if $cygwin; then# Workaround for issue involving JLine and Cygwin# (see http://sourceforge.net/p/jline/bugs/40/).# If you‘re using the Mintty terminal emulator in Cygwin, may need to set the# "Backspace sends ^H" setting in "Keys" section of the Mintty options# (see https://github.com/sbt/sbt/issues/562).stty -icanon min 1 -echo > /dev/null 2>&1export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"$FWDIR/bin/spark-submit --class org.apache.spark.repl.Main "${SUBMISSION_OPTS[@]}" spark-shell "${APPLICATION_OPTS[@]}"stty icanon echo > /dev/null 2>&1elseexport SPARK_SUBMIT_OPTS$FWDIR/bin/spark-submit --class org.apache.spark.repl.Main "${SUBMISSION_OPTS[@]}" spark-shell "${APPLICATION_OPTS[@]}"fi}# Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in# binary distribution of Spark where Scala is not installedexit_status=127saved_stty=""# restore stty settings (echo in particular)function restoreSttySettings() {stty $saved_sttysaved_stty=""}function onExit() {if [[ "$saved_stty" != "" ]]; thenrestoreSttySettingsfiexit $exit_status}# to reenable echo if we are interrupted before completing.trap onExit INT# save terminal settingssaved_stty=$(stty -g 2>/dev/null)# clear on error so we don‘t later try to restore themif [[ ! $? ]]; thensaved_stty=""fimain "[email protected]"# record the exit status lest it be overwritten:# then reenable echo and propagate the code.exit_status=$?onExit

utils.sh指令碼內容:

 

 1 #!/usr/bin/env bash 2  3 # 4 # Licensed to the Apache Software Foundation (ASF) under one or more 5 # contributor license agreements.  See the NOTICE file distributed with 6 # this work for additional information regarding copyright ownership. 7 # The ASF licenses this file to You under the Apache License, Version 2.0 8 # (the "License"); you may not use this file except in compliance with 9 # the License.  You may obtain a copy of the License at10 #11 #    http://www.apache.org/licenses/LICENSE-2.012 #13 # Unless required by applicable law or agreed to in writing, software14 # distributed under the License is distributed on an "AS IS" BASIS,15 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.16 # See the License for the specific language governing permissions and17 # limitations under the License.18 #19 20 # Gather all all spark-submit options into SUBMISSION_OPTS21 function gatherSparkSubmitOpts() {22 23   if [ -z "$SUBMIT_USAGE_FUNCTION" ]; then24     echo "Function for printing usage of $0 is not set." 1>&225     echo "Please set usage function to shell variable ‘SUBMIT_USAGE_FUNCTION‘ in $0" 1>&226     exit 127   fi28 29   # NOTE: If you add or remove spark-sumbmit options,30   # modify NOT ONLY this script but also SparkSubmitArgument.scala31   SUBMISSION_OPTS=()32   APPLICATION_OPTS=()33   while (($#)); do34     case "$1" in35       --master | --deploy-mode | --class | --name | --jars | --py-files | --files | 36       --conf | --properties-file | --driver-memory | --driver-java-options | 37       --driver-library-path | --driver-class-path | --executor-memory | --driver-cores | 38       --total-executor-cores | --executor-cores | --queue | --num-executors | --archives)39         if [[ $# -lt 2 ]]; then40           "$SUBMIT_USAGE_FUNCTION"41           exit 1;42         fi43         SUBMISSION_OPTS+=("$1"); shift44         SUBMISSION_OPTS+=("$1"); shift45         ;;46 47       --verbose | -v | --supervise)48         SUBMISSION_OPTS+=("$1"); shift49         ;;50 51       *)52         APPLICATION_OPTS+=("$1"); shift53         ;;54     esac55   done56 57   export SUBMISSION_OPTS58   export APPLICATION_OPTS59 }
View Code

 

Spark-shell啟動指令碼解讀

相關文章

聯繫我們

該頁面正文內容均來源於網絡整理,並不代表阿里雲官方的觀點,該頁面所提到的產品和服務也與阿里云無關,如果該頁面內容對您造成了困擾,歡迎寫郵件給我們,收到郵件我們將在5個工作日內處理。

如果您發現本社區中有涉嫌抄襲的內容,歡迎發送郵件至: info-contact@alibabacloud.com 進行舉報並提供相關證據,工作人員會在 5 個工作天內聯絡您,一經查實,本站將立刻刪除涉嫌侵權內容。

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.