Skip to content

Commit

Permalink
[SPARK-11518][DEPLOY, WINDOWS] Handle spaces in Windows command scripts
Browse files Browse the repository at this point in the history
Author: Jon Maurer <[email protected]>
Author: Jonathan Maurer <[email protected]>

Closes apache#10789 from tritab/cmd_updates.
  • Loading branch information
tritab authored and srowen committed Feb 10, 2016
1 parent 9269036 commit 2ba9b6a
Show file tree
Hide file tree
Showing 14 changed files with 27 additions and 30 deletions.
2 changes: 1 addition & 1 deletion bin/beeline.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,4 @@ rem limitations under the License.
rem

set SPARK_HOME=%~dp0..
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.hive.beeline.BeeLine %*
cmd /V /E /C "%SPARK_HOME%\bin\spark-class.cmd" org.apache.hive.beeline.BeeLine %*
6 changes: 3 additions & 3 deletions bin/load-spark-env.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,16 @@ if [%SPARK_ENV_LOADED%] == [] (
if not [%SPARK_CONF_DIR%] == [] (
set user_conf_dir=%SPARK_CONF_DIR%
) else (
set user_conf_dir=%~dp0..\conf
set user_conf_dir=..\conf
)

call :LoadSparkEnv
)

rem Setting SPARK_SCALA_VERSION if not already set.

set ASSEMBLY_DIR2=%SPARK_HOME%/assembly/target/scala-2.11
set ASSEMBLY_DIR1=%SPARK_HOME%/assembly/target/scala-2.10
set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-2.11"
set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-2.10"

if [%SPARK_SCALA_VERSION%] == [] (

Expand Down
2 changes: 1 addition & 1 deletion bin/pyspark.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ rem
rem This is the entry point for running PySpark. To avoid polluting the
rem environment, it just launches a new cmd to do the real work.

cmd /V /E /C %~dp0pyspark2.cmd %*
cmd /V /E /C "%~dp0pyspark2.cmd" %*
4 changes: 2 additions & 2 deletions bin/pyspark2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ rem
rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0..

call %SPARK_HOME%\bin\load-spark-env.cmd
call "%SPARK_HOME%\bin\load-spark-env.cmd"
set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options]

rem Figure out which Python to use.
Expand All @@ -35,4 +35,4 @@ set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.9.1-src.zip;%PYTHONPATH%
set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py

call %SPARK_HOME%\bin\spark-submit2.cmd pyspark-shell-main --name "PySparkShell" %*
call "%SPARK_HOME%\bin\spark-submit2.cmd" pyspark-shell-main --name "PySparkShell" %*
2 changes: 1 addition & 1 deletion bin/run-example.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ rem
rem This is the entry point for running a Spark example. To avoid polluting
rem the environment, it just launches a new cmd to do the real work.

cmd /V /E /C %~dp0run-example2.cmd %*
cmd /V /E /C "%~dp0run-example2.cmd" %*
15 changes: 6 additions & 9 deletions bin/run-example2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,9 @@ rem
set SCALA_VERSION=2.10

rem Figure out where the Spark framework is installed
set FWDIR=%~dp0..\
set SPARK_HOME=%~dp0..

rem Export this as SPARK_HOME
set SPARK_HOME=%FWDIR%

call %SPARK_HOME%\bin\load-spark-env.cmd
call "%SPARK_HOME%\bin\load-spark-env.cmd"

rem Test that an argument was given
if not "x%1"=="x" goto arg_given
Expand All @@ -36,12 +33,12 @@ if not "x%1"=="x" goto arg_given
goto exit
:arg_given

set EXAMPLES_DIR=%FWDIR%examples
set EXAMPLES_DIR=%SPARK_HOME%\examples

rem Figure out the JAR file that our examples were packaged into.
set SPARK_EXAMPLES_JAR=
if exist "%FWDIR%RELEASE" (
for %%d in ("%FWDIR%lib\spark-examples*.jar") do (
if exist "%SPARK_HOME%\RELEASE" (
for %%d in ("%SPARK_HOME%\lib\spark-examples*.jar") do (
set SPARK_EXAMPLES_JAR=%%d
)
) else (
Expand Down Expand Up @@ -80,7 +77,7 @@ if "%~1" neq "" (
)
if defined ARGS set ARGS=%ARGS:~1%

call "%FWDIR%bin\spark-submit.cmd" ^
call "%SPARK_HOME%\bin\spark-submit.cmd" ^
--master %EXAMPLE_MASTER% ^
--class %EXAMPLE_CLASS% ^
"%SPARK_EXAMPLES_JAR%" %ARGS%
Expand Down
2 changes: 1 addition & 1 deletion bin/spark-class.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ rem
rem This is the entry point for running a Spark class. To avoid polluting
rem the environment, it just launches a new cmd to do the real work.

cmd /V /E /C %~dp0spark-class2.cmd %*
cmd /V /E /C "%~dp0spark-class2.cmd" %*
10 changes: 5 additions & 5 deletions bin/spark-class2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ rem
rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0..

call %SPARK_HOME%\bin\load-spark-env.cmd
call "%SPARK_HOME%\bin\load-spark-env.cmd"

rem Test that an argument was given
if "x%1"=="x" (
Expand All @@ -32,9 +32,9 @@ rem Find assembly jar
set SPARK_ASSEMBLY_JAR=0

if exist "%SPARK_HOME%\RELEASE" (
set ASSEMBLY_DIR=%SPARK_HOME%\lib
set ASSEMBLY_DIR="%SPARK_HOME%\lib"
) else (
set ASSEMBLY_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%
set ASSEMBLY_DIR="%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%"
)

for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do (
Expand All @@ -50,7 +50,7 @@ set LAUNCH_CLASSPATH=%SPARK_ASSEMBLY_JAR%

rem Add the launcher build dir to the classpath if requested.
if not "x%SPARK_PREPEND_CLASSES%"=="x" (
set LAUNCH_CLASSPATH=%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH%
set LAUNCH_CLASSPATH="%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH%"
)

set _SPARK_ASSEMBLY=%SPARK_ASSEMBLY_JAR%
Expand All @@ -62,7 +62,7 @@ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
rem The launcher library prints the command to be executed in a single line suitable for being
rem executed by the batch interpreter. So read all the output of the launcher into a variable.
set LAUNCHER_OUTPUT=%temp%\spark-class-launcher-output-%RANDOM%.txt
"%RUNNER%" -cp %LAUNCH_CLASSPATH% org.apache.spark.launcher.Main %* > %LAUNCHER_OUTPUT%
"%RUNNER%" -cp "%LAUNCH_CLASSPATH%" org.apache.spark.launcher.Main %* > %LAUNCHER_OUTPUT%
for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do (
set SPARK_CMD=%%i
)
Expand Down
2 changes: 1 addition & 1 deletion bin/spark-shell.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ rem
rem This is the entry point for running Spark shell. To avoid polluting the
rem environment, it just launches a new cmd to do the real work.

cmd /V /E /C %~dp0spark-shell2.cmd %*
cmd /V /E /C "%~dp0spark-shell2.cmd" %*
2 changes: 1 addition & 1 deletion bin/spark-shell2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,4 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" (
set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true"

:run_shell
%SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %*
"%SPARK_HOME%\bin\spark-submit2.cmd" --class org.apache.spark.repl.Main --name "Spark shell" %*
2 changes: 1 addition & 1 deletion bin/spark-submit.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ rem
rem This is the entry point for running Spark submit. To avoid polluting the
rem environment, it just launches a new cmd to do the real work.

cmd /V /E /C %~dp0spark-submit2.cmd %*
cmd /V /E /C spark-submit2.cmd %*
2 changes: 1 addition & 1 deletion bin/spark-submit2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,4 @@ rem disable randomized hash for string in Python 3.3+
set PYTHONHASHSEED=0

set CLASS=org.apache.spark.deploy.SparkSubmit
%~dp0spark-class2.cmd %CLASS% %*
"%~dp0spark-class2.cmd" %CLASS% %*
2 changes: 1 addition & 1 deletion bin/sparkR.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ rem
rem This is the entry point for running SparkR. To avoid polluting the
rem environment, it just launches a new cmd to do the real work.

cmd /V /E /C %~dp0sparkR2.cmd %*
cmd /V /E /C "%~dp0sparkR2.cmd" %*
4 changes: 2 additions & 2 deletions bin/sparkR2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ rem
rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0..

call %SPARK_HOME%\bin\load-spark-env.cmd
call "%SPARK_HOME%\bin\load-spark-env.cmd"


call %SPARK_HOME%\bin\spark-submit2.cmd sparkr-shell-main %*
call "%SPARK_HOME%\bin\spark-submit2.cmd" sparkr-shell-main %*

0 comments on commit 2ba9b6a

Please sign in to comment.