Skip to content

Commit 55ab777

Browse files
sarutakJoshRosen
authored andcommitted
[SPARK-3870] EOL character enforcement
We have shell scripts and Windows batch files, so we should enforce proper EOL character. Author: Kousuke Saruta <[email protected]> Closes apache#2726 from sarutak/eol-enforcement and squashes the following commits: 9748c3f [Kousuke Saruta] Fixed make.bat 252de89 [Kousuke Saruta] Removed extra characters from make.bat 5b81c00 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark into eol-enforcement 8633ed2 [Kousuke Saruta] merge branch 'master' of git://git.apache.org/spark into eol-enforcement 5d630d8 [Kousuke Saruta] Merged ba10797 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark into eol-enforcement 7407515 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark into eol-enforcement 772fd4e [Kousuke Saruta] Normized EOL character in make.bat and compute-classpath.cmd ac7f873 [Kousuke Saruta] Added an entry for .gitattributes to .rat-excludes 1570e77 [Kousuke Saruta] Added .gitattributes
1 parent f1e7361 commit 55ab777

File tree

5 files changed

+369
-366
lines changed

5 files changed

+369
-366
lines changed

.gitattributes

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
*.bat text eol=crlf
2+
*.cmd text eol=crlf

.rat-excludes

+1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
target
22
.gitignore
3+
.gitattributes
34
.project
45
.classpath
56
.mima-excludes

bin/compute-classpath.cmd

+117-117
Original file line numberDiff line numberDiff line change
@@ -1,117 +1,117 @@
1-
@echo off
2-
3-
rem
4-
rem Licensed to the Apache Software Foundation (ASF) under one or more
5-
rem contributor license agreements. See the NOTICE file distributed with
6-
rem this work for additional information regarding copyright ownership.
7-
rem The ASF licenses this file to You under the Apache License, Version 2.0
8-
rem (the "License"); you may not use this file except in compliance with
9-
rem the License. You may obtain a copy of the License at
10-
rem
11-
rem http://www.apache.org/licenses/LICENSE-2.0
12-
rem
13-
rem Unless required by applicable law or agreed to in writing, software
14-
rem distributed under the License is distributed on an "AS IS" BASIS,
15-
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16-
rem See the License for the specific language governing permissions and
17-
rem limitations under the License.
18-
rem
19-
20-
rem This script computes Spark's classpath and prints it to stdout; it's used by both the "run"
21-
rem script and the ExecutorRunner in standalone cluster mode.
22-
23-
rem If we're called from spark-class2.cmd, it already set enabledelayedexpansion and setting
24-
rem it here would stop us from affecting its copy of the CLASSPATH variable; otherwise we
25-
rem need to set it here because we use !datanucleus_jars! below.
26-
if "%DONT_PRINT_CLASSPATH%"=="1" goto skip_delayed_expansion
27-
setlocal enabledelayedexpansion
28-
:skip_delayed_expansion
29-
30-
set SCALA_VERSION=2.10
31-
32-
rem Figure out where the Spark framework is installed
33-
set FWDIR=%~dp0..\
34-
35-
rem Load environment variables from conf\spark-env.cmd, if it exists
36-
if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
37-
38-
rem Build up classpath
39-
set CLASSPATH=%SPARK_CLASSPATH%;%SPARK_SUBMIT_CLASSPATH%
40-
41-
if not "x%SPARK_CONF_DIR%"=="x" (
42-
set CLASSPATH=%CLASSPATH%;%SPARK_CONF_DIR%
43-
) else (
44-
set CLASSPATH=%CLASSPATH%;%FWDIR%conf
45-
)
46-
47-
if exist "%FWDIR%RELEASE" (
48-
for %%d in ("%FWDIR%lib\spark-assembly*.jar") do (
49-
set ASSEMBLY_JAR=%%d
50-
)
51-
) else (
52-
for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
53-
set ASSEMBLY_JAR=%%d
54-
)
55-
)
56-
57-
set CLASSPATH=%CLASSPATH%;%ASSEMBLY_JAR%
58-
59-
rem When Hive support is needed, Datanucleus jars must be included on the classpath.
60-
rem Datanucleus jars do not work if only included in the uber jar as plugin.xml metadata is lost.
61-
rem Both sbt and maven will populate "lib_managed/jars/" with the datanucleus jars when Spark is
62-
rem built with Hive, so look for them there.
63-
if exist "%FWDIR%RELEASE" (
64-
set datanucleus_dir=%FWDIR%lib
65-
) else (
66-
set datanucleus_dir=%FWDIR%lib_managed\jars
67-
)
68-
set "datanucleus_jars="
69-
for %%d in ("%datanucleus_dir%\datanucleus-*.jar") do (
70-
set datanucleus_jars=!datanucleus_jars!;%%d
71-
)
72-
set CLASSPATH=%CLASSPATH%;%datanucleus_jars%
73-
74-
set SPARK_CLASSES=%FWDIR%core\target\scala-%SCALA_VERSION%\classes
75-
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%repl\target\scala-%SCALA_VERSION%\classes
76-
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\classes
77-
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\classes
78-
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%graphx\target\scala-%SCALA_VERSION%\classes
79-
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\classes
80-
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%tools\target\scala-%SCALA_VERSION%\classes
81-
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\catalyst\target\scala-%SCALA_VERSION%\classes
82-
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\core\target\scala-%SCALA_VERSION%\classes
83-
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\hive\target\scala-%SCALA_VERSION%\classes
84-
85-
set SPARK_TEST_CLASSES=%FWDIR%core\target\scala-%SCALA_VERSION%\test-classes
86-
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%repl\target\scala-%SCALA_VERSION%\test-classes
87-
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\test-classes
88-
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\test-classes
89-
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%graphx\target\scala-%SCALA_VERSION%\test-classes
90-
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\test-classes
91-
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\catalyst\target\scala-%SCALA_VERSION%\test-classes
92-
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\core\target\scala-%SCALA_VERSION%\test-classes
93-
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\hive\target\scala-%SCALA_VERSION%\test-classes
94-
95-
if "x%SPARK_TESTING%"=="x1" (
96-
rem Add test clases to path - note, add SPARK_CLASSES and SPARK_TEST_CLASSES before CLASSPATH
97-
rem so that local compilation takes precedence over assembled jar
98-
set CLASSPATH=%SPARK_CLASSES%;%SPARK_TEST_CLASSES%;%CLASSPATH%
99-
)
100-
101-
rem Add hadoop conf dir - else FileSystem.*, etc fail
102-
rem Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts
103-
rem the configurtion files.
104-
if "x%HADOOP_CONF_DIR%"=="x" goto no_hadoop_conf_dir
105-
set CLASSPATH=%CLASSPATH%;%HADOOP_CONF_DIR%
106-
:no_hadoop_conf_dir
107-
108-
if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir
109-
set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%
110-
:no_yarn_conf_dir
111-
112-
rem A bit of a hack to allow calling this script within run2.cmd without seeing output
113-
if "%DONT_PRINT_CLASSPATH%"=="1" goto exit
114-
115-
echo %CLASSPATH%
116-
117-
:exit
1+
@echo off
2+
3+
rem
4+
rem Licensed to the Apache Software Foundation (ASF) under one or more
5+
rem contributor license agreements. See the NOTICE file distributed with
6+
rem this work for additional information regarding copyright ownership.
7+
rem The ASF licenses this file to You under the Apache License, Version 2.0
8+
rem (the "License"); you may not use this file except in compliance with
9+
rem the License. You may obtain a copy of the License at
10+
rem
11+
rem http://www.apache.org/licenses/LICENSE-2.0
12+
rem
13+
rem Unless required by applicable law or agreed to in writing, software
14+
rem distributed under the License is distributed on an "AS IS" BASIS,
15+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
rem See the License for the specific language governing permissions and
17+
rem limitations under the License.
18+
rem
19+
20+
rem This script computes Spark's classpath and prints it to stdout; it's used by both the "run"
21+
rem script and the ExecutorRunner in standalone cluster mode.
22+
23+
rem If we're called from spark-class2.cmd, it already set enabledelayedexpansion and setting
24+
rem it here would stop us from affecting its copy of the CLASSPATH variable; otherwise we
25+
rem need to set it here because we use !datanucleus_jars! below.
26+
if "%DONT_PRINT_CLASSPATH%"=="1" goto skip_delayed_expansion
27+
setlocal enabledelayedexpansion
28+
:skip_delayed_expansion
29+
30+
set SCALA_VERSION=2.10
31+
32+
rem Figure out where the Spark framework is installed
33+
set FWDIR=%~dp0..\
34+
35+
rem Load environment variables from conf\spark-env.cmd, if it exists
36+
if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
37+
38+
rem Build up classpath
39+
set CLASSPATH=%SPARK_CLASSPATH%;%SPARK_SUBMIT_CLASSPATH%
40+
41+
if not "x%SPARK_CONF_DIR%"=="x" (
42+
set CLASSPATH=%CLASSPATH%;%SPARK_CONF_DIR%
43+
) else (
44+
set CLASSPATH=%CLASSPATH%;%FWDIR%conf
45+
)
46+
47+
if exist "%FWDIR%RELEASE" (
48+
for %%d in ("%FWDIR%lib\spark-assembly*.jar") do (
49+
set ASSEMBLY_JAR=%%d
50+
)
51+
) else (
52+
for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
53+
set ASSEMBLY_JAR=%%d
54+
)
55+
)
56+
57+
set CLASSPATH=%CLASSPATH%;%ASSEMBLY_JAR%
58+
59+
rem When Hive support is needed, Datanucleus jars must be included on the classpath.
60+
rem Datanucleus jars do not work if only included in the uber jar as plugin.xml metadata is lost.
61+
rem Both sbt and maven will populate "lib_managed/jars/" with the datanucleus jars when Spark is
62+
rem built with Hive, so look for them there.
63+
if exist "%FWDIR%RELEASE" (
64+
set datanucleus_dir=%FWDIR%lib
65+
) else (
66+
set datanucleus_dir=%FWDIR%lib_managed\jars
67+
)
68+
set "datanucleus_jars="
69+
for %%d in ("%datanucleus_dir%\datanucleus-*.jar") do (
70+
set datanucleus_jars=!datanucleus_jars!;%%d
71+
)
72+
set CLASSPATH=%CLASSPATH%;%datanucleus_jars%
73+
74+
set SPARK_CLASSES=%FWDIR%core\target\scala-%SCALA_VERSION%\classes
75+
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%repl\target\scala-%SCALA_VERSION%\classes
76+
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\classes
77+
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\classes
78+
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%graphx\target\scala-%SCALA_VERSION%\classes
79+
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\classes
80+
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%tools\target\scala-%SCALA_VERSION%\classes
81+
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\catalyst\target\scala-%SCALA_VERSION%\classes
82+
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\core\target\scala-%SCALA_VERSION%\classes
83+
set SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\hive\target\scala-%SCALA_VERSION%\classes
84+
85+
set SPARK_TEST_CLASSES=%FWDIR%core\target\scala-%SCALA_VERSION%\test-classes
86+
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%repl\target\scala-%SCALA_VERSION%\test-classes
87+
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\test-classes
88+
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\test-classes
89+
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%graphx\target\scala-%SCALA_VERSION%\test-classes
90+
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\test-classes
91+
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\catalyst\target\scala-%SCALA_VERSION%\test-classes
92+
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\core\target\scala-%SCALA_VERSION%\test-classes
93+
set SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\hive\target\scala-%SCALA_VERSION%\test-classes
94+
95+
if "x%SPARK_TESTING%"=="x1" (
96+
rem Add test clases to path - note, add SPARK_CLASSES and SPARK_TEST_CLASSES before CLASSPATH
97+
rem so that local compilation takes precedence over assembled jar
98+
set CLASSPATH=%SPARK_CLASSES%;%SPARK_TEST_CLASSES%;%CLASSPATH%
99+
)
100+
101+
rem Add hadoop conf dir - else FileSystem.*, etc fail
102+
rem Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts
103+
rem the configurtion files.
104+
if "x%HADOOP_CONF_DIR%"=="x" goto no_hadoop_conf_dir
105+
set CLASSPATH=%CLASSPATH%;%HADOOP_CONF_DIR%
106+
:no_hadoop_conf_dir
107+
108+
if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir
109+
set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%
110+
:no_yarn_conf_dir
111+
112+
rem A bit of a hack to allow calling this script within run2.cmd without seeing output
113+
if "%DONT_PRINT_CLASSPATH%"=="1" goto exit
114+
115+
echo %CLASSPATH%
116+
117+
:exit

python/docs/make.bat

+6-6
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
@ECHO OFF
2-
3-
rem This is the entry point for running Sphinx documentation. To avoid polluting the
4-
rem environment, it just launches a new cmd to do the real work.
5-
6-
cmd /V /E /C %~dp0make2.bat %*
1+
@ECHO OFF
2+
3+
rem This is the entry point for running Sphinx documentation. To avoid polluting the
4+
rem environment, it just launches a new cmd to do the real work.
5+
6+
cmd /V /E /C %~dp0make2.bat %*

0 commit comments

Comments
 (0)