@@ -43,32 +43,22 @@ function delete_virtualenv() {
4343}
4444trap delete_virtualenv EXIT
4545
46- PYTHON_EXECS=()
47- # Some systems don't have pip or virtualenv - in those cases our tests won't work.
48- if hash virtualenv 2> /dev/null && [ ! -n " $USE_CONDA " ]; then
49- echo " virtualenv installed - using. Note if this is a conda virtual env you may wish to set USE_CONDA"
50- # test only against python3
51- if hash python3 2> /dev/null; then
52- PYTHON_EXECS=(' python3' )
53- else
54- echo " Python3 not installed on system, skipping pip installability tests"
55- exit 0
56- fi
57- elif hash conda 2> /dev/null; then
58- echo " Using conda virtual environments"
59- PYTHON_EXECS=(' 3.10' )
60- USE_CONDA=1
46+
47+ if [ -z " ${PYTHON_TO_TEST} " ]; then
48+ PYTHON_EXECUTABLE=" python3"
6149else
62- echo " Missing virtualenv & conda, skipping pip installability tests"
63- exit 0
50+ PYTHON_EXECUTABLE=" ${PYTHON_TO_TEST} "
6451fi
65- if ! hash pip 2> /dev/null; then
66- echo " Missing pip, skipping pip installability tests."
52+
53+ if ! hash " $PYTHON_EXECUTABLE " 2> /dev/null; then
54+ echo " Python executable $PYTHON_EXECUTABLE not installed on system, skipping pip installability tests"
6755 exit 0
6856fi
6957
58+ echo " Using Python executable: $PYTHON_EXECUTABLE "
59+
7060# Determine which version of PySpark we are building for archive name
71- PYSPARK_VERSION=$( python3 -c " exec(open('python/pyspark/version.py').read());print(__version__)" )
61+ PYSPARK_VERSION=$( $PYTHON_EXECUTABLE -c " exec(open('python/pyspark/version.py').read());print(__version__)" )
7262PYSPARK_DIST=" $FWDIR /python/dist/pyspark-$PYSPARK_VERSION .tar.gz"
7363# The pip install options we use for all the pip commands
7464PIP_OPTIONS=" --upgrade --no-cache-dir --force-reinstall --use-pep517"
@@ -80,64 +70,46 @@ PIP_COMMANDS=("pip install $PIP_OPTIONS $PYSPARK_DIST"
8070# In this test, explicitly exclude user sitepackages to prevent side effects
8171export PYTHONNOUSERSITE=1
8272
83- for python in " ${PYTHON_EXECS[@]} " ; do
84- for install_command in " ${PIP_COMMANDS[@]} " ; do
85- echo " Testing pip installation with python $python "
86- # Create a temp directory for us to work in and save its name to a file for cleanup
87- echo " Using $VIRTUALENV_BASE for virtualenv"
88- VIRTUALENV_PATH=" $VIRTUALENV_BASE " /$python
89- rm -rf " $VIRTUALENV_PATH "
90- if [ -n " $USE_CONDA " ]; then
91- conda create -y -p " $VIRTUALENV_PATH " python=$python numpy pandas pip setuptools
92- source activate " $VIRTUALENV_PATH " || conda activate " $VIRTUALENV_PATH "
93- else
94- mkdir -p " $VIRTUALENV_PATH "
95- virtualenv --python=$python " $VIRTUALENV_PATH "
96- source " $VIRTUALENV_PATH " /bin/activate
97- fi
98- # Upgrade pip & friends if using virtual env
99- if [ ! -n " $USE_CONDA " ]; then
100- pip install --upgrade pip wheel numpy
101- fi
102-
103- echo " Creating pip installable source dist"
104- cd " $FWDIR " /python
105- # Delete the egg info file if it exists, this can cache the setup file.
106- rm -rf pyspark.egg-info || echo " No existing egg info file, skipping deletion"
107- python3 packaging/classic/setup.py sdist
108-
109-
110- echo " Installing dist into virtual env"
111- cd dist
112- # Verify that the dist directory only contains one thing to install
113- sdists=(* .tar.gz)
114- if [ ${# sdists[@]} -ne 1 ]; then
115- echo " Unexpected number of targets found in dist directory - please cleanup existing sdists first."
116- exit -1
117- fi
118- # Do the actual installation
119- cd " $FWDIR "
120- $install_command
121-
122- cd /
123-
124- echo " Run basic sanity check on pip installed version with spark-submit"
125- spark-submit " $FWDIR " /dev/pip-sanity-check.py
126- echo " Run basic sanity check with import based"
127- python3 " $FWDIR " /dev/pip-sanity-check.py
128- echo " Run the tests for context.py"
129- python3 " $FWDIR " /python/pyspark/core/context.py
130-
131- cd " $FWDIR "
132-
133- # conda / virtualenv environments need to be deactivated differently
134- if [ -n " $USE_CONDA " ]; then
135- source deactivate || conda deactivate
136- else
137- deactivate
138- fi
139-
140- done
73+ for install_command in " ${PIP_COMMANDS[@]} " ; do
74+ # Create a temp directory for us to work in and save its name to a file for cleanup
75+ echo " Using $VIRTUALENV_BASE for virtualenv"
76+ VIRTUALENV_PATH=" $VIRTUALENV_BASE " /$python
77+ rm -rf " $VIRTUALENV_PATH "
78+ $PYTHON_EXECUTABLE -m venv " $VIRTUALENV_PATH "
79+ source " $VIRTUALENV_PATH " /bin/activate
80+ pip install --upgrade pip wheel numpy setuptools
81+
82+ echo " Creating pip installable source dist"
83+ cd " $FWDIR " /python
84+ # Delete the egg info file if it exists, this can cache the setup file.
85+ rm -rf pyspark.egg-info || echo " No existing egg info file, skipping deletion"
86+ python3 packaging/classic/setup.py sdist
87+
88+ echo " Installing dist into virtual env"
89+ cd dist
90+ # Verify that the dist directory only contains one thing to install
91+ sdists=(* .tar.gz)
92+ if [ ${# sdists[@]} -ne 1 ]; then
93+ echo " Unexpected number of targets found in dist directory - please cleanup existing sdists first."
94+ exit -1
95+ fi
96+ # Do the actual installation
97+ cd " $FWDIR "
98+ $install_command
99+
100+ cd /
101+
102+ echo " Run basic sanity check on pip installed version with spark-submit"
103+ spark-submit " $FWDIR " /dev/pip-sanity-check.py
104+ echo " Run basic sanity check with import based"
105+ python3 " $FWDIR " /dev/pip-sanity-check.py
106+ echo " Run the tests for context.py"
107+ python3 " $FWDIR " /python/pyspark/core/context.py
108+
109+ cd " $FWDIR "
110+
111+ deactivate
112+
141113done
142114
143115exit 0
0 commit comments