Skip to content

Commit 656e3c2

Browse files
Drop support for python 3.7 (#1334)
Drop support for Python 3.7 and upgrade min version of some dependencies
1 parent 9424e3f commit 656e3c2

File tree

10 files changed

+29
-37
lines changed

10 files changed

+29
-37
lines changed

.github/workflows/build.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ jobs:
1313
fail-fast: false
1414
matrix:
1515
os: [ubuntu-latest]
16-
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
16+
python-version: ["3.8", "3.9", "3.10", "3.11"]
1717
steps:
1818
- name: Checkout
1919
uses: actions/checkout@v4
@@ -93,7 +93,7 @@ jobs:
9393
- uses: actions/checkout@v4
9494
- uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
9595
with:
96-
python_version: "3.7"
96+
python_version: "3.8"
9797
- uses: jupyterlab/maintainer-tools/.github/actions/install-minimums@v1
9898
- name: Run the unit tests
9999
run: |

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ repos:
3232
[mdformat-gfm, mdformat-frontmatter, mdformat-footnote]
3333

3434
- repo: https://github.com/psf/black
35-
rev: 23.3.0
35+
rev: 23.9.1
3636
hooks:
3737
- id: black
3838

docs/source/developers/kernel-specification.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ Here's an example from the [`spark_python_yarn_cluster`](https://github.com/jupy
2121
"env": {
2222
"SPARK_HOME": "/usr/hdp/current/spark2-client",
2323
"PYSPARK_PYTHON": "/opt/conda/bin/python",
24-
"PYTHONPATH": "${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
25-
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH ${KERNEL_EXTRA_SPARK_OPTS}",
24+
"PYTHONPATH": "${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
25+
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH ${KERNEL_EXTRA_SPARK_OPTS}",
2626
"LAUNCH_OPTS": ""
2727
},
2828
"argv": [

docs/source/developers/rest-api.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,7 @@ the icon filenames to be used by the front-end application.
178178
"env": {
179179
"SPARK_HOME": "/usr/hdp/current/spark2-client",
180180
"PYSPARK_PYTHON": "/opt/conda/bin/python",
181-
"PYTHONPATH": "${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
181+
"PYTHONPATH": "${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
182182
"SPARK_OPTS": "--master yarn --deploy-mode client --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} ${KERNEL_EXTRA_SPARK_OPTS}",
183183
"LAUNCH_OPTS": ""
184184
},
@@ -215,8 +215,8 @@ the icon filenames to be used by the front-end application.
215215
"env": {
216216
"SPARK_HOME": "/usr/hdp/current/spark2-client",
217217
"PYSPARK_PYTHON": "/opt/conda/bin/python",
218-
"PYTHONPATH": "${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
219-
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH ${KERNEL_EXTRA_SPARK_OPTS}",
218+
"PYTHONPATH": "${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
219+
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH ${KERNEL_EXTRA_SPARK_OPTS}",
220220
"LAUNCH_OPTS": ""
221221
},
222222
"display_name": "Spark - Python (YARN Cluster Mode)",

docs/source/other/troubleshooting.md

Lines changed: 11 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -125,19 +125,19 @@ Scenario: **I'm trying to launch a (Python/Scala/R) kernel, but it failed with `
125125

126126
```
127127
Traceback (most recent call last):
128-
File "/opt/conda/lib/python3.7/site-packages/tornado/web.py", line 1512, in _execute
128+
File "/opt/conda/lib/python3.8/site-packages/tornado/web.py", line 1512, in _execute
129129
result = yield result
130-
File "/opt/conda/lib/python3.7/site-packages/tornado/gen.py", line 1055, in run
130+
File "/opt/conda/lib/python3.8/site-packages/tornado/gen.py", line 1055, in run
131131
value = future.result()
132132
...
133133
...
134-
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/kernels/remotemanager.py", line 125, in _launch_kernel
134+
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/kernels/remotemanager.py", line 125, in _launch_kernel
135135
return self.process_proxy.launch_process(kernel_cmd, **kw)
136-
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/yarn.py", line 63, in launch_process
136+
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/yarn.py", line 63, in launch_process
137137
self.confirm_remote_startup(kernel_cmd, **kw)
138-
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/yarn.py", line 174, in confirm_remote_startup
138+
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/yarn.py", line 174, in confirm_remote_startup
139139
ready_to_connect = self.receive_connection_info()
140-
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 565, in receive_connection_info
140+
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 565, in receive_connection_info
141141
raise e
142142
TypeError: Incorrect padding
143143
```
@@ -166,17 +166,17 @@ Scenario: **I'm trying to launch a (Python/Scala/R) kernel with port range, but
166166

167167
```
168168
Traceback (most recent call last):
169-
File "/opt/conda/lib/python3.7/site-packages/tornado/web.py", line 1511, in _execute
169+
File "/opt/conda/lib/python3.8/site-packages/tornado/web.py", line 1511, in _execute
170170
result = yield result
171-
File "/opt/conda/lib/python3.7/site-packages/tornado/gen.py", line 1055, in run
171+
File "/opt/conda/lib/python3.8/site-packages/tornado/gen.py", line 1055, in run
172172
value = future.result()
173173
....
174174
....
175-
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 478, in __init__
175+
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 478, in __init__
176176
super(RemoteProcessProxy, self).__init__(kernel_manager, proxy_config)
177-
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 87, in __init__
177+
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 87, in __init__
178178
self._validate_port_range(proxy_config)
179-
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 407, in _validate_port_range
179+
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 407, in _validate_port_range
180180
"port numbers is (1024, 65535).".format(self.lower_port))
181181
RuntimeError: Invalid port range '1000..2000' specified. Range for valid port numbers is (1024, 65535).
182182
```
@@ -214,13 +214,6 @@ This is usually seen when you are trying to use more resources then what is avai
214214
To address this issue, increase the amount of memory available for your Hadoop YARN application or another
215215
resource manager managing the kernel. For example, on Kubernetes, this may be a time when the kernel specification's [kernel-pod.yaml.j2](https://github.com/jupyter-server/enterprise_gateway/blob/main/etc/kernel-launchers/kubernetes/scripts/kernel-pod.yaml.j2) file should be extended with resource quotas.
216216

217-
## Spark and Python Versions
218-
219-
Scenario: **PySpark 2.4.x fails on Python 3.8**
220-
221-
PySpark 2.4.x fails on Python 3.8 as described in [SPARK-29536](https://issues.apache.org/jira/browse/SPARK-29536).
222-
Use Python 3.7.x as the issue only seems to have been resolved on Spark 3.0.
223-
224217
## Kerberos
225218

226219
Scenario: **I'm trying to use a notebook with user impersonation on a Kerberos enabled cluster, but it fails to authenticate.**

etc/docker/demo-base/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
- Hadoop 2.7.7
55
- Apache Spark 2.4.6
66
- Java 1.8 runtime
7-
- Mini-conda latest (python 3.7) with R packages
7+
- Mini-conda latest (python 3.8) with R packages
88
- Toree 0.4.0-incubating
99
- `jovyan` service user, with system users `elyra`, `bob`, and `alice`. The jovyan uid is `1000` to match other jupyter
1010
images.

etc/kernelspecs/spark_python_yarn_client/kernel.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
"env": {
1111
"SPARK_HOME": "/usr/hdp/current/spark2-client",
1212
"PYSPARK_PYTHON": "/opt/conda/bin/python",
13-
"PYTHONPATH": "${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
13+
"PYTHONPATH": "${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
1414
"SPARK_OPTS": "--master yarn --deploy-mode client --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} ${KERNEL_EXTRA_SPARK_OPTS}",
1515
"LAUNCH_OPTS": ""
1616
},

etc/kernelspecs/spark_python_yarn_cluster/kernel.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@
1010
"env": {
1111
"SPARK_HOME": "/usr/hdp/current/spark2-client",
1212
"PYSPARK_PYTHON": "/opt/conda/bin/python",
13-
"PYTHONPATH": "${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
14-
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH --conf spark.yarn.maxAppAttempts=1 ${KERNEL_EXTRA_SPARK_OPTS}",
13+
"PYTHONPATH": "${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
14+
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH --conf spark.yarn.maxAppAttempts=1 ${KERNEL_EXTRA_SPARK_OPTS}",
1515
"LAUNCH_OPTS": ""
1616
},
1717
"argv": [

pyproject.toml

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,25 +15,24 @@ classifiers = [
1515
"Programming Language :: Python",
1616
"Programming Language :: Python :: 3",
1717
"Programming Language :: Python :: 3 :: Only",
18-
"Programming Language :: Python :: 3.7",
1918
"Programming Language :: Python :: 3.8",
2019
"Programming Language :: Python :: 3.9",
2120
"Programming Language :: Python :: 3.10",
2221
"Programming Language :: Python :: 3.11",
2322
]
24-
requires-python = ">=3.7"
23+
requires-python = ">=3.8"
2524
dependencies = [
2625
"docker>=3.5.0",
2726
"future",
2827
"jinja2>=3.1",
2928
"jupyter_client>=6.1.12,<7", # Remove cap once EG supports kernel provisioners
3029
"jupyter_core>=4.7.0",
3130
"kubernetes>=18.20.0",
32-
"jupyter_server>=1.3,<2.0", # Remove cap (increase floor) once EG suport kernel provisioners
31+
"jupyter_server>=1.7,<2.0", # Remove cap (increase floor) once EG suport kernel provisioners
3332
"paramiko>=2.11",
3433
"pexpect>=4.8.0",
3534
"pycryptodomex>=3.9.7",
36-
"pyzmq>=17.0,<25.0", # Pyzmq 25 removes deprecated code that jupyter_client 6 uses, remove if v6 gets updated
35+
"pyzmq>=20.0,<25.0", # Pyzmq 25 removes deprecated code that jupyter_client 6 uses, remove if v6 gets updated
3736
"requests>=2.14.2",
3837
"tornado>=6.1",
3938
"traitlets>=5.3.0",
@@ -65,7 +64,7 @@ test = [
6564
"websocket-client"
6665
]
6766
lint = [
68-
"black[jupyter]==23.3.0",
67+
"black[jupyter]==23.9.1",
6968
"mdformat>0.7",
7069
"mdformat-gfm>=0.3.5",
7170
"ruff==0.0.290"

requirements.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,14 @@ dependencies:
77
- jinja2>=3.1
88
- jupyter_client>=6.1
99
- jupyter_core>=4.6.0
10-
- jupyter_server>=1.2
10+
- jupyter_server>=1.7
1111
- paramiko>=2.1.2
1212
- pexpect>=4.2.0
1313
- pip
1414
- pre-commit
1515
- pycryptodomex>=3.9.7
1616
- python-kubernetes>=18.20.0
17-
- pyzmq>=17.0.0
17+
- pyzmq>=20.0.0
1818
- requests>=2.7,<3.0
1919
- tornado>=6.1
2020
- traitlets>=4.2.0

0 commit comments

Comments
 (0)