Skip to content

Commit 9e3b738

Browse files
committed
Apply changes in PR #631
1 parent b485305 commit 9e3b738

File tree

7 files changed

+84
-4
lines changed

7 files changed

+84
-4
lines changed

build_artifacts/v2/v2.6/v2.6.0/Dockerfile

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -172,9 +172,7 @@ RUN if [[ -z $ARG_BASED_ENV_IN_FILENAME ]] ; \
172172
sed -i 's="Spark"="SparkMagic Spark"=g' /opt/conda/share/jupyter/kernels/sparkkernel/kernel.json && \
173173
sed -i 's="PySpark"="SparkMagic PySpark"=g' /opt/conda/share/jupyter/kernels/pysparkkernel/kernel.json && \
174174
# Configure RTC - disable jupyter_collaboration by default
175-
jupyter labextension disable @jupyter/collaboration-extension && \
176-
/etc/patches/apply_patches.sh && \
177-
sudo rm -rf /etc/patches
175+
jupyter labextension disable @jupyter/collaboration-extension
178176

179177
# Patch glue kernels to use kernel wrapper
180178
COPY patch_glue_pyspark.json /opt/conda/share/jupyter/kernels/glue_pyspark/kernel.json

build_artifacts/v2/v2.6/v2.6.0/dirs/etc/patches/apply_patches.sh

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,35 @@
22

33
set -eux
44

5+
# Check if parameter is provided
6+
if [ $# -ne 1 ]; then
7+
echo "Usage: $0 [smus|studio-ai]"
8+
exit 1
9+
fi
10+
11+
# Validate parameter
12+
case "$1" in
13+
"smus")
14+
PATCH_DIR="/etc/patches/smus"
15+
;;
16+
"studio-ai")
17+
PATCH_DIR="/etc/patches/studio-ai"
18+
;;
19+
*)
20+
echo "Error: Parameter must be either 'smus' or 'studio-ai'"
21+
exit 1
22+
;;
23+
esac
24+
25+
# Check if patch directory exists
26+
if [ ! -d "$PATCH_DIR" ]; then
27+
echo "Error: Patch directory $PATCH_DIR does not exist"
28+
exit 1
29+
fi
30+
531
# Patch files can be generated via "diff -u /path/to/original_file /path/to/new_file > XXX_bad_package.patch"
632
# See https://www.thegeekstuff.com/2014/12/patch-command-examples/
7-
for PATCHFILE in /etc/patches/*.patch; do
33+
for PATCHFILE in "$PATCH_DIR"/*.patch; do
834
[ -f "$PATCHFILE" ] || continue
935
echo "Applying $PATCHFILE"
1036
(cd "/opt/conda" && patch --strip=3 < "$PATCHFILE")
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
--- /opt/conda/lib/python3.11/site-packages/sparkmagic/livyclientlib/command.py
2+
+++ /opt/conda/lib/python3.11/site-packages/sparkmagic/livyclientlib/command.py
3+
@@ -54,6 +54,7 @@
4+
try:
5+
session.wait_for_idle()
6+
data = {"code": self.code}
7+
+ data["kind"] = session.kind
8+
response = session.http_client.post_statement(session.id, data)
9+
statement_id = response["id"]
10+
output = self._get_statement_output(session, statement_id)
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
--- /opt/conda/lib/python3.11/site-packages/sparkmagic/utils/utils.py
2+
+++ /opt/conda/lib/python3.11/site-packages/sparkmagic/utils/utils.py
3+
@@ -94,15 +94,12 @@
4+
5+
6+
def get_sessions_info_html(info_sessions, current_session_id):
7+
- html = (
8+
- """<table>
9+
-<tr><th>ID</th><th>YARN Application ID</th><th>Kind</th><th>State</th><th>Spark UI</th><th>Driver log</th><th>User</th><th>Current session?</th></tr>"""
10+
- + "".join(
11+
- [session.get_row_html(current_session_id) for session in info_sessions]
12+
- )
13+
- + "</table>"
14+
- )
15+
-
16+
+ current_session = next((session for session in info_sessions if session.id == current_session_id), None)
17+
+ spark_ui_url = current_session.get_spark_ui_url()
18+
+ driver_log_url = current_session.get_driver_log_url()
19+
+
20+
+ from sagemaker_studio_dataengineering_sessions.sagemaker_base_session_manager.common.debugging_utils import get_sessions_info_html
21+
+ html = get_sessions_info_html(current_session.get_app_id(), spark_ui_url, driver_log_url)
22+
return html
23+
24+
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
--- /opt/conda/lib/python3.11/site-packages/hdijupyterutils/ipythondisplay.py
2+
+++ /opt/conda/lib/python3.11/site-packages/hdijupyterutils/ipythondisplay.py
3+
@@ -1,4 +1,4 @@
4+
-from IPython.core.display import display, HTML
5+
+from IPython.display import display, HTML
6+
from IPython import get_ipython
7+
import sys
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
--- /opt/conda/lib/python3.11/site-packages/botocore/data/endpoints.json
2+
+++ /opt/conda/lib/python3.11/site-packages/botocore/data/endpoints.json
3+
@@ -5404,6 +5404,9 @@
4+
"ap-northeast-3" : {
5+
"hostname" : "datazone.ap-northeast-3.api.aws"
6+
},
7+
+ "ap-south-1" : {
8+
+ "hostname" : "datazone.ap-south-1.api.aws"
9+
+ },
10+
"ap-south-2" : {
11+
"hostname" : "datazone.ap-south-2.api.aws"
12+
},

build_artifacts/v2/v2.6/v2.6.0/dirs/usr/local/bin/entrypoint-sagemaker-ui-jupyter-server

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,9 @@ set -e
66
# micromamba commands (e.g. using `micromamba activate` to activate environments)
77
eval "$(micromamba shell hook --shell=bash)"
88

9+
# apply patches for SMUS
10+
/etc/patches/apply_patches.sh smus && sudo rm -rf /etc/patches
11+
912
# Activate conda environment 'base', where supervisord is installed
1013
micromamba activate base
1114

0 commit comments

Comments
 (0)