From 7b7050ca4a2b5d06ce594009a95fc8c06edd5b35 Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Wed, 2 Apr 2025 21:19:44 +0000 Subject: [PATCH 01/15] Add unit tests for external packages --- .../test_artifacts/v2/fastapi.test.Dockerfile | 17 +++++ .../v2/ipywidgets.test.Dockerfile | 18 +++++ test/test_artifacts/v2/jinja2.test.Dockerfile | 12 +++ .../v2/jupyter-collaboration.test.Dockerfile | 13 +++- .../v2/jupyter-scheduler.test.Dockerfile | 17 +++++ .../v2/jupyter-server-proxy.test.Dockerfile | 16 ++++ .../v2/jupyterlab-git.test.Dockerfile | 14 +++- .../v2/jupyterlab-lsp.test.Dockerfile | 7 +- .../v2/langchain-aws.test.Dockerfile | 15 ++++ .../v2/langchain.test.Dockerfile | 28 +++++++ .../v2/notebook.test.Dockerfile | 13 +++- test/test_artifacts/v2/pyhive.test.Dockerfile | 17 +++++ .../v2/python-gssapi.test.Dockerfile | 19 +++++ .../v2/python-lsp-server.test.Dockerfile | 11 ++- test/test_artifacts/v2/run_pandas_tests.py | 4 +- test/test_artifacts/v2/s3fs.test.Dockerfile | 1 + .../v2/scikit-learn.test.Dockerfile | 23 ++++++ .../v2/scripts/run_fastapi_tests.sh | 9 +++ .../v2/scripts/run_ipywidgets_tests.sh | 4 + .../v2/scripts/run_jinja2_tests.sh | 74 +++++++++++++++++++ .../run_jupyter_collaboration_tests.sh | 5 ++ .../v2/scripts/run_jupyter_scheduler_tests.sh | 20 +++++ .../scripts/run_jupyter_server_proxy_tests.sh | 18 +++++ .../v2/scripts/run_jupyterlab_git_tests.sh | 9 +++ .../v2/scripts/run_jupyterlab_lsp_tests.sh | 69 +++++++++++++++++ .../v2/scripts/run_langchain_aws_tests.sh | 6 ++ .../v2/scripts/run_langchain_tests.sh | 8 ++ .../v2/scripts/run_notebook_tests.sh | 9 +++ .../v2/scripts/run_pyhive_tests.sh | 11 +++ .../v2/scripts/run_pysdk_tests.sh | 3 +- .../v2/scripts/run_python_gssapi_tests.sh | 11 +++ .../v2/scripts/run_python_lsp_server_tests.sh | 13 ++++ .../v2/scripts/run_s3fs_tests.sh | 11 ++- .../v2/scripts/run_supervisor_tests.sh | 8 ++ .../v2/scripts/run_torchvision_tests.sh | 17 +++++ .../v2/scripts/run_uvicorn_tests.sh | 9 +++ .../v2/scripts/run_xgboost_cpu_tests.sh | 11 +++ .../v2/scripts/run_xgboost_gpu_tests.sh | 9 +++ test/test_artifacts/v2/serve.test.Dockerfile | 6 -- .../v2/supervisor.test.Dockerfile | 21 ++++++ .../v2/tf-keras.test.Dockerfile | 31 ++++++++ .../v2/torchvision.test.Dockerfile | 17 +++++ .../test_artifacts/v2/uvicorn.test.Dockerfile | 19 +++++ .../v2/xgboost-cpu.test.Dockerfile | 17 +++++ .../v2/xgboost-gpu.test.Dockerfile | 18 +++++ test/test_dockerfile_based_harness.py | 33 +++++++-- 46 files changed, 719 insertions(+), 22 deletions(-) create mode 100644 test/test_artifacts/v2/fastapi.test.Dockerfile create mode 100644 test/test_artifacts/v2/ipywidgets.test.Dockerfile create mode 100644 test/test_artifacts/v2/jinja2.test.Dockerfile create mode 100644 test/test_artifacts/v2/jupyter-scheduler.test.Dockerfile create mode 100644 test/test_artifacts/v2/jupyter-server-proxy.test.Dockerfile create mode 100644 test/test_artifacts/v2/langchain.test.Dockerfile create mode 100644 test/test_artifacts/v2/pyhive.test.Dockerfile create mode 100644 test/test_artifacts/v2/python-gssapi.test.Dockerfile create mode 100644 test/test_artifacts/v2/scikit-learn.test.Dockerfile create mode 100644 test/test_artifacts/v2/scripts/run_fastapi_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_ipywidgets_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_jinja2_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_jupyter_collaboration_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_jupyter_scheduler_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_jupyter_server_proxy_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_jupyterlab_git_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_jupyterlab_lsp_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_langchain_aws_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_langchain_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_notebook_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_pyhive_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_python_gssapi_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_python_lsp_server_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_supervisor_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_torchvision_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_uvicorn_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_xgboost_cpu_tests.sh create mode 100644 test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh delete mode 100644 test/test_artifacts/v2/serve.test.Dockerfile create mode 100644 test/test_artifacts/v2/supervisor.test.Dockerfile create mode 100644 test/test_artifacts/v2/tf-keras.test.Dockerfile create mode 100644 test/test_artifacts/v2/torchvision.test.Dockerfile create mode 100644 test/test_artifacts/v2/uvicorn.test.Dockerfile create mode 100644 test/test_artifacts/v2/xgboost-cpu.test.Dockerfile create mode 100644 test/test_artifacts/v2/xgboost-gpu.test.Dockerfile diff --git a/test/test_artifacts/v2/fastapi.test.Dockerfile b/test/test_artifacts/v2/fastapi.test.Dockerfile new file mode 100644 index 00000000..5a54c11b --- /dev/null +++ b/test/test_artifacts/v2/fastapi.test.Dockerfile @@ -0,0 +1,17 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN sudo apt-get update && \ + sudo apt-get install -y git && \ + git clone --recursive https://github.com/fastapi/fastapi + +WORKDIR "fastapi" + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_fastapi_tests.sh . + +RUN chmod +x run_fastapi_tests.sh + +CMD ["./run_fastapi_tests.sh"] + diff --git a/test/test_artifacts/v2/ipywidgets.test.Dockerfile b/test/test_artifacts/v2/ipywidgets.test.Dockerfile new file mode 100644 index 00000000..b8a293d8 --- /dev/null +++ b/test/test_artifacts/v2/ipywidgets.test.Dockerfile @@ -0,0 +1,18 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN sudo apt-get update && \ + sudo apt-get install -y git + +RUN git clone https://github.com/jupyter-widgets/ipywidgets.git + +WORKDIR "ipywidgets" + +RUN pip install jupyter nbconvert + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_ipywidgets_tests.sh ./ +RUN chmod +x run_ipywidgets_tests.sh + +CMD ["./run_ipywidgets_tests.sh"] diff --git a/test/test_artifacts/v2/jinja2.test.Dockerfile b/test/test_artifacts/v2/jinja2.test.Dockerfile new file mode 100644 index 00000000..32f6d9b5 --- /dev/null +++ b/test/test_artifacts/v2/jinja2.test.Dockerfile @@ -0,0 +1,12 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN micromamba install -y -c conda-forge jinja2 + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_jinja2_tests.sh ./ + +RUN chmod +x run_jinja2_tests.sh + +CMD ["./run_jinja2_tests.sh"] \ No newline at end of file diff --git a/test/test_artifacts/v2/jupyter-collaboration.test.Dockerfile b/test/test_artifacts/v2/jupyter-collaboration.test.Dockerfile index 0b005de1..a4ed91e4 100644 --- a/test/test_artifacts/v2/jupyter-collaboration.test.Dockerfile +++ b/test/test_artifacts/v2/jupyter-collaboration.test.Dockerfile @@ -3,4 +3,15 @@ FROM $SAGEMAKER_DISTRIBUTION_IMAGE ARG MAMBA_DOCKERFILE_ACTIVATE=1 -CMD ["python", "-c", "import jupyter_collaboration; import jupyter_server_fileid; from jupyter_ydoc import YBlob; yblob = YBlob(); assert yblob.get() == b''; yblob.set(b'012'); assert yblob.get() == b'012'"] +RUN sudo apt-get update && \ + sudo apt-get install -y git + +RUN git clone --recursive https://github.com/jupyterlab/jupyter-collaboration.git + +WORKDIR "jupyter-collaboration" + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_jupyter_collaboration_tests.sh . + +RUN chmod +x run_jupyter_collaboration_tests.sh + +CMD ["./run_jupyter_collaboration_tests.sh"] diff --git a/test/test_artifacts/v2/jupyter-scheduler.test.Dockerfile b/test/test_artifacts/v2/jupyter-scheduler.test.Dockerfile new file mode 100644 index 00000000..0487bfa2 --- /dev/null +++ b/test/test_artifacts/v2/jupyter-scheduler.test.Dockerfile @@ -0,0 +1,17 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN sudo apt-get update && \ + sudo apt-get install -y git && \ + git clone --recursive https://github.com/jupyter-server/jupyter-scheduler + +WORKDIR "jupyter-scheduler" + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_jupyter_scheduler_tests.sh . + +RUN chmod +x run_jupyter_scheduler_tests.sh + +CMD ["./run_jupyter_scheduler_tests.sh"] + diff --git a/test/test_artifacts/v2/jupyter-server-proxy.test.Dockerfile b/test/test_artifacts/v2/jupyter-server-proxy.test.Dockerfile new file mode 100644 index 00000000..f7e44e17 --- /dev/null +++ b/test/test_artifacts/v2/jupyter-server-proxy.test.Dockerfile @@ -0,0 +1,16 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN sudo apt-get update && \ + sudo apt-get install -y git && \ + git clone --recursive https://github.com/jupyterhub/jupyter-server-proxy.git + +WORKDIR "jupyter-server-proxy" + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_jupyter_server_proxy_tests.sh . + +RUN chmod +x run_jupyter_server_proxy_tests.sh + +CMD ["./run_jupyter_server_proxy_tests.sh"] diff --git a/test/test_artifacts/v2/jupyterlab-git.test.Dockerfile b/test/test_artifacts/v2/jupyterlab-git.test.Dockerfile index 7d5cbd96..3b9faf86 100644 --- a/test/test_artifacts/v2/jupyterlab-git.test.Dockerfile +++ b/test/test_artifacts/v2/jupyterlab-git.test.Dockerfile @@ -3,4 +3,16 @@ FROM $SAGEMAKER_DISTRIBUTION_IMAGE ARG MAMBA_DOCKERFILE_ACTIVATE=1 -CMD ["python", "-c", "import jupyterlab_git"] +RUN sudo apt-get update && \ + sudo apt-get install -y git + +RUN git clone --recursive https://github.com/jupyterlab/jupyterlab-git + +WORKDIR "jupyterlab-git" + +RUN micromamba install --freeze-installed -y -c conda-forge pytest + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_jupyterlab_git_tests.sh . +RUN chmod +x run_jupyterlab_git_tests.sh + +CMD ["./run_jupyterlab_git_tests.sh"] diff --git a/test/test_artifacts/v2/jupyterlab-lsp.test.Dockerfile b/test/test_artifacts/v2/jupyterlab-lsp.test.Dockerfile index c13df62a..21d66a8b 100644 --- a/test/test_artifacts/v2/jupyterlab-lsp.test.Dockerfile +++ b/test/test_artifacts/v2/jupyterlab-lsp.test.Dockerfile @@ -3,5 +3,8 @@ FROM $SAGEMAKER_DISTRIBUTION_IMAGE ARG MAMBA_DOCKERFILE_ACTIVATE=1 -CMD ["python", "-c", "import jupyter_lsp"] -CMD ["python", "-c", "import jupyterlab_lsp"] +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_jupyterlab_lsp_tests.sh ./ +RUN chmod +x run_jupyterlab_lsp_tests.sh + +CMD ["./run_jupyterlab_lsp_tests.sh"] + diff --git a/test/test_artifacts/v2/langchain-aws.test.Dockerfile b/test/test_artifacts/v2/langchain-aws.test.Dockerfile index 73353bb0..59d26ef3 100644 --- a/test/test_artifacts/v2/langchain-aws.test.Dockerfile +++ b/test/test_artifacts/v2/langchain-aws.test.Dockerfile @@ -3,6 +3,11 @@ FROM $SAGEMAKER_DISTRIBUTION_IMAGE ARG MAMBA_DOCKERFILE_ACTIVATE=1 +RUN sudo apt-get update && \ + sudo apt-get install -y git + +RUN git clone https://github.com/langchain-ai/langchain-aws /tmp/langchain-aws + CMD ["python", "-c", "import langchain_aws"] CMD ["python", "-c", "from langchain_aws import BedrockLLM"] CMD ["python", "-c", "from langchain_aws import ChatBedrock"] @@ -11,3 +16,13 @@ CMD ["python", "-c", "from langchain_aws import AmazonKendraRetriever"] CMD ["python", "-c", "from langchain_aws import AmazonKnowledgeBasesRetriever"] CMD ["python", "-c", "from langchain_aws import NeptuneAnalyticsGraph"] CMD ["python", "-c", "from langchain_aws import NeptuneGraph"] + + +WORKDIR "/tmp/langchain-aws" + +RUN pip install jupyter nbconvert + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_langchain_aws_tests.sh ./ +RUN chmod +x run_langchain_aws_tests.sh + +CMD ["./run_langchain_aws_tests.sh"] diff --git a/test/test_artifacts/v2/langchain.test.Dockerfile b/test/test_artifacts/v2/langchain.test.Dockerfile new file mode 100644 index 00000000..80309ab1 --- /dev/null +++ b/test/test_artifacts/v2/langchain.test.Dockerfile @@ -0,0 +1,28 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +ENV OPENBLAS_NUM_THREADS=1 + +RUN micromamba install -y -c conda-forge pytest + +# Create the test script +RUN echo '#!/bin/bash' > /home/sagemaker-user/run_langchain_tests.sh && \ + echo 'set -e' >> /home/sagemaker-user/run_langchain_tests.sh && \ + echo 'echo "Langchain version: $(python -c "import langchain; print(langchain.__version__)")"' >> /home/sagemaker-user/run_langchain_tests.sh && \ + echo 'langchain_path=$(python -c "import langchain; import os; print(os.path.dirname(langchain.__file__))")' >> /home/sagemaker-user/run_langchain_tests.sh && \ + echo 'test_path="${langchain_path}/tests/unit_tests"' >> /home/sagemaker-user/run_langchain_tests.sh && \ + echo 'if [ -d "$test_path" ]; then' >> /home/sagemaker-user/run_langchain_tests.sh && \ + echo ' pytest "$test_path" -v' >> /home/sagemaker-user/run_langchain_tests.sh && \ + echo 'else' >> /home/sagemaker-user/run_langchain_tests.sh && \ + echo ' echo "Unit tests directory not found at $test_path"' >> /home/sagemaker-user/run_langchain_tests.sh && \ + echo ' echo "Available files in langchain directory:"' >> /home/sagemaker-user/run_langchain_tests.sh && \ + echo ' find "$langchain_path" -name "*.py"' >> /home/sagemaker-user/run_langchain_tests.sh && \ + echo 'fi' >> /home/sagemaker-user/run_langchain_tests.sh + +RUN chmod +x /home/sagemaker-user/run_langchain_tests.sh + +WORKDIR /home/sagemaker-user + +CMD ["./run_langchain_tests.sh"] \ No newline at end of file diff --git a/test/test_artifacts/v2/notebook.test.Dockerfile b/test/test_artifacts/v2/notebook.test.Dockerfile index 9afb9e08..f0bb8576 100644 --- a/test/test_artifacts/v2/notebook.test.Dockerfile +++ b/test/test_artifacts/v2/notebook.test.Dockerfile @@ -3,4 +3,15 @@ FROM $SAGEMAKER_DISTRIBUTION_IMAGE ARG MAMBA_DOCKERFILE_ACTIVATE=1 -CMD ["python", "-c", "import notebook"] +RUN sudo apt-get update && \ + sudo apt-get install -y git && \ + git clone --recursive https://github.com/jupyter/notebook + +WORKDIR "notebook" + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_notebook_tests.sh . + +RUN chmod +x run_notebook_tests.sh + +CMD ["./run_notebook_tests.sh"] + diff --git a/test/test_artifacts/v2/pyhive.test.Dockerfile b/test/test_artifacts/v2/pyhive.test.Dockerfile new file mode 100644 index 00000000..8259c098 --- /dev/null +++ b/test/test_artifacts/v2/pyhive.test.Dockerfile @@ -0,0 +1,17 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN sudo apt-get update && \ + sudo apt-get install -y git && \ + git clone --recursive https://github.com/dropbox/PyHive.git + +WORKDIR "PyHive" + +RUN micromamba install --freeze-installed -y -c conda-forge pytest + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_pyhive_tests.sh . +RUN chmod +x run_pyhive_tests.sh + +CMD ["./run_pyhive_tests.sh"] diff --git a/test/test_artifacts/v2/python-gssapi.test.Dockerfile b/test/test_artifacts/v2/python-gssapi.test.Dockerfile new file mode 100644 index 00000000..b4f17f61 --- /dev/null +++ b/test/test_artifacts/v2/python-gssapi.test.Dockerfile @@ -0,0 +1,19 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN micromamba install -y --name base -c conda-forge pytest pytest-cov parameterized && \ + pip install k5test + +# run tests in the home directory +RUN echo '#!/bin/bash' > /home/sagemaker-user/run_python_gssapi_tests.sh && \ + echo 'import_path=$(python -c "import gssapi; print(gssapi.__file__)")' >> /home/sagemaker-user/run_python_gssapi_tests.sh && \ + echo 'gssapi_path=$(dirname "$import_path")' >> /home/sagemaker-user/run_python_gssapi_tests.sh && \ + echo 'pytest -v "$gssapi_path" "$@"' >> /home/sagemaker-user/run_python_gssapi_tests.sh + +RUN chmod +x /home/sagemaker-user/run_python_gssapi_tests.sh + +WORKDIR /home/sagemaker-user + +CMD ["./run_python_gssapi_tests.sh"] diff --git a/test/test_artifacts/v2/python-lsp-server.test.Dockerfile b/test/test_artifacts/v2/python-lsp-server.test.Dockerfile index eb73f0e0..3d1946d5 100644 --- a/test/test_artifacts/v2/python-lsp-server.test.Dockerfile +++ b/test/test_artifacts/v2/python-lsp-server.test.Dockerfile @@ -3,5 +3,12 @@ FROM $SAGEMAKER_DISTRIBUTION_IMAGE ARG MAMBA_DOCKERFILE_ACTIVATE=1 -# "Confirm that installation succeeded" by running this - https://github.com/python-lsp/python-lsp-server#installation -CMD ["pylsp", "--help"] +RUN sudo apt-get update && \ + sudo apt-get install -y git && \ + git clone --recursive https://github.com/python-lsp/python-lsp-server + +WORKDIR "python-lsp" + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_python_lsp_server_tests.sh . + +RUN chmod +x run_python_lsp_server_tests.sh diff --git a/test/test_artifacts/v2/run_pandas_tests.py b/test/test_artifacts/v2/run_pandas_tests.py index a286443f..ee6dc383 100644 --- a/test/test_artifacts/v2/run_pandas_tests.py +++ b/test/test_artifacts/v2/run_pandas_tests.py @@ -24,7 +24,9 @@ "-m", "(not slow and not network and not db and not clipboard)", "-k", - "(not test_network and not s3 and not test_plain_axes)", + "(not test_network and not s3 and not test_plain_axes and not test_xsqlite_execute_closed_connection and not test_sql)", + "-W", + "ignore::PendingDeprecationWarning", "--no-strict-data-files", "--ignore", "pandas/tests/frame/test_arithmetic.py::TestFrameFlexArithmetic::test_floordiv_axis0_numexpr_path", diff --git a/test/test_artifacts/v2/s3fs.test.Dockerfile b/test/test_artifacts/v2/s3fs.test.Dockerfile index 8f3a0c68..5e182ad1 100644 --- a/test/test_artifacts/v2/s3fs.test.Dockerfile +++ b/test/test_artifacts/v2/s3fs.test.Dockerfile @@ -5,6 +5,7 @@ ARG MAMBA_DOCKERFILE_ACTIVATE=1 RUN git clone --recursive https://github.com/fsspec/s3fs.git WORKDIR "s3fs" + COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_s3fs_tests.sh . RUN chmod +x run_s3fs_tests.sh CMD ["./run_s3fs_tests.sh"] diff --git a/test/test_artifacts/v2/scikit-learn.test.Dockerfile b/test/test_artifacts/v2/scikit-learn.test.Dockerfile new file mode 100644 index 00000000..bb4e6a28 --- /dev/null +++ b/test/test_artifacts/v2/scikit-learn.test.Dockerfile @@ -0,0 +1,23 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +ENV OPENBLAS_NUM_THREADS=1 + +RUN micromamba install -y --name base -c conda-forge pytest pytest-cov + +RUN echo '#!/bin/bash' > /home/sagemaker-user/run_scikit_learn_tests.sh && \ + echo 'import_path=$(python -c "import sklearn; print(sklearn.__file__)")' >> /home/sagemaker-user/run_scikit_learn_tests.sh && \ + echo 'sklearn_path=$(dirname "$import_path")' >> /home/sagemaker-user/run_scikit_learn_tests.sh && \ + echo 'test_files=("test_base.py" "test_init.py")' >> /home/sagemaker-user/run_scikit_learn_tests.sh && \ + echo 'for test_file in "${test_files[@]}"; do' >> /home/sagemaker-user/run_scikit_learn_tests.sh && \ + echo ' echo "Running tests in $test_file"' >> /home/sagemaker-user/run_scikit_learn_tests.sh && \ + echo ' pytest -v "$sklearn_path/tests/$test_file"' >> /home/sagemaker-user/run_scikit_learn_tests.sh && \ + echo 'done' >> /home/sagemaker-user/run_scikit_learn_tests.sh + +RUN chmod +x /home/sagemaker-user/run_scikit_learn_tests.sh + +WORKDIR /home/sagemaker-user + +CMD ["./run_scikit_learn_tests.sh"] \ No newline at end of file diff --git a/test/test_artifacts/v2/scripts/run_fastapi_tests.sh b/test/test_artifacts/v2/scripts/run_fastapi_tests.sh new file mode 100644 index 00000000..85a372d1 --- /dev/null +++ b/test/test_artifacts/v2/scripts/run_fastapi_tests.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +pysdk_version=$(micromamba list | grep fastapi | tr -s ' ' | cut -d ' ' -f 3) + +git checkout tags/v$pysdk_version + +pip install -r requirements-tests.txt + +pytest tests/ -v -k "not test_tutorial" diff --git a/test/test_artifacts/v2/scripts/run_ipywidgets_tests.sh b/test/test_artifacts/v2/scripts/run_ipywidgets_tests.sh new file mode 100644 index 00000000..ba7afda2 --- /dev/null +++ b/test/test_artifacts/v2/scripts/run_ipywidgets_tests.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +jupyter nbconvert --execute --to python tests/test_borders.ipynb +jupyter nbconvert --execute --to python tests/test_sanitizer.ipynb diff --git a/test/test_artifacts/v2/scripts/run_jinja2_tests.sh b/test/test_artifacts/v2/scripts/run_jinja2_tests.sh new file mode 100644 index 00000000..e0cefa8a --- /dev/null +++ b/test/test_artifacts/v2/scripts/run_jinja2_tests.sh @@ -0,0 +1,74 @@ +#!/bin/bash + +set -e + +# Test 1: Basic template +python -c " +from jinja2 import Template +template = Template('Hello {{ name }}!') +result = template.render(name='John') +assert result == 'Hello John!', f'Expected \"Hello John!\", got \"{result}\"' +" + +# Test 2: Conditional statements +python -c " +from jinja2 import Template +template = Template('{% if user %}Hello, {{ user }}!{% else %}Hello, stranger!{% endif %}') +result1 = template.render(user='Alice') +result2 = template.render(user=None) +assert result1 == 'Hello, Alice!', f'Expected \"Hello, Alice!\", got \"{result1}\"' +assert result2 == 'Hello, stranger!', f'Expected \"Hello, stranger!\", got \"{result2}\"' +" + +# Test 3: Loops +python -c " +from jinja2 import Template +template = Template('{% for item in items %}{{ item }} {% endfor %}') +result = template.render(items=['apple', 'banana', 'cherry']) +assert result == 'apple banana cherry ', f'Expected \"apple banana cherry \", got \"{result}\"' +" + +# Test 4: Filters +python -c " +from jinja2 import Template +template = Template('{{ name|upper }}') +result = template.render(name='john') +assert result == 'JOHN', f'Expected \"JOHN\", got \"{result}\"' +" + +# Test 5: File system loader +echo "

Hello, {{ name }}!

" > /tmp/test_template.html +python -c " +from jinja2 import Environment, FileSystemLoader +import os +env = Environment(loader=FileSystemLoader('/tmp')) +template = env.get_template('test_template.html') +result = template.render(name='World') +assert result == '

Hello, World!

', f'Expected \"

Hello, World!

\", got \"{result}\"' +" +rm /tmp/test_template.html + +python -c " +from jinja2 import Template, TemplateSyntaxError +try: + Template('{% if %}') + assert False, 'Should have raised TemplateSyntaxError' +except TemplateSyntaxError: + print('Error handling test passed.') +" + +# Test 7: Template inheritance +# base template +echo "{% block content %}Default content{% endblock %}" > /tmp/base.html +# child template +echo "{% extends 'base.html' %}{% block content %}Child content{% endblock %}" > /tmp/child.html + +python -c " +from jinja2 import Environment, FileSystemLoader +env = Environment(loader=FileSystemLoader('/tmp')) +template = env.get_template('child.html') +result = template.render() +assert result == 'Child content', f'Expected \"Child content\", got \"{result}\"' +" + +rm /tmp/base.html /tmp/child.html diff --git a/test/test_artifacts/v2/scripts/run_jupyter_collaboration_tests.sh b/test/test_artifacts/v2/scripts/run_jupyter_collaboration_tests.sh new file mode 100644 index 00000000..9775ffc8 --- /dev/null +++ b/test/test_artifacts/v2/scripts/run_jupyter_collaboration_tests.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +pip install ".[test]" + +pytest tests/ -v diff --git a/test/test_artifacts/v2/scripts/run_jupyter_scheduler_tests.sh b/test/test_artifacts/v2/scripts/run_jupyter_scheduler_tests.sh new file mode 100644 index 00000000..90db6be8 --- /dev/null +++ b/test/test_artifacts/v2/scripts/run_jupyter_scheduler_tests.sh @@ -0,0 +1,20 @@ +#!/bin/bash +pysdk_version=$(micromamba list | grep jupyter-scheduler | tr -s ' ' | cut -d ' ' -f 3) + +git checkout tags/v$pysdk_version + + +pip install ".[test]" + +test_files=( + "jupyter_scheduler/tests/test_execution_manager.py" + "jupyter_scheduler/tests/test_handlers.py" + "jupyter_scheduler/tests/test_job_files_manager.py" + "jupyter_scheduler/tests/test_orm.py" + "jupyter_scheduler/tests/test_scheduler.py" +) + +for test_file in "${test_files[@]}"; do + echo "Running tests in $test_file" + pytest -v "$test_file" +done \ No newline at end of file diff --git a/test/test_artifacts/v2/scripts/run_jupyter_server_proxy_tests.sh b/test/test_artifacts/v2/scripts/run_jupyter_server_proxy_tests.sh new file mode 100644 index 00000000..8f14c50e --- /dev/null +++ b/test/test_artifacts/v2/scripts/run_jupyter_server_proxy_tests.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +pysdk_version=$(micromamba list | grep jupyter-server-proxy | tr -s ' ' | cut -d ' ' -f 3) + +git checkout tags/v$pysdk_version + +pip install ".[test]" + +test_files=( + "tests/test_config.py" + "tests/test_proxies.py" + "tests/test_utils.py" +) + +# Run each test file +for test_file in "${test_files[@]}"; do + pytest -v "$test_file" +done diff --git a/test/test_artifacts/v2/scripts/run_jupyterlab_git_tests.sh b/test/test_artifacts/v2/scripts/run_jupyterlab_git_tests.sh new file mode 100644 index 00000000..b2f4fa2a --- /dev/null +++ b/test/test_artifacts/v2/scripts/run_jupyterlab_git_tests.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +pysdk_version=$(micromamba list | grep jupyter-git | tr -s ' ' | cut -d ' ' -f 3) + +git checkout tags/v$pysdk_version + +pip install ".[test]" + +pytest -v jupyterlab_git/tests/ diff --git a/test/test_artifacts/v2/scripts/run_jupyterlab_lsp_tests.sh b/test/test_artifacts/v2/scripts/run_jupyterlab_lsp_tests.sh new file mode 100644 index 00000000..b7f5d652 --- /dev/null +++ b/test/test_artifacts/v2/scripts/run_jupyterlab_lsp_tests.sh @@ -0,0 +1,69 @@ +#!/bin/bash + +#imports test +python -c "import jupyter_lsp" || { echo "jupyter_lsp import test failed"; exit 1; } +python -c "import jupyterlab_lsp" || { echo "jupyterlab_lsp import test failed"; exit 1; } + +TEST_DIR=$(mktemp -d) +cd "$TEST_DIR" + +python - < test.py +def hello_world(): + print("Hello, World!") + return None +EOF + +# LSP functionality with the created file +python - < /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo 'set -e' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo 'import_path=$(python -c "import tf_keras; print(tf_keras.__file__)")' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo 'tf_keras_path=$(dirname "$import_path")' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo 'echo "tf-keras path: $tf_keras_path"' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo 'test_file="keras_doctest.py"' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo 'test_path=$(find "$tf_keras_path" -name "$test_file" -print -quit)' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo 'if [ -n "$test_path" ]; then' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo ' echo "Running test: $test_path"' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo ' pytest -v "$test_path"' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo 'else' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo ' echo "Test file not found: $test_file"' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo ' echo "Available files in tf_keras:"' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo ' find "$tf_keras_path" -name "*.py"' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo ' exit 1' >> /home/sagemaker-user/run_tf_keras_tests.sh && \ + echo 'fi' >> /home/sagemaker-user/run_tf_keras_tests.sh + +RUN chmod +x /home/sagemaker-user/run_tf_keras_tests.sh + +WORKDIR /home/sagemaker-user + +CMD ["./run_tf_keras_tests.sh"] \ No newline at end of file diff --git a/test/test_artifacts/v2/torchvision.test.Dockerfile b/test/test_artifacts/v2/torchvision.test.Dockerfile new file mode 100644 index 00000000..daa5006b --- /dev/null +++ b/test/test_artifacts/v2/torchvision.test.Dockerfile @@ -0,0 +1,17 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN sudo apt-get update && \ + sudo apt-get install -y git && \ + git clone --recursive https://github.com/pytorch/vision + +WORKDIR "vision" + +RUN micromamba install -y -c conda-forge pytest + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_torchvision_tests.sh . +RUN chmod +x run_torchvision_tests.sh +CMD ["./run_torchvision_tests.sh"] + diff --git a/test/test_artifacts/v2/uvicorn.test.Dockerfile b/test/test_artifacts/v2/uvicorn.test.Dockerfile new file mode 100644 index 00000000..409c3bf0 --- /dev/null +++ b/test/test_artifacts/v2/uvicorn.test.Dockerfile @@ -0,0 +1,19 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN sudo apt-get update && \ + sudo apt-get install -y git && \ + git clone --recursive https://github.com/encode/uvicorn + +WORKDIR "uvicorn" + +RUN micromamba install --freeze-installed -y -c conda-forge pytest + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_uvicorn_tests.sh . + +RUN chmod +x run_uvicorn_tests.sh + +CMD ["./run_uvicorn_tests.sh"] + diff --git a/test/test_artifacts/v2/xgboost-cpu.test.Dockerfile b/test/test_artifacts/v2/xgboost-cpu.test.Dockerfile new file mode 100644 index 00000000..54694360 --- /dev/null +++ b/test/test_artifacts/v2/xgboost-cpu.test.Dockerfile @@ -0,0 +1,17 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN sudo apt-get update && \ + sudo apt-get install -y git && \ + git clone --recursive https://github.com/dmlc/xgboost + +WORKDIR "xgboost" + +RUN micromamba install --freeze-installed -y -c conda-forge xgboost hypothesis loky pytest pytest-timeout + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_xgboost_cpu_tests.sh . +RUN chmod +x run_xgboost_cpu_tests.sh + +CMD ["./run_xgboost_cpu_tests.sh"] diff --git a/test/test_artifacts/v2/xgboost-gpu.test.Dockerfile b/test/test_artifacts/v2/xgboost-gpu.test.Dockerfile new file mode 100644 index 00000000..648b83c0 --- /dev/null +++ b/test/test_artifacts/v2/xgboost-gpu.test.Dockerfile @@ -0,0 +1,18 @@ +ARG SAGEMAKER_DISTRIBUTION_IMAGE +FROM $SAGEMAKER_DISTRIBUTION_IMAGE + +ARG MAMBA_DOCKERFILE_ACTIVATE=1 + +RUN sudo apt-get update && \ + sudo apt-get install -y git && \ + git clone --recursive https://github.com/dmlc/xgboost + +WORKDIR "xgboost" + +RUN micromamba install --freeze-installed -y -c conda-forge xgboost hypothesis loky pytest pytest-timeout + +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_xgboost_gpu_tests.sh . + +RUN chmod +x run_xgboost_gpu_tests.sh + +CMD ["./run_xgboost_gpu_tests.sh"] diff --git a/test/test_dockerfile_based_harness.py b/test/test_dockerfile_based_harness.py index 4711dd58..a3dc36b2 100644 --- a/test/test_dockerfile_based_harness.py +++ b/test/test_dockerfile_based_harness.py @@ -37,7 +37,7 @@ ("jupyter-collaboration.test.Dockerfile", ["jupyter-collaboration"]), ("jupyter-dash.test.Dockerfile", ["jupyter-dash"]), ("jupyterlab-lsp.test.Dockerfile", ["jupyterlab-lsp"]), - ("python-lsp-server.test.Dockerfile", ["jupyter-lsp-server"]), + ("python-lsp-server.test.Dockerfile", ["python-lsp-server"]), ("sagemaker-code-editor.test.Dockerfile", ["sagemaker-code-editor"]), ("notebook.test.Dockerfile", ["notebook"]), ("glue-sessions.test.Dockerfile", ["aws-glue-sessions"]), @@ -56,7 +56,7 @@ "amazon_sagemaker_sql_editor.test.Dockerfile", ["amazon_sagemaker_sql_editor"], ), - ("serve.test.Dockerfile", ["langchain"]), + ("langchain.test.Dockerfile", ["langchain"]), ("langchain-aws.test.Dockerfile", ["langchain-aws"]), ("mlflow.test.Dockerfile", ["mlflow"]), ( @@ -67,8 +67,16 @@ ("s3fs.test.Dockerfile", ["s3fs"]), ("seaborn.test.Dockerfile", ["seaborn"]), ("sagemaker-recovery-mode.test.Dockerfile", ["sagemaker-jupyterlab-extension"]), - ("s3fs.test.Dockerfile", ["s3fs"]), - ("seaborn.test.Dockerfile", ["seaborn"]), + ("jinja2.test.Dockerfile", ["jinja2"]), + ("uvicorn.test.Dockerfile", ["uvicorn"]), + ("fastapi.test.Dockerfile", ["fastapi"]), + ("scikit-learn.test.Dockerfile", ["scikit-learn"]), + ("jupyter-scheduler.test.Dockerfile", ["jupyter-scheduler"]), + ("jupyter-server-proxy.test.Dockerfile", ["jupyter-server-proxy"]), + ("ipywidgets.test.Dockerfile", ["ipywidgets"]), + ("supervisor.test.Dockerfile", ["supervisor"]), + ("python-gssapi.test.Dockerfile", ["python-gssapi"]), + ("xgboost-cpu.test.Dockerfile",["xgboost"]), ], ) def test_dockerfiles_for_cpu( @@ -100,10 +108,11 @@ def test_dockerfiles_for_cpu( ("pytorch.examples.Dockerfile", ["pytorch"]), ("tensorflow.examples.Dockerfile", ["tensorflow"]), ("glue-sessions.test.Dockerfile", ["aws-glue-sessions"]), + ("scikit-learn.test.Dockerfile", ["scikit-learn"]), ("jupyter-ai.test.Dockerfile", ["jupyter-ai"]), ("jupyter-dash.test.Dockerfile", ["jupyter-dash"]), ("jupyterlab-lsp.test.Dockerfile", ["jupyterlab-lsp"]), - ("python-lsp-server.test.Dockerfile", ["jupyter-lsp-server"]), + ("python-lsp-server.test.Dockerfile", ["python-lsp-server"]), ("sagemaker-code-editor.test.Dockerfile", ["sagemaker-code-editor"]), ("notebook.test.Dockerfile", ["notebook"]), ("glue-sessions.test.Dockerfile", ["aws-glue-sessions"]), @@ -124,6 +133,7 @@ def test_dockerfiles_for_cpu( ), ("serve.test.Dockerfile", ["langchain"]), ("langchain-aws.test.Dockerfile", ["langchain-aws"]), + ("langchain.test.Dockerfile", ["langchain"]), ("mlflow.test.Dockerfile", ["mlflow"]), ("sagemaker-mlflow.test.Dockerfile", ["sagemaker-mlflow"]), ( @@ -135,8 +145,19 @@ def test_dockerfiles_for_cpu( ("s3fs.test.Dockerfile", ["s3fs"]), ("seaborn.test.Dockerfile", ["seaborn"]), ("sagemaker-recovery-mode.test.Dockerfile", ["sagemaker-jupyterlab-extension"]), - ("s3fs.test.Dockerfile", ["s3fs"]), ("seaborn.test.Dockerfile", ["seaborn"]), + ("jinja2.test.Dockerfile", ["jinja2"]), + ("uvicorn.test.Dockerfile", ["uvicorn"]), + ("fastapi.test.Dockerfile", ["fastapi"]), + ("torchvision.test.Dockerfile", ["torchvision"]), + ("jupyter-scheduler.test.Dockerfile", ["jupyter-scheduler"]), + ("ipywidgets.test.Dockerfile", ["ipywidgets"]), + ("supervisor.test.Dockerfile", ["supervisor"]), + ("tf-keras.test.Dockerfile",["tf-keras"]), + ("xgboost-gpu.test.Dockerfile", ["xgboost"]), + ("jupyter-collaboration.test.Dockerfile", ["jupyter-collaboration"]), + ("jupyter-server-proxy.test.Dockerfile", ["jupyter-server-proxy"]), + ], ) def test_dockerfiles_for_gpu( From 86e04debceeba64e767add5204c6e0920503868d Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Wed, 2 Apr 2025 22:33:10 +0000 Subject: [PATCH 02/15] fix version names in shell files **Description** **Motivation** **Testing Done** **Backwards Compatibility Criteria (if any)** --- test/test_artifacts/v2/scripts/run_fastapi_tests.sh | 4 ++-- test/test_artifacts/v2/scripts/run_jupyter_scheduler_tests.sh | 4 ++-- .../v2/scripts/run_jupyter_server_proxy_tests.sh | 4 ++-- test/test_artifacts/v2/scripts/run_jupyterlab_git_tests.sh | 4 ++-- test/test_artifacts/v2/scripts/run_notebook_tests.sh | 4 ++-- test/test_artifacts/v2/scripts/run_pyhive_tests.sh | 4 ++-- test/test_artifacts/v2/scripts/run_python_lsp_server_tests.sh | 4 ++-- test/test_artifacts/v2/scripts/run_supervisor_tests.sh | 4 ++-- test/test_artifacts/v2/scripts/run_torchvision_tests.sh | 4 ++-- test/test_artifacts/v2/scripts/run_uvicorn_tests.sh | 4 ++-- test/test_artifacts/v2/scripts/run_xgboost_cpu_tests.sh | 4 ++-- test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh | 4 ++-- 12 files changed, 24 insertions(+), 24 deletions(-) diff --git a/test/test_artifacts/v2/scripts/run_fastapi_tests.sh b/test/test_artifacts/v2/scripts/run_fastapi_tests.sh index 85a372d1..960ebf8f 100644 --- a/test/test_artifacts/v2/scripts/run_fastapi_tests.sh +++ b/test/test_artifacts/v2/scripts/run_fastapi_tests.sh @@ -1,8 +1,8 @@ #!/bin/bash -pysdk_version=$(micromamba list | grep fastapi | tr -s ' ' | cut -d ' ' -f 3) +fastapi_version=$(micromamba list | grep fastapi | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$fastapi_version pip install -r requirements-tests.txt diff --git a/test/test_artifacts/v2/scripts/run_jupyter_scheduler_tests.sh b/test/test_artifacts/v2/scripts/run_jupyter_scheduler_tests.sh index 90db6be8..7aa3b1c2 100644 --- a/test/test_artifacts/v2/scripts/run_jupyter_scheduler_tests.sh +++ b/test/test_artifacts/v2/scripts/run_jupyter_scheduler_tests.sh @@ -1,7 +1,7 @@ #!/bin/bash -pysdk_version=$(micromamba list | grep jupyter-scheduler | tr -s ' ' | cut -d ' ' -f 3) +jupyter_scheduler_version=$(micromamba list | grep jupyter-scheduler | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$jupyter_scheduler_version pip install ".[test]" diff --git a/test/test_artifacts/v2/scripts/run_jupyter_server_proxy_tests.sh b/test/test_artifacts/v2/scripts/run_jupyter_server_proxy_tests.sh index 8f14c50e..7e76ab57 100644 --- a/test/test_artifacts/v2/scripts/run_jupyter_server_proxy_tests.sh +++ b/test/test_artifacts/v2/scripts/run_jupyter_server_proxy_tests.sh @@ -1,8 +1,8 @@ #!/bin/bash -pysdk_version=$(micromamba list | grep jupyter-server-proxy | tr -s ' ' | cut -d ' ' -f 3) +jupyter_server_proxy_version=$(micromamba list | grep jupyter-server-proxy | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$jupyter_server_proxy_version pip install ".[test]" diff --git a/test/test_artifacts/v2/scripts/run_jupyterlab_git_tests.sh b/test/test_artifacts/v2/scripts/run_jupyterlab_git_tests.sh index b2f4fa2a..7022affe 100644 --- a/test/test_artifacts/v2/scripts/run_jupyterlab_git_tests.sh +++ b/test/test_artifacts/v2/scripts/run_jupyterlab_git_tests.sh @@ -1,8 +1,8 @@ #!/bin/bash -pysdk_version=$(micromamba list | grep jupyter-git | tr -s ' ' | cut -d ' ' -f 3) +jupyter_git_version=$(micromamba list | grep jupyter-git | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$jupyter_git_version pip install ".[test]" diff --git a/test/test_artifacts/v2/scripts/run_notebook_tests.sh b/test/test_artifacts/v2/scripts/run_notebook_tests.sh index 004a3a81..2e82c237 100644 --- a/test/test_artifacts/v2/scripts/run_notebook_tests.sh +++ b/test/test_artifacts/v2/scripts/run_notebook_tests.sh @@ -1,8 +1,8 @@ #!/bin/bash -pysdk_version=$(micromamba list | grep notebook | tr -s ' ' | cut -d ' ' -f 3) +notebook_version=$(micromamba list | grep notebook | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$notebook_version pip install ".[test]" diff --git a/test/test_artifacts/v2/scripts/run_pyhive_tests.sh b/test/test_artifacts/v2/scripts/run_pyhive_tests.sh index a4b2d4d1..9dd30a29 100644 --- a/test/test_artifacts/v2/scripts/run_pyhive_tests.sh +++ b/test/test_artifacts/v2/scripts/run_pyhive_tests.sh @@ -1,8 +1,8 @@ #!/bin/bash -pysdk_version=$(micromamba list | grep pyhive | tr -s ' ' | cut -d ' ' -f 3) +pyhive_version=$(micromamba list | grep pyhive | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$pyhive_version ./scripts/make_test_tables.sh diff --git a/test/test_artifacts/v2/scripts/run_python_lsp_server_tests.sh b/test/test_artifacts/v2/scripts/run_python_lsp_server_tests.sh index 723898eb..0fcb39a0 100644 --- a/test/test_artifacts/v2/scripts/run_python_lsp_server_tests.sh +++ b/test/test_artifacts/v2/scripts/run_python_lsp_server_tests.sh @@ -3,9 +3,9 @@ # "Confirm that installation succeeded" by running this - https://github.com/python-lsp/python-lsp-server#installation pylsp --help -pysdk_version=$(micromamba list | grep python-lsp-server | tr -s ' ' | cut -d ' ' -f 3) +python_lsp_server_version=$(micromamba list | grep python-lsp-server | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$python_lsp_server_version pip install ".[test]" diff --git a/test/test_artifacts/v2/scripts/run_supervisor_tests.sh b/test/test_artifacts/v2/scripts/run_supervisor_tests.sh index d8797cca..88692206 100644 --- a/test/test_artifacts/v2/scripts/run_supervisor_tests.sh +++ b/test/test_artifacts/v2/scripts/run_supervisor_tests.sh @@ -1,8 +1,8 @@ #!/bin/bash -pysdk_version=$(micromamba list | grep supervisor | tr -s ' ' | cut -d ' ' -f 3) +supervisor_version=$(micromamba list | grep supervisor | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$supervisor_version pytest -v supervisor/tests/ diff --git a/test/test_artifacts/v2/scripts/run_torchvision_tests.sh b/test/test_artifacts/v2/scripts/run_torchvision_tests.sh index 640208a6..6f844fa8 100644 --- a/test/test_artifacts/v2/scripts/run_torchvision_tests.sh +++ b/test/test_artifacts/v2/scripts/run_torchvision_tests.sh @@ -2,9 +2,9 @@ set -e -pysdk_version=$(micromamba list | grep torchvision | tr -s ' ' | cut -d ' ' -f 3) +torchvision_version=$(micromamba list | grep torchvision | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$torchvision_version test_files=( "test/test_utils.py" diff --git a/test/test_artifacts/v2/scripts/run_uvicorn_tests.sh b/test/test_artifacts/v2/scripts/run_uvicorn_tests.sh index dd779f14..09ba9cc4 100644 --- a/test/test_artifacts/v2/scripts/run_uvicorn_tests.sh +++ b/test/test_artifacts/v2/scripts/run_uvicorn_tests.sh @@ -1,8 +1,8 @@ #!/bin/bash -pysdk_version=$(micromamba list | grep uvicorn | tr -s ' ' | cut -d ' ' -f 3) +uvicorn_version=$(micromamba list | grep uvicorn | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$uvicorn_version pip install -r requirements.txt diff --git a/test/test_artifacts/v2/scripts/run_xgboost_cpu_tests.sh b/test/test_artifacts/v2/scripts/run_xgboost_cpu_tests.sh index 16c6582b..e57f0c19 100644 --- a/test/test_artifacts/v2/scripts/run_xgboost_cpu_tests.sh +++ b/test/test_artifacts/v2/scripts/run_xgboost_cpu_tests.sh @@ -1,8 +1,8 @@ #!/bin/bash -pysdk_version=$(micromamba list | grep py-xgboost | tr -s ' ' | cut -d ' ' -f 3) +xgboost_cpu_version=$(micromamba list | grep py-xgboost | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$xgboost_cpu_version python -m pytest tests/python/test_config.py -v -k "not test_nthread" python -m pytest tests/python/test_demos.py -v diff --git a/test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh b/test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh index 399c2ce8..5a4f0da5 100644 --- a/test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh +++ b/test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh @@ -1,8 +1,8 @@ #!/bin/bash -pysdk_version=$(micromamba list | grep py-xgboost | tr -s ' ' | cut -d ' ' -f 3) +xgboost_gpu_version=$(micromamba list | grep py-xgboost | tr -s ' ' | cut -d ' ' -f 3) -git checkout tags/v$pysdk_version +git checkout tags/v$xgboost_gpu_version python -m pytest tests/python-gpu/test_gpu_basic_models.py -v python -m pytest tests/python-gpu/test_gpu_data_iterator.py -v From dadfa423b3d2ccfb764a7876ae522d04213de47e Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Mon, 7 Apr 2025 11:55:20 -0700 Subject: [PATCH 03/15] Update test_dockerfile_based_harness.py Removed python-gssapi as it is failing which needs more robust kerberos setup --- test/test_dockerfile_based_harness.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/test_dockerfile_based_harness.py b/test/test_dockerfile_based_harness.py index a3dc36b2..c03e3fdf 100644 --- a/test/test_dockerfile_based_harness.py +++ b/test/test_dockerfile_based_harness.py @@ -75,7 +75,6 @@ ("jupyter-server-proxy.test.Dockerfile", ["jupyter-server-proxy"]), ("ipywidgets.test.Dockerfile", ["ipywidgets"]), ("supervisor.test.Dockerfile", ["supervisor"]), - ("python-gssapi.test.Dockerfile", ["python-gssapi"]), ("xgboost-cpu.test.Dockerfile",["xgboost"]), ], ) @@ -131,9 +130,8 @@ def test_dockerfiles_for_cpu( "amazon_sagemaker_sql_editor.test.Dockerfile", ["amazon_sagemaker_sql_editor"], ), - ("serve.test.Dockerfile", ["langchain"]), - ("langchain-aws.test.Dockerfile", ["langchain-aws"]), ("langchain.test.Dockerfile", ["langchain"]), + ("langchain-aws.test.Dockerfile", ["langchain-aws"]), ("mlflow.test.Dockerfile", ["mlflow"]), ("sagemaker-mlflow.test.Dockerfile", ["sagemaker-mlflow"]), ( From 4953a524817bc209d6095a522a5d80680c917ce0 Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Mon, 7 Apr 2025 11:56:46 -0700 Subject: [PATCH 04/15] Delete test/test_artifacts/v2/langchain-aws.test.Dockerfile cannot be tested apart from imports which was already there due to credentials requirement --- .../v2/langchain-aws.test.Dockerfile | 28 ------------------- 1 file changed, 28 deletions(-) delete mode 100644 test/test_artifacts/v2/langchain-aws.test.Dockerfile diff --git a/test/test_artifacts/v2/langchain-aws.test.Dockerfile b/test/test_artifacts/v2/langchain-aws.test.Dockerfile deleted file mode 100644 index 59d26ef3..00000000 --- a/test/test_artifacts/v2/langchain-aws.test.Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -ARG SAGEMAKER_DISTRIBUTION_IMAGE -FROM $SAGEMAKER_DISTRIBUTION_IMAGE - -ARG MAMBA_DOCKERFILE_ACTIVATE=1 - -RUN sudo apt-get update && \ - sudo apt-get install -y git - -RUN git clone https://github.com/langchain-ai/langchain-aws /tmp/langchain-aws - -CMD ["python", "-c", "import langchain_aws"] -CMD ["python", "-c", "from langchain_aws import BedrockLLM"] -CMD ["python", "-c", "from langchain_aws import ChatBedrock"] -CMD ["python", "-c", "from langchain_aws import SagemakerEndpoint"] -CMD ["python", "-c", "from langchain_aws import AmazonKendraRetriever"] -CMD ["python", "-c", "from langchain_aws import AmazonKnowledgeBasesRetriever"] -CMD ["python", "-c", "from langchain_aws import NeptuneAnalyticsGraph"] -CMD ["python", "-c", "from langchain_aws import NeptuneGraph"] - - -WORKDIR "/tmp/langchain-aws" - -RUN pip install jupyter nbconvert - -COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_langchain_aws_tests.sh ./ -RUN chmod +x run_langchain_aws_tests.sh - -CMD ["./run_langchain_aws_tests.sh"] From ebf02d99852a3c05bf3a50babf403a3530a2c534 Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Mon, 7 Apr 2025 11:57:27 -0700 Subject: [PATCH 05/15] Delete test/test_artifacts/v2/python-gssapi.test.Dockerfile Need more robust kerberos setup. --- .../v2/python-gssapi.test.Dockerfile | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 test/test_artifacts/v2/python-gssapi.test.Dockerfile diff --git a/test/test_artifacts/v2/python-gssapi.test.Dockerfile b/test/test_artifacts/v2/python-gssapi.test.Dockerfile deleted file mode 100644 index b4f17f61..00000000 --- a/test/test_artifacts/v2/python-gssapi.test.Dockerfile +++ /dev/null @@ -1,19 +0,0 @@ -ARG SAGEMAKER_DISTRIBUTION_IMAGE -FROM $SAGEMAKER_DISTRIBUTION_IMAGE - -ARG MAMBA_DOCKERFILE_ACTIVATE=1 - -RUN micromamba install -y --name base -c conda-forge pytest pytest-cov parameterized && \ - pip install k5test - -# run tests in the home directory -RUN echo '#!/bin/bash' > /home/sagemaker-user/run_python_gssapi_tests.sh && \ - echo 'import_path=$(python -c "import gssapi; print(gssapi.__file__)")' >> /home/sagemaker-user/run_python_gssapi_tests.sh && \ - echo 'gssapi_path=$(dirname "$import_path")' >> /home/sagemaker-user/run_python_gssapi_tests.sh && \ - echo 'pytest -v "$gssapi_path" "$@"' >> /home/sagemaker-user/run_python_gssapi_tests.sh - -RUN chmod +x /home/sagemaker-user/run_python_gssapi_tests.sh - -WORKDIR /home/sagemaker-user - -CMD ["./run_python_gssapi_tests.sh"] From 151d0df58b3ad1ca7fbbaf033efcd6946c1df7a9 Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Mon, 7 Apr 2025 11:58:41 -0700 Subject: [PATCH 06/15] Delete test/test_artifacts/v2/scripts/run_python_gssapi_tests.sh Need robust kerberos setup --- .../v2/scripts/run_python_gssapi_tests.sh | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 test/test_artifacts/v2/scripts/run_python_gssapi_tests.sh diff --git a/test/test_artifacts/v2/scripts/run_python_gssapi_tests.sh b/test/test_artifacts/v2/scripts/run_python_gssapi_tests.sh deleted file mode 100644 index de6e67c5..00000000 --- a/test/test_artifacts/v2/scripts/run_python_gssapi_tests.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -import_path=$(python -c "import gssapi; print(gssapi.__file__)") -gssapi_path=$(dirname "$import_path") -pip install k - -echo "GSSAPI path: $gssapi_path" - -# Run all tests -pytest -v "$gssapi_path" - From e1ec32e6fdd3e4b448d161293c0ff8b04e6bd2f8 Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Mon, 7 Apr 2025 12:03:44 -0700 Subject: [PATCH 07/15] Delete test/test_artifacts/v2/scripts/run_langchain_aws_tests.sh Not testing any notebbok from repo as need credentials to run models --- test/test_artifacts/v2/scripts/run_langchain_aws_tests.sh | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 test/test_artifacts/v2/scripts/run_langchain_aws_tests.sh diff --git a/test/test_artifacts/v2/scripts/run_langchain_aws_tests.sh b/test/test_artifacts/v2/scripts/run_langchain_aws_tests.sh deleted file mode 100644 index a48acbc2..00000000 --- a/test/test_artifacts/v2/scripts/run_langchain_aws_tests.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -set -e - -jupyter nbconvert --execute --to python samples/agents/agents_with_nova.ipynb - From 2341053a0d81304b1d5275b39fbbf08adc123f1c Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Mon, 7 Apr 2025 14:31:36 -0700 Subject: [PATCH 08/15] Delete test/test_artifacts/v2/scripts/run_langchain_tests.sh Making shell script at run time --- test/test_artifacts/v2/scripts/run_langchain_tests.sh | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 test/test_artifacts/v2/scripts/run_langchain_tests.sh diff --git a/test/test_artifacts/v2/scripts/run_langchain_tests.sh b/test/test_artifacts/v2/scripts/run_langchain_tests.sh deleted file mode 100644 index a62d6208..00000000 --- a/test/test_artifacts/v2/scripts/run_langchain_tests.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -pip install ".[test]" -pip install blockbuster - -pytest -v libs/langchain/tests/unit_tests/test_formatting.py - - From 97eb99023f05a054e5e7ad70d11ae56284615299 Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Mon, 7 Apr 2025 16:02:34 -0700 Subject: [PATCH 09/15] Update ipywidgets.test.Dockerfile --- test/test_artifacts/v2/ipywidgets.test.Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/test_artifacts/v2/ipywidgets.test.Dockerfile b/test/test_artifacts/v2/ipywidgets.test.Dockerfile index b8a293d8..c2d3235a 100644 --- a/test/test_artifacts/v2/ipywidgets.test.Dockerfile +++ b/test/test_artifacts/v2/ipywidgets.test.Dockerfile @@ -3,6 +3,8 @@ FROM $SAGEMAKER_DISTRIBUTION_IMAGE ARG MAMBA_DOCKERFILE_ACTIVATE=1 +ENV OPENBLAS_NUM_THREADS=1 + RUN sudo apt-get update && \ sudo apt-get install -y git From 570ffd5157325c24513c7487f16343c8a3aff2a0 Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Mon, 7 Apr 2025 16:02:52 -0700 Subject: [PATCH 10/15] Update jinja2.test.Dockerfile --- test/test_artifacts/v2/jinja2.test.Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/test_artifacts/v2/jinja2.test.Dockerfile b/test/test_artifacts/v2/jinja2.test.Dockerfile index 32f6d9b5..be28d8f0 100644 --- a/test/test_artifacts/v2/jinja2.test.Dockerfile +++ b/test/test_artifacts/v2/jinja2.test.Dockerfile @@ -3,10 +3,12 @@ FROM $SAGEMAKER_DISTRIBUTION_IMAGE ARG MAMBA_DOCKERFILE_ACTIVATE=1 +ENV OPENBLAS_NUM_THREADS=1 + RUN micromamba install -y -c conda-forge jinja2 COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_jinja2_tests.sh ./ RUN chmod +x run_jinja2_tests.sh -CMD ["./run_jinja2_tests.sh"] \ No newline at end of file +CMD ["./run_jinja2_tests.sh"] From 8794f01aad098b0320e4fa3322ae88c271ef22aa Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Mon, 7 Apr 2025 16:05:24 -0700 Subject: [PATCH 11/15] Update torchvision.test.Dockerfile --- test/test_artifacts/v2/torchvision.test.Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/test_artifacts/v2/torchvision.test.Dockerfile b/test/test_artifacts/v2/torchvision.test.Dockerfile index daa5006b..ccde14fd 100644 --- a/test/test_artifacts/v2/torchvision.test.Dockerfile +++ b/test/test_artifacts/v2/torchvision.test.Dockerfile @@ -3,6 +3,8 @@ FROM $SAGEMAKER_DISTRIBUTION_IMAGE ARG MAMBA_DOCKERFILE_ACTIVATE=1 +ENV OPENBLAS_NUM_THREADS=1 + RUN sudo apt-get update && \ sudo apt-get install -y git && \ git clone --recursive https://github.com/pytorch/vision From bc11dfc920527435e010df0749743b1965ce94bf Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Mon, 7 Apr 2025 16:05:54 -0700 Subject: [PATCH 12/15] Update xgboost-cpu.test.Dockerfile --- test/test_artifacts/v2/xgboost-cpu.test.Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/test_artifacts/v2/xgboost-cpu.test.Dockerfile b/test/test_artifacts/v2/xgboost-cpu.test.Dockerfile index 54694360..2b5cb01c 100644 --- a/test/test_artifacts/v2/xgboost-cpu.test.Dockerfile +++ b/test/test_artifacts/v2/xgboost-cpu.test.Dockerfile @@ -3,6 +3,8 @@ FROM $SAGEMAKER_DISTRIBUTION_IMAGE ARG MAMBA_DOCKERFILE_ACTIVATE=1 +ENV OPENBLAS_NUM_THREADS=1 + RUN sudo apt-get update && \ sudo apt-get install -y git && \ git clone --recursive https://github.com/dmlc/xgboost From 39bf7852cf33fcce25496a7fd93cb21374d5a74c Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Tue, 8 Apr 2025 15:51:19 -0700 Subject: [PATCH 13/15] Delete test/test_artifacts/v2/scripts/run_pyhive_tests.sh Right now pyhive is not running successfully. --- test/test_artifacts/v2/scripts/run_pyhive_tests.sh | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 test/test_artifacts/v2/scripts/run_pyhive_tests.sh diff --git a/test/test_artifacts/v2/scripts/run_pyhive_tests.sh b/test/test_artifacts/v2/scripts/run_pyhive_tests.sh deleted file mode 100644 index 9dd30a29..00000000 --- a/test/test_artifacts/v2/scripts/run_pyhive_tests.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -pyhive_version=$(micromamba list | grep pyhive | tr -s ' ' | cut -d ' ' -f 3) - -git checkout tags/v$pyhive_version - -./scripts/make_test_tables.sh - -pip install -r dev_requirements.txt - -pytest -v pyhive/tests/test_common.py \ No newline at end of file From 430f4ce6b0100f56876ec32003b61dc55e2eff92 Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Wed, 9 Apr 2025 11:09:01 -0700 Subject: [PATCH 14/15] Delete test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh xgboost_gpu are not running successfully due to errors like no CUDA device available. --- test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh diff --git a/test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh b/test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh deleted file mode 100644 index 5a4f0da5..00000000 --- a/test/test_artifacts/v2/scripts/run_xgboost_gpu_tests.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -xgboost_gpu_version=$(micromamba list | grep py-xgboost | tr -s ' ' | cut -d ' ' -f 3) - -git checkout tags/v$xgboost_gpu_version - -python -m pytest tests/python-gpu/test_gpu_basic_models.py -v -python -m pytest tests/python-gpu/test_gpu_data_iterator.py -v -python -m pytest tests/python-gpu/test_gpu_prediction.py -v \ No newline at end of file From 125488c3a13b13ff2ed8001d42188c9f9d24181c Mon Sep 17 00:00:00 2001 From: Bhavya Sharma Date: Wed, 9 Apr 2025 11:09:16 -0700 Subject: [PATCH 15/15] Delete test/test_artifacts/v2/xgboost-gpu.test.Dockerfile --- .../v2/xgboost-gpu.test.Dockerfile | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 test/test_artifacts/v2/xgboost-gpu.test.Dockerfile diff --git a/test/test_artifacts/v2/xgboost-gpu.test.Dockerfile b/test/test_artifacts/v2/xgboost-gpu.test.Dockerfile deleted file mode 100644 index 648b83c0..00000000 --- a/test/test_artifacts/v2/xgboost-gpu.test.Dockerfile +++ /dev/null @@ -1,18 +0,0 @@ -ARG SAGEMAKER_DISTRIBUTION_IMAGE -FROM $SAGEMAKER_DISTRIBUTION_IMAGE - -ARG MAMBA_DOCKERFILE_ACTIVATE=1 - -RUN sudo apt-get update && \ - sudo apt-get install -y git && \ - git clone --recursive https://github.com/dmlc/xgboost - -WORKDIR "xgboost" - -RUN micromamba install --freeze-installed -y -c conda-forge xgboost hypothesis loky pytest pytest-timeout - -COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_xgboost_gpu_tests.sh . - -RUN chmod +x run_xgboost_gpu_tests.sh - -CMD ["./run_xgboost_gpu_tests.sh"]