Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -182,9 +182,9 @@ cython_debug/
.abstra/

# Visual Studio Code
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
# and can be added to the global gitignore or merged into this file. However, if you prefer,
# and can be added to the global gitignore or merged into this file. However, if you prefer,
# you could uncomment the following to ignore the entire vscode folder
# .vscode/

Expand Down
60 changes: 60 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
repos:
- repo: https://github.com/checkmake/checkmake.git
# Or another commit hash or version
rev: 0.2.2
hooks:
# Use this hook to let pre-commit build checkmake in its sandbox
- id: checkmake
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
exclude: |
(?x)^(
.*\.svg
)$
- id: check-yaml
- id: check-added-large-files
- id: detect-private-key
- repo: https://github.com/python-poetry/poetry
rev: "2.2.1"
hooks:
- id: poetry-check
- repo: https://github.com/tox-dev/pyproject-fmt
rev: "v2.6.0"
hooks:
- id: pyproject-fmt
- repo: https://github.com/codespell-project/codespell
rev: v2.4.1
hooks:
- id: codespell
exclude: |
(?x)^(
.*\.lock |
.*\.json |
.*\.ipynb |
.*\.cppipe |
.*cosmicqc_paper_v1/.* # skip everything inside this folder
)$
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.22
hooks:
- id: mdformat
additional_dependencies:
- mdformat-gfm
- repo: https://github.com/adrienverge/yamllint
rev: v1.37.1
hooks:
- id: yamllint
exclude: pre-commit-config.yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.13.1"
hooks:
- id: ruff-format
- id: ruff-check
- repo: https://gitlab.com/vojko.pribudic.foss/pre-commit-update
rev: v0.8.0
hooks:
- id: pre-commit-update
args: ["--keep", "mdformat", "--keep", "pre-commit-update", "--keep", "cffconvert"]
2 changes: 1 addition & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
// Make notebooks resolve relative paths from the repo root
"jupyter.notebookFileRoot": "${workspaceFolder}"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Consider not using this file as it would inhibit testing of notebooks as if we are not using vscode

}
}
38 changes: 38 additions & 0 deletions Makefile
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

While I like the makefile using this on an analysis repo where a shell script within a module that needs to call scripts would get entirely too messy (given my assumption that each repo should only have a single make file that lives at the repo root).

I also do not understand what is inherently different in this makefile compared to a shell script?
Makefile does not change how the python code is run? The environments do though which can be run in a shell script.

The makefile IMO is great for managing these workflows but does not help to solve the problem that initially identified. How to reduce redundancy and antipatterns in python code while retaining a way for the python interpreter to know where it is to import modules and utils.

Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Makefile for project
SHELL := /usr/bin/env bash
.SHELLFLAGS := -eu -o pipefail -c
.DEFAULT_GOAL := help

ENV_NAME ?= demo-vs-code-notebook-root
ENV_FILE ?= env.yml
ENV_UV_VER ?= 3.13

.PHONY: all clean test help run_code_uv conda_env_install_or_update run_code_conda run_code_papermill

help: ## Show available targets (default)
@awk 'BEGIN {FS=":.*##"; printf "Usage:\n make <target>\n\nTargets:\n"} \
/^[a-zA-Z0-9_.-]+:.*##/ { printf " %-22s %s\n", $$1, $$2 }' $(MAKEFILE_LIST)

env_install_or_update_uv: ## Create or update uv env
uv pip install --python '$(ENV_UV_VER)' -e .

env_install_or_update_conda: ## Create or update the conda env
if conda env list | awk '{print $$1}' | grep -qx '$(ENV_NAME)'; then \
echo "Updating '$(ENV_NAME)' environment"; \
conda env update -n '$(ENV_NAME)' -f '$(ENV_FILE)' --prune; \
else \
echo "Creating '$(ENV_NAME)' environment"; \
conda env create -n '$(ENV_NAME)' -f '$(ENV_FILE)'; \
fi

run_code_uv: env_install_or_update_uv ## Run code via uv
uv run --python '$(ENV_UV_VER)' python src/notebooks/example.py

run_code_conda: env_install_or_update_conda ## Run code via conda
conda run -n '$(ENV_NAME)' python src/notebooks/example.py

run_code_uv_papermill: env_install_or_update_uv ## Run jupyter notebook via uv
uv run --python '$(ENV_UV_VER)' papermill src/notebooks/example.ipynb src/notebooks/example_output.ipynb

run_code_conda_papermill: env_install_or_update_conda ## Run jupyter notebook via conda
conda run -n '$(ENV_NAME)' python -m papermill src/notebooks/example.ipynb src/notebooks/example_output.ipynb
Comment on lines +28 to +38
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These only run for me when using the conda or uv env and not outside of the environment. This leads me to beleive that the env is installing the src modules.

81 changes: 72 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,22 +1,85 @@
# demo-vs-code-notebook-root

This repository demonstrates how to set a notebook root for VS Code through a repository.
It also demonstrates how to process notebook-derivative code (such as `.py` files generated from `.ipynb` files).
We write this content from the perspective of local development on Linux or MacOS systems.
Processing may take place on those systems locally or be executed remotely on a SLURM high-performance compute (HPC) environment.

## Philosophy

We propose it is best to develop your code from the perspective of the processor and from the context of the project/repo root (in this order).

A programming language or platform may have specific rules for processing code.
Python's processor is the interpreter which translates Python code into processes.
Therefore, we abide Python interpreter rules when creating Python code for the best outcome.

Code may be developed from the context of the project/repo root to increase consistency and reduce confusion.
Consider if we have nested files as follows:

```shell
project/
└── src/
├── a_dir/
│ └── file1.py
└── b_dir/
└── file2.py
```

We suggest programming from the perspective that both `file1.py` and `file2.py` have access starting from the directory `project/`.
This means `file1.py` may access `file2.py`, and vice-versa.
We suggest paths found within `file1.py` and `file2.py` should include relative pathing from the perspective the directory `project/`.
For example, `file1.py` might reference `file2.py` using path `src/b_dir/file2.py`.

These are not laws and instead are a suggested philosophy to help you deduplicate and simplify your code.
If you need to break these suggestions we recommend encapsulating processor execution directory context for each isolated occasion.
For example, consider the following:
Comment on lines +28 to +35
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This seems non-consistent with the python script:
Where from cow.say import personalized_cowsay if programmed from the perspective of the project/ directory should actually be from src.cow.say import personalized_cowsay. Which is displayed in this readme but not carried out in the notebook/script/


```sh
(
cd ./a_dir || exit
python file1.py
)
```

This block will enable you to temporarily change directory and process code from the context of that directory, then return to the previous directory context.

## How Python handles package namespaces

A __namespace__ is a mapping from names → things.
Programming languages use namespaces to keep names from colliding and to organize code. You already use several layers daily:

- Global / module namespace: names defined in a file (`foo.py`) live in that module’s namespace.
- Package namespace: names under a package (e.g., `cow.say`) live together under the cow namespace.
- Scope namespaces: function/class scopes create their own name maps.

Python resolves code-level imports by scanning `sys.path` in order for a top-level package name.
A package can be either a "regular" package (a directory with `__init__.py`) or an implicit namespace package ([PEP 420](https://peps.python.org/pep-0420/)).
Your local code is not importable just because it exists on disk—the parent directory of the top-level package must be on `sys.path`.
The cleanest, most reproducible way to do enable local code to become importable without custom `sys.path` overrides is to install your project so the environment’s site-packages contains a pointer to your source tree.

## Development

This work requires `uv` or `conda` to install the environments and process code properly.
We presume the existence of `make`, a tool we will use to perform common development tasks (see the [`Makefile`](Makefile) for more).

VS Code Jupyter notebooks will by default treat their local directory as the root.
This creates challenges when attempting to load local data or code within the notebook because the kernal and Python won't have visibility of those files without customization.
This creates challenges when attempting to load local data or code within the notebook because the kernel and Python won't have visibility of those files without customization.
A common workaround is to use `sys.path` manipulation to add paths.
This leads to an anti-pattern of extra code in all notebooks where you must use outside-of-local-path files.

This repository demonstrates how to use a simple file and related configuration found within [`.vscode/settings.json`](.vscode/settings.json) to configure VS Code Jupyter notebooks so that they use the root of the repository as their root.
This enables notebooks to open and automatically use the root of the repository as their root and avoids the addition of `sys.path` manipulation within each notebook.

# installation
1. Use either `make env_install_or_update_uv` or `make env_install_or_update_conda` to initialize an environment
1. Explore the local Python package `cow` and module `say` to better understand how they're structured in context with the notebook found within `src/notebooks/`.
1. Open the notebook under `src/notebooks/example.ipynb` within VS Code (if using VS Code, this will automatically open from the context of the notebook having access to the project root folder).
1. Within the VS Code notebook window, select kernel associated with a Python environment: `demo-vs-code-notebook-root`.
1. Run the notebook to reproduce the local pathing result.

## Processing

This work requires `uv` to install the environment.
Any environment manager can be used to reproduce the results (`uv` was just used to build a quick example).
Explore demonstrations of how to process code related to development with the following steps.
These steps will invoke code found within the `Makefile` in order to process code from the context of the environment.

1. Run `uv sync` to synchronize the `uv` environment locally.
2. Explore the local Python package `cow` and module `say` to better understand how they're structured in context with the notebook found within `src/notebooks/`.
2. Open the notebook under `src/notebooks/example.ipynb` within VS Code.
3. Within the VS Code notebook window, select kernal: `demo-vs-code-notebook-root` found under `.venv/bin/python`.
4. Run the notebook to reproduce the local pathing result.
1. Invoke Python files through related environments: `make run_code_uv` or `make run_code_conda`
1. Invoke Jupyter notebook files through related environments: `make run_code_uv_papermill` or `make run_code_conda_papermill`
14 changes: 14 additions & 0 deletions env.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# env file for demonstration conda environment
---
name: demo-vs-code-notebook-root
channels:
- conda-forge
dependencies:
- python=3.11
- pip
- ipykernel
- papermill
- pip:
- -e .
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this something that needs to be run on all projects that have util scripts? Should utils actually be in a src dir and installed? Without this line do imports fail? Are we actually importing modules from their path or from the env?

- cowsay
- poethepoet>=0.37.0
16 changes: 14 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,19 @@ version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.11"
classifiers = [
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
dependencies = [
"cowsay>=6.1",
"ipykernel>=6.30.1",
"cowsay>=6.1",
"ipykernel>=6.30.1",
"jupytext>=1.17.3",
"papermill>=2.6",
"poethepoet>=0.37",
]

[tool.ruff]
lint.per-file-ignores."src/notebooks/example.py" = [ "E402" ]
5 changes: 3 additions & 2 deletions src/cow/say.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@

import cowsay


def personalized_cowsay(name: str) -> None:
"""
Returns a greeting message for the given name.

Parameters:
name (str): The name of the person to greet.
"""
cowsay.cow(f"Hello, {name}!")
cowsay.cow(f"Hello, {name}!")
50 changes: 38 additions & 12 deletions src/notebooks/example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2,31 +2,57 @@
"cells": [
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 1,
"id": "05eabbf9",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"sys.executable: /opt/anaconda3/envs/demo-vs-code-notebook-root/bin/python\n",
"shutil.which('python'): /opt/anaconda3/envs/demo-vs-code-notebook-root/bin/python\n",
"version: 3.11.13 | packaged by conda-forge | (main, Jun 4 2025, 14:52:34) [Clang 18.1.8 ]\n"
]
}
],
"source": [
"# show interpreter information\n",
"import sys\n",
"import shutil\n",
"\n",
"print(\"sys.executable:\", sys.executable)\n",
"print(\"shutil.which('python'):\", shutil.which(\"python\"))\n",
"print(\"version:\", sys.version)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "c7554bce",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" _____________\n",
"| Hello, d33bs! |\n",
" =============\n",
" ____________\n",
"| Hello, Mike! |\n",
" ============\n",
" \\\n",
" \\\n",
" \\\n",
" ^__^\n",
" (oo)\\_______\n",
" (__)\\ )\\/\\\n",
" ||----w |\n",
" || ||\n"
" ^__^\n",
" (oo)\\_______\n",
" (__)\\ )\\/\\\n",
" ||----w |\n",
" || ||\n"
]
}
],
"source": [
"from cow.say import personalized_cowsay\n",
"\n",
"personalized_cowsay(\"d33bs\")"
"personalized_cowsay(\"Mike\")"
]
}
],
Expand All @@ -46,7 +72,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.5"
"version": "3.11.13"
}
},
"nbformat": 4,
Expand Down
27 changes: 27 additions & 0 deletions src/notebooks/example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.17.3
# kernelspec:
# display_name: demo-vs-code-notebook-root
# language: python
# name: python3
# ---

# %%
# show interpreter information
import sys
import shutil

print("sys.executable:", sys.executable)
print("shutil.which('python'):", shutil.which("python"))
print("version:", sys.version)

# %%
from cow.say import personalized_cowsay

personalized_cowsay("Mike")
Loading