Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
85 changes: 85 additions & 0 deletions .github/workflows/build-docs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
name: build-docs

on:
push:
branches: [ main ]
pull_request:
branches: [ main ]

jobs:
doc-build:
runs-on: ubuntu-latest
env:
DISPLAY: ":99"
OPENBLAS_NUM_THREADS: 4
MNE_3D_BACKEND: pyvista
_MNE_BRAIN_TRACES_AUTO: false

steps:
- uses: actions/checkout@v4

- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: 3.12

- name: Merge with upstream
run: |
echo $(git log -1 --pretty=%B) | tee gitlog.txt
echo ${CI_PULL_REQUEST//*pull\//} | tee merge.txt
if [[ $(cat merge.txt) != "" ]]; then
echo "Merging $(cat merge.txt)";
git remote add upstream git://github.com/AaltoImagingLanguage/conpy.git;
git pull --ff-only upstream "refs/pull/$(cat merge.txt)/merge";
git fetch upstream main;
fi

- name: Install 3D rendering libraries
run: |
sudo apt-get update
sudo apt-get install libosmesa6 libglx-mesa0 libopengl0 libglx0 libdbus-1-3

- name: Spin up Xvfb
run: |
/sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -screen 0 1400x900x24 -ac +extension GLX +render -noreset;

- name: Install PyQt6 dependencies
run: |
sudo apt-get install qt6-base-dev libx11-xcb-dev libxcb-cursor0

- name: Cache Pip
id: cache-pip
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: pip-cache

- name: Install Python dependencies
run: |
python -m pip install --user --upgrade --progress-bar off pip wheel
python -m pip install --user --upgrade --progress-bar off -r requirements-dev.txt
python -m pip install --user -e .

# Look at what we have and fail early if there is some library conflict
- name: Check installation
run: |
which python
python -c "import mne; mne.sys_info()"
python -c "import numpy; numpy.show_config()"
python -c "import conpy"

- name: Download example data
run: |
python -c "import mne; mne.datasets.sample.data_path(download=True)"

# Build docs
- name: make html
run: |
cd doc;
make html;

- name: Save HTML as artifact
uses: actions/upload-artifact@v4
with:
name: doc-dev
path: doc/_build/html
37 changes: 37 additions & 0 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions

name: unit-tests

on:
push:
branches: [ main ]
pull_request:
branches: [ main ]

jobs:
unit-tests:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: 3.12
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install ruff pytest tqdm pytest-cov
pip install -r requirements-dev.txt

- name: Lint with ruff
run: |
# stop the build if there are Python syntax errors or undefined names
ruff check conpy

- name: Test with pytest
run: |
pip install -e .
pytest --cov-report term-missing --cov conpy tests
20 changes: 20 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
repos:
# Ruff conpy
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.13.1
hooks:
- id: ruff
name: ruff-lint
files: ^conpy/
- id: ruff-format
files: ^conpy|^doc/|^examples/

# Codespell
- repo: https://github.com/codespell-project/codespell
rev: v2.4.1
hooks:
- id: codespell
additional_dependencies:
- tomli
files: ^conpy/|^doc/|^examples/
types_or: [python, bib, rst]
54 changes: 0 additions & 54 deletions azure-pipelines.yml

This file was deleted.

26 changes: 12 additions & 14 deletions conpy/connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from .viz import plot_connectivity


class BaseConnectivity(object):
class _BaseConnectivity(object):
"""Base class for connectivity objects.

Contains implementation of methods that are defined for all connectivity
Expand Down Expand Up @@ -98,6 +98,7 @@ def __init__(
self.source_degree = np.asarray(_compute_degree(pairs, n_sources))

def __repr__(self):
"""Obtain a string representation."""
return "<{} | n_sources={}, n_conns={}, subject={}>".format(
self.__class__.__name__, self.n_sources, self.n_connections, self.subject
)
Expand Down Expand Up @@ -212,7 +213,7 @@ def threshold(self, thresh, crit=None, direction="above", copy=False):
mask = crit < thresh
else:
raise ValueError(
'The direction parameter must be either "above" ' 'or "below".'
'The direction parameter must be either "above" or "below".'
)

if copy:
Expand Down Expand Up @@ -252,7 +253,7 @@ def is_compatible(self, other):
Whether the given connectivity object is compatible with this one.
"""
return (
isinstance(other, BaseConnectivity)
isinstance(other, _BaseConnectivity)
and other.n_sources == self.n_sources
and np.array_equal(other.pairs, self.pairs)
)
Expand Down Expand Up @@ -356,7 +357,7 @@ def _compute_degree(pairs, n_sources):
return out_degree, in_degree


class VertexConnectivity(BaseConnectivity):
class VertexConnectivity(_BaseConnectivity):
"""Estimation of connectivity between vertices.

Parameters
Expand Down Expand Up @@ -397,9 +398,7 @@ def __init__(
self, data, pairs, vertices, vertex_degree=None, subject=None, directed=False
):
if len(vertices) != 2:
raise ValueError(
"The `vertices` parameter should be a list of " "two arrays."
)
raise ValueError("The `vertices` parameter should be a list of two arrays.")

self.vertices = [np.asarray(v) for v in vertices]
n_vertices = len(self.vertices[0]) + len(self.vertices[1])
Expand All @@ -420,7 +419,7 @@ def make_stc(self, summary="sum", weight_by_degree=True):
summary : 'sum' | 'degree' | 'absmax'
How to summarize the adjacency data:

'sum' : sum the strenghts of both the incoming and outgoing connections
'sum' : sum the strengths of both the incoming and outgoing connections
for each source.
'degree': count the number of incoming and outgoing connections for each
source.
Expand Down Expand Up @@ -474,7 +473,7 @@ def make_stc(self, summary="sum", weight_by_degree=True):

else:
raise ValueError(
'The summary parameter must be "degree", or ' '"sum", or "absmax".'
'The summary parameter must be "degree", or "sum", or "absmax".'
)

data = np.asarray(data, dtype="float").ravel()
Expand Down Expand Up @@ -578,8 +577,7 @@ def summary(c, f, t):
return np.abs(c[f, :][:, t]).max()
elif not isinstance(summary, types.FunctionType):
raise ValueError(
'The summary parameter must be "degree", "sum" '
'"absmax" or a function.'
'The summary parameter must be "degree", "sum" "absmax" or a function.'
)

logger.info("Computing out- and in-degree for each label...")
Expand Down Expand Up @@ -718,7 +716,7 @@ def to_original_src(
)


class LabelConnectivity(BaseConnectivity):
class LabelConnectivity(_BaseConnectivity):
"""Estimation of all-to-all connectivity, parcellated into labels.

Parameters
Expand Down Expand Up @@ -1001,7 +999,7 @@ def one_to_all_connectivity_pairs(src_or_fwd, ref_point, min_dist=0):
-------
vert_from : ndarray, shape (n_pairs,)
For each pair, the index of the first vertex. This is always the index
of the refence point.
of the reference point.
vert_to : ndarray, shape (n_pairs,)
For each pair, the index of the second vertex.

Expand Down Expand Up @@ -1236,7 +1234,7 @@ def dics_connectivity(

if n_orient == 1:
raise ValueError(
"A forward operator with free or tangential " "orientation must be used."
"A forward operator with free or tangential orientation must be used."
)
elif n_orient == 3:
# Convert forward to tangential orientation for more speed.
Expand Down
16 changes: 8 additions & 8 deletions conpy/forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,11 +322,11 @@ def restrict_src_to_vertices(
vert_no_lh, vert_no_rh = vertno_or_idx
if check_vertno:
if not (
np.all(np.in1d(vert_no_lh, src[0]["vertno"]))
and np.all(np.in1d(vert_no_rh, src[1]["vertno"]))
np.all(np.isin(vert_no_lh, src[0]["vertno"]))
and np.all(np.isin(vert_no_rh, src[1]["vertno"]))
):
raise ValueError(
"One or more vertices were not present in" " SourceSpaces."
"One or more vertices were not present in SourceSpaces."
)

else:
Expand Down Expand Up @@ -404,9 +404,9 @@ def _make_radial_coord_system(points, origin):
Parameters
----------
points : ndarray, shape (n_points, 3)
For each point, the XYZ carthesian coordinates.
For each point, the XYZ Cartesian coordinates.
origin : (x, y, z)
A tuple (or other array-like) containing the XYZ carthesian coordinates
A tuple (or other array-like) containing the XYZ Cartesian coordinates
of the point of origin. This can for example be the center of a sphere
fitted through the points.

Expand Down Expand Up @@ -497,7 +497,7 @@ def forward_to_tangential(fwd, center=None):
fwd : instance of Forward
The forward solution to convert.
center : tuple of float (x, y, z) | None
The carthesian coordinates of the center of the brain. By default, a
The Cartesian coordinates of the center of the brain. By default, a
sphere is fitted through all the points in the source space.

Returns
Expand All @@ -512,13 +512,13 @@ def forward_to_tangential(fwd, center=None):

if fwd["sol"]["ncol"] // n_sources == 2:
raise ValueError(
"Forward solution already seems to be in tangential " "orientation."
"Forward solution already seems to be in tangential orientation."
)

# Compute two dipole directions tangential to a sphere that has its origin
# in the center of the brain.
if center is None:
_, center = _fit_sphere(fwd["source_rr"], disp=False)
_, center = _fit_sphere(fwd["source_rr"])
_, tan1, tan2 = _make_radial_coord_system(fwd["source_rr"], center)

# Make sure the forward solution is in head orientation for this
Expand Down
Loading