diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml new file mode 100644 index 0000000..7a86879 --- /dev/null +++ b/.github/workflows/build-docs.yml @@ -0,0 +1,85 @@ +name: build-docs + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + doc-build: + runs-on: ubuntu-latest + env: + DISPLAY: ":99" + OPENBLAS_NUM_THREADS: 4 + MNE_3D_BACKEND: pyvista + _MNE_BRAIN_TRACES_AUTO: false + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: 3.12 + + - name: Merge with upstream + run: | + echo $(git log -1 --pretty=%B) | tee gitlog.txt + echo ${CI_PULL_REQUEST//*pull\//} | tee merge.txt + if [[ $(cat merge.txt) != "" ]]; then + echo "Merging $(cat merge.txt)"; + git remote add upstream git://github.com/AaltoImagingLanguage/conpy.git; + git pull --ff-only upstream "refs/pull/$(cat merge.txt)/merge"; + git fetch upstream main; + fi + + - name: Install 3D rendering libraries + run: | + sudo apt-get update + sudo apt-get install libosmesa6 libglx-mesa0 libopengl0 libglx0 libdbus-1-3 + + - name: Spin up Xvfb + run: | + /sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -screen 0 1400x900x24 -ac +extension GLX +render -noreset; + + - name: Install PyQt6 dependencies + run: | + sudo apt-get install qt6-base-dev libx11-xcb-dev libxcb-cursor0 + + - name: Cache Pip + id: cache-pip + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: pip-cache + + - name: Install Python dependencies + run: | + python -m pip install --user --upgrade --progress-bar off pip wheel + python -m pip install --user --upgrade --progress-bar off -r requirements-dev.txt + python -m pip install --user -e . + + # Look at what we have and fail early if there is some library conflict + - name: Check installation + run: | + which python + python -c "import mne; mne.sys_info()" + python -c "import numpy; numpy.show_config()" + python -c "import conpy" + + - name: Download example data + run: | + python -c "import mne; mne.datasets.sample.data_path(download=True)" + + # Build docs + - name: make html + run: | + cd doc; + make html; + + - name: Save HTML as artifact + uses: actions/upload-artifact@v4 + with: + name: doc-dev + path: doc/_build/html diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml new file mode 100644 index 0000000..5d474f8 --- /dev/null +++ b/.github/workflows/unit-tests.yml @@ -0,0 +1,37 @@ +# This workflow will install Python dependencies, run tests and lint with a single version of Python +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: unit-tests + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + unit-tests: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: 3.12 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install ruff pytest tqdm pytest-cov + pip install -r requirements-dev.txt + + - name: Lint with ruff + run: | + # stop the build if there are Python syntax errors or undefined names + ruff check conpy + + - name: Test with pytest + run: | + pip install -e . + pytest --cov-report term-missing --cov conpy tests diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..dfada97 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,20 @@ +repos: + # Ruff conpy + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.13.1 + hooks: + - id: ruff + name: ruff-lint + files: ^conpy/ + - id: ruff-format + files: ^conpy|^doc/|^examples/ + + # Codespell + - repo: https://github.com/codespell-project/codespell + rev: v2.4.1 + hooks: + - id: codespell + additional_dependencies: + - tomli + files: ^conpy/|^doc/|^examples/ + types_or: [python, bib, rst] diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index 247744c..0000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,54 +0,0 @@ -trigger: -- master - -jobs: -- job: 'Test' - pool: - vmImage: 'Ubuntu-16.04' - steps: - - bash: | - echo "##vso[task.prependpath]$CONDA/bin" - sudo chown -R $USER /usr/share/miniconda - conda env create --quiet --file environment.yml - . /usr/share/miniconda/etc/profile.d/conda.sh - conda activate conpy - displayName: 'Setup python environment' - - - script: python setup.py develop --user - displayName: 'Install conpy' - - - bash: | - . /usr/share/miniconda/etc/profile.d/conda.sh - conda activate conpy - /sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -screen 0 1400x900x24 -ac +extension GLX +render -noreset - export DISPLAY=:99.0 - python -c "import mne; mne.sys_info()" - python -c "from mayavi import mlab; import matplotlib.pyplot as plt; mlab.figure(); plt.figure()" - displayName: 'Start X Virtual Frame Buffer' - - - script: | - . /usr/share/miniconda/etc/profile.d/conda.sh - conda activate conpy - python -c "import mne; print(mne.datasets.sample.data_path(update_path=True, verbose=False))"; - python -c "import mne; print(mne.datasets.testing.data_path(update_path=True, verbose=False))"; - pytest tests --doctest-modules --junitxml=junit/test-results.xml --cov=com --cov-report=xml --cov-report=html - displayName: 'Run unit tests' - - - task: PublishTestResults@2 - inputs: - testResultsFiles: '**/test-results.xml' - testRunTitle: 'Python $(python.version)' - condition: succeededOrFailed() - - - bash: | - . /usr/share/miniconda/etc/profile.d/conda.sh - conda activate conpy - export DISPLAY=:99.0 - cd doc - make html - displayName: 'Build documentation' - - - task: PublishPipelineArtifact@0 - inputs: - artifactName: 'doc' - targetPath: 'doc/_build/html' diff --git a/conpy/connectivity.py b/conpy/connectivity.py index 0018e64..44c4d26 100644 --- a/conpy/connectivity.py +++ b/conpy/connectivity.py @@ -28,7 +28,7 @@ from .viz import plot_connectivity -class BaseConnectivity(object): +class _BaseConnectivity(object): """Base class for connectivity objects. Contains implementation of methods that are defined for all connectivity @@ -98,6 +98,7 @@ def __init__( self.source_degree = np.asarray(_compute_degree(pairs, n_sources)) def __repr__(self): + """Obtain a string representation.""" return "<{} | n_sources={}, n_conns={}, subject={}>".format( self.__class__.__name__, self.n_sources, self.n_connections, self.subject ) @@ -212,7 +213,7 @@ def threshold(self, thresh, crit=None, direction="above", copy=False): mask = crit < thresh else: raise ValueError( - 'The direction parameter must be either "above" ' 'or "below".' + 'The direction parameter must be either "above" or "below".' ) if copy: @@ -252,7 +253,7 @@ def is_compatible(self, other): Whether the given connectivity object is compatible with this one. """ return ( - isinstance(other, BaseConnectivity) + isinstance(other, _BaseConnectivity) and other.n_sources == self.n_sources and np.array_equal(other.pairs, self.pairs) ) @@ -356,7 +357,7 @@ def _compute_degree(pairs, n_sources): return out_degree, in_degree -class VertexConnectivity(BaseConnectivity): +class VertexConnectivity(_BaseConnectivity): """Estimation of connectivity between vertices. Parameters @@ -397,9 +398,7 @@ def __init__( self, data, pairs, vertices, vertex_degree=None, subject=None, directed=False ): if len(vertices) != 2: - raise ValueError( - "The `vertices` parameter should be a list of " "two arrays." - ) + raise ValueError("The `vertices` parameter should be a list of two arrays.") self.vertices = [np.asarray(v) for v in vertices] n_vertices = len(self.vertices[0]) + len(self.vertices[1]) @@ -420,7 +419,7 @@ def make_stc(self, summary="sum", weight_by_degree=True): summary : 'sum' | 'degree' | 'absmax' How to summarize the adjacency data: - 'sum' : sum the strenghts of both the incoming and outgoing connections + 'sum' : sum the strengths of both the incoming and outgoing connections for each source. 'degree': count the number of incoming and outgoing connections for each source. @@ -474,7 +473,7 @@ def make_stc(self, summary="sum", weight_by_degree=True): else: raise ValueError( - 'The summary parameter must be "degree", or ' '"sum", or "absmax".' + 'The summary parameter must be "degree", or "sum", or "absmax".' ) data = np.asarray(data, dtype="float").ravel() @@ -578,8 +577,7 @@ def summary(c, f, t): return np.abs(c[f, :][:, t]).max() elif not isinstance(summary, types.FunctionType): raise ValueError( - 'The summary parameter must be "degree", "sum" ' - '"absmax" or a function.' + 'The summary parameter must be "degree", "sum" "absmax" or a function.' ) logger.info("Computing out- and in-degree for each label...") @@ -718,7 +716,7 @@ def to_original_src( ) -class LabelConnectivity(BaseConnectivity): +class LabelConnectivity(_BaseConnectivity): """Estimation of all-to-all connectivity, parcellated into labels. Parameters @@ -1001,7 +999,7 @@ def one_to_all_connectivity_pairs(src_or_fwd, ref_point, min_dist=0): ------- vert_from : ndarray, shape (n_pairs,) For each pair, the index of the first vertex. This is always the index - of the refence point. + of the reference point. vert_to : ndarray, shape (n_pairs,) For each pair, the index of the second vertex. @@ -1236,7 +1234,7 @@ def dics_connectivity( if n_orient == 1: raise ValueError( - "A forward operator with free or tangential " "orientation must be used." + "A forward operator with free or tangential orientation must be used." ) elif n_orient == 3: # Convert forward to tangential orientation for more speed. diff --git a/conpy/forward.py b/conpy/forward.py index b2f3a87..c449bf6 100644 --- a/conpy/forward.py +++ b/conpy/forward.py @@ -322,11 +322,11 @@ def restrict_src_to_vertices( vert_no_lh, vert_no_rh = vertno_or_idx if check_vertno: if not ( - np.all(np.in1d(vert_no_lh, src[0]["vertno"])) - and np.all(np.in1d(vert_no_rh, src[1]["vertno"])) + np.all(np.isin(vert_no_lh, src[0]["vertno"])) + and np.all(np.isin(vert_no_rh, src[1]["vertno"])) ): raise ValueError( - "One or more vertices were not present in" " SourceSpaces." + "One or more vertices were not present in SourceSpaces." ) else: @@ -404,9 +404,9 @@ def _make_radial_coord_system(points, origin): Parameters ---------- points : ndarray, shape (n_points, 3) - For each point, the XYZ carthesian coordinates. + For each point, the XYZ Cartesian coordinates. origin : (x, y, z) - A tuple (or other array-like) containing the XYZ carthesian coordinates + A tuple (or other array-like) containing the XYZ Cartesian coordinates of the point of origin. This can for example be the center of a sphere fitted through the points. @@ -497,7 +497,7 @@ def forward_to_tangential(fwd, center=None): fwd : instance of Forward The forward solution to convert. center : tuple of float (x, y, z) | None - The carthesian coordinates of the center of the brain. By default, a + The Cartesian coordinates of the center of the brain. By default, a sphere is fitted through all the points in the source space. Returns @@ -512,13 +512,13 @@ def forward_to_tangential(fwd, center=None): if fwd["sol"]["ncol"] // n_sources == 2: raise ValueError( - "Forward solution already seems to be in tangential " "orientation." + "Forward solution already seems to be in tangential orientation." ) # Compute two dipole directions tangential to a sphere that has its origin # in the center of the brain. if center is None: - _, center = _fit_sphere(fwd["source_rr"], disp=False) + _, center = _fit_sphere(fwd["source_rr"]) _, tan1, tan2 = _make_radial_coord_system(fwd["source_rr"], center) # Make sure the forward solution is in head orientation for this diff --git a/conpy/stats.py b/conpy/stats.py index ed1c6e8..5d1745e 100644 --- a/conpy/stats.py +++ b/conpy/stats.py @@ -43,9 +43,7 @@ def group_connectivity_ttest(cond1, cond2, df=None, tail=None): p-values for all connections. """ if len(cond1) != len(cond2): - raise ValueError( - "The number of subjects in each condition must be " "the same." - ) + raise ValueError("The number of subjects in each condition must be the same.") n_subjects = len(cond1) # Check compatibility of the connection objects @@ -53,8 +51,7 @@ def group_connectivity_ttest(cond1, cond2, df=None, tail=None): for con in cond1[1:] + cond2: if not np.array_equal(pairs1, con.pairs): raise ValueError( - "Not all Connectivity objects have the same " - "connection pairs defined." + "Not all Connectivity objects have the same connection pairs defined." ) # Perform a paired t-test @@ -126,7 +123,7 @@ def cluster_threshold( ) # Restrict the connections to only those found in the big bundles - mask = np.in1d(clust_no, big_clusters) + mask = np.isin(clust_no, big_clusters) data = con.data[mask] pairs = [p[mask] for p in con.pairs] @@ -177,7 +174,7 @@ def cluster_permutation_test( experimental condition. Each connectivity object should define the same connections. cluster_threshold : float - The threshold to use for forming the intial bundles. Only connections + The threshold to use for forming the initial bundles. Only connections with a t-value that is either higher than ``cluster_threshold`` or lower than ``-cluster_threshold`` are kept. tail : -1 | 0 | 1 @@ -236,7 +233,7 @@ def cluster_permutation_test( Only returned when ``return_details=True`` is specified. bundle_ps : ndarray, shape (n_bundles,) (optional) For each found bundle, the p-value based on the permutation test, - indicative for the likelyhood that the null-hypothesis holds. + indicative for the likelihood that the null-hypothesis holds. Only returned when ``return_details=True`` is specified. H0 : ndarray, shape (n_permutations,) (optional) The maximum observed t-value during each random permutation. @@ -253,22 +250,19 @@ def cluster_permutation_test( bioRxiv, 245530, 1-25. https://doi.org/10.1101/245530 """ if len(cond1) != len(cond2): - raise ValueError( - "The number of subjects in each condition must be " "the same." - ) + raise ValueError("The number of subjects in each condition must be the same.") n_subjects = len(cond1) # Check compatibility of the connection objects for con in cond1 + cond2: if not isinstance(con, VertexConnectivity): raise ValueError( - "All connectivity objects must by of type " "VertexConnectivity." + "All connectivity objects must by of type VertexConnectivity." ) if not np.array_equal(con.pairs, cond1[0].pairs): raise ValueError( - "Not all Connectivity objects have the same " - "connection pairs defined." + "Not all Connectivity objects have the same connection pairs defined." ) if tail not in [-1, 0, 1]: @@ -382,7 +376,7 @@ def _do_single_permutation( Parameters ---------- Xs : ndarray, shape(n_subjects, n_connections) - The connectivity data: a constrast between two conditions. + The connectivity data: a contrast between two conditions. cluster_threshold : float The initial t-value threshold to prune connections. tail : -1 | 0 | 1 diff --git a/conpy/utils.py b/conpy/utils.py index a2a22bd..1bbe2f8 100644 --- a/conpy/utils.py +++ b/conpy/utils.py @@ -14,12 +14,7 @@ ) from mne.utils import get_subjects_dir, warn -try: - # MNE-Python 0.18 and up - from mne.rank import estimate_rank -except ImportError: - # Older verions of MNE-Python - from mne.utils import estimate_rank +from mne.rank import estimate_rank def _make_diagonal_noise_matrix(csd, reg): @@ -74,11 +69,11 @@ def _find_indices_1d(haystack, needles, check_needles=True): haystack = np.asarray(haystack) needles = np.asarray(needles) if haystack.ndim != 1 or needles.ndim != 1: - raise ValueError("Both the haystack and the needles arrays should be " "1D.") + raise ValueError("Both the haystack and the needles arrays should be 1D.") if check_needles and len(np.setdiff1d(needles, haystack)) > 0: raise IndexError( - "One or more values where not present in the given " "haystack array." + "One or more values where not present in the given haystack array." ) sorted_ind = np.argsort(haystack) @@ -292,7 +287,7 @@ def reg_pinv(x, reg=0, rank="full", rcond=1e-15): # Warn the user if both all parameters were kept at their defaults and the # matrix is rank deficient. if rank_after < len(x) and reg == 0 and rank == "full" and rcond == 1e-15: - warn("Covariance matrix is rank-deficient and no regularization is " "done.") + warn("Covariance matrix is rank-deficient and no regularization is done.") elif isinstance(rank, int) and rank > len(x): raise ValueError( "Invalid value for the rank parameter (%d) given " diff --git a/doc/Makefile b/doc/Makefile index c45dc8f..12f327e 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -42,7 +42,7 @@ help: clean: rm -rf $(BUILDDIR)/* rm -rf auto_examples/ - rm -rf generated/* + rm -rf functions/* rm -rf modules/* html-noplot: diff --git a/doc/_templates/layout.html b/doc/_templates/layout.html deleted file mode 100644 index a99c912..0000000 --- a/doc/_templates/layout.html +++ /dev/null @@ -1,17 +0,0 @@ -{%- extends 'pydata_sphinx_theme/layout.html' %} - -{% block extrahead %} - - {{ super() }} -{% endblock %} - -{% block docs_body %} -