Skip to content

Commit

Permalink
update: improve tests for complexity markers
Browse files Browse the repository at this point in the history
  • Loading branch information
synchon committed Nov 14, 2023
1 parent 7e8e2a3 commit a12e50d
Show file tree
Hide file tree
Showing 7 changed files with 127 additions and 162 deletions.
43 changes: 19 additions & 24 deletions junifer/markers/complexity/tests/test_hurst_exponent.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,23 @@
"""Provide test for Hurst exponent."""

# Authors: Amir Omidvarnia <[email protected]>
# Synchon Mandal <[email protected]>
# License: AGPL

import os
from pathlib import Path

from nilearn.maskers import NiftiLabelsMasker
import pytest

from junifer.data import load_parcellation
from junifer.datareader import DefaultDataReader
from junifer.markers import HurstExponent
from junifer.storage import SQLiteFeatureStorage
from junifer.testing.datagrabbers import SPMAuditoryTestingDatagrabber

pytest.importorskip("neurokit2")


from junifer.datareader import DefaultDataReader # noqa: E402
from junifer.markers.complexity import HurstExponent # noqa: E402
from junifer.storage import SQLiteFeatureStorage # noqa: E402
from junifer.testing.datagrabbers import ( # noqa: E402
SPMAuditoryTestingDataGrabber,
)


# Set parcellation
Expand All @@ -21,7 +26,7 @@

def test_compute() -> None:
"""Test HurstExponent compute()."""
with SPMAuditoryTestingDatagrabber() as dg:
with SPMAuditoryTestingDataGrabber() as dg:
# Fetch element
element = dg["sub001"]
# Fetch element data
Expand All @@ -30,17 +35,8 @@ def test_compute() -> None:
marker = HurstExponent(parcellation=PARCELLATION)
# Compute the marker
feature_map = marker.fit_transform(element_data)

# Load parcellation
test_parcellation, _, _ = load_parcellation(PARCELLATION)
# Compute the NiftiLabelsMasker
test_masker = NiftiLabelsMasker(test_parcellation)
test_ts = test_masker.fit_transform(element_data["BOLD"]["data"])
_, n_roi = test_ts.shape

# Assert the dimension of timeseries
_, n_roi2 = feature_map["BOLD"]["data"].shape
assert n_roi == n_roi2
# Assert the dimension of timeseries
assert feature_map["BOLD"]["data"].ndim == 2


def test_get_output_type() -> None:
Expand All @@ -58,17 +54,16 @@ def test_store(tmp_path: Path) -> None:
The path to the test directory.
"""
with SPMAuditoryTestingDatagrabber() as dg:
with SPMAuditoryTestingDataGrabber() as dg:
# Fetch element
element = dg["sub001"]
# Fetch element data
element_data = DefaultDataReader().fit_transform(element)
# Initialize the marker
marker = HurstExponent(parcellation=PARCELLATION)

# Create storage
# tmp_path = "/home/aomidvarnia/tmp"
storage_uri = os.path.join(tmp_path, "test_hurst_exponent.sqlite")
storage = SQLiteFeatureStorage(uri=storage_uri)
storage = SQLiteFeatureStorage(
uri=tmp_path / "test_hurst_exponent.sqlite"
)
# Compute the marker and store
marker.fit_transform(input=element_data, storage=storage)
41 changes: 17 additions & 24 deletions junifer/markers/complexity/tests/test_multiscale_entropy_auc.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,22 @@
"""Provide test for the AUC of multiscale entropy."""

# Authors: Amir Omidvarnia <[email protected]>
# Synchon Mandal <[email protected]>
# License: AGPL

import os
from pathlib import Path

from nilearn.maskers import NiftiLabelsMasker
import pytest

from junifer.data import load_parcellation
from junifer.datareader import DefaultDataReader
from junifer.markers import MultiscaleEntropyAUC
from junifer.storage import SQLiteFeatureStorage
from junifer.testing.datagrabbers import SPMAuditoryTestingDatagrabber

pytest.importorskip("neurokit2")

from junifer.datareader import DefaultDataReader # noqa: E402
from junifer.markers.complexity import MultiscaleEntropyAUC # noqa: E402
from junifer.storage import SQLiteFeatureStorage # noqa: E402
from junifer.testing.datagrabbers import ( # noqa: E402
SPMAuditoryTestingDataGrabber,
)


# Set parcellation
Expand All @@ -21,7 +25,7 @@

def test_compute() -> None:
"""Test MultiscaleEntropyAUC compute()."""
with SPMAuditoryTestingDatagrabber() as dg:
with SPMAuditoryTestingDataGrabber() as dg:
# Fetch element
element = dg["sub001"]
# Fetch element data
Expand All @@ -30,17 +34,8 @@ def test_compute() -> None:
marker = MultiscaleEntropyAUC(parcellation=PARCELLATION)
# Compute the marker
feature_map = marker.fit_transform(element_data)

# Load parcellation
test_parcellation, _, _ = load_parcellation(PARCELLATION)
# Compute the NiftiLabelsMasker
test_masker = NiftiLabelsMasker(test_parcellation)
test_ts = test_masker.fit_transform(element_data["BOLD"]["data"])
_, n_roi = test_ts.shape

# Assert the dimension of timeseries
_, n_roi2 = feature_map["BOLD"]["data"].shape
assert n_roi == n_roi2
# Assert the dimension of timeseries
assert feature_map["BOLD"]["data"].ndim == 2


def test_get_output_type() -> None:
Expand All @@ -58,18 +53,16 @@ def test_store(tmp_path: Path) -> None:
The path to the test directory.
"""
with SPMAuditoryTestingDatagrabber() as dg:
with SPMAuditoryTestingDataGrabber() as dg:
# Fetch element
element = dg["sub001"]
# Fetch element data
element_data = DefaultDataReader().fit_transform(element)
# Initialize the marker
marker = MultiscaleEntropyAUC(parcellation=PARCELLATION)

# Create storage
storage_uri = os.path.join(
tmp_path, "test_multiscale_entropy_auc.sqlite"
storage = SQLiteFeatureStorage(
uri=tmp_path / "test_multiscale_entropy_auc.sqlite"
)
storage = SQLiteFeatureStorage(uri=storage_uri)
# Compute the marker and store
marker.fit_transform(input=element_data, storage=storage)
41 changes: 18 additions & 23 deletions junifer/markers/complexity/tests/test_perm_entropy.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,22 @@
"""Provide test for permutation entropy."""

# Authors: Amir Omidvarnia <[email protected]>
# Synchon Mandal <[email protected]>
# License: AGPL

import os
from pathlib import Path

from nilearn.maskers import NiftiLabelsMasker
import pytest

from junifer.data import load_parcellation
from junifer.datareader import DefaultDataReader
from junifer.markers import PermEntropy
from junifer.storage import SQLiteFeatureStorage
from junifer.testing.datagrabbers import SPMAuditoryTestingDatagrabber

pytest.importorskip("neurokit2")

from junifer.datareader import DefaultDataReader # noqa: E402
from junifer.markers.complexity import PermEntropy # noqa: E402
from junifer.storage import SQLiteFeatureStorage # noqa: E402
from junifer.testing.datagrabbers import ( # noqa: E402
SPMAuditoryTestingDataGrabber,
)


# Set parcellation
Expand All @@ -21,7 +25,7 @@

def test_compute() -> None:
"""Test PermEntropy compute()."""
with SPMAuditoryTestingDatagrabber() as dg:
with SPMAuditoryTestingDataGrabber() as dg:
# Fetch element
element = dg["sub001"]
# Fetch element data
Expand All @@ -30,17 +34,8 @@ def test_compute() -> None:
marker = PermEntropy(parcellation=PARCELLATION)
# Compute the marker
feature_map = marker.fit_transform(element_data)

# Load parcellation
test_parcellation, _, _ = load_parcellation(PARCELLATION)
# Compute the NiftiLabelsMasker
test_masker = NiftiLabelsMasker(test_parcellation)
test_ts = test_masker.fit_transform(element_data["BOLD"]["data"])
_, n_roi = test_ts.shape

# Assert the dimension of timeseries
_, n_roi2 = feature_map["BOLD"]["data"].shape
assert n_roi == n_roi2
# Assert the dimension of timeseries
assert feature_map["BOLD"]["data"].ndim == 2


def test_get_output_type() -> None:
Expand All @@ -58,16 +53,16 @@ def test_store(tmp_path: Path) -> None:
The path to the test directory.
"""
with SPMAuditoryTestingDatagrabber() as dg:
with SPMAuditoryTestingDataGrabber() as dg:
# Fetch element
element = dg["sub001"]
# Fetch element data
element_data = DefaultDataReader().fit_transform(element)
# Initialize the marker
marker = PermEntropy(parcellation=PARCELLATION)
# Create storage
# Create storage
storage_uri = os.path.join(tmp_path, "test_perm_entropy.sqlite")
storage = SQLiteFeatureStorage(uri=storage_uri)
storage = SQLiteFeatureStorage(
uri=tmp_path / "test_perm_entropy.sqlite"
)
# Compute the marker and store
marker.fit_transform(input=element_data, storage=storage)
41 changes: 19 additions & 22 deletions junifer/markers/complexity/tests/test_range_entropy.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,23 @@
"""Provide test for range entropy."""

# Authors: Amir Omidvarnia <[email protected]>
# Synchon Mandal <[email protected]>
# License: AGPL

import os
from pathlib import Path

from nilearn.maskers import NiftiLabelsMasker
import pytest

from junifer.data import load_parcellation
from junifer.datareader import DefaultDataReader
from junifer.markers import RangeEntropy
from junifer.storage import SQLiteFeatureStorage
from junifer.testing.datagrabbers import SPMAuditoryTestingDatagrabber

pytest.importorskip("neurokit2")


from junifer.datareader import DefaultDataReader # noqa: E402
from junifer.markers.complexity import RangeEntropy # noqa: E402
from junifer.storage import SQLiteFeatureStorage # noqa: E402
from junifer.testing.datagrabbers import ( # noqa: E402
SPMAuditoryTestingDataGrabber,
)


# Set parcellation
Expand All @@ -21,7 +26,7 @@

def test_compute() -> None:
"""Test RangeEntropy compute()."""
with SPMAuditoryTestingDatagrabber() as dg:
with SPMAuditoryTestingDataGrabber() as dg:
# Fetch element
element = dg["sub001"]
# Fetch element data
Expand All @@ -30,17 +35,8 @@ def test_compute() -> None:
marker = RangeEntropy(parcellation=PARCELLATION)
# Compute the marker
feature_map = marker.fit_transform(element_data)

# Load parcellation
test_parcellation, _, _ = load_parcellation(PARCELLATION)
# Compute the NiftiLabelsMasker
test_masker = NiftiLabelsMasker(test_parcellation)
test_ts = test_masker.fit_transform(element_data["BOLD"]["data"])
_, n_roi = test_ts.shape

# Assert the dimension of timeseries
_, n_roi2 = feature_map["BOLD"]["data"].shape
assert n_roi == n_roi2
# Assert the dimension of timeseries
assert feature_map["BOLD"]["data"].ndim == 2


def test_get_output_type() -> None:
Expand All @@ -58,15 +54,16 @@ def test_store(tmp_path: Path) -> None:
The path to the test directory.
"""
with SPMAuditoryTestingDatagrabber() as dg:
with SPMAuditoryTestingDataGrabber() as dg:
# Fetch element
element = dg["sub001"]
# Fetch element data
element_data = DefaultDataReader().fit_transform(element)
# Initialize the marker
marker = RangeEntropy(parcellation=PARCELLATION)
# Create storage
storage_uri = os.path.join(tmp_path, "test_range_entropy.sqlite")
storage = SQLiteFeatureStorage(uri=storage_uri)
storage = SQLiteFeatureStorage(
uri=tmp_path / "test_range_entropy.sqlite"
)
# Compute the marker and store
marker.fit_transform(input=element_data, storage=storage)
Loading

0 comments on commit a12e50d

Please sign in to comment.