Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/lightning/__setup__.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def _setup_args() -> dict[str, Any]:
"include_package_data": True,
"zip_safe": False,
"keywords": ["deep learning", "pytorch", "AI"],
"python_requires": ">=3.9",
"python_requires": ">=3.10",
"entry_points": {
"console_scripts": [
"fabric = lightning.fabric.cli:_main",
Expand Down Expand Up @@ -123,9 +123,9 @@ def _setup_args() -> dict[str, Any]:
"Operating System :: OS Independent",
# Specify the Python versions you support here.
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
], # todo: consider aggregation/union of tags from particular packages
}
2 changes: 1 addition & 1 deletion src/lightning/pytorch/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

### Removed

-
- Removed support for Python 3.9 due to end-of-life status ([#21398](https://github.com/Lightning-AI/pytorch-lightning/pull/21398))

### Fixed

Expand Down
4 changes: 2 additions & 2 deletions src/lightning_fabric/__setup__.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def _setup_args() -> dict[str, Any]:
"include_package_data": True,
"zip_safe": False,
"keywords": ["deep learning", "pytorch", "AI"],
"python_requires": ">=3.9",
"python_requires": ">=3.10",
"setup_requires": ["wheel"],
"install_requires": assistant.load_requirements(
_PATH_REQUIREMENTS, unfreeze="none" if _FREEZE_REQUIREMENTS else "all"
Expand Down Expand Up @@ -105,9 +105,9 @@ def _setup_args() -> dict[str, Any]:
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
],
}
4 changes: 2 additions & 2 deletions src/pytorch_lightning/__setup__.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def _setup_args() -> dict[str, Any]:
"long_description_content_type": "text/markdown",
"zip_safe": False,
"keywords": ["deep learning", "pytorch", "AI"],
"python_requires": ">=3.9",
"python_requires": ">=3.10",
"setup_requires": ["wheel"],
# TODO: aggregate pytorch and lite requirements as we include its source code directly in this package.
# this is not a problem yet because lite's base requirements are all included in pytorch's base requirements
Expand All @@ -107,9 +107,9 @@ def _setup_args() -> dict[str, Any]:
"Operating System :: OS Independent",
# Specify the Python versions you support here.
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
],
}
4 changes: 2 additions & 2 deletions tests/tests_fabric/test_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def __instancecheck__(self, instance):
@pytest.mark.parametrize(
("accelerator", "devices"), [("tpu", "auto"), ("tpu", 1), ("tpu", [1]), ("tpu", 8), ("auto", 1), ("auto", 8)]
)
@RunIf(min_python="3.9") # mocking issue
@RunIf(min_python="3.10") # mocking issue
def test_accelerator_choice_tpu(accelerator, devices, tpu_available, monkeypatch):
monkeypatch.setattr(torch, "device", DeviceMock())

Expand Down Expand Up @@ -1031,7 +1031,7 @@ def get_defaults(cls):


@pytest.mark.parametrize("is_interactive", [False, True])
@RunIf(min_python="3.9") # mocking issue
@RunIf(min_python="3.10") # mocking issue
def test_connector_auto_selection(monkeypatch, is_interactive):
no_cuda = mock.patch("lightning.fabric.accelerators.cuda.num_cuda_devices", return_value=0)
single_cuda = mock.patch("lightning.fabric.accelerators.cuda.num_cuda_devices", return_value=1)
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_fabric/utilities/test_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def test_import_deepspeed_lazily():
assert subprocess.call([sys.executable, "-c", code]) == 0


@RunIf(min_python="3.9")
@RunIf(min_python="3.10")
def test_import_lightning_multiprocessing_start_method_not_set():
"""Regression test for avoiding the lightning import to set the multiprocessing context."""
package_name = "lightning_fabric" if "lightning.fabric" == "lightning_fabric" else "lightning"
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/accelerators/test_xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ def test_warning_if_tpus_not_used(tpu_available):
("2,", [2]),
],
)
@RunIf(min_python="3.9") # mocking issue
@RunIf(min_python="3.10") # mocking issue
def test_trainer_config_device_ids(devices, expected_device_ids, tpu_available, monkeypatch):
monkeypatch.setattr(lightning.fabric.accelerators.xla, "_using_pjrt", lambda: True)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
@pytest.mark.parametrize(
("accelerator", "devices"), [("tpu", "auto"), ("tpu", 1), ("tpu", [1]), ("tpu", 8), ("auto", 1), ("auto", 8)]
)
@RunIf(min_python="3.9") # mocking issue
@RunIf(min_python="3.10") # mocking issue
def test_accelerator_choice_tpu(accelerator, devices, tpu_available, monkeypatch):
monkeypatch.setattr(torch, "device", DeviceMock())
if _IS_WINDOWS:
Expand Down Expand Up @@ -736,7 +736,7 @@ def test_gpu_accelerator_backend_choice_cuda(cuda_count_1):
assert isinstance(trainer.accelerator, CUDAAccelerator)


@RunIf(min_python="3.9") # mocking issue
@RunIf(min_python="3.10") # mocking issue
def test_gpu_accelerator_backend_choice_mps(mps_count_1, cuda_count_0):
trainer = Trainer(accelerator="gpu")
assert trainer._accelerator_connector._accelerator_flag == "mps"
Expand Down Expand Up @@ -809,7 +809,7 @@ def test_connector_with_tpu_accelerator_instance(tpu_available, monkeypatch):


@pytest.mark.parametrize("is_interactive", [False, True])
@RunIf(min_python="3.9") # mocking issue
@RunIf(min_python="3.10") # mocking issue
def test_connector_auto_selection(monkeypatch, is_interactive):
import lightning.fabric # avoid breakage with standalone package

Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/utilities/test_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def test_import_deepspeed_lazily():
assert subprocess.call([sys.executable, "-c", code]) == 0


@RunIf(min_python="3.9")
@RunIf(min_python="3.10")
def test_import_lightning_multiprocessing_start_method_not_set():
"""Regression test for avoiding the lightning import to set the multiprocessing context."""
package_name = "pytorch_lightning" if "lightning.pytorch" == "pytorch_lightning" else "lightning"
Expand Down
Loading