Skip to content

Commit

Permalink
switch to lightning release/non-dev sha based install for release, ad…
Browse files Browse the repository at this point in the history
…just epi-patch for docs build context
  • Loading branch information
speediedan committed Dec 20, 2024
1 parent f698388 commit 540ebdd
Show file tree
Hide file tree
Showing 7 changed files with 20 additions and 37 deletions.
24 changes: 4 additions & 20 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

## [2.5.0] - 2024-XX-XX
## [2.5.0] - 2024-12-20

### Added

Expand All @@ -16,30 +16,14 @@ name/pattern-based configuration instead of manually inspecting modules and appl
- FSDP2 'Auto' Plan Convenience Aliases, simplifying use of both composable and non-composable activation checkpointing APIs
- Flexible orchestration of advanced profiling combining multiple complementary PyTorch profilers with FTS ``MemProfiler``

### Deprecated

- removed support for PyTorch `2.1`

## [2.4.1] - 2024-XX-XX

### Added

- Support for Lightning and PyTorch ``2.4.1``

### Fixed

- Added logic to more robustly condition depth-aligned checkpoint metadata updates to address edge-cases where `current_score` precisely equaled the `best_model_score` at multiple different depths. Resolved [#15](https://github.com/speediedan/finetuning-scheduler/issues/15).

## [2.4.1] - 2024-XX-XX

### Added

- Support for Lightning and PyTorch ``2.4.1``

### Fixed

- Added logic to more robustly condition depth-aligned checkpoint metadata updates to address edge-cases where `current_score` precisely equaled the `best_model_score` at multiple different depths. Resolved [#15](https://github.com/speediedan/finetuning-scheduler/issues/15).
### Deprecated

- As upstream PyTorch [has deprecated](https://github.com/pytorch/pytorch/issues/138506) official Anaconda channel builds, `finetuning-scheduler` will no longer be releasing conda builds. Installation of FTS via pip (irrespective of the virtual environment used) is the recommended installation approach.
- removed support for PyTorch `2.1`

## [2.4.0] - 2024-08-15

Expand Down
4 changes: 2 additions & 2 deletions requirements/base.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#lightning>=2.5.0,<2.5.1
lightning>=2.5.0,<2.5.1
# the below is uncommented when master is targeting a specific pl dev master commit
git+https://github.com/Lightning-AI/lightning.git@110d62185161cd0b11d8619336ddd139e5ee09dd#egg=lightning
#git+https://github.com/Lightning-AI/lightning.git@110d62185161cd0b11d8619336ddd139e5ee09dd#egg=lightning
torch>=2.2.0
4 changes: 2 additions & 2 deletions requirements/standalone_base.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#pytorch-lightning>=2.5.0,<2.5.1
pytorch-lightning>=2.5.0,<2.5.1
# the below is uncommented when master is targeting a specific pl dev master commit
git+https://github.com/Lightning-AI/pytorch-lightning.git@110d62185161cd0b11d8619336ddd139e5ee09dd#egg=pytorch-lightning
#git+https://github.com/Lightning-AI/pytorch-lightning.git@110d62185161cd0b11d8619336ddd139e5ee09dd#egg=pytorch-lightning
torch>=2.2.0
14 changes: 7 additions & 7 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,15 +131,15 @@ def _setup_args(standalone: bool = False) -> Dict[str, Any]:
)

base_reqs = "standalone_base.txt" if standalone else "base.txt"
# install_requires = setup_tools._load_requirements(
# _INSTALL_PATHS["require"], file_name=base_reqs, standalone=standalone
# )
install_requires = setup_tools._load_requirements(
_INSTALL_PATHS["require"],
file_name=base_reqs,
standalone=standalone,
pl_commit="110d62185161cd0b11d8619336ddd139e5ee09dd",
_INSTALL_PATHS["require"], file_name=base_reqs, standalone=standalone
)
# install_requires = setup_tools._load_requirements(
# _INSTALL_PATHS["require"],
# file_name=base_reqs,
# standalone=standalone,
# pl_commit="110d62185161cd0b11d8619336ddd139e5ee09dd",
# )
base_setup["install_requires"] = install_requires
return base_setup

Expand Down
2 changes: 1 addition & 1 deletion src/finetuning_scheduler/__about__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import time

_this_year = time.strftime("%Y")
__version__ = "2.5.0.rc0"
__version__ = "2.5.0"
__author__ = "Dan Dale"
__author_email__ = "[email protected]"
__license__ = "Apache-2.0"
Expand Down
4 changes: 2 additions & 2 deletions src/fts_examples/patching/dep_patch_shim.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from enum import Enum
from typing import NamedTuple, Tuple, Callable
from fts_examples.patching._patch_utils import lwt_compare_version

from lightning.pytorch.cli import _JSONARGPARSE_SIGNATURES_AVAILABLE

class OSEnvToggle(NamedTuple):
env_var_name: str
Expand Down Expand Up @@ -70,7 +70,7 @@ def _patch_lightning_jsonargparse():
# TODO: remove if lightning fixes `2.5.0` with a post or `2.6.0` is minimum
lightning_jsonargparse_patch = DependencyPatch(
condition=(lwt_compare_version("lightning", operator.eq, "2.5.0"), sys.version_info >= (3, 12, 8),
lwt_compare_version("jsonargparse", operator.ge, "4.35.0") ),
lwt_compare_version("jsonargparse", operator.ge, "4.35.0"), _JSONARGPARSE_SIGNATURES_AVAILABLE),
env_flag=OSEnvToggle("ENABLE_FTS_LIGHTNING_JSONARGPARSE_PATCH", default="1"),
function=_patch_lightning_jsonargparse,
patched_package='lightning',
Expand Down
5 changes: 2 additions & 3 deletions src/fts_examples/patching/patched_lightning_jsonargparse.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
from fts_examples.patching._patch_utils import _prepare_module_ctx
from lightning.pytorch.cli import LightningCLI # noqa: F401


globals().update(_prepare_module_ctx('lightning.pytorch.cli', globals()))

# we ignore these for the entire file since we're using our global namespace trickeration to patch
# ruff: noqa: F821
# pyright: reportUndefinedVariable=false

globals().update(_prepare_module_ctx('lightning.pytorch.cli', globals()))

def _updated_parse_known_args_patch(self: ArgumentParser, args: Any = None, namespace: Any = None,
intermixed: bool = False) -> tuple[Any, Any]:
namespace, args = super(ArgumentParser, self)._parse_known_args(args, namespace,
Expand Down

0 comments on commit 540ebdd

Please sign in to comment.