Skip to content

Commit

Permalink
slight import refactor for rank_zero
Browse files Browse the repository at this point in the history
  • Loading branch information
speediedan committed Aug 15, 2024
1 parent 2b35455 commit 2e7be86
Show file tree
Hide file tree
Showing 5 changed files with 5 additions and 6 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

## [2.4.0] - 2024-XX-XX
## [2.4.0] - 2024-08-15

### Added

Expand Down
1 change: 0 additions & 1 deletion requirements/base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,3 @@
# the below is uncommented when master is targeting a specific pl dev master commit
git+https://github.com/Lightning-AI/lightning.git@2064887b12dd934a5f9a2bf45897f29e3bfc74d1#egg=lightning
torch>=2.1.0
#mpmath<1.4.0 # temporary requirement to avoid installation of alpha version of mpmath
1 change: 0 additions & 1 deletion requirements/standalone_base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,3 @@
# the below is uncommented when master is targeting a specific pl dev master commit
git+https://github.com/Lightning-AI/pytorch-lightning.git@2064887b12dd934a5f9a2bf45897f29e3bfc74d1#egg=pytorch-lightning
torch>=2.1.0
#mpmath<1.4.0 # temporary requirement to avoid installation of alpha version of mpmath
4 changes: 2 additions & 2 deletions src/finetuning_scheduler/fts.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,13 @@

import lightning.pytorch as pl
import torch
from lightning.fabric.utilities import rank_zero_info
from lightning.fabric.utilities import rank_zero_info, rank_zero_warn
from lightning.fabric.utilities.distributed import ReduceOp
from lightning.pytorch.callbacks import BaseFinetuning
from lightning.pytorch.strategies.strategy import Strategy
from lightning.pytorch.trainer.states import TrainerFn
from lightning.pytorch.utilities.exceptions import MisconfigurationException
from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_warn
from lightning.pytorch.utilities.rank_zero import rank_zero_debug

from finetuning_scheduler.fts_supporters import (
CallbackDepMixin,
Expand Down
3 changes: 2 additions & 1 deletion src/finetuning_scheduler/strategy_adapters/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,13 @@

import torch

from lightning.fabric.utilities import rank_zero_info, rank_zero_debug
from lightning.fabric.utilities import rank_zero_info
from lightning.fabric.utilities.types import ReduceLROnPlateau
from lightning.pytorch import LightningModule, Trainer
from lightning.pytorch.callbacks import Callback
from lightning.pytorch.callbacks import BaseFinetuning
from lightning.pytorch.strategies.strategy import Strategy
from lightning.pytorch.utilities.rank_zero import rank_zero_debug


class StrategyAdapter:
Expand Down

0 comments on commit 2e7be86

Please sign in to comment.