Skip to content

Commit

Permalink
reclassified an info-level message to debug-level, updated changelog,…
Browse files Browse the repository at this point in the history
… reverted no-longer necessary FTS tutorial notebook CLI workaround
  • Loading branch information
speediedan committed Jul 29, 2024
1 parent 04350f1 commit 2b35455
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 13 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,17 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
### Added

- Support for Lightning and PyTorch ``2.4.0``
- Support for Python ``3.12``

### Changed

- Changed default value of the ``frozen_bn_track_running_stats`` option to the FTS callback constructor to ``True``.

### Deprecated

- removed support for PyTorch `2.0`
- removed support for Python `3.8`

## [2.3.3] - 2024-07-09

- Support for Lightning <= ``2.3.3`` (includes critical security fixes) and PyTorch <= ``2.3.1``
Expand Down
5 changes: 2 additions & 3 deletions src/finetuning_scheduler/strategy_adapters/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,12 @@

import torch

from lightning.fabric.utilities import rank_zero_info
from lightning.fabric.utilities import rank_zero_info, rank_zero_debug
from lightning.fabric.utilities.types import ReduceLROnPlateau
from lightning.pytorch import LightningModule, Trainer
from lightning.pytorch.callbacks import Callback
from lightning.pytorch.callbacks import BaseFinetuning
from lightning.pytorch.strategies.strategy import Strategy
from lightning.pytorch.utilities.rank_zero import rank_zero_debug


class StrategyAdapter:
Expand Down Expand Up @@ -327,7 +326,7 @@ def _module_specific_freezing(self, modules: torch.nn.Module) -> None:
None
"""
if self.fts_handle.frozen_bn_track_running_stats:
rank_zero_info("Since `frozen_bn_track_running_stats` is currently set to `True`, FinetuningScheduler"
rank_zero_debug("Since `frozen_bn_track_running_stats` is currently set to `True`, FinetuningScheduler"
" will set `track_running_stats` to `True` for all `BatchNorm` layers.")
modules = BaseFinetuning.flatten_modules(modules) # type: ignore[assignment]
for mod in modules:
Expand Down
11 changes: 1 addition & 10 deletions src/fts_examples/stable/ipynb_src/fts_superglue_nb.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,22 +253,13 @@ def __init__(
super().__init__()
task_name = task_name if task_name in TASK_NUM_LABELS.keys() else DEFAULT_TASK
self.text_fields = self.TASK_TEXT_FIELD_MAP[task_name]
self.init_hparams = {
"model_name_or_path": model_name_or_path,
"task_name": task_name,
"max_seq_length": max_seq_length,
"train_batch_size": train_batch_size,
"eval_batch_size": eval_batch_size,
"dataloader_kwargs": dataloader_kwargs,
"tokenizers_parallelism": tokenizers_parallelism,
}
# starting with HF Datasets v3.x, trust_remote_code must be `True` https://bit.ly/hf_datasets_trust_remote_req
self.trust_remote_code = True
self.save_hyperparameters(self.init_hparams)
self.dataloader_kwargs = {
"num_workers": dataloader_kwargs.get("num_workers", 0),
"pin_memory": dataloader_kwargs.get("pin_memory", False),
}
self.save_hyperparameters()
os.environ["TOKENIZERS_PARALLELISM"] = "true" if self.hparams.tokenizers_parallelism else "false"
self.tokenizer = AutoTokenizer.from_pretrained(
self.hparams.model_name_or_path, use_fast=True, local_files_only=False
Expand Down

0 comments on commit 2b35455

Please sign in to comment.