diff --git a/.github/workflows/build-docs.yaml b/.github/workflows/build-docs.yaml index ad45e7135..14c8b2800 100644 --- a/.github/workflows/build-docs.yaml +++ b/.github/workflows/build-docs.yaml @@ -15,10 +15,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Clone repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Clone docs repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: repository: Nixtla/docs ref: scripts diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f05864130..05983690e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -30,7 +30,7 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_NIXTLA_TMP }} steps: - name: Clone repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Set up environment uses: mamba-org/setup-micromamba@f8b8a1e23a26f60a44c853292711bacfd3eac822 # v1.9.0 diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index da8b66d3d..ee68ba5c4 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Clone repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Set up python uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # 5.2.0 diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 7109edeb3..a070bdfe9 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -21,7 +21,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Set up Python uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # 5.2.0 with: diff --git a/action_files/test_models/src/multivariate_models.py b/action_files/test_models/src/multivariate_models.py index af6ab7fab..8376f1a45 100644 --- a/action_files/test_models/src/multivariate_models.py +++ b/action_files/test_models/src/multivariate_models.py @@ -10,7 +10,7 @@ from neuralforecast.models.tsmixer import TSMixer from neuralforecast.models.tsmixerx import TSMixerx from neuralforecast.models.itransformer import iTransformer -# from neuralforecast.models.stemgnn import StemGNN +# # from neuralforecast.models.stemgnn import StemGNN from neuralforecast.models.mlpmultivariate import MLPMultivariate from neuralforecast.models.timemixer import TimeMixer diff --git a/nbs/common.base_model.ipynb b/nbs/common.base_model.ipynb index 0be07e1fa..16c2582d4 100644 --- a/nbs/common.base_model.ipynb +++ b/nbs/common.base_model.ipynb @@ -109,6 +109,14 @@ " nn.init.xavier_normal_ = xavier_normal" ] }, + { + "cell_type": "markdown", + "id": "fffc7edd", + "metadata": {}, + "source": [ + "`<<<<<<< HEAD`" + ] + }, { "cell_type": "code", "execution_count": null, @@ -590,6 +598,7 @@ " if self.val_size == 0:\n", " return\n", " losses = torch.stack(self.validation_step_outputs)\n", + " avg_loss = losses.mean().detach().item()\n", " avg_loss = losses.mean().detach()\n", " self.log(\n", " \"ptl/val_loss\",\n", diff --git a/nbs/core.ipynb b/nbs/core.ipynb index 8810218cf..70069e10a 100644 --- a/nbs/core.ipynb +++ b/nbs/core.ipynb @@ -93,7 +93,7 @@ " StemGNN, PatchTST, TimesNet, TimeLLM, TSMixer, TSMixerx,\n", " MLPMultivariate, iTransformer,\n", " BiTCN, TiDE, DeepNPTS, SOFTS,\n", - " TimeMixer, KAN\n", + " TimeMixer, KAN, RMoK\n", ")\n", "from neuralforecast.common._base_auto import BaseAuto, MockTrial" ] @@ -245,7 +245,8 @@ " 'deepnpts': DeepNPTS, 'autodeepnpts': DeepNPTS,\n", " 'softs': SOFTS, 'autosofts': SOFTS,\n", " 'timemixer': TimeMixer, 'autotimemixer': TimeMixer,\n", - " 'kan': KAN, 'autokan': KAN\n", + " 'kan': KAN, 'autokan': KAN,\n", + " 'rmok': RMoK, 'autormok': RMoK\n", "}" ] }, diff --git a/neuralforecast/common/_base_model.py b/neuralforecast/common/_base_model.py index a78b887e6..0c73952d8 100644 --- a/neuralforecast/common/_base_model.py +++ b/neuralforecast/common/_base_model.py @@ -67,7 +67,7 @@ def noop(*args, **kwargs): nn.init.xavier_uniform_ = xavier_uniform nn.init.xavier_normal_ = xavier_normal -# %% ../../nbs/common.base_model.ipynb 5 +# %% ../../nbs/common.base_model.ipynb 6 class BaseModel(pl.LightningModule): EXOGENOUS_FUTR = True # If the model can handle future exogenous variables EXOGENOUS_HIST = True # If the model can handle historical exogenous variables @@ -597,6 +597,7 @@ def on_validation_epoch_end(self): if self.val_size == 0: return losses = torch.stack(self.validation_step_outputs) + avg_loss = losses.mean().detach().item() avg_loss = losses.mean().detach() self.log( "ptl/val_loss", diff --git a/neuralforecast/core.py b/neuralforecast/core.py index 382714fc2..b0568a583 100644 --- a/neuralforecast/core.py +++ b/neuralforecast/core.py @@ -66,6 +66,7 @@ SOFTS, TimeMixer, KAN, + RMoK, ) from .common._base_auto import BaseAuto, MockTrial @@ -190,6 +191,8 @@ def _insample_times( "autotimemixer": TimeMixer, "kan": KAN, "autokan": KAN, + "rmok": RMoK, + "autormok": RMoK, } # %% ../nbs/core.ipynb 8