Skip to content

Commit

Permalink
fix(axolotl): fix axolotl crash when no LOG_LEVEL set
Browse files Browse the repository at this point in the history
  • Loading branch information
philwinder committed Nov 13, 2024
1 parent 30b226a commit 1318217
Showing 1 changed file with 7 additions and 3 deletions.
10 changes: 7 additions & 3 deletions runner/axolotl_finetune_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@
import traceback
import uuid
from typing import List, Optional
import os

# Set default LOG_LEVEL if it doesn't exist to prevent axolotl from crashing
if os.environ["LOG_LEVEL"] == "":
os.environ["LOG_LEVEL"] = "INFO"

import torch
import transformers
Expand Down Expand Up @@ -149,7 +154,7 @@ def run_fine_tuning(
add_fine_tuning_event(job_id, "info", "Fine-tuning job started.")

parsed_cfg = unified_config(job_id, training_file, "")

cli_args = TrainerCliArgs()
dataset_meta = load_datasets(cfg=parsed_cfg, cli_args=cli_args)

Expand Down Expand Up @@ -361,7 +366,7 @@ async def healthz():
return {"status": "ok"}


def unified_config(job_id = "", training_file = "", lora_dir = ""):
def unified_config(job_id="", training_file="", lora_dir=""):
print("unified_content")
parsed_cfg = load_cfg("helix-llama3.2-instruct-1b-v1.yml")
parsed_cfg["sample_packing"] = False
Expand All @@ -378,7 +383,6 @@ def unified_config(job_id = "", training_file = "", lora_dir = ""):
parsed_cfg["datasets"][0]["roles"]["assistant"] = ["gpt"]
parsed_cfg["datasets"][0]["roles"]["system"] = ["system"]


if job_id != "":
# Monkeypatch mlflow for our own logging purposes
parsed_cfg["use_mlflow"] = True
Expand Down

0 comments on commit 1318217

Please sign in to comment.