From 18fbb741eb3f4412efa769de5b5382aae0d2f1c3 Mon Sep 17 00:00:00 2001 From: Arsh Zahed Date: Wed, 18 Dec 2024 11:53:07 -0800 Subject: [PATCH] Remove the default LoRA rank warning --- src/together/cli/api/finetune.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/together/cli/api/finetune.py b/src/together/cli/api/finetune.py index 0eb2b00..7bc0274 100644 --- a/src/together/cli/api/finetune.py +++ b/src/together/cli/api/finetune.py @@ -197,10 +197,7 @@ def create( "batch_size": model_limits.lora_training.max_batch_size, "learning_rate": 1e-3, } - log_warn_once( - f"The default LoRA rank for {model} has been changed to {default_values['lora_r']} as the max available.\n" - f"Also, the default learning rate for LoRA fine-tuning has been changed to {default_values['learning_rate']}." - ) + for arg in default_values: arg_source = ctx.get_parameter_source("arg") # type: ignore[attr-defined] if arg_source == ParameterSource.DEFAULT: