We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent fa58e5b commit 18fbb74Copy full SHA for 18fbb74
src/together/cli/api/finetune.py
@@ -197,10 +197,7 @@ def create(
197
"batch_size": model_limits.lora_training.max_batch_size,
198
"learning_rate": 1e-3,
199
}
200
- log_warn_once(
201
- f"The default LoRA rank for {model} has been changed to {default_values['lora_r']} as the max available.\n"
202
- f"Also, the default learning rate for LoRA fine-tuning has been changed to {default_values['learning_rate']}."
203
- )
+
204
for arg in default_values:
205
arg_source = ctx.get_parameter_source("arg") # type: ignore[attr-defined]
206
if arg_source == ParameterSource.DEFAULT:
0 commit comments