Skip to content

Commit 5105c13

Browse files
authored
Fix default value for the LoRA flag (#166)
1 parent 1d234c9 commit 5105c13

File tree

2 files changed

+8
-2
lines changed

2 files changed

+8
-2
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ build-backend = "poetry.masonry.api"
1212

1313
[tool.poetry]
1414
name = "together"
15-
version = "1.2.6"
15+
version = "1.2.7"
1616
authors = [
1717
"Together AI <[email protected]>"
1818
]

src/together/resources/finetune.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def create(
3333
n_checkpoints: int | None = 1,
3434
batch_size: int | None = 16,
3535
learning_rate: float | None = 0.00001,
36-
lora: bool = True,
36+
lora: bool = False,
3737
lora_r: int | None = 8,
3838
lora_dropout: float | None = 0,
3939
lora_alpha: float | None = 8,
@@ -108,6 +108,12 @@ def create(
108108
"The default value of batch size has been changed from 32 to 16 since together version >= 1.2.6"
109109
)
110110

111+
# TODO: Remove after next LoRA default change
112+
log_warn(
113+
"Some of the jobs run _directly_ from the together-python library might be trained using LoRA adapters. "
114+
"The version range when this change occurred is from 1.2.3 to 1.2.6."
115+
)
116+
111117
return FinetuneResponse(**response.data)
112118

113119
def list(self) -> FinetuneList:

0 commit comments

Comments
 (0)