Skip to content

Commit e11fbe4

Browse files
Merge pull request #338 from BenjaminBossan/fix-fine-tuning-vlm-peft-trl
FIX Error in VLM fine-tuning notebook
2 parents 0126a7a + bdb2054 commit e11fbe4

File tree

2 files changed

+6
-36
lines changed

2 files changed

+6
-36
lines changed

notebooks/en/fine_tuning_vlm_trl.ipynb

Lines changed: 3 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -607,7 +607,6 @@
607607
" if 'model' in globals(): del globals()['model']\n",
608608
" if 'processor' in globals(): del globals()['processor']\n",
609609
" if 'trainer' in globals(): del globals()['trainer']\n",
610-
" if 'peft_model' in globals(): del globals()['peft_model']\n",
611610
" if 'bnb_config' in globals(): del globals()['bnb_config']\n",
612611
" time.sleep(2)\n",
613612
"\n",
@@ -738,17 +737,9 @@
738737
"id": "ITmkRHWCKYjf",
739738
"outputId": "49440aaf-89a4-4810-ad40-eafe4582bab3"
740739
},
741-
"outputs": [
742-
{
743-
"name": "stdout",
744-
"output_type": "stream",
745-
"text": [
746-
"trainable params: 2,523,136 || all params: 8,293,898,752 || trainable%: 0.0304\n"
747-
]
748-
}
749-
],
740+
"outputs": [],
750741
"source": [
751-
"from peft import LoraConfig, get_peft_model\n",
742+
"from peft import LoraConfig\n",
752743
"\n",
753744
"# Configure LoRA\n",
754745
"peft_config = LoraConfig(\n",
@@ -758,13 +749,7 @@
758749
" bias=\"none\",\n",
759750
" target_modules=[\"q_proj\", \"v_proj\"],\n",
760751
" task_type=\"CAUSAL_LM\",\n",
761-
")\n",
762-
"\n",
763-
"# Apply PEFT model adaptation\n",
764-
"peft_model = get_peft_model(model, peft_config)\n",
765-
"\n",
766-
"# Print trainable parameters\n",
767-
"peft_model.print_trainable_parameters()"
752+
")"
768753
]
769754
},
770755
{

notebooks/zh-CN/fine_tuning_vlm_trl.ipynb

Lines changed: 3 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1120,7 +1120,6 @@
11201120
" if 'model' in globals(): del globals()['model']\n",
11211121
" if 'processor' in globals(): del globals()['processor']\n",
11221122
" if 'trainer' in globals(): del globals()['trainer']\n",
1123-
" if 'peft_model' in globals(): del globals()['peft_model']\n",
11241123
" if 'bnb_config' in globals(): del globals()['bnb_config']\n",
11251124
" time.sleep(2)\n",
11261125
"\n",
@@ -1243,17 +1242,9 @@
12431242
"id": "ITmkRHWCKYjf",
12441243
"outputId": "3ca824c9-4aca-4d5b-e942-7a1705939e08"
12451244
},
1246-
"outputs": [
1247-
{
1248-
"name": "stdout",
1249-
"output_type": "stream",
1250-
"text": [
1251-
"trainable params: 2,523,136 || all params: 8,293,898,752 || trainable%: 0.0304\n"
1252-
]
1253-
}
1254-
],
1245+
"outputs": [],
12551246
"source": [
1256-
"from peft import LoraConfig, get_peft_model\n",
1247+
"from peft import LoraConfig\n",
12571248
"\n",
12581249
"# Configure LoRA\n",
12591250
"peft_config = LoraConfig(\n",
@@ -1263,13 +1254,7 @@
12631254
" bias=\"none\",\n",
12641255
" target_modules=[\"q_proj\", \"v_proj\"],\n",
12651256
" task_type=\"CAUSAL_LM\",\n",
1266-
")\n",
1267-
"\n",
1268-
"# Apply PEFT model adaptation\n",
1269-
"peft_model = get_peft_model(model, peft_config)\n",
1270-
"\n",
1271-
"# Print trainable parameters\n",
1272-
"peft_model.print_trainable_parameters()"
1257+
")"
12731258
]
12741259
},
12751260
{

0 commit comments

Comments
 (0)