Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions src/open_r1/sft.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,16 @@ def main(script_args, training_args, model_args):
tokenizer = get_tokenizer(model_args, training_args)
model = get_model(model_args, training_args)

# Qwen MoE: set ZeRO-3 leaf modules (lazy import; no effect for non-Qwen)
if getattr(getattr(model, "config", {}), "model_type", "") == "qwen3_moe":
try:
from transformers.models.qwen3_moe.modeling_qwen3_moe import Qwen3MoeSparseMoeBlock as _QwenSparseMoeBlock
import deepspeed
deepspeed.utils.set_z3_leaf_modules(model, [_QwenSparseMoeBlock])
logger.info("[MoE] ZeRO-3 leaf module setup for Qwen MoE model completed.")
except Exception as e:
logger.warning(f"[MoE] Skipped ZeRO-3 leaf module setup: {e}")

if tokenizer.chat_template is None:
logger.info("No chat template provided, defaulting to ChatML.")
model, tokenizer = setup_chat_format(model, tokenizer, format="chatml")
Expand Down