Skip to content

Commit 718bdc8

Browse files
committed
change related to warmup
1 parent 6d8b2c0 commit 718bdc8

File tree

1 file changed

+6
-9
lines changed

1 file changed

+6
-9
lines changed

tensorrt_llm/_torch/pyexecutor/model_engine.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -570,15 +570,12 @@ def warmup(self, resource_manager: ResourceManager) -> None:
570570
# Reset the global cuda graph dummy request to None in warmup.
571571
self.cuda_graph_runner.padding_dummy_request = None
572572

573-
cp_type = self.mapping.cp_config.get('cp_type', None)
574-
if cp_type is not None:
575-
if cp_type in [
576-
CpType.ULYSSES, CpType.STAR, CpType.HELIX, CpType.RING
577-
]:
578-
logger.info(
579-
"[ModelEngine::warmup] Skipping warmup for cp_type: ",
580-
cp_type.name)
581-
return
573+
if self.mapping.cp_size > 1:
574+
cp_type = self.mapping.cp_config.get("cp_type", None)
575+
logger.info(
576+
f"[ModelEngine::warmup] Skipping warmup for cp_type: {None if cp_type is None else cp_type.name}."
577+
)
578+
return
582579

583580
self._run_torch_compile_warmup(resource_manager)
584581
self._run_autotuner_warmup(resource_manager)

0 commit comments

Comments
 (0)