Skip to content

Commit 09b5bdb

Browse files
Remove export_for_training
Differential Revision: D90114683 Pull Request resolved: #16434
1 parent d7be289 commit 09b5bdb

File tree

7 files changed

+8
-10
lines changed

7 files changed

+8
-10
lines changed

.ci/scripts/wheel/test_windows.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,7 @@ def test_model_xnnpack(model: Model, quantize: bool) -> None:
3030

3131
if quantize:
3232
quant_type = MODEL_NAME_TO_OPTIONS[str(model)].quantization
33-
model_instance = torch.export.export_for_training(
34-
model_instance, example_inputs
35-
)
33+
model_instance = torch.export.export(model_instance, example_inputs)
3634
model_instance = quantize_xnn(
3735
model_instance.module(), example_inputs, quant_type
3836
)

backends/samsung/utils/export_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def quantize_module(
6363
quantizer = EnnQuantizer()
6464
quantizer.setup_quant_params(precision, is_per_channel, is_qat)
6565
logging.info("Export nn module for quantization...")
66-
exported_module = torch.export.export_for_training(module, inputs).module()
66+
exported_module = torch.export.export(module, inputs).module()
6767
DecomposeScaledDotProductAttention()(exported_module)
6868
logging.info("Quantizing the module...")
6969
annotated_module = prepare_pt2e(exported_module, quantizer)

examples/mediatek/model_export_scripts/gemma.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,7 @@ def export_to_et_ir(
393393
max_num_token, max_cache_size, True
394394
)
395395
print("Getting pre autograd ATen Dialect Graph")
396-
pre_autograd_aten_dialect = torch.export.export_for_training(
396+
pre_autograd_aten_dialect = torch.export.export(
397397
model, example_inputs, dynamic_shapes=dynamic_shapes, strict=True
398398
).module() # NOTE: Will be replaced with export
399399
quantizer = NeuropilotQuantizer()

examples/mediatek/model_export_scripts/phi.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -338,7 +338,7 @@ def export_to_et_ir(
338338
max_num_token, max_cache_size, True
339339
)
340340
print("Getting pre autograd ATen Dialect Graph")
341-
pre_autograd_aten_dialect = torch.export.export_for_training(
341+
pre_autograd_aten_dialect = torch.export.export(
342342
model, example_inputs, dynamic_shapes=dynamic_shapes, strict=True
343343
).module() # NOTE: Will be replaced with export
344344
quantizer = NeuropilotQuantizer()

examples/mediatek/model_export_scripts/qwen.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -339,7 +339,7 @@ def export_to_et_ir(
339339
max_num_token, max_cache_size, True
340340
)
341341
print("Getting pre autograd ATen Dialect Graph")
342-
pre_autograd_aten_dialect = torch.export.export_for_training(
342+
pre_autograd_aten_dialect = torch.export.export(
343343
model, example_inputs, dynamic_shapes=dynamic_shapes, strict=True
344344
).module() # NOTE: Will be replaced with export
345345
quantizer = NeuropilotQuantizer()

examples/mediatek/model_export_scripts/whisper.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -410,7 +410,7 @@ def export_to_et_ir(
410410
max_num_token, max_cache_size, True
411411
)
412412
print("Getting pre autograd ATen Dialect Graph")
413-
pre_autograd_aten_dialect = torch.export.export_for_training(
413+
pre_autograd_aten_dialect = torch.export.export(
414414
model, example_inputs, dynamic_shapes=dynamic_shapes, strict=True
415415
).module() # NOTE: Will be replaced with export
416416
quantizer = NeuropilotQuantizer()
@@ -483,7 +483,7 @@ def export_encoder_to_et_ir(
483483
print(f"Exporting Encoder to PTE")
484484
example_inputs = model.get_example_inputs(num_mel_bins)
485485
print("Getting pre autograd ATen Dialect Graph")
486-
pre_autograd_aten_dialect = torch.export.export_for_training(
486+
pre_autograd_aten_dialect = torch.export.export(
487487
model, example_inputs, strict=True
488488
).module() # NOTE: Will be replaced with export
489489
quantizer = NeuropilotQuantizer()

exir/tests/test_dim_order_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,6 @@ def forward(self, t1, t2):
4444
y = torch.randn(5, 6)
4545
M(x, y)
4646

47-
expo_prog = torch.export.export_for_training(M, (x, y))
47+
expo_prog = torch.export.export(M, (x, y))
4848
edge_prog = to_edge_transform_and_lower(expo_prog)
4949
edge_prog.to_executorch()

0 commit comments

Comments
 (0)