|
15 | 15 | import torch
|
16 | 16 | from executorch.backends.xnnpack.partition.xnnpack_partitioner import XnnpackPartitioner
|
17 | 17 | from executorch.devtools import generate_etrecord
|
18 |
| -from executorch.exir import EdgeCompileConfig, ExecutorchBackendConfig |
| 18 | +from executorch.exir import ( |
| 19 | + EdgeCompileConfig, |
| 20 | + ExecutorchBackendConfig, |
| 21 | + to_edge_transform_and_lower, |
| 22 | +) |
19 | 23 | from executorch.extension.export_util.utils import export_to_edge, save_pte_program
|
20 | 24 |
|
21 | 25 | from ..models import MODEL_NAME_TO_MODEL
|
|
81 | 85 |
|
82 | 86 | model = model.eval()
|
83 | 87 | # pre-autograd export. eventually this will become torch.export
|
84 |
| - model = torch.export.export_for_training(model, example_inputs).module() |
| 88 | + ep = torch.export.export_for_training(model, example_inputs) |
| 89 | + model = ep.module() |
85 | 90 |
|
86 | 91 | if args.quantize:
|
87 | 92 | logging.info("Quantizing Model...")
|
88 | 93 | # TODO(T165162973): This pass shall eventually be folded into quantizer
|
89 | 94 | model = quantize(model, example_inputs)
|
90 | 95 |
|
91 |
| - edge = export_to_edge( |
92 |
| - model, |
93 |
| - example_inputs, |
94 |
| - edge_compile_config=EdgeCompileConfig( |
| 96 | + edge = to_edge_transform_and_lower( |
| 97 | + ep, |
| 98 | + partitioner=[XnnpackPartitioner()], |
| 99 | + compile_config=EdgeCompileConfig( |
95 | 100 | _check_ir_validity=False if args.quantize else True,
|
96 | 101 | _skip_dim_order=True, # TODO(T182187531): enable dim order in xnnpack
|
97 | 102 | ),
|
98 | 103 | )
|
99 |
| - logging.info(f"Exported graph:\n{edge.exported_program().graph}") |
| 104 | + logging.info(f"Exported and lowered graph:\n{edge.exported_program().graph}") |
100 | 105 |
|
101 | 106 | # this is needed for the ETRecord as lowering modifies the graph in-place
|
102 | 107 | edge_copy = copy.deepcopy(edge)
|
103 | 108 |
|
104 |
| - edge = edge.to_backend(XnnpackPartitioner()) |
105 |
| - logging.info(f"Lowered graph:\n{edge.exported_program().graph}") |
106 |
| - |
107 | 109 | exec_prog = edge.to_executorch(
|
108 | 110 | config=ExecutorchBackendConfig(extract_delegate_segments=False)
|
109 | 111 | )
|
|
0 commit comments