Skip to content

Commit e86c9c9

Browse files
authored
Fix LLM fine-tuning examples import error on OSS
Differential Revision: D71001041 Pull Request resolved: #9168
1 parent 8f27305 commit e86c9c9

File tree

2 files changed

+13
-9
lines changed

2 files changed

+13
-9
lines changed

backends/vulkan/runtime/graph/ops/impl/BinaryOp.cpp

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -132,18 +132,21 @@ void add_binary_op_buffer_node(
132132
// Shader params buffers
133133
{},
134134
// Specialization Constants
135-
{graph.packed_dim_of(out), graph.packed_dim_of(in1), graph.packed_dim_of(in2)},
135+
{graph.packed_dim_of(out),
136+
graph.packed_dim_of(in1),
137+
graph.packed_dim_of(in2)},
136138
// Resizing Logic
137139
resize_binary_op_node,
138140
{},
139-
{{graph.sizes_pc_of(in1),
140-
graph.sizes_pc_of(in2),
141-
graph.strides_pc_of(out),
142-
graph.strides_pc_of(in1),
143-
graph.strides_pc_of(in2),
144-
graph.numel_pc_of(out),
145-
PushConstantDataInfo(&alpha_val, sizeof(float)),
146-
}}));
141+
{{
142+
graph.sizes_pc_of(in1),
143+
graph.sizes_pc_of(in2),
144+
graph.strides_pc_of(out),
145+
graph.strides_pc_of(in1),
146+
graph.strides_pc_of(in2),
147+
graph.numel_pc_of(out),
148+
PushConstantDataInfo(&alpha_val, sizeof(float)),
149+
}}));
147150
}
148151

149152
void add_binary_op_node(

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,7 @@ flatc = "executorch.data.bin:flatc"
9393
# into core pip packages. Refactor out the necessary utils
9494
# or core models files into a separate package.
9595
"executorch.examples.apple.coreml.llama" = "examples/apple/coreml/llama"
96+
"executorch.examples.llm_pte_finetuning" = "examples/llm_pte_finetuning"
9697
"executorch.examples.models" = "examples/models"
9798
"executorch.exir" = "exir"
9899
"executorch.extension" = "extension"

0 commit comments

Comments
 (0)