Skip to content

Commit 0246ac2

Browse files
authored
Merge branch 'main' into export-D68374585
2 parents 84c76fc + a828307 commit 0246ac2

File tree

669 files changed

+21987
-10623
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

669 files changed

+21987
-10623
lines changed

.buckconfig

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,3 +39,6 @@
3939

4040
[buck2]
4141
restarter=true
42+
43+
[oss]
44+
folly_cxx_tests = False

.ci/docker/build.sh

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,10 @@ case "${IMAGE_NAME}" in
2929
LINTRUNNER=""
3030
CLANG_VERSION=12
3131
;;
32+
executorch-ubuntu-22.04-gcc11-aarch64)
33+
LINTRUNNER=""
34+
GCC_VERSION=11
35+
;;
3236
executorch-ubuntu-22.04-linter)
3337
LINTRUNNER=yes
3438
CLANG_VERSION=12
@@ -44,6 +48,7 @@ case "${IMAGE_NAME}" in
4448
executorch-ubuntu-22.04-mediatek-sdk)
4549
MEDIATEK_SDK=yes
4650
CLANG_VERSION=12
51+
ANDROID_NDK_VERSION=r27b
4752
;;
4853
executorch-ubuntu-22.04-clang12-android)
4954
LINTRUNNER=""

.ci/docker/ci_commit_pins/pytorch.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
27e35de6c288bffad1b4d18b393579c1d1a95547
1+
295f2ed4d103017f7e19a7b8263ece606cd629db

.ci/docker/common/install_android.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ install_sdk() {
7070
# These are the tools needed to build Android apps
7171
yes | /opt/cmdline-tools/bin/sdkmanager --sdk_root="${SDK_INSTALLATION_DIR}" --install "platforms;android-34"
7272
yes | /opt/cmdline-tools/bin/sdkmanager --sdk_root="${SDK_INSTALLATION_DIR}" --install "build-tools;33.0.1"
73+
yes | /opt/cmdline-tools/bin/sdkmanager --sdk_root="${SDK_INSTALLATION_DIR}" --install "build-tools;35.0.0"
7374
# And some more tools for future emulator tests
7475
yes | /opt/cmdline-tools/bin/sdkmanager --sdk_root="${SDK_INSTALLATION_DIR}" --install "platform-tools"
7576
yes | /opt/cmdline-tools/bin/sdkmanager --sdk_root="${SDK_INSTALLATION_DIR}" --install "tools"

.ci/docker/common/install_conda.sh

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@ source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
1313
install_miniconda() {
1414
BASE_URL="https://repo.anaconda.com/miniconda"
1515
CONDA_FILE="Miniconda3-py${PYTHON_VERSION//./}_${MINICONDA_VERSION}-Linux-x86_64.sh"
16+
if [[ $(uname -m) == "aarch64" ]]; then
17+
CONDA_FILE="Miniconda3-py${PYTHON_VERSION//./}_${MINICONDA_VERSION}-Linux-aarch64.sh"
18+
fi
1619

1720
mkdir -p /opt/conda
1821
chown ci-user:ci-user /opt/conda
@@ -36,7 +39,7 @@ install_python() {
3639

3740
# From https://github.com/pytorch/pytorch/blob/main/.ci/docker/common/install_conda.sh
3841
if [[ $(uname -m) == "aarch64" ]]; then
39-
conda_install "openblas==0.3.28=*openmp*"
42+
conda_install "openblas==0.3.29=*openmp*" -c conda-forge
4043
else
4144
conda_install mkl=2022.1.0 mkl-include=2022.1.0
4245
fi

.ci/docker/common/install_java.sh

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
# Copyright (c) Meta Platforms, Inc. and affiliates.
3+
# All rights reserved.
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
8+
set -ex
9+
10+
apt-get update
11+
12+
apt-get install -y --no-install-recommends openjdk-17-jdk

.ci/docker/conda-env-ci.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
cmake=3.22.1
1+
cmake=3.26.4
22
ninja=1.10.2
33
libuv
44
llvm-openmp

.ci/docker/ubuntu/Dockerfile

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,10 @@ ARG BUCK2_VERSION
3030
COPY ./common/install_buck.sh install_buck.sh
3131
RUN bash ./install_buck.sh && rm install_buck.sh
3232

33+
# Install java
34+
COPY ./common/install_java.sh install_java.sh
35+
RUN bash ./install_java.sh && rm install_java.sh
36+
3337
# Setup user
3438
COPY ./common/install_user.sh install_user.sh
3539
RUN bash ./install_user.sh && rm install_user.sh
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
#!/bin/bash
2+
# Copyright (c) Meta Platforms, Inc. and affiliates.
3+
# All rights reserved.
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
8+
set -ex
9+
10+
if [[ -z "${PYTHON_EXECUTABLE:-}" ]]; then
11+
PYTHON_EXECUTABLE=python3
12+
fi
13+
which "${PYTHON_EXECUTABLE}"
14+
15+
build_android_test() {
16+
pushd extension/android_test
17+
ANDROID_HOME="${ANDROID_SDK:-/opt/android/sdk}" ./gradlew testDebugUnitTest
18+
ANDROID_HOME="${ANDROID_SDK:-/opt/android/sdk}" ./gradlew build assembleAndroidTest
19+
popd
20+
}
21+
22+
collect_artifacts_to_be_uploaded() {
23+
ARTIFACTS_DIR_NAME="$1"
24+
# Collect Java library test
25+
JAVA_LIBRARY_TEST_DIR="${ARTIFACTS_DIR_NAME}/library_test_dir"
26+
mkdir -p "${JAVA_LIBRARY_TEST_DIR}"
27+
cp extension/android_test/build/outputs/apk/debug/*.apk "${JAVA_LIBRARY_TEST_DIR}"
28+
cp extension/android_test/build/outputs/apk/androidTest/debug/*.apk "${JAVA_LIBRARY_TEST_DIR}"
29+
}
30+
31+
main() {
32+
build_android_test
33+
if [ -n "$ARTIFACTS_DIR_NAME" ]; then
34+
collect_artifacts_to_be_uploaded ${ARTIFACTS_DIR_NAME}
35+
fi
36+
}
37+
38+
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
39+
main "$@"
40+
fi

.ci/scripts/build_llama_android.sh

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,10 @@ install_executorch_and_backend_lib() {
4242

4343
build_llama_runner() {
4444
echo "Building llama runner for Android..."
45+
pushd extension/llm/tokenizers
46+
echo "Updating tokenizers submodule"
47+
git submodule update --init
48+
popd
4549
ANDROID_ABI=arm64-v8a
4650
cmake -DBUCK2="${BUCK2}" \
4751
-DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK"/build/cmake/android.toolchain.cmake \
@@ -56,6 +60,5 @@ build_llama_runner() {
5660

5761
cmake --build cmake-android-out/examples/models/llama -j4 --config Release
5862
}
59-
install_flatc_from_source
6063
install_executorch_and_backend_lib
6164
build_llama_runner

.ci/scripts/gather_benchmark_configs.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -263,7 +263,8 @@ def is_valid_huggingface_model_id(model_name: str) -> bool:
263263
def get_benchmark_configs() -> Dict[str, Dict]: # noqa: C901
264264
"""
265265
Gather benchmark configurations for a given set of models on the target operating system and devices.
266-
266+
CHANGE IF this function's return changed:
267+
extract_model_info() in executorch/.github/scripts/extract_benchmark_results.py IF YOU CHANGE THE RESULT OF THIS FUNCTION.
267268
Args:
268269
None
269270

.ci/scripts/gather_test_models.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,9 @@
55
# This source code is licensed under the BSD-style license found in the
66
# LICENSE file in the root directory of this source tree.
77

8+
# WARNING: The CI runner logic should directly be in the corresponding yml files
9+
# This file will be deleted once the reference in periodic.yml is deleted.
10+
811
import itertools
912
import json
1013
import os
@@ -30,6 +33,7 @@
3033
"dl3": "linux.4xlarge.memory",
3134
"emformer_join": "linux.4xlarge.memory",
3235
"emformer_predict": "linux.4xlarge.memory",
36+
"phi-4-mini": "linux.4xlarge.memory",
3337
}
3438
}
3539

.ci/scripts/test_ane_static_llama.sh

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
#!/bin/bash
2+
# Copyright (c) Qualcomm Innovation Center, Inc.
3+
# All rights reserved
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
8+
set -exu
9+
10+
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
11+
12+
export EXECUTORCH_ROOT="$(dirname "${BASH_SOURCE[0]}")/../.."
13+
14+
if [[ -z "${PYTHON_EXECUTABLE:-}" ]]; then
15+
PYTHON_EXECUTABLE=python3
16+
fi
17+
18+
which "${PYTHON_EXECUTABLE}"
19+
20+
# Update tokenizers submodule
21+
pushd $EXECUTORCH_ROOT/extension/llm/tokenizers
22+
echo "Update tokenizers submodule"
23+
git submodule update --init
24+
popd
25+
26+
pushd $EXECUTORCH_ROOT/examples/apple/coreml/llama
27+
28+
# Download stories llama110m artifacts
29+
download_stories_model_artifacts
30+
31+
python export.py -n model.pte -p params.json -c stories110M.pt --seq_length 32 --max_seq_length 64 --dtype fp16 --coreml-quantize c4w
32+
33+
popd
File renamed without changes.

.ci/scripts/test_llama.sh

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -173,6 +173,10 @@ cmake_install_executorch_libraries() {
173173

174174
cmake_build_llama_runner() {
175175
echo "Building llama runner"
176+
pushd extension/llm/tokenizers
177+
echo "Updating tokenizers submodule"
178+
git submodule update --init
179+
popd
176180
dir="examples/models/llama"
177181
retry cmake \
178182
-DCMAKE_INSTALL_PREFIX=cmake-out \
Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
#!/bin/bash
2+
# Copyright (c) Qualcomm Innovation Center, Inc.
3+
# All rights reserved
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
8+
set -exu
9+
10+
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
11+
12+
export EXECUTORCH_ROOT="$(dirname "${BASH_SOURCE[0]}")/../.."
13+
14+
if [[ -z "${PYTHON_EXECUTABLE:-}" ]]; then
15+
PYTHON_EXECUTABLE=python3
16+
fi
17+
18+
which "${PYTHON_EXECUTABLE}"
19+
20+
# Update tokenizers submodule
21+
pushd $EXECUTORCH_ROOT/extension/llm/tokenizers
22+
echo "Update tokenizers submodule"
23+
git submodule update --init
24+
popd
25+
26+
# Install ET with CMake
27+
cmake -DPYTHON_EXECUTABLE=python \
28+
-DCMAKE_INSTALL_PREFIX=cmake-out \
29+
-DEXECUTORCH_ENABLE_LOGGING=1 \
30+
-DCMAKE_BUILD_TYPE=Release \
31+
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
32+
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
33+
-DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
34+
-DEXECUTORCH_BUILD_XNNPACK=OFF \
35+
-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
36+
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
37+
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
38+
-Bcmake-out .
39+
cmake --build cmake-out -j16 --target install --config Release
40+
41+
# Install llama runner with torchao
42+
cmake -DPYTHON_EXECUTABLE=python \
43+
-DCMAKE_PREFIX_PATH=$(python -c 'from distutils.sysconfig import get_python_lib; print(get_python_lib())') \
44+
-DCMAKE_BUILD_TYPE=Release \
45+
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
46+
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
47+
-DEXECUTORCH_BUILD_XNNPACK=OFF \
48+
-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
49+
-DEXECUTORCH_BUILD_TORCHAO=ON \
50+
-Bcmake-out/examples/models/llama \
51+
examples/models/llama
52+
cmake --build cmake-out/examples/models/llama -j16 --config Release
53+
54+
# Download stories llama110m artifacts
55+
download_stories_model_artifacts
56+
57+
echo "Creating tokenizer.bin"
58+
$PYTHON_EXECUTABLE -m extension.llm.tokenizer.tokenizer -t tokenizer.model -o tokenizer.bin
59+
60+
# Export model
61+
LLAMA_CHECKPOINT=stories110M.pt
62+
LLAMA_PARAMS=params.json
63+
MODEL_OUT=model.pte
64+
TOKENIZER=tokenizer.bin
65+
66+
# Set low-bit quantization parameters
67+
QLINEAR_BITWIDTH=3 # Can be 1-8
68+
QLINEAR_GROUP_SIZE=128 # Must be multiple of 16
69+
QEMBEDDING_BITWIDTH=4 # Can be 1-8
70+
QEMBEDDING_GROUP_SIZE=32 # Must be multiple of 16
71+
72+
${PYTHON_EXECUTABLE} -m examples.models.llama.export_llama \
73+
--checkpoint "${LLAMA_CHECKPOINT:?}" \
74+
--params "${LLAMA_PARAMS:?}" \
75+
-kv \
76+
--use_sdpa_with_kv_cache \
77+
--output_name=${MODEL_OUT} \
78+
-qmode "torchao:8da${QLINEAR_BITWIDTH}w" \
79+
--group_size ${QLINEAR_GROUP_SIZE} \
80+
-E "torchao:${QEMBEDDING_BITWIDTH},${QEMBEDDING_GROUP_SIZE}" \
81+
--disable_dynamic_shape \
82+
-d fp32
83+
84+
# Test run
85+
./cmake-out/examples/models/llama/llama_main --model_path=$MODEL_OUT --tokenizer_path=$TOKENIZER --prompt="Once upon a time,"

.ci/scripts/test_model.sh

Lines changed: 37 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -100,6 +100,15 @@ test_model() {
100100
rm "./${MODEL_NAME}.pte"
101101
return # Skip running with portable executor runnner since portable doesn't support Qwen's biased linears.
102102
fi
103+
if [[ "${MODEL_NAME}" == "phi-4-mini" ]]; then
104+
# Install requirements for export_llama
105+
bash examples/models/llama/install_requirements.sh
106+
# Test export_llama script: python3 -m examples.models.llama.export_llama.
107+
"${PYTHON_EXECUTABLE}" -m examples.models.llama.export_llama --model "${MODEL_NAME}" -c examples/models/llama/params/demo_rand_params.pth -p examples/models/phi-4-mini/config.json
108+
run_portable_executor_runner
109+
rm "./${MODEL_NAME}.pte"
110+
return
111+
fi
103112

104113
# Export a basic .pte and run the model.
105114
"${PYTHON_EXECUTABLE}" -m examples.portable.scripts.export --model_name="${MODEL_NAME}" "${STRICT}"
@@ -164,6 +173,7 @@ test_model_with_qnn() {
164173
export LD_LIBRARY_PATH=$QNN_SDK_ROOT/lib/x86_64-linux-clang/
165174
export PYTHONPATH=$EXECUTORCH_ROOT/..
166175

176+
EXTRA_FLAGS=""
167177
if [[ "${MODEL_NAME}" == "dl3" ]]; then
168178
EXPORT_SCRIPT=deeplab_v3
169179
elif [[ "${MODEL_NAME}" == "mv3" ]]; then
@@ -176,6 +186,12 @@ test_model_with_qnn() {
176186
EXPORT_SCRIPT=inception_v3
177187
elif [[ "${MODEL_NAME}" == "vit" ]]; then
178188
EXPORT_SCRIPT=torchvision_vit
189+
elif [[ "${MODEL_NAME}" == "mb" ]]; then
190+
EXPORT_SCRIPT=mobilebert_fine_tune
191+
EXTRA_FLAGS="--num_epochs 1"
192+
pip install scikit-learn
193+
elif [[ "${MODEL_NAME}" == "w2l" ]]; then
194+
EXPORT_SCRIPT=wav2letter
179195
elif [[ "${MODEL_NAME}" == "edsr" ]]; then
180196
EXPORT_SCRIPT=edsr
181197
# Additional deps for edsr
@@ -189,13 +205,18 @@ test_model_with_qnn() {
189205
# TODO(guangyang): Make QNN chipset matches the target device
190206
QNN_CHIPSET=SM8450
191207

192-
"${PYTHON_EXECUTABLE}" -m examples.qualcomm.scripts.${EXPORT_SCRIPT} -b ${CMAKE_OUTPUT_DIR} -m ${QNN_CHIPSET} --compile_only
208+
"${PYTHON_EXECUTABLE}" -m examples.qualcomm.scripts.${EXPORT_SCRIPT} -b ${CMAKE_OUTPUT_DIR} -m ${QNN_CHIPSET} --compile_only $EXTRA_FLAGS
193209
EXPORTED_MODEL=$(find "./${EXPORT_SCRIPT}" -type f -name "${MODEL_NAME}*.pte" -print -quit)
194210
}
195211

212+
# Run CoreML tests.
213+
#
214+
# @param should_test If true, build and test the model using the coreml_executor_runner.
196215
test_model_with_coreml() {
197-
if [[ "${BUILD_TOOL}" == "buck2" ]]; then
198-
echo "coreml doesn't support buck2."
216+
local should_test="$1"
217+
218+
if [[ "${BUILD_TOOL}" != "cmake" ]]; then
219+
echo "coreml only supports cmake."
199220
exit 1
200221
fi
201222

@@ -213,6 +234,14 @@ test_model_with_coreml() {
213234
echo "No .pte file found"
214235
exit 1
215236
fi
237+
238+
# Run the model
239+
if [ "${should_test}" = true ]; then
240+
echo "Testing exported model with coreml_executor_runner..."
241+
local out_dir=$(mktemp -d)
242+
COREML_EXECUTOR_RUNNER_OUT_DIR="${out_dir}" examples/apple/coreml/scripts/build_executor_runner.sh
243+
"${out_dir}/coreml_executor_runner" --model_path "${EXPORTED_MODEL}"
244+
fi
216245
}
217246

218247
test_model_with_mps() {
@@ -231,7 +260,11 @@ elif [[ "${BACKEND}" == *"qnn"* ]]; then
231260
fi
232261
elif [[ "${BACKEND}" == *"coreml"* ]]; then
233262
echo "Testing ${MODEL_NAME} with coreml..."
234-
test_model_with_coreml
263+
should_test_coreml=false
264+
if [[ "${BACKEND}" == *"test"* ]]; then
265+
should_test_coreml=true
266+
fi
267+
test_model_with_coreml "${should_test_coreml}"
235268
if [[ $? -eq 0 ]]; then
236269
prepare_artifacts_upload
237270
fi

0 commit comments

Comments
 (0)