Skip to content

Commit 1925d00

Browse files
Merge branch 'main' into arm64_enablement
2 parents 5e61df8 + 966da7e commit 1925d00

File tree

3 files changed

+15
-11
lines changed

3 files changed

+15
-11
lines changed

.github/workflows/build-wheels-aarch64-linux.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ jobs:
2525
os: linux-aarch64
2626
test-infra-repository: pytorch/test-infra
2727
test-infra-ref: main
28-
with-cuda: disable
28+
with-cuda: enable
2929
build:
3030
needs: generate-matrix
3131
strategy:

packaging/pre_build_script_arm64.sh

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,6 @@ export TRIPLET_FILE="triplets/arm64-windows.cmake"
1313
export PYTORCH_VERSION="$PYTORCH_VERSION"
1414
export CHANNEL="$CHANNEL"
1515

16-
echo "channel: $CHANNEL"
17-
1816
# Dependencies
1917
mkdir -p "$DOWNLOADS_DIR"
2018
mkdir -p "$DEPENDENCIES_DIR"
@@ -27,15 +25,17 @@ git clone https://github.com/microsoft/vcpkg.git
2725
cd vcpkg || exit
2826
./bootstrap-vcpkg.sh
2927

30-
# # Set vcpkg to only build release packages
28+
# Set vcpkg to only build release packages
29+
3130
echo "set(VCPKG_BUILD_TYPE release)" >> "$TRIPLET_FILE"
3231

3332
# Install dependencies using vcpkg
3433
./vcpkg install libjpeg-turbo:arm64-windows --x-install-root="$DEPENDENCIES_DIR"
3534
./vcpkg install libwebp:arm64-windows --x-install-root="$DEPENDENCIES_DIR"
3635
./vcpkg install libpng[tools]:arm64-windows --x-install-root="$DEPENDENCIES_DIR"
3736

38-
# Copy files using cp (replace robocopy)
37+
# Copy files using cp
38+
3939
cp "$DEPENDENCIES_DIR/arm64-windows/lib/libpng16.lib" "$DEPENDENCIES_DIR/arm64-windows/lib/libpng.lib"
4040
cp "$DEPENDENCIES_DIR/arm64-windows/bin/libpng16.dll" "$DEPENDENCIES_DIR/arm64-windows/bin/libpng.dll"
4141
cp "$DEPENDENCIES_DIR/arm64-windows/bin/libpng16.pdb" "$DEPENDENCIES_DIR/arm64-windows/bin/libpng.pdb"
@@ -62,7 +62,7 @@ if [ "$CHANNEL" = "release" ]; then
6262
pip3 install --pre torch --index-url https://download.pytorch.org/whl/torch/
6363
elif [ "$CHANNEL" = "test" ]; then
6464
echo "Installing PyTorch version $PYTORCH_VERSION."
65-
pip3 install torch=="$PYTORCH_VERSION"
65+
pip3 install --pre torch=="$PYTORCH_VERSION" --index-url https://download.pytorch.org/whl/test
6666
else
6767
echo "CHANNEL is not set, installing PyTorch from nightly."
6868
pip3 install --pre torch --index-url https://download.pytorch.org/whl/nightly/cpu

torchvision/models/detection/anchor_utils.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -74,16 +74,20 @@ def generate_anchors(
7474
return base_anchors.round()
7575

7676
def set_cell_anchors(self, dtype: torch.dtype, device: torch.device):
77-
self.cell_anchors = [cell_anchor.to(dtype=dtype, device=device) for cell_anchor in self.cell_anchors]
77+
return [cell_anchor.to(dtype=dtype, device=device) for cell_anchor in self.cell_anchors]
7878

7979
def num_anchors_per_location(self) -> list[int]:
8080
return [len(s) * len(a) for s, a in zip(self.sizes, self.aspect_ratios)]
8181

8282
# For every combination of (a, (g, s), i) in (self.cell_anchors, zip(grid_sizes, strides), 0:2),
8383
# output g[i] anchors that are s[i] distance apart in direction i, with the same dimensions as a.
84-
def grid_anchors(self, grid_sizes: list[list[int]], strides: list[list[Tensor]]) -> list[Tensor]:
84+
def grid_anchors(
85+
self,
86+
grid_sizes: list[list[int]],
87+
strides: list[list[Tensor]],
88+
cell_anchors: list[torch.Tensor],
89+
) -> list[Tensor]:
8590
anchors = []
86-
cell_anchors = self.cell_anchors
8791
torch._assert(cell_anchors is not None, "cell_anchors should not be None")
8892
torch._assert(
8993
len(grid_sizes) == len(strides) == len(cell_anchors),
@@ -123,8 +127,8 @@ def forward(self, image_list: ImageList, feature_maps: list[Tensor]) -> list[Ten
123127
]
124128
for g in grid_sizes
125129
]
126-
self.set_cell_anchors(dtype, device)
127-
anchors_over_all_feature_maps = self.grid_anchors(grid_sizes, strides)
130+
cell_anchors = self.set_cell_anchors(dtype, device)
131+
anchors_over_all_feature_maps = self.grid_anchors(grid_sizes, strides, cell_anchors)
128132
anchors: list[list[torch.Tensor]] = []
129133
for _ in range(len(image_list.image_sizes)):
130134
anchors_in_image = [anchors_per_feature_map for anchors_per_feature_map in anchors_over_all_feature_maps]

0 commit comments

Comments
 (0)