Skip to content

Commit 12f4431

Browse files
authored
Tiny fixes to make gcc pedantic build happy (#8933)
### Summary Tiny fixes to make gcc 13 baremetal, pedantic build happy. ### Test plan CI
1 parent 320d555 commit 12f4431

File tree

9 files changed

+15
-11
lines changed

9 files changed

+15
-11
lines changed

kernels/portable/cpu/op_constant_pad_nd.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ Tensor& constant_pad_nd_out(
189189
CTYPE value_v;
190190
ET_SWITCH_SCALAR_OBJ_TYPES(
191191
value_type, ctx, "constant_pad_nd.out", CTYPE_VALUE, [&]() {
192-
CTYPE_VALUE val;
192+
CTYPE_VALUE val = 0;
193193
utils::extract_scalar(value, &val);
194194
value_v = static_cast<CTYPE>(val);
195195
});

kernels/portable/cpu/op_fill.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ Tensor& fill_scalar_out(
4545
ET_SWITCH_REALHBBF16_TYPES(a_type, ctx, "fill.Scalar_out", CTYPE_A, [&] {
4646
CTYPE_A b_casted;
4747
ET_SWITCH_SCALAR_OBJ_TYPES(b_type, ctx, "fill.Scalar_out", CTYPE_B, [&] {
48-
CTYPE_B b_val;
48+
CTYPE_B b_val = 0;
4949
utils::extract_scalar(b, &b_val);
5050
b_casted = static_cast<CTYPE_A>(b_val);
5151
});

kernels/portable/cpu/op_hardtanh.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -49,14 +49,14 @@ Tensor& hardtanh_out(
4949
ET_SWITCH_REALHBF16_TYPES(in_type, ctx, "hardtanh.out", CTYPE, [&]() {
5050
CTYPE min_casted;
5151
ET_SWITCH_SCALAR_OBJ_TYPES(min_type, ctx, "hardtanh.out", CTYPE_MIN, [&]() {
52-
CTYPE_MIN min_val;
52+
CTYPE_MIN min_val = 0;
5353
utils::extract_scalar(min, &min_val);
5454
min_casted = static_cast<CTYPE>(min_val);
5555
});
5656

5757
CTYPE max_casted;
5858
ET_SWITCH_SCALAR_OBJ_TYPES(max_type, ctx, "hardtanh.out", CTYPE_MAX, [&]() {
59-
CTYPE_MAX max_val;
59+
CTYPE_MAX max_val = 0;
6060
utils::extract_scalar(max, &max_val);
6161
max_casted = static_cast<CTYPE>(max_val);
6262
});

kernels/portable/cpu/op_leaky_relu.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -45,10 +45,10 @@ Tensor& leaky_relu_out(
4545
ET_KERNEL_CHECK(ctx, in_type == out_type, InvalidArgument, out);
4646

4747
ET_SWITCH_FLOATHBF16_TYPES(in_type, ctx, "leaky_relu.out", CTYPE, [&]() {
48-
CTYPE negative_slope_casted;
48+
CTYPE negative_slope_casted = 0;
4949
ET_SWITCH_SCALAR_OBJ_TYPES(
5050
sc_type, ctx, "leaky_relu.out", CTYPE_MIN, [&]() {
51-
CTYPE_MIN negative_slope_val;
51+
CTYPE_MIN negative_slope_val = 0;
5252
utils::extract_scalar(negative_slope, &negative_slope_val);
5353
negative_slope_casted = static_cast<CTYPE>(negative_slope_val);
5454
});

kernels/portable/cpu/op_masked_fill.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ Tensor& masked_fill_scalar_out(
4646
in_type, ctx, "masked_fill.Scalar_out", CTYPE, [&]() {
4747
ET_SWITCH_REAL_TYPES_AND(
4848
Bool, val_type, ctx, "masked_fill.Scalar_out", CTYPE_VAL, [&]() {
49-
CTYPE_VAL value_v;
49+
CTYPE_VAL value_v = 0;
5050
utils::extract_scalar(value, &value_v);
5151
CTYPE val = static_cast<CTYPE>(value_v);
5252

kernels/portable/cpu/op_scalar_tensor.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ scalar_tensor_out(KernelRuntimeContext& ctx, const Scalar& s, Tensor& out) {
2828
ET_SWITCH_REAL_TYPES_AND3(
2929
Half, Bool, BFloat16, out_type, ctx, name, CTYPE, [&]() {
3030
ET_SWITCH_SCALAR_OBJ_TYPES(s_type, ctx, name, CTYPE_S, [&]() {
31-
CTYPE_S val_s;
31+
CTYPE_S val_s = 0;
3232
utils::extract_scalar(s, &val_s);
3333
out.mutable_data_ptr<CTYPE>()[0] = convert<CTYPE, CTYPE_S>(val_s);
3434
});

kernels/portable/cpu/util/repeat_util.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ bool check_repeat_args(
6868
}
6969
for (size_t i = 0; i < repeats.size(); i++) {
7070
ET_CHECK_OR_RETURN_FALSE(
71-
reformat_self_size[i] * repeats[i] ==
71+
reformat_self_size[i] * static_cast<size_t>(repeats[i]) ==
7272
static_cast<uint64_t>(out.size(i)),
7373
"Expect out size at dimension %zu is %" PRId64 ", but now is %zd",
7474
i,

runtime/executor/method_meta.cpp

+5-1
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,11 @@ bool MethodMeta::uses_backend(const char* backend_name) const {
233233
const auto delegates = s_plan_->delegates();
234234
for (size_t i = 0; i < delegates->size(); i++) {
235235
auto delegate = delegates->Get(i);
236-
if (std::strcmp(delegate->id()->c_str(), backend_name) == 0) {
236+
auto backend_name_len = std::strlen(backend_name);
237+
auto delegate_id_len = delegate->id()->size();
238+
if (backend_name_len == delegate_id_len &&
239+
std::strncmp(delegate->id()->c_str(), backend_name, backend_name_len) ==
240+
0) {
237241
return true;
238242
}
239243
}

tools/cmake/Utils.cmake

+1-1
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ function(executorch_print_configuration_summary)
7070
message(STATUS " EXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR : "
7171
"${EXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR}"
7272
)
73-
message(STATUS " EXECUTORCH_BUILD_EXTENSION_LLM : "
73+
message(STATUS " EXECUTORCH_BUILD_EXTENSION_LLM : "
7474
"${EXECUTORCH_BUILD_EXTENSION_LLM}"
7575
)
7676
message(STATUS " EXECUTORCH_BUILD_EXTENSION_MODULE : "

0 commit comments

Comments
 (0)