Skip to content

Commit 30eca6a

Browse files
committed
remove ET_TRY macros
1 parent f276bf0 commit 30eca6a

File tree

12 files changed

+130
-353
lines changed

12 files changed

+130
-353
lines changed

backends/apple/mps/runtime/MPSBackend.mm

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,11 @@ bool is_available() const override {
4343
BackendInitContext& context,
4444
FreeableBuffer* processed,
4545
ArrayRef<CompileSpec> compile_specs) const override {
46-
auto executor = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
47-
context.get_runtime_allocator(), mps::delegate::MPSExecutor);
46+
auto executor = context.get_runtime_allocator()->allocateInstance<mps::delegate::MPSExecutor>();
47+
if (executor == nullptr) {
48+
return Error::MemoryAllocationFailed;
49+
}
50+
4851
// NOTE: Since we use placement new and since this type is not trivially
4952
// destructible, we must call the destructor manually in destroy().
5053
new (executor) mps::delegate::MPSExecutor;

backends/arm/runtime/EthosUBackend.cpp

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -120,8 +120,11 @@ class EthosUBackend final : public ::executorch::runtime::BackendInterface {
120120
}
121121

122122
MemoryAllocator* allocator = context.get_runtime_allocator();
123-
ExecutionHandle* handle =
124-
ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(allocator, ExecutionHandle);
123+
ExecutionHandle* handle = allocator->allocateInstance<ExecutionHandle>();
124+
if (handle == nullptr) {
125+
return Error::MemoryAllocationFailed;
126+
}
127+
125128
handle->processed = processed;
126129

127130
// Return the same buffer we were passed - this data will be

backends/mediatek/runtime/NeuronBackend.cpp

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,12 @@ Result<DelegateHandle*> NeuronBackend::init(
6868
processed->size());
6969

7070
MemoryAllocator* runtime_allocator = context.get_runtime_allocator();
71-
NeuronExecuTorchDelegate* delegate = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
72-
runtime_allocator, NeuronExecuTorchDelegate);
71+
NeuronExecuTorchDelegate* delegate =
72+
runtime_allocator->allocateInstance<NeuronExecuTorchDelegate>();
73+
if (delegate == nullptr) {
74+
return Error::MemoryAllocationFailed;
75+
}
76+
7377
new (delegate) NeuronExecuTorchDelegate();
7478

7579
if (delegate == nullptr) {

backends/qualcomm/runtime/QnnExecuTorchBackend.cpp

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,10 @@ Result<DelegateHandle*> QnnExecuTorchBackend::init(
6666

6767
// Create QnnManager
6868
MemoryAllocator* runtime_allocator = context.get_runtime_allocator();
69-
QnnManager* qnn_manager =
70-
ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(runtime_allocator, QnnManager);
69+
QnnManager* qnn_manager = runtime_allocator->allocateInstance<QnnManager>();
70+
if (qnn_manager == nullptr) {
71+
return Error::MemoryAllocationFailed;
72+
}
7173

7274
// NOTE: Since we use placement new and since this type is not trivially
7375
// destructible, we must call the destructor manually in destroy().

backends/vulkan/runtime/VulkanBackend.cpp

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -510,8 +510,11 @@ class VulkanBackend final : public ::executorch::runtime::BackendInterface {
510510
BackendInitContext& context,
511511
FreeableBuffer* processed,
512512
ArrayRef<CompileSpec> compile_specs) const override {
513-
ComputeGraph* compute_graph = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
514-
context.get_runtime_allocator(), ComputeGraph);
513+
ComputeGraph* compute_graph =
514+
context.get_runtime_allocator()->allocateInstance<ComputeGraph>();
515+
if (compute_graph == nullptr) {
516+
return Error::MemoryAllocationFailed;
517+
}
515518

516519
new (compute_graph) ComputeGraph(get_graph_config(compile_specs));
517520

backends/xnnpack/runtime/XNNPACKBackend.cpp

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,10 @@ class XnnpackBackend final : public ::executorch::runtime::BackendInterface {
7373
BackendInitContext& context,
7474
FreeableBuffer* processed,
7575
ArrayRef<CompileSpec> compile_specs) const override {
76-
auto executor = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
77-
context.get_runtime_allocator(), xnnpack::delegate::XNNExecutor);
76+
auto executor = context.get_runtime_allocator()->allocateInstance<xnnpack::delegate::XNNExecutor>();
77+
if (executor == nullptr) {
78+
return Error::MemoryAllocationFailed;
79+
}
7880

7981
#ifdef ENABLE_XNNPACK_SHARED_WORKSPACE
8082
// This is needed to serialize access to xnn_create_runtime which is not

exir/backend/test/demos/rpc/ExecutorBackend.cpp

Lines changed: 47 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -72,8 +72,11 @@ class ExecutorBackend final : public ::executorch::runtime::BackendInterface {
7272
// `processed` contains an executorch program. Wrap it in a DataLoader that
7373
// will return the data directly without copying it.
7474
MemoryAllocator* runtime_allocator = context.get_runtime_allocator();
75-
auto loader = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
76-
runtime_allocator, BufferDataLoader);
75+
auto loader = runtime_allocator->allocateInstance<BufferDataLoader>();
76+
if (loader == nullptr) {
77+
return Error::MemoryAllocationFailed;
78+
}
79+
7780
new (loader) BufferDataLoader(processed->data(), processed->size());
7881
// Can't free `processed` because the program will point into that memory.
7982

@@ -84,8 +87,11 @@ class ExecutorBackend final : public ::executorch::runtime::BackendInterface {
8487
}
8588

8689
// Move the Program off the stack.
87-
auto client_program =
88-
ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(runtime_allocator, Program);
90+
auto client_program = runtime_allocator->allocateInstance<Program>();
91+
if (client_program == nullptr) {
92+
return Error::MemoryAllocationFailed;
93+
}
94+
8995
new (client_program) Program(std::move(program_result.get()));
9096

9197
Result<MethodMeta> method_meta = client_program->method_meta("forward");
@@ -97,35 +103,56 @@ class ExecutorBackend final : public ::executorch::runtime::BackendInterface {
97103
// Building all different allocators for the client executor
98104
auto num_memory_planned_buffers = method_meta->num_memory_planned_buffers();
99105

100-
Span<uint8_t>* memory_planned_buffers = ET_ALLOCATE_LIST_OR_RETURN_ERROR(
101-
runtime_allocator, Span<uint8_t>, num_memory_planned_buffers);
106+
Span<uint8_t>* memory_planned_buffers =
107+
runtime_allocator->allocateList<Span<uint8_t>>(
108+
num_memory_planned_buffers);
109+
if (memory_planned_buffers == nullptr) {
110+
return Error::MemoryAllocationFailed;
111+
}
102112

103113
for (size_t id = 0; id < num_memory_planned_buffers; ++id) {
104114
size_t buffer_size = static_cast<size_t>(
105115
method_meta->memory_planned_buffer_size(id).get());
106-
uint8_t* buffer_i = ET_ALLOCATE_LIST_OR_RETURN_ERROR(
107-
runtime_allocator, uint8_t, buffer_size);
116+
uint8_t* buffer_i = runtime_allocator->allocateList<uint8_t>(buffer_size);
117+
if (buffer_i == nullptr) {
118+
return Error::MemoryAllocationFailed;
119+
}
120+
108121
memory_planned_buffers[id] = {buffer_i, buffer_size};
109122
}
110123

111-
auto client_planned_memory = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
112-
runtime_allocator, HierarchicalAllocator);
124+
auto client_planned_memory =
125+
runtime_allocator->allocateInstance<HierarchicalAllocator>();
126+
if (client_planned_memory == nullptr) {
127+
return Error::MemoryAllocationFailed;
128+
}
129+
113130
new (client_planned_memory) HierarchicalAllocator(
114131
{memory_planned_buffers, num_memory_planned_buffers});
115132

116133
// Allocate some memory from runtime allocator for the client executor, in
117134
// real case, like if it's an executor in dsp, it should allocate memory
118135
// dedicated to this specific hardware
119-
auto client_method_allocator = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(
120-
runtime_allocator, MemoryAllocator);
136+
auto client_method_allocator =
137+
runtime_allocator->allocateInstance<MemoryAllocator>();
138+
if (client_method_allocator == nullptr) {
139+
return Error::MemoryAllocationFailed;
140+
}
141+
121142
const size_t kClientRuntimeMemorySize = 4 * 1024U;
122-
auto runtime_pool = ET_ALLOCATE_OR_RETURN_ERROR(
123-
runtime_allocator, kClientRuntimeMemorySize);
143+
auto runtime_pool = runtime_allocator->allocate(kClientRuntimeMemorySize);
144+
if (runtime_pool == nullptr) {
145+
return Error::MemoryAllocationFailed;
146+
}
124147
new (client_method_allocator) MemoryAllocator(
125148
kClientRuntimeMemorySize, static_cast<uint8_t*>(runtime_pool));
126149

127150
auto client_memory_manager =
128-
ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(runtime_allocator, MemoryManager);
151+
runtime_allocator->allocateInstance<MemoryManager>();
152+
if (client_memory_manager == nullptr) {
153+
return Error::MemoryAllocationFailed;
154+
}
155+
129156
new (client_memory_manager)
130157
MemoryManager(client_method_allocator, client_planned_memory);
131158

@@ -140,8 +167,11 @@ class ExecutorBackend final : public ::executorch::runtime::BackendInterface {
140167
return method_res.error();
141168
}
142169

143-
auto client_method =
144-
ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(runtime_allocator, Method);
170+
auto client_method = runtime_allocator->allocateInstance<Method>();
171+
if (client_method == nullptr) {
172+
return Error::MemoryAllocationFailed;
173+
}
174+
145175
new (client_method) Method(std::move(method_res.get()));
146176

147177
// Return the client method so it will be passed to `execute()` as

runtime/core/memory_allocator.h

Lines changed: 0 additions & 159 deletions
Original file line numberDiff line numberDiff line change
@@ -198,165 +198,6 @@ class MemoryAllocator {
198198
int32_t prof_id_ = -1;
199199
};
200200

201-
#if ET_HAVE_GNU_STATEMENT_EXPRESSIONS
202-
/**
203-
* Tries allocating from the specified MemoryAllocator*.
204-
*
205-
* - On success, returns a pointer to the allocated buffer.
206-
* - On failure, executes the provided code block, which must return or panic.
207-
*
208-
* Example:
209-
* @code
210-
* char* buf = ET_TRY_ALLOCATE_OR(
211-
* memory_allocator, bufsize, {
212-
* *out_err = Error::MemoryAllocationFailed;
213-
* return nullopt;
214-
* });
215-
* @endcode
216-
*/
217-
#define ET_TRY_ALLOCATE_OR(memory_allocator__, nbytes__, ...) \
218-
({ \
219-
void* et_try_allocate_result = memory_allocator__->allocate(nbytes__); \
220-
if (et_try_allocate_result == nullptr && nbytes__ > 0) { \
221-
__VA_ARGS__ \
222-
/* The args must return. */ \
223-
ET_UNREACHABLE(); \
224-
} \
225-
et_try_allocate_result; \
226-
})
227-
228-
/**
229-
* Tries allocating an instance of type__ from the specified MemoryAllocator*.
230-
*
231-
* - On success, returns a pointer to the allocated buffer. Note that the memory
232-
* will not be initialized.
233-
* - On failure, executes the provided code block, which must return or panic.
234-
*
235-
* Example:
236-
* @code
237-
* char* buf = ET_TRY_ALLOCATE_INSTANCE_OR(
238-
* memory_allocator,
239-
* MyType,
240-
* { *out_err = Error::MemoryAllocationFailed; return nullopt; });
241-
* @endcode
242-
*/
243-
#define ET_TRY_ALLOCATE_INSTANCE_OR(memory_allocator__, type__, ...) \
244-
({ \
245-
type__* et_try_allocate_result = \
246-
memory_allocator__->allocateInstance<type__>(); \
247-
if (et_try_allocate_result == nullptr) { \
248-
__VA_ARGS__ \
249-
/* The args must return. */ \
250-
ET_UNREACHABLE(); \
251-
} \
252-
et_try_allocate_result; \
253-
})
254-
255-
/**
256-
* Tries allocating multiple elements of a given type from the specified
257-
* MemoryAllocator*.
258-
*
259-
* - On success, returns a pointer to the allocated buffer.
260-
* - On failure, executes the provided code block, which must return or panic.
261-
*
262-
* Example:
263-
* @code
264-
* Tensor* tensor_list = ET_TRY_ALLOCATE_LIST_OR(
265-
* memory_allocator, Tensor, num_tensors, {
266-
* *out_err = Error::MemoryAllocationFailed;
267-
* return nullopt;
268-
* });
269-
* @endcode
270-
*/
271-
#define ET_TRY_ALLOCATE_LIST_OR(memory_allocator__, type__, nelem__, ...) \
272-
({ \
273-
type__* et_try_allocate_result = \
274-
memory_allocator__->allocateList<type__>(nelem__); \
275-
if (et_try_allocate_result == nullptr && nelem__ > 0) { \
276-
__VA_ARGS__ \
277-
/* The args must return. */ \
278-
ET_UNREACHABLE(); \
279-
} \
280-
et_try_allocate_result; \
281-
})
282-
#else // !ET_HAVE_GNU_STATEMENT_EXPRESSIONS
283-
/**
284-
* The recommended alternative for statement expression-incompatible compilers
285-
* is to directly allocate the memory.
286-
*/
287-
#define ET_TRY_ALLOCATE_OR(memory_allocator__, nbytes__, ...) \
288-
memory_allocator__->allocate(nbytes__);
289-
290-
/**
291-
* The recommended alternative for statement expression-incompatible compilers
292-
* is to directly allocate the memory.
293-
*/
294-
#define ET_TRY_ALLOCATE_INSTANCE_OR(memory_allocator__, type__, ...) \
295-
memory_allocator__->allocateInstance<type__>();
296-
297-
/**
298-
* The recommended alternative for statement expression-incompatible compilers
299-
* is to directly use allocate the memory.
300-
*/
301-
#define ET_TRY_ALLOCATE_LIST_OR(memory_allocator__, type__, nelem__, ...) \
302-
memory_allocator__->allocateList<type__>(nelem__);
303-
304-
#endif // !ET_HAVE_GNU_STATEMENT_EXPRESSIONS
305-
306-
/**
307-
* Tries allocating from the specified MemoryAllocator*.
308-
*
309-
* - On success, returns a pointer to the allocated buffer.
310-
* - On failure, returns `Error::MemoryAllocationFailed` from the calling
311-
* function, which must be declared to return `executorch::runtime::Error`.
312-
*
313-
* Example:
314-
* @code
315-
* char* buf = ET_ALLOCATE_OR_RETURN_ERROR(memory_allocator, bufsize);
316-
* @endcode
317-
*/
318-
#define ET_ALLOCATE_OR_RETURN_ERROR(memory_allocator__, nbytes__) \
319-
ET_TRY_ALLOCATE_OR(memory_allocator__, nbytes__, { \
320-
return ::executorch::runtime::Error::MemoryAllocationFailed; \
321-
})
322-
323-
/**
324-
* Tries allocating an instance of type__ from the specified MemoryAllocator*.
325-
*
326-
* - On success, returns a pointer to the allocated buffer. Note that the memory
327-
* will not be initialized.
328-
* - On failure, returns `Error::MemoryAllocationFailed` from the calling
329-
* function, which must be declared to return `executorch::runtime::Error`.
330-
*
331-
* Example:
332-
* @code
333-
* char* buf = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(memory_allocator, MyType);
334-
* @endcode
335-
*/
336-
#define ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR(memory_allocator__, type__) \
337-
ET_TRY_ALLOCATE_INSTANCE_OR(memory_allocator__, type__, { \
338-
return ::executorch::runtime::Error::MemoryAllocationFailed; \
339-
})
340-
341-
/**
342-
* Tries allocating multiple elements of a given type from the specified
343-
* MemoryAllocator*.
344-
*
345-
* - On success, returns a pointer to the allocated buffer.
346-
* - On failure, returns `Error::MemoryAllocationFailed` from the calling
347-
* function, which must be declared to return `executorch::runtime::Error`.
348-
*
349-
* Example:
350-
* @code
351-
* Tensor* tensor_list = ET_ALLOCATE_LIST_OR_RETURN_ERROR(
352-
* memory_allocator, Tensor, num_tensors);
353-
* @endcode
354-
*/
355-
#define ET_ALLOCATE_LIST_OR_RETURN_ERROR(memory_allocator__, type__, nelem__) \
356-
ET_TRY_ALLOCATE_LIST_OR(memory_allocator__, type__, nelem__, { \
357-
return ::executorch::runtime::Error::MemoryAllocationFailed; \
358-
})
359-
360201
} // namespace runtime
361202
} // namespace executorch
362203

0 commit comments

Comments
 (0)