Skip to content

Commit 202b106

Browse files
Remove tags for arena cleanup nodes now that we have dedicated string cleanup blocks.
PiperOrigin-RevId: 603491012
1 parent 0a8409b commit 202b106

File tree

3 files changed

+33
-161
lines changed

3 files changed

+33
-161
lines changed

src/google/protobuf/arena.cc

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
#include "absl/container/internal/layout.h"
2121
#include "absl/synchronization/mutex.h"
2222
#include "google/protobuf/arena_allocation_policy.h"
23+
#include "google/protobuf/arena_cleanup.h"
2324
#include "google/protobuf/arenaz_sampler.h"
2425
#include "google/protobuf/port.h"
2526
#include "google/protobuf/serial_arena.h"
@@ -136,9 +137,9 @@ std::vector<void*> SerialArena::PeekCleanupListForTesting() {
136137
ArenaBlock* b = head();
137138
if (b->IsSentry()) return res;
138139

139-
const auto peek_list = [&](const char* pos, const char* end) {
140-
while (pos != end) {
141-
pos += cleanup::PeekNode(pos, res);
140+
const auto peek_list = [&](char* pos, char* end) {
141+
for (; pos != end; pos += cleanup::Size()) {
142+
cleanup::PeekNode(pos, res);
142143
}
143144
};
144145

@@ -222,15 +223,14 @@ void* SerialArena::AllocateFromStringBlockFallback() {
222223
PROTOBUF_NOINLINE
223224
void* SerialArena::AllocateAlignedWithCleanupFallback(
224225
size_t n, size_t align, void (*destructor)(void*)) {
225-
size_t required = AlignUpTo(n, align) + cleanup::Size(destructor);
226+
size_t required = AlignUpTo(n, align) + cleanup::Size();
226227
AllocateNewBlock(required);
227228
return AllocateAlignedWithCleanup(n, align, destructor);
228229
}
229230

230231
PROTOBUF_NOINLINE
231232
void SerialArena::AddCleanupFallback(void* elem, void (*destructor)(void*)) {
232-
size_t required = cleanup::Size(destructor);
233-
AllocateNewBlock(required);
233+
AllocateNewBlock(cleanup::Size());
234234
AddCleanupFromExisting(elem, destructor);
235235
}
236236

@@ -324,8 +324,8 @@ void SerialArena::CleanupList() {
324324
char* limit = b->Limit();
325325
char* it = reinterpret_cast<char*>(b->cleanup_nodes);
326326
ABSL_DCHECK(!b->IsSentry() || it == limit);
327-
while (it < limit) {
328-
it += cleanup::DestroyNode(it);
327+
for (; it < limit; it += cleanup::Size()) {
328+
cleanup::DestroyNode(it);
329329
}
330330
b = b->next;
331331
} while (b);

src/google/protobuf/arena_cleanup.h

Lines changed: 19 additions & 145 deletions
Original file line numberDiff line numberDiff line change
@@ -33,169 +33,43 @@ void arena_destruct_object(void* object) {
3333
reinterpret_cast<T*>(object)->~T();
3434
}
3535

36-
// Tag defines the type of cleanup / cleanup object. This tag is stored in the
37-
// lowest 2 bits of the `elem` value identifying the type of node. All node
38-
// types must start with a `uintptr_t` that stores `Tag` in its low two bits.
39-
enum class Tag : uintptr_t {
40-
kDynamic = 0, // DynamicNode
41-
kString = 1, // TaggedNode (std::string)
42-
kCord = 2, // TaggedNode (absl::Cord)
43-
};
44-
45-
// DynamicNode contains the object (`elem`) that needs to be
36+
// CleanupNode contains the object (`elem`) that needs to be
4637
// destroyed, and the function to destroy it (`destructor`)
4738
// elem must be aligned at minimum on a 4 byte boundary.
48-
struct DynamicNode {
49-
uintptr_t elem;
39+
struct CleanupNode {
40+
void* elem;
5041
void (*destructor)(void*);
5142
};
5243

53-
// TaggedNode contains a `std::string` or `absl::Cord` object (`elem`) that
54-
// needs to be destroyed. The lowest 2 bits of `elem` contain the non-zero
55-
// `kString` or `kCord` tag.
56-
struct TaggedNode {
57-
uintptr_t elem;
58-
};
59-
60-
// EnableSpecializedTags() return true if the alignment of tagged objects
61-
// such as std::string allow us to poke tags in the 2 LSB bits.
62-
inline constexpr bool EnableSpecializedTags() {
63-
// For now we require 2 bits
64-
return alignof(std::string) >= 8 && alignof(absl::Cord) >= 8;
44+
inline ABSL_ATTRIBUTE_ALWAYS_INLINE CleanupNode* ToCleanup(void* pos) {
45+
return reinterpret_cast<CleanupNode*>(pos);
6546
}
6647

67-
// Adds a cleanup entry identified by `tag` at memory location `pos`.
68-
inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CreateNode(Tag tag, void* pos,
69-
const void* elem_raw,
48+
// Adds a cleanup entry at memory location `pos`.
49+
inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CreateNode(void* pos, void* elem,
7050
void (*destructor)(void*)) {
71-
auto elem = reinterpret_cast<uintptr_t>(elem_raw);
72-
if (EnableSpecializedTags()) {
73-
ABSL_DCHECK_EQ(elem & 3, 0ULL); // Must be aligned
74-
switch (tag) {
75-
case Tag::kString: {
76-
TaggedNode n = {elem | static_cast<uintptr_t>(Tag::kString)};
77-
memcpy(pos, &n, sizeof(n));
78-
return;
79-
}
80-
case Tag::kCord: {
81-
TaggedNode n = {elem | static_cast<uintptr_t>(Tag::kCord)};
82-
memcpy(pos, &n, sizeof(n));
83-
return;
84-
}
85-
86-
case Tag::kDynamic:
87-
default:
88-
break;
89-
}
90-
}
91-
DynamicNode n = {elem, destructor};
51+
CleanupNode n = {elem, destructor};
9252
memcpy(pos, &n, sizeof(n));
9353
}
9454

95-
// Optimization: performs a prefetch on `elem_address`.
96-
// Returns the size of the cleanup (meta) data at this address, allowing the
97-
// caller to advance cleanup iterators without needing to examine or know
98-
// anything about the underlying cleanup node or cleanup meta data / tags.
99-
inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t
100-
PrefetchNode(const void* elem_address) {
101-
if (EnableSpecializedTags()) {
102-
uintptr_t elem;
103-
memcpy(&elem, elem_address, sizeof(elem));
104-
if (static_cast<Tag>(elem & 3) != Tag::kDynamic) {
105-
return sizeof(TaggedNode);
106-
}
107-
}
108-
return sizeof(DynamicNode);
55+
// Optimization: performs a prefetch on the elem for the cleanup node at `pos`.
56+
inline ABSL_ATTRIBUTE_ALWAYS_INLINE void PrefetchNode(void* pos) {
10957
}
11058

111-
// Destroys the object referenced by the cleanup node at memory location `pos`.
112-
// Returns the size of the cleanup (meta) data at this address, allowing the
113-
// caller to advance cleanup iterators without needing to examine or know
114-
// anything about the underlying cleanup node or cleanup meta data / tags.
115-
inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t DestroyNode(const void* pos) {
116-
uintptr_t elem;
117-
memcpy(&elem, pos, sizeof(elem));
118-
if (EnableSpecializedTags()) {
119-
switch (static_cast<Tag>(elem & 3)) {
120-
case Tag::kString: {
121-
// Some compilers don't like fully qualified explicit dtor calls,
122-
// so use an alias to avoid having to type `::`.
123-
using T = std::string;
124-
reinterpret_cast<T*>(elem - static_cast<uintptr_t>(Tag::kString))->~T();
125-
return sizeof(TaggedNode);
126-
}
127-
case Tag::kCord: {
128-
using T = absl::Cord;
129-
reinterpret_cast<T*>(elem - static_cast<uintptr_t>(Tag::kCord))->~T();
130-
return sizeof(TaggedNode);
131-
}
132-
133-
case Tag::kDynamic:
134-
135-
default:
136-
break;
137-
}
138-
}
139-
static_cast<const DynamicNode*>(pos)->destructor(
140-
reinterpret_cast<void*>(elem - static_cast<uintptr_t>(Tag::kDynamic)));
141-
return sizeof(DynamicNode);
59+
// Destroys the object referenced by the cleanup node.
60+
inline ABSL_ATTRIBUTE_ALWAYS_INLINE void DestroyNode(void* pos) {
61+
CleanupNode* cleanup = ToCleanup(pos);
62+
cleanup->destructor(cleanup->elem);
14263
}
14364

14465
// Append in `out` the pointer to the to-be-cleaned object in `pos`.
145-
// Return the length of the cleanup node to allow the caller to advance the
146-
// position, like `DestroyNode` does.
147-
inline size_t PeekNode(const void* pos, std::vector<void*>& out) {
148-
uintptr_t elem;
149-
memcpy(&elem, pos, sizeof(elem));
150-
out.push_back(reinterpret_cast<void*>(elem & ~3));
151-
if (EnableSpecializedTags()) {
152-
switch (static_cast<Tag>(elem & 3)) {
153-
case Tag::kString:
154-
case Tag::kCord:
155-
return sizeof(TaggedNode);
156-
157-
case Tag::kDynamic:
158-
default:
159-
break;
160-
}
161-
}
162-
return sizeof(DynamicNode);
163-
}
164-
165-
// Returns the `tag` identifying the type of object for `destructor` or
166-
// kDynamic if `destructor` does not identify a well know object type.
167-
inline ABSL_ATTRIBUTE_ALWAYS_INLINE Tag Type(void (*destructor)(void*)) {
168-
if (EnableSpecializedTags()) {
169-
if (destructor == &arena_destruct_object<std::string>) {
170-
return Tag::kString;
171-
}
172-
if (destructor == &arena_destruct_object<absl::Cord>) {
173-
return Tag::kCord;
174-
}
175-
}
176-
return Tag::kDynamic;
177-
}
178-
179-
// Returns the required size in bytes off the node type identified by `tag`.
180-
inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t Size(Tag tag) {
181-
if (!EnableSpecializedTags()) return sizeof(DynamicNode);
182-
183-
switch (tag) {
184-
case Tag::kDynamic:
185-
return sizeof(DynamicNode);
186-
case Tag::kString:
187-
return sizeof(TaggedNode);
188-
case Tag::kCord:
189-
return sizeof(TaggedNode);
190-
default:
191-
ABSL_DCHECK(false) << "Corrupted cleanup tag: " << static_cast<int>(tag);
192-
return sizeof(DynamicNode);
193-
}
66+
inline void PeekNode(void* pos, std::vector<void*>& out) {
67+
out.push_back(ToCleanup(pos)->elem);
19468
}
19569

196-
// Returns the required size in bytes off the node type for `destructor`.
197-
inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t Size(void (*destructor)(void*)) {
198-
return destructor == nullptr ? 0 : Size(Type(destructor));
70+
// Returns the required size for a cleanup node.
71+
constexpr ABSL_ATTRIBUTE_ALWAYS_INLINE size_t Size() {
72+
return sizeof(CleanupNode);
19973
}
20074

20175
} // namespace cleanup

src/google/protobuf/serial_arena.h

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ class PROTOBUF_EXPORT SerialArena {
250250
char* ret = ArenaAlignAs(align).CeilDefaultAligned(ptr());
251251
// See the comment in MaybeAllocateAligned re uintptr_t.
252252
if (PROTOBUF_PREDICT_FALSE(reinterpret_cast<uintptr_t>(ret) + n +
253-
cleanup::Size(destructor) >
253+
cleanup::Size() >
254254
reinterpret_cast<uintptr_t>(limit_))) {
255255
return AllocateAlignedWithCleanupFallback(n, align, destructor);
256256
}
@@ -265,9 +265,8 @@ class PROTOBUF_EXPORT SerialArena {
265265

266266
PROTOBUF_ALWAYS_INLINE
267267
void AddCleanup(void* elem, void (*destructor)(void*)) {
268-
size_t required = cleanup::Size(destructor);
269268
size_t has = static_cast<size_t>(limit_ - ptr());
270-
if (PROTOBUF_PREDICT_FALSE(required > has)) {
269+
if (PROTOBUF_PREDICT_FALSE(cleanup::Size() > has)) {
271270
return AddCleanupFallback(elem, destructor);
272271
}
273272
AddCleanupFromExisting(elem, destructor);
@@ -301,14 +300,13 @@ class PROTOBUF_EXPORT SerialArena {
301300

302301
PROTOBUF_ALWAYS_INLINE
303302
void AddCleanupFromExisting(void* elem, void (*destructor)(void*)) {
304-
cleanup::Tag tag = cleanup::Type(destructor);
305-
size_t n = cleanup::Size(tag);
303+
const size_t cleanup_size = cleanup::Size();
306304

307-
PROTOBUF_UNPOISON_MEMORY_REGION(limit_ - n, n);
308-
limit_ -= n;
305+
PROTOBUF_UNPOISON_MEMORY_REGION(limit_ - cleanup_size, cleanup_size);
306+
limit_ -= cleanup_size;
309307
MaybePrefetchBackwards(limit_);
310308
ABSL_DCHECK_GE(limit_, ptr());
311-
cleanup::CreateNode(tag, limit_, elem, destructor);
309+
cleanup::CreateNode(limit_, elem, destructor);
312310
}
313311

314312
// Prefetch the next kPrefetchForwardsDegree bytes after `prefetch_ptr_` and

0 commit comments

Comments
 (0)