1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
|
// Copyright 2012 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_OBJECTS_VISITING_INL_H_
#define V8_HEAP_OBJECTS_VISITING_INL_H_
#include "src/base/logging.h"
#include "src/heap/mark-compact.h"
#include "src/heap/object-lock.h"
#include "src/heap/objects-visiting.h"
#include "src/objects/arguments.h"
#include "src/objects/data-handler-inl.h"
#include "src/objects/free-space-inl.h"
#include "src/objects/js-array-buffer-inl.h"
#include "src/objects/js-weak-refs-inl.h"
#include "src/objects/module-inl.h"
#include "src/objects/objects-body-descriptors-inl.h"
#include "src/objects/objects-inl.h"
#include "src/objects/oddball.h"
#include "src/objects/ordered-hash-table.h"
#include "src/objects/shared-function-info.h"
#include "src/objects/synthetic-module-inl.h"
#include "src/objects/torque-defined-classes.h"
#include "src/objects/visitors.h"
#if V8_ENABLE_WEBASSEMBLY
#include "src/wasm/wasm-objects.h"
#endif // V8_ENABLE_WEBASSEMBLY
namespace v8 {
namespace internal {
template <VisitorId visitor_id>
inline bool ContainsReadOnlyMap(PtrComprCageBase, Tagged<HeapObject>) {
return false;
}
// List of visitor ids that can only appear in read-only maps. Unfortunately,
// these are generally contained in all other lists. Adding an instance type
// here optimizes handling in visitors that do not need to Map objects with such
// visitor ids.
#define READ_ONLY_MAPS_VISITOR_ID_LIST(V) \
V(AccessorInfo) \
V(AllocationSite) \
V(BigInt) \
V(BytecodeArray) \
V(ByteArray) \
V(CallHandlerInfo) \
V(Cell) \
V(Code) \
V(DataHandler) \
V(DataObject) \
V(DescriptorArray) \
V(EmbedderDataArray) \
V(ExternalString) \
V(FeedbackCell) \
V(FeedbackMetadata) \
V(FeedbackVector) \
V(FixedArray) \
V(FixedDoubleArray) \
V(InstructionStream) \
V(PreparseData) \
V(PropertyArray) \
V(PropertyCell) \
V(PrototypeInfo) \
V(ScopeInfo) \
V(SeqOneByteString) \
V(SeqTwoByteString) \
V(SharedFunctionInfo) \
V(ShortcutCandidate) \
V(SlicedString) \
V(SloppyArgumentsElements) \
V(Symbol) \
V(ThinString) \
V(TransitionArray) \
V(UncompiledDataWithoutPreparseData) \
V(UncompiledDataWithPreparseData) \
V(WeakArrayList) \
V(WeakFixedArray)
#define DEFINE_READ_ONLY_MAP_SPECIALIZATION(VisitorIdType) \
template <> \
inline bool ContainsReadOnlyMap<VisitorId::kVisit##VisitorIdType>( \
PtrComprCageBase cage_base, Tagged<HeapObject> object) { \
/* If you see this DCHECK fail we encountered a Map with a VisitorId that \
* should have only ever appeared in read-only space. */ \
DCHECK(object->map(cage_base).InReadOnlySpace()); \
return true; \
}
READ_ONLY_MAPS_VISITOR_ID_LIST(DEFINE_READ_ONLY_MAP_SPECIALIZATION)
#undef DEFINE_READ_ONLY_MAP_SPECIALIZATION
#undef READ_ONLY_MAPS_VISITOR_ID_LIST
template <typename ResultType, typename ConcreteVisitor>
HeapVisitor<ResultType, ConcreteVisitor>::HeapVisitor(
PtrComprCageBase cage_base, PtrComprCageBase code_cage_base)
: ObjectVisitorWithCageBases(cage_base, code_cage_base) {}
template <typename ResultType, typename ConcreteVisitor>
HeapVisitor<ResultType, ConcreteVisitor>::HeapVisitor(Isolate* isolate)
: ObjectVisitorWithCageBases(isolate) {}
template <typename ResultType, typename ConcreteVisitor>
HeapVisitor<ResultType, ConcreteVisitor>::HeapVisitor(Heap* heap)
: ObjectVisitorWithCageBases(heap) {}
template <typename ResultType, typename ConcreteVisitor>
template <typename T>
Tagged<T> HeapVisitor<ResultType, ConcreteVisitor>::Cast(
Tagged<HeapObject> object) {
return T::cast(object);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(
Tagged<HeapObject> object) {
return Visit(object->map(cage_base()), object);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(
Tagged<Map> map, Tagged<HeapObject> object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
switch (map->visitor_id()) {
#define CASE(TypeName) \
case kVisit##TypeName: \
return visitor->Visit##TypeName( \
map, ConcreteVisitor::template Cast<TypeName>(object));
TYPED_VISITOR_ID_LIST(CASE)
TORQUE_VISITOR_ID_LIST(CASE)
#undef CASE
case kVisitShortcutCandidate:
return visitor->VisitShortcutCandidate(
map, ConcreteVisitor::template Cast<ConsString>(object));
case kVisitDataObject:
return visitor->VisitDataObject(map, object);
case kVisitJSObjectFast:
return visitor->VisitJSObjectFast(
map, ConcreteVisitor::template Cast<JSObject>(object));
case kVisitJSApiObject:
return visitor->VisitJSApiObject(
map, ConcreteVisitor::template Cast<JSObject>(object));
case kVisitStruct:
return visitor->VisitStruct(map, object);
case kVisitFreeSpace:
return visitor->VisitFreeSpace(map, FreeSpace::cast(object));
case kDataOnlyVisitorIdCount:
case kVisitorIdCount:
UNREACHABLE();
}
UNREACHABLE();
// Make the compiler happy.
return ResultType();
}
template <typename ResultType, typename ConcreteVisitor>
template <VisitorId visitor_id>
void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointerIfNeeded(
Tagged<HeapObject> host) {
DCHECK(!host->map_word(cage_base(), kRelaxedLoad).IsForwardingAddress());
if constexpr (!ConcreteVisitor::ShouldVisitMapPointer()) {
return;
}
if constexpr (!ConcreteVisitor::ShouldVisitReadOnlyMapPointer()) {
if (ContainsReadOnlyMap<visitor_id>(cage_base(), host)) {
return;
}
}
static_cast<ConcreteVisitor*>(this)->VisitMapPointer(host);
}
#define VISIT(TypeName) \
template <typename ResultType, typename ConcreteVisitor> \
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit##TypeName( \
Map map, TypeName object) { \
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this); \
/* If you see the following DCHECK fail, then the size computation of \
* BodyDescriptor doesn't match the size return via obj.Size(). This is \
* problematic as the GC requires those sizes to match for accounting \
* reasons. The fix likely involves adding a padding field in the object \
* defintions. */ \
DCHECK_EQ(object->SizeFromMap(map), \
TypeName::BodyDescriptor::SizeOf(map, object)); \
visitor->template VisitMapPointerIfNeeded<VisitorId::kVisit##TypeName>( \
object); \
const int size = TypeName::BodyDescriptor::SizeOf(map, object); \
TypeName::BodyDescriptor::IterateBody(map, object, size, visitor); \
return static_cast<ResultType>(size); \
}
TYPED_VISITOR_ID_LIST(VISIT)
TORQUE_VISITOR_ID_LIST(VISIT)
#undef VISIT
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitShortcutCandidate(
Tagged<Map> map, Tagged<ConsString> object) {
return static_cast<ConcreteVisitor*>(this)->VisitConsString(map, object);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitDataObject(
Tagged<Map> map, Tagged<HeapObject> object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
int size = map->instance_size();
visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitDataObject>(
object);
#ifdef V8_ENABLE_SANDBOX
// The following types have external pointers, which must be visited.
// TODO(v8:10391) Consider adding custom visitor IDs for these and making
// this block not depend on V8_ENABLE_SANDBOX.
if (IsForeign(object, cage_base())) {
Foreign::BodyDescriptor::IterateBody(map, object, size, visitor);
}
#endif // V8_ENABLE_SANDBOX
return static_cast<ResultType>(size);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSObjectFast(
Tagged<Map> map, Tagged<JSObject> object) {
return VisitJSObjectSubclass<JSObject, JSObject::FastBodyDescriptor>(map,
object);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSApiObject(
Tagged<Map> map, Tagged<JSObject> object) {
return VisitJSObjectSubclass<JSObject, JSObject::BodyDescriptor>(map, object);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct(
Tagged<Map> map, Tagged<HeapObject> object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
int size = map->instance_size();
visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitStruct>(object);
StructBodyDescriptor::IterateBody(map, object, size, visitor);
return static_cast<ResultType>(size);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitFreeSpace(
Tagged<Map> map, Tagged<FreeSpace> object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitFreeSpace>(object);
return static_cast<ResultType>(object->size(kRelaxedLoad));
}
template <typename ResultType, typename ConcreteVisitor>
template <typename T, typename TBodyDescriptor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSObjectSubclass(
Tagged<Map> map, Tagged<T> object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitJSObject>(object);
const int size = TBodyDescriptor::SizeOf(map, object);
const int used_size = map->UsedInstanceSize();
DCHECK_LE(used_size, size);
DCHECK_GE(used_size, JSObject::GetHeaderSize(map));
// It is important to visit only the used field and ignore the slack fields
// because the slack fields may be trimmed concurrently. For non-concurrent
// visitors this merely is an optimization in that we only visit the actually
// used fields.
TBodyDescriptor::IterateBody(map, object, used_size, visitor);
return size;
}
template <typename ResultType, typename ConcreteVisitor>
ConcurrentHeapVisitor<ResultType, ConcreteVisitor>::ConcurrentHeapVisitor(
Isolate* isolate)
: HeapVisitor<ResultType, ConcreteVisitor>(isolate) {}
template <typename T>
struct ConcurrentVisitorCastHelper {
static V8_INLINE Tagged<T> Cast(Tagged<HeapObject> object) {
return T::cast(object);
}
};
#define UNCHECKED_CAST(VisitorId, TypeName) \
template <> \
V8_INLINE Tagged<TypeName> ConcurrentVisitorCastHelper<TypeName>::Cast( \
Tagged<HeapObject> object) { \
return TypeName::unchecked_cast(object); \
}
SAFE_STRING_TRANSITION_SOURCES(UNCHECKED_CAST)
// Casts are also needed for unsafe ones for the initial dispatch in
// HeapVisitor.
UNSAFE_STRING_TRANSITION_SOURCES(UNCHECKED_CAST)
#undef UNCHECKED_CAST
template <typename ResultType, typename ConcreteVisitor>
template <typename T>
Tagged<T> ConcurrentHeapVisitor<ResultType, ConcreteVisitor>::Cast(
Tagged<HeapObject> object) {
if constexpr (ConcreteVisitor::EnableConcurrentVisitation()) {
return ConcurrentVisitorCastHelper<T>::Cast(object);
}
return T::cast(object);
}
#define VISIT_AS_LOCKED_STRING(VisitorId, TypeName) \
template <typename ResultType, typename ConcreteVisitor> \
ResultType \
ConcurrentHeapVisitor<ResultType, ConcreteVisitor>::Visit##TypeName( \
Tagged<Map> map, Tagged<TypeName> object) { \
if constexpr (ConcreteVisitor::EnableConcurrentVisitation()) { \
return VisitStringLocked(object); \
} \
return HeapVisitor<ResultType, ConcreteVisitor>::Visit##TypeName(map, \
object); \
}
UNSAFE_STRING_TRANSITION_SOURCES(VISIT_AS_LOCKED_STRING)
#undef VISIT_AS_LOCKED_STRING
template <typename ResultType, typename ConcreteVisitor>
template <typename T>
ResultType
ConcurrentHeapVisitor<ResultType, ConcreteVisitor>::VisitStringLocked(
Tagged<T> object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
SharedObjectLockGuard guard(object);
// The object has been locked. At this point shared read access is
// guaranteed but we must re-read the map and check whether the string has
// transitioned.
Tagged<Map> map = object->map(visitor->cage_base());
int size;
switch (map->visitor_id()) {
#define UNSAFE_STRING_TRANSITION_TARGET_CASE(VisitorIdType, TypeName) \
case kVisit##VisitorIdType: \
visitor \
->template VisitMapPointerIfNeeded<VisitorId::kVisit##VisitorIdType>( \
object); \
size = TypeName::BodyDescriptor::SizeOf(map, object); \
TypeName::BodyDescriptor::IterateBody( \
map, TypeName::unchecked_cast(object), size, visitor); \
break;
UNSAFE_STRING_TRANSITION_TARGETS(UNSAFE_STRING_TRANSITION_TARGET_CASE)
#undef UNSAFE_STRING_TRANSITION_TARGET_CASE
default:
UNREACHABLE();
}
return static_cast<ResultType>(size);
;
}
template <typename ConcreteVisitor>
NewSpaceVisitor<ConcreteVisitor>::NewSpaceVisitor(Isolate* isolate)
: ConcurrentHeapVisitor<int, ConcreteVisitor>(isolate) {}
} // namespace internal
} // namespace v8
#endif // V8_HEAP_OBJECTS_VISITING_INL_H_
|