@@ -331,23 +331,37 @@ append_objects(PyObject *py_list, PyGC_Head *gc_list)
331
331
332
332
#ifdef GC_DEBUG
333
333
// validate_list checks list consistency. And it works as document
334
- // describing when expected_mask is set / unset.
334
+ // describing when flags are expected to be set / unset.
335
+ // `head` must be a doubly-linked gc list, although it's fine (expected!) if
336
+ // the prev and next pointers are "polluted" with flags.
337
+ // What's checked:
338
+ // - The `head` pointers are not polluted.
339
+ // - The objects' prev pointers' PREV_MASK_COLLECTING flags are all
340
+ // `prev_value` (PREV_MASK_COLLECTING for set, 0 for clear).
341
+ // - The objects' next pointers' NEXT_MASK_UNREACHABLE flags are all
342
+ // `next_value` (NEXT_MASK_UNREACHABLE for set, 0 for clear).
343
+ // - The prev and next pointers are mutually consistent.
335
344
static void
336
- validate_list (PyGC_Head * head , uintptr_t expected_mask )
345
+ validate_list (PyGC_Head * head , uintptr_t prev_value , uintptr_t next_value )
337
346
{
347
+ assert ((head -> _gc_prev & PREV_MASK_COLLECTING ) == 0 );
348
+ assert ((head -> _gc_next & NEXT_MASK_UNREACHABLE ) == 0 );
338
349
PyGC_Head * prev = head ;
339
350
PyGC_Head * gc = GC_NEXT (head );
340
351
while (gc != head ) {
341
- assert (GC_NEXT (gc ) != NULL );
342
- assert (GC_PREV (gc ) == prev );
343
- assert ((gc -> _gc_prev & PREV_MASK_COLLECTING ) == expected_mask );
352
+ PyGC_Head * trueprev = GC_PREV (gc );
353
+ PyGC_Head * truenext = (PyGC_Head * )(gc -> _gc_next & ~NEXT_MASK_UNREACHABLE );
354
+ assert (truenext != NULL );
355
+ assert (trueprev == prev );
356
+ assert ((gc -> _gc_prev & PREV_MASK_COLLECTING ) == prev_value );
357
+ assert ((gc -> _gc_next & NEXT_MASK_UNREACHABLE ) == next_value );
344
358
prev = gc ;
345
- gc = GC_NEXT ( gc ) ;
359
+ gc = truenext ;
346
360
}
347
361
assert (prev == GC_PREV (head ));
348
362
}
349
363
#else
350
- #define validate_list (x ,y ) do{}while(0)
364
+ #define validate_list (x , y , z ) do{}while(0)
351
365
#endif
352
366
353
367
/*** end of list stuff ***/
@@ -434,6 +448,9 @@ visit_reachable(PyObject *op, PyGC_Head *reachable)
434
448
const Py_ssize_t gc_refs = gc_get_refs (gc );
435
449
436
450
// Ignore untracked objects and objects in other generation.
451
+ // This also skips objects "to the left" of the current position in
452
+ // move_unreachable's scan of the 'young' list - they've already been
453
+ // traversed, and no longer have the PREV_MASK_COLLECTING flag.
437
454
if (gc -> _gc_next == 0 || !gc_is_collecting (gc )) {
438
455
return 0 ;
439
456
}
@@ -445,7 +462,8 @@ visit_reachable(PyObject *op, PyGC_Head *reachable)
445
462
* and move_unreachable will eventually get to it
446
463
* again.
447
464
*/
448
- // Manually unlink gc from unreachable list because
465
+ // Manually unlink gc from unreachable list because the list functions
466
+ // don't work right in the presence of NEXT_MASK_UNREACHABLE flags.
449
467
PyGC_Head * prev = GC_PREV (gc );
450
468
PyGC_Head * next = (PyGC_Head * )(gc -> _gc_next & ~NEXT_MASK_UNREACHABLE );
451
469
_PyObject_ASSERT (FROM_GC (prev ),
@@ -546,8 +564,9 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
546
564
PyGC_Head * last = GC_PREV (unreachable );
547
565
// NOTE: Since all objects in unreachable set has
548
566
// NEXT_MASK_UNREACHABLE flag, we set it unconditionally.
549
- // But this may set the flat to unreachable too.
550
- // move_legacy_finalizers() should care about it.
567
+ // But this may pollute the unreachable list head's 'next' pointer
568
+ // too. That's semantically senseless but expedient here - the
569
+ // damage is repaired when this fumction ends.
551
570
last -> _gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t )gc );
552
571
_PyGCHead_SET_PREV (gc , last );
553
572
gc -> _gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t )unreachable );
@@ -557,6 +576,8 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
557
576
}
558
577
// young->_gc_prev must be last element remained in the list.
559
578
young -> _gc_prev = (uintptr_t )prev ;
579
+ // don't let the pollution of the list head's next pointer leak
580
+ unreachable -> _gc_next &= ~NEXT_MASK_UNREACHABLE ;
560
581
}
561
582
562
583
static void
@@ -604,7 +625,7 @@ static void
604
625
move_legacy_finalizers (PyGC_Head * unreachable , PyGC_Head * finalizers )
605
626
{
606
627
PyGC_Head * gc , * next ;
607
- unreachable -> _gc_next &= ~ NEXT_MASK_UNREACHABLE ;
628
+ assert (( unreachable -> _gc_next & NEXT_MASK_UNREACHABLE ) == 0 ) ;
608
629
609
630
/* March over unreachable. Move objects with finalizers into
610
631
* `finalizers`.
@@ -630,13 +651,13 @@ clear_unreachable_mask(PyGC_Head *unreachable)
630
651
assert (((uintptr_t )unreachable & NEXT_MASK_UNREACHABLE ) == 0 );
631
652
632
653
PyGC_Head * gc , * next ;
633
- unreachable -> _gc_next &= ~ NEXT_MASK_UNREACHABLE ;
654
+ assert (( unreachable -> _gc_next & NEXT_MASK_UNREACHABLE ) == 0 ) ;
634
655
for (gc = GC_NEXT (unreachable ); gc != unreachable ; gc = next ) {
635
656
_PyObject_ASSERT ((PyObject * )FROM_GC (gc ), gc -> _gc_next & NEXT_MASK_UNREACHABLE );
636
657
gc -> _gc_next &= ~NEXT_MASK_UNREACHABLE ;
637
658
next = (PyGC_Head * )gc -> _gc_next ;
638
659
}
639
- validate_list (unreachable , PREV_MASK_COLLECTING );
660
+ validate_list (unreachable , PREV_MASK_COLLECTING , 0 );
640
661
}
641
662
642
663
/* A traversal callback for move_legacy_finalizer_reachable. */
@@ -1021,15 +1042,15 @@ show_stats_each_generations(struct _gc_runtime_state *state)
1021
1042
1022
1043
* The "unreachable" list must be uninitialized (this function calls
1023
1044
gc_list_init over 'unreachable').
1024
-
1045
+
1025
1046
IMPORTANT: This function leaves 'unreachable' with the NEXT_MASK_UNREACHABLE
1026
1047
flag set but it does not clear it to skip unnecessary iteration. Before the
1027
1048
flag is cleared (for example, by using 'clear_unreachable_mask' function or
1028
1049
by a call to 'move_legacy_finalizers'), the 'unreachable' list is not a normal
1029
1050
list and we can not use most gc_list_* functions for it. */
1030
1051
static inline void
1031
1052
deduce_unreachable (PyGC_Head * base , PyGC_Head * unreachable ) {
1032
- validate_list (base , 0 );
1053
+ validate_list (base , 0 , 0 );
1033
1054
/* Using ob_refcnt and gc_refs, calculate which objects in the
1034
1055
* container set are reachable from outside the set (i.e., have a
1035
1056
* refcount greater than 0 when all the references within the
@@ -1046,7 +1067,8 @@ deduce_unreachable(PyGC_Head *base, PyGC_Head *unreachable) {
1046
1067
*/
1047
1068
gc_list_init (unreachable );
1048
1069
move_unreachable (base , unreachable ); // gc_prev is pointer again
1049
- validate_list (base , 0 );
1070
+ validate_list (base , 0 , 0 );
1071
+ validate_list (unreachable , PREV_MASK_COLLECTING , NEXT_MASK_UNREACHABLE );
1050
1072
}
1051
1073
1052
1074
/* Handle objects that may have resurrected after a call to 'finalize_garbage', moving
@@ -1123,7 +1145,7 @@ collect(struct _gc_runtime_state *state, int generation,
1123
1145
old = GEN_HEAD (state , generation + 1 );
1124
1146
else
1125
1147
old = young ;
1126
- validate_list (old , 0 );
1148
+ validate_list (old , 0 , 0 );
1127
1149
1128
1150
deduce_unreachable (young , & unreachable );
1129
1151
@@ -1156,8 +1178,8 @@ collect(struct _gc_runtime_state *state, int generation,
1156
1178
*/
1157
1179
move_legacy_finalizer_reachable (& finalizers );
1158
1180
1159
- validate_list (& finalizers , 0 );
1160
- validate_list (& unreachable , PREV_MASK_COLLECTING );
1181
+ validate_list (& finalizers , 0 , 0 );
1182
+ validate_list (& unreachable , PREV_MASK_COLLECTING , 0 );
1161
1183
1162
1184
/* Print debugging information. */
1163
1185
if (state -> debug & DEBUG_COLLECTABLE ) {
@@ -1169,8 +1191,8 @@ collect(struct _gc_runtime_state *state, int generation,
1169
1191
/* Clear weakrefs and invoke callbacks as necessary. */
1170
1192
m += handle_weakrefs (& unreachable , old );
1171
1193
1172
- validate_list (old , 0 );
1173
- validate_list (& unreachable , PREV_MASK_COLLECTING );
1194
+ validate_list (old , 0 , 0 );
1195
+ validate_list (& unreachable , PREV_MASK_COLLECTING , 0 );
1174
1196
1175
1197
/* Call tp_finalize on objects which have one. */
1176
1198
finalize_garbage (& unreachable );
@@ -1208,7 +1230,7 @@ collect(struct _gc_runtime_state *state, int generation,
1208
1230
* this if they insist on creating this type of structure.
1209
1231
*/
1210
1232
handle_legacy_finalizers (state , & finalizers , old );
1211
- validate_list (old , 0 );
1233
+ validate_list (old , 0 , 0 );
1212
1234
1213
1235
/* Clear free list only during the collection of the highest
1214
1236
* generation */
0 commit comments