@@ -1465,22 +1465,6 @@ protected void trace(PythonContext context, Object ptr, Reference ref, TruffleSt
1465
1465
* means that it ensures the existence of a {@link PythonAbstractNativeObject} for each native
1466
1466
* object) and then stored in a Java object array which is then attached to the primary object.
1467
1467
* </p>
1468
- * <p>
1469
- * Further, the native references are <emph>stolen</emph>. This is important because otherwise
1470
- * we would still keep potential reference cycles in native and the Java GC cannot collect them.
1471
- * As a consequence, the refcount of a native object may temporarily be lower than
1472
- * {@link PythonAbstractObjectNativeWrapper#MANAGED_REFCNT MANAGED_REFCNT}. This is best
1473
- * explained by an example: Assume there is a native object {@code p0} that has a field
1474
- * {@code PyObject *obj}. If native object {@code p1} is assigned to {@code p0->obj}, an incref
1475
- * needs to be done. Now, if the Python GC runs, we will replicate the reference to Java and do
1476
- * a decref. Since the corresponding {@link PythonAbstractNativeObject} then exist, the refcount
1477
- * will be at least {@link PythonAbstractObjectNativeWrapper#MANAGED_REFCNT MANAGED_REFCNT}.
1478
- * Now, another object {@code p2} is assigned to {@code p0->obj} which means the previous
1479
- * {@code p1} will be decref'd. The refcount is at this point
1480
- * {@link PythonAbstractObjectNativeWrapper#MANAGED_REFCNT MANAGED_REFCNT - 1} although there is
1481
- * a managed reference to {@code p1}. This will be fixed in the next Python GC run as soon as we
1482
- * see the update.
1483
- * </p>
1484
1468
*/
1485
1469
@ CApiBuiltin (ret = Void , args = {Pointer , Pointer , Int }, call = Ignored )
1486
1470
abstract static class PyTruffleObject_ReplicateNativeReferences extends CApiTernaryBuiltinNode {
@@ -1533,38 +1517,13 @@ static Object doGeneric(Object pointer, Object listHead, int n,
1533
1517
oldReferents = nativeSequenceStorage .getReplicatedNativeReferences ();
1534
1518
nativeSequenceStorage .setReplicatedNativeReferences (referents );
1535
1519
}
1536
- // 1. Collect referents (traverse native list and resolve pointers)
1520
+ // Collect referents (traverse native list and resolve pointers)
1537
1521
Object cur = listHead ;
1538
1522
for (int i = 0 ; i < n ; i ++) {
1539
1523
referents [i ] = readObjectNode .read (cur , GraalPyGC_CycleNode__item );
1540
1524
cur = readPointerNode .read (cur , GraalPyGC_CycleNode__next );
1541
1525
}
1542
1526
1543
- /*
1544
- * 2. Compare old and new referents. We optimize for the case where the arrays are
1545
- * equal. In this case, we don't need to do anything. If the arrays differ, we will
1546
- * give the stolen reference back (by doing an incref) and we will steal the
1547
- * reference which is up to be replicated.
1548
- */
1549
- if (!arrayEquals (oldReferents , referents )) {
1550
- int oldLen = oldReferents != null ? oldReferents .length : 0 ;
1551
- int maxLen = Math .max (oldLen , referents .length );
1552
- for (int i = 0 ; i < maxLen ; i ++) {
1553
- Object oldReferent = i < oldLen ? oldReferents [i ] : null ;
1554
- Object referent = i < referents .length ? referents [i ] : null ;
1555
- assert oldReferent != null || referent != null ;
1556
- if (oldReferent != referent ) {
1557
- if (oldReferent instanceof PythonAbstractNativeObject nativeObject ) {
1558
- long lItemPointer = coerceNativePointerToLongNode .execute (inliningTarget , nativeObject .getPtr ());
1559
- CApiTransitions .addNativeRefCount (lItemPointer , 1 );
1560
- }
1561
- if (referent instanceof PythonAbstractNativeObject nativeObject ) {
1562
- long lItemPointer = coerceNativePointerToLongNode .execute (inliningTarget , nativeObject .getPtr ());
1563
- CApiTransitions .subNativeRefCount (lItemPointer , 1 );
1564
- }
1565
- }
1566
- }
1567
- }
1568
1527
/*
1569
1528
* As described above: Ensure that the 'old' replicated references are strong until
1570
1529
* this point. Otherwise, weakly referenced managed objects could die.
0 commit comments