41
41
package com .oracle .graal .python .builtins .objects .cext .capi .transitions ;
42
42
43
43
import static com .oracle .graal .python .builtins .objects .cext .capi .PythonNativeWrapper .PythonAbstractObjectNativeWrapper .IMMORTAL_REFCNT ;
44
+ import static com .oracle .graal .python .builtins .objects .cext .capi .PythonNativeWrapper .PythonAbstractObjectNativeWrapper .MANAGED_REFCNT ;
44
45
45
46
import java .lang .ref .ReferenceQueue ;
46
47
import java .lang .ref .WeakReference ;
82
83
import com .oracle .graal .python .builtins .objects .cext .structs .CStructAccess .AllocateNode ;
83
84
import com .oracle .graal .python .builtins .objects .cext .structs .CStructAccess .FreeNode ;
84
85
import com .oracle .graal .python .builtins .objects .cext .structs .CStructs ;
86
+ import com .oracle .graal .python .builtins .objects .floats .PFloat ;
85
87
import com .oracle .graal .python .builtins .objects .getsetdescriptor .DescriptorDeleteMarker ;
86
88
import com .oracle .graal .python .builtins .objects .tuple .PTuple ;
87
89
import com .oracle .graal .python .nodes .PGuards ;
102
104
import com .oracle .truffle .api .dsl .Bind ;
103
105
import com .oracle .truffle .api .dsl .Cached ;
104
106
import com .oracle .truffle .api .dsl .Cached .Exclusive ;
107
+ import com .oracle .truffle .api .dsl .Cached .Shared ;
105
108
import com .oracle .truffle .api .dsl .GenerateCached ;
106
109
import com .oracle .truffle .api .dsl .GenerateInline ;
107
110
import com .oracle .truffle .api .dsl .GenerateUncached ;
@@ -390,7 +393,7 @@ public static void pollReferenceQueue() {
390
393
* free'd at context finalization.
391
394
*/
392
395
long stubPointer = HandlePointerConverter .pointerToStub (reference .pointer );
393
- if (subNativeRefCount (stubPointer , PythonAbstractObjectNativeWrapper . MANAGED_REFCNT ) == 0 ) {
396
+ if (subNativeRefCount (stubPointer , MANAGED_REFCNT ) == 0 ) {
394
397
freeNativeStub (stubPointer );
395
398
} else {
396
399
/*
@@ -429,7 +432,7 @@ public static void pollReferenceQueue() {
429
432
*/
430
433
private static void processNativeObjectReference (NativeObjectReference reference , NativeObjectReferenceArrayWrapper referencesToBeFreed ) {
431
434
LOGGER .fine (() -> PythonUtils .formatJString ("releasing %s" , reference .toString ()));
432
- if (subNativeRefCount (reference .pointer , PythonAbstractObjectNativeWrapper . MANAGED_REFCNT ) == 0 ) {
435
+ if (subNativeRefCount (reference .pointer , MANAGED_REFCNT ) == 0 ) {
433
436
referencesToBeFreed .add (reference .pointer );
434
437
}
435
438
}
@@ -752,6 +755,7 @@ public static boolean pointsToPyHandleSpace(long pointer) {
752
755
@ GenerateUncached
753
756
@ GenerateInline
754
757
@ GenerateCached (false )
758
+ @ ImportStatic (CApiGuards .class )
755
759
public abstract static class FirstToNativeNode extends Node {
756
760
757
761
public static long executeUncached (PythonAbstractObjectNativeWrapper wrapper , boolean immortal ) {
@@ -765,55 +769,127 @@ public final long execute(Node inliningTarget, PythonAbstractObjectNativeWrapper
765
769
public abstract long execute (Node inliningTarget , PythonAbstractObjectNativeWrapper wrapper , boolean immortal );
766
770
767
771
@ Specialization
768
- static long doGeneric (Node inliningTarget , PythonAbstractObjectNativeWrapper wrapper , boolean immortal ,
772
+ static long doPrimitiveNativeWrapper (Node inliningTarget , PrimitiveNativeWrapper wrapper , boolean immortal ,
773
+ @ Shared @ Cached (inline = false ) CStructAccess .WriteDoubleNode writeDoubleNode ,
774
+ @ Shared @ Cached InlinedConditionProfile isFloatObjectProfile ,
775
+ @ Shared @ Cached AllocateNativeObjectStubNode allocateNativeObjectStubNode ) {
776
+ boolean isFloat = isFloatObjectProfile .profile (inliningTarget , wrapper .isDouble ());
777
+ CStructs ctype = isFloat ? CStructs .GraalPyFloatObject : CStructs .GraalPyObject ;
778
+ Object type ;
779
+ if (wrapper .isBool ()) {
780
+ type = PythonBuiltinClassType .Boolean ;
781
+ } else if (wrapper .isIntLike ()) {
782
+ type = PythonBuiltinClassType .PInt ;
783
+ } else if (isFloat ) {
784
+ type = PythonBuiltinClassType .PFloat ;
785
+ } else {
786
+ throw CompilerDirectives .shouldNotReachHere ();
787
+ }
788
+ long taggedPointer = allocateNativeObjectStubNode .execute (inliningTarget , wrapper , type , ctype , immortal );
789
+
790
+ // allocate a native stub object (C type: GraalPy*Object)
791
+ if (isFloat ) {
792
+ long realPointer = HandlePointerConverter .pointerToStub (taggedPointer );
793
+ writeDoubleNode .write (realPointer , CFields .GraalPyFloatObject__ob_fval , wrapper .getDouble ());
794
+ }
795
+ return taggedPointer ;
796
+ }
797
+
798
+ @ Specialization (guards = "!isPrimitiveNativeWrapper(wrapper)" )
799
+ static long doOther (Node inliningTarget , PythonAbstractObjectNativeWrapper wrapper , boolean immortal ,
800
+ @ Cached (inline = false ) CStructAccess .WriteLongNode writeLongNode ,
801
+ @ Cached (inline = false ) CStructAccess .WritePointerNode writePointerNode ,
802
+ @ Shared @ Cached (inline = false ) CStructAccess .WriteDoubleNode writeDoubleNode ,
803
+ @ Exclusive @ Cached InlinedConditionProfile isVarObjectProfile ,
804
+ @ Shared @ Cached InlinedConditionProfile isFloatObjectProfile ,
805
+ @ Cached GetClassNode getClassNode ,
806
+ @ Shared @ Cached AllocateNativeObjectStubNode allocateNativeObjectStubNode ) {
807
+
808
+ assert !(wrapper instanceof TruffleObjectNativeWrapper );
809
+ assert !(wrapper instanceof PrimitiveNativeWrapper );
810
+
811
+ Object delegate = wrapper .getDelegate ();
812
+ Object type = getClassNode .execute (inliningTarget , delegate );
813
+
814
+ CStructs ctype ;
815
+ if (isVarObjectProfile .profile (inliningTarget , delegate instanceof PTuple )) {
816
+ ctype = CStructs .GraalPyVarObject ;
817
+ } else if (isFloatObjectProfile .profile (inliningTarget , delegate instanceof Double || delegate instanceof PFloat )) {
818
+ ctype = CStructs .GraalPyFloatObject ;
819
+ } else {
820
+ ctype = CStructs .GraalPyObject ;
821
+ }
822
+
823
+ long taggedPointer = allocateNativeObjectStubNode .execute (inliningTarget , wrapper , type , ctype , immortal );
824
+
825
+ // allocate a native stub object (C type: GraalPy*Object)
826
+ if (ctype == CStructs .GraalPyVarObject ) {
827
+ assert delegate instanceof PTuple ;
828
+ SequenceStorage sequenceStorage = ((PTuple ) delegate ).getSequenceStorage ();
829
+ long realPointer = HandlePointerConverter .pointerToStub (taggedPointer );
830
+ writeLongNode .write (realPointer , CFields .GraalPyVarObject__ob_size , sequenceStorage .length ());
831
+ Object obItemPtr = 0L ;
832
+ if (sequenceStorage instanceof NativeSequenceStorage nativeSequenceStorage ) {
833
+ obItemPtr = nativeSequenceStorage .getPtr ();
834
+ }
835
+ writePointerNode .write (realPointer , CFields .GraalPyVarObject__ob_item , obItemPtr );
836
+ } else if (ctype == CStructs .GraalPyFloatObject ) {
837
+ assert delegate instanceof Double || delegate instanceof PFloat ;
838
+ long realPointer = HandlePointerConverter .pointerToStub (taggedPointer );
839
+ double fval ;
840
+ if (delegate instanceof Double d ) {
841
+ fval = d ;
842
+ } else {
843
+ fval = ((PFloat ) delegate ).getValue ();
844
+ }
845
+ writeDoubleNode .write (realPointer , CFields .GraalPyFloatObject__ob_fval , fval );
846
+ }
847
+
848
+ return taggedPointer ;
849
+ }
850
+ }
851
+
852
+ @ GenerateUncached
853
+ @ GenerateInline
854
+ @ GenerateCached (false )
855
+ abstract static class AllocateNativeObjectStubNode extends Node {
856
+
857
+ abstract long execute (Node inliningTarget , PythonAbstractObjectNativeWrapper wrapper , Object type , CStructs ctype , boolean immortal );
858
+
859
+ @ Specialization
860
+ static long doGeneric (Node inliningTarget , PythonAbstractObjectNativeWrapper wrapper , Object type , CStructs ctype , boolean immortal ,
769
861
@ Cached (inline = false ) GilNode gil ,
770
862
@ Cached (inline = false ) CStructAccess .AllocateNode allocateNode ,
771
863
@ Cached (inline = false ) CStructAccess .WriteLongNode writeLongNode ,
772
- @ Cached (inline = false ) CStructAccess .WritePointerNode writePointerNode ,
773
864
@ Cached (inline = false ) CStructAccess .WriteObjectNewRefNode writeObjectNode ,
774
865
@ Cached (inline = false ) CStructAccess .WriteIntNode writeIntNode ,
775
- @ Cached InlinedConditionProfile isVarObjectProfile ,
776
- @ Cached InlinedExactClassProfile wrapperProfile ,
777
- @ Cached GetClassNode getClassNode ,
778
866
@ Cached CoerceNativePointerToLongNode coerceToLongNode ) {
779
867
868
+ log (wrapper );
869
+ pollReferenceQueue ();
870
+
871
+ long initialRefCount = immortal ? IMMORTAL_REFCNT : MANAGED_REFCNT ;
872
+
873
+ // allocate a native stub object (C type: GraalPy*Object)
874
+ Object nativeObjectStub = allocateNode .alloc (ctype );
875
+
876
+ HandleContext handleContext = PythonContext .get (inliningTarget ).nativeContext ;
877
+ long stubPointer = coerceToLongNode .execute (inliningTarget , nativeObjectStub );
878
+ long taggedPointer = HandlePointerConverter .stubToPointer (stubPointer );
879
+
880
+ writeLongNode .write (stubPointer , CFields .PyObject__ob_refcnt , initialRefCount );
881
+ writeObjectNode .write (stubPointer , CFields .PyObject__ob_type , type );
882
+
883
+ // TODO(fa): this should not require the GIL (GR-51314)
780
884
boolean acquired = gil .acquire ();
781
885
try {
782
- log (wrapper );
783
- assert !(wrapper instanceof TruffleObjectNativeWrapper );
784
- pollReferenceQueue ();
785
-
786
- long initialRefCount = immortal ? IMMORTAL_REFCNT : PythonAbstractObjectNativeWrapper .MANAGED_REFCNT ;
787
-
788
- Object delegate = NativeToPythonNode .handleWrapper (inliningTarget , wrapperProfile , false , wrapper );
789
- Object type = getClassNode .execute (inliningTarget , delegate );
790
-
791
- // allocate a native stub object (C type: PyObject)
792
- boolean isTuple = isVarObjectProfile .profile (inliningTarget , delegate instanceof PTuple );
793
- Object nativeObjectStub = allocateNode .alloc (isTuple ? CStructs .GraalPyVarObject : CStructs .GraalPyObject );
794
- writeLongNode .write (nativeObjectStub , CFields .PyObject__ob_refcnt , initialRefCount );
795
- writeObjectNode .write (nativeObjectStub , CFields .PyObject__ob_type , type );
796
- if (isTuple ) {
797
- SequenceStorage sequenceStorage = ((PTuple ) delegate ).getSequenceStorage ();
798
- writeLongNode .write (nativeObjectStub , CFields .PyVarObject__ob_size , sequenceStorage .length ());
799
- Object obItemPtr = 0L ;
800
- if (sequenceStorage instanceof NativeSequenceStorage nativeSequenceStorage ) {
801
- obItemPtr = nativeSequenceStorage .getPtr ();
802
- }
803
- writePointerNode .write (nativeObjectStub , CFields .GraalPyVarObject__ob_item , obItemPtr );
804
- }
805
- HandleContext handleContext = PythonContext .get (inliningTarget ).nativeContext ;
806
- long stubPointer = coerceToLongNode .execute (inliningTarget , nativeObjectStub );
807
- long taggedPointer = HandlePointerConverter .stubToPointer (stubPointer );
808
886
int idx = nativeStubLookupReserve (handleContext );
809
887
// We don't allow 'handleTableIndex == 0' to avoid that zeroed memory
810
888
// accidentally maps to some valid object.
811
889
assert idx > 0 ;
812
890
writeIntNode .write (stubPointer , CFields .GraalPyObject__handle_table_index , idx );
813
891
PythonObjectReference ref = PythonObjectReference .create (handleContext , wrapper , immortal , taggedPointer , idx );
814
892
nativeStubLookupPut (handleContext , ref );
815
-
816
- return logResult (taggedPointer );
817
893
} catch (OverflowException e ) {
818
894
/*
819
895
* The OverflowException may be thrown by 'nativeStubLookupReserve' and indicates
@@ -825,6 +901,7 @@ static long doGeneric(Node inliningTarget, PythonAbstractObjectNativeWrapper wra
825
901
} finally {
826
902
gil .release (acquired );
827
903
}
904
+ return logResult (taggedPointer );
828
905
}
829
906
}
830
907
@@ -1283,7 +1360,7 @@ static Object handleWrapper(Node node, InlinedExactClassProfile wrapperProfile,
1283
1360
* *MUST* have done an incref and so the refcount must be greater than
1284
1361
* MANAGED_REFCNT.
1285
1362
*/
1286
- assert objectNativeWrapper .getRefCount () > PythonAbstractObjectNativeWrapper . MANAGED_REFCNT ;
1363
+ assert objectNativeWrapper .getRefCount () > MANAGED_REFCNT ;
1287
1364
objectNativeWrapper .decRef ();
1288
1365
}
1289
1366
if (profiledWrapper instanceof PrimitiveNativeWrapper primitive ) {
@@ -1480,7 +1557,7 @@ private static Object createAbstractNativeObject(HandleContext handleContext, Ob
1480
1557
NativeObjectReference ref = new NativeObjectReference (handleContext , result , pointer );
1481
1558
nativeLookupPut (getContext (), pointer , ref );
1482
1559
1483
- long refCntDelta = PythonAbstractObjectNativeWrapper . MANAGED_REFCNT - (transfer ? 1 : 0 );
1560
+ long refCntDelta = MANAGED_REFCNT - (transfer ? 1 : 0 );
1484
1561
addNativeRefCount (pointer , refCntDelta );
1485
1562
return result ;
1486
1563
}
0 commit comments