@@ -76,6 +76,10 @@ public distributed actor MyActor {
76
76
distributed func complex( _: [ Int ] , _: Obj , _: String ? , _: LargeStruct ) -> LargeStruct {
77
77
fatalError ( )
78
78
}
79
+
80
+ // Combination of direct and indirect arguments involving generic arguments.
81
+ distributed func genericArgs< T: Codable , U: Codable > ( _: T , _: [ U ] ) {
82
+ }
79
83
}
80
84
81
85
@available ( SwiftStdlib 5 . 6 , * )
@@ -336,6 +340,79 @@ public distributed actor MyOtherActor {
336
340
337
341
// CHECK: {{.*}} = call i1 (i8*, i1, ...) @llvm.coro.end.async({{.*}}, %swift.context* {{.*}}, %swift.error* {{.*}})
338
342
343
+
344
+ /// ---> Accessor for `genericArgs`
345
+
346
+ // CHECK: define internal swifttailcc void @"$s27distributed_actor_accessors7MyActorC11genericArgsyyx_Sayq_GtSeRzSERzSeR_SER_r0_lFTETF"(%swift.context* swiftasync %0, i8* [[ARG_BUF:%.*]], i8* [[ARG_TYPES:%.*]], i8* [[RESULT_BUF:%.*]], i8* [[GENERIC_SUBS:%.*]], i8* [[WITNESS_TABLES:%.*]], i32 [[NUM_WITNESS_TABLES:%.*]], %T27distributed_actor_accessors7MyActorC* [[ACTOR:%.*]])
347
+
348
+ /// ---> Load `T`
349
+
350
+ // CHECK: store i8* [[ARG_BUFF]], i8** %offset
351
+ // CHECK-NEXT: [[ARG_TYPES_BUF:%.*]] = bitcast i8* [[ARG_TYPES]] to %swift.type**
352
+ // CHECK-NEXT: %elt_offset = load i8*, i8** %offset
353
+ // CHECK-NEXT: [[FIRST_ARG_TYPE_ADDR:%.*]] = getelementptr inbounds %swift.type*, %swift.type** [[ARG_TYPES_BUF]], i32 0
354
+ // CHECK-NEXT: %arg_type = load %swift.type*, %swift.type** [[FIRST_ARG_TYPE_ADDR]]
355
+ // CHECK: %size = load i32, i32* {{.*}}
356
+ // CHECK: %flags = load i32, i32* {{.*}}
357
+ // CHECK: [[ELT_PTR:%.*]] = ptrtoint i8* %elt_offset to i32
358
+ // CHECK-NEXT: [[START_ELT_ALIGN:%.*]] = add nuw i32 [[ELT_PTR]], %flags.alignmentMask
359
+ // CHECK-NEXT: [[ALIGNMENT:%.*]] = xor i32 %flags.alignmentMask, -1
360
+ // CHECK-NEXT: [[ALIGNED_ELT_PTR:%.*]] = and i32 [[START_ELT_ALIGN]], [[ALIGNMENT]]
361
+ // CHECK-NEXT: [[TYPED_ARG_0:%.*]] = inttoptr i32 [[ALIGNED_ELT_PTR:%.*]] to %swift.opaque*
362
+
363
+ /// Move offset to the next element
364
+
365
+ // CHECK: [[CUR_OFFSET:%.*]] = ptrtoint %swift.opaque* [[TYPED_ARG_0]] to i32
366
+ // CHECK-NEXT: [[NEXT_OFFSET:%.*]] = add i32 [[CUR_OFFSET]], %size
367
+ // CHECK-NEXT: [[NEXT_OFFSET_PTR:%.*]] = inttoptr i32 [[NEXT_OFFSET]] to i8*
368
+ // CHECK-NEXT: store i8* [[NEXT_OFFSET_PTR]], i8** %offset
369
+
370
+ /// ---> Load `[U]`
371
+
372
+ // CHECK: %elt_offset2 = load i8*, i8** %offset
373
+ // CHECK-NEXT: [[SECOND_ARG_TYPE_ADDR:%.*]] = getelementptr inbounds %swift.type*, %swift.type** [[ARG_TYPES_BUF]], i32 1
374
+ // CHECK-NEXT: %arg_type3 = load %swift.type*, %swift.type** [[SECOND_ARG_TYPE_ADDR]]
375
+ // CHECK: %size4 = load i32, i32* {{.*}}
376
+ // CHECK: %flags6 = load i32, i32* {{.*}}
377
+ // CHECK: [[ELT_PTR:%.*]] = ptrtoint i8* %elt_offset2 to i32
378
+ // CHECK-NEXT: [[START_ELT_ALIGN:%.*]] = add nuw i32 [[ELT_PTR]], %flags6.alignmentMask
379
+ // CHECK-NEXT: [[ALIGNMENT:%.*]] = xor i32 %flags6.alignmentMask, -1
380
+ // CHECK-NEXT: [[ALIGNED_ELT_PTR:%.*]] = and i32 [[START_ELT_ALIGN]], [[ALIGNMENT]]
381
+ // CHECK-NEXT: [[TYPED_ARG_1:%.*]] = inttoptr i32 [[ALIGNED_ELT_PTR]] to %swift.opaque*
382
+ // CHECK-NEXT: [[ARR_ARG_1:%.*]] = bitcast %swift.opaque* [[TYPED_ARG_1]] to %TSa*
383
+ // CHECK-NEXT: %._buffer = getelementptr inbounds %TSa, %TSa* [[ARR_ARG_1]], i32 0, i32 0
384
+ // CHECK-NEXT: %._buffer._storage = getelementptr inbounds %Ts12_ArrayBufferV, %Ts12_ArrayBufferV* %._buffer, i32 0, i32 0
385
+ // CHECK-NEXT: %._buffer._storage.rawValue = getelementptr inbounds %Ts14_BridgeStorageV, %Ts14_BridgeStorageV* %._buffer._storage, i32 0, i32 0
386
+ // CHECK-NEXT: [[TYPED_ARG_1:%.*]] = load %swift.bridge*, %swift.bridge** %._buffer._storage.rawValue
387
+
388
+ /// ---> Load generic argument substitutions from the caller-provided buffer
389
+
390
+ // CHECK: [[GENERIC_SUBS_BUF:%.*]] = bitcast i8* [[GENERIC_SUBS]] to %swift.type**
391
+ // CHECK-NEXT: [[SUB_T_ADDR:%.*]] = getelementptr inbounds %swift.type*, %swift.type** [[GENERIC_SUBS_BUF]], i32 0
392
+ // CHECK-NEXT: [[SUB_T:%.*]] = load %swift.type*, %swift.type** [[SUB_T_ADDR]]
393
+ // CHECK-NEXT: [[SUB_U_ADDR:%.*]] = getelementptr inbounds %swift.type*, %swift.type** [[GENERIC_SUBS_BUF]], i32 1
394
+ // CHECK-NEXT: [[SUB_U:%.*]] = load %swift.type*, %swift.type** [[SUB_U_ADDR]]
395
+
396
+ /// --> Load witness tables from caller-provided buffer
397
+
398
+ /// First, check whether the number of witness tables matches expected
399
+
400
+ // CHECK: [[IS_INCORRECT_WITNESSES:%.*]] = icmp ne i32 [[NUM_WITNESS_TABLES]], 4
401
+ // CHECK-NEXT: br i1 [[IS_INCORRECT_WITNESSES]], label %incorrect-witness-tables, label [[LOAD_WITNESS_TABLES:%.*]]
402
+ // CHECK: incorrect-witness-tables:
403
+ // CHECK-NEXT: unreachable
404
+
405
+ // CHECK: [[WITNESS_BUF:%.*]] = bitcast i8* [[WITNESS_TABLES]] to i8**
406
+ // CHECK-NEXT: [[T_ENCODABLE:%.*]] = getelementptr inbounds i8*, i8** [[WITNESS_BUF]], i32 0
407
+ // CHECK-NEXT: [[T_DECODABLE:%.*]] = getelementptr inbounds i8*, i8** [[WITNESS_BUF]], i32 1
408
+ // CHECK-NEXT: [[U_ENCODABLE:%.*]] = getelementptr inbounds i8*, i8** [[WITNESS_BUF]], i32 2
409
+ // CHECK-NEXT: [[U_DECODABLE:%.*]] = getelementptr inbounds i8*, i8** [[WITNESS_BUF]], i32 3
410
+
411
+ /// ---> Check that distributed thunk code is formed correctly
412
+
413
+ // CHECK: [[THUNK_RESULT:%.*]] = call { i8*, %swift.error* } (i32, i8*, i8*, ...) @llvm.coro.suspend.async.sl_p0i8p0s_swift.errorss({{.*}}, %swift.context* {{.*}}, %swift.opaque* [[TYPED_ARG_0]], %swift.bridge* [[TYPED_ARG_1]], %swift.type* [[SUB_T]], %swift.type* [[SUB_U]], i8** [[T_ENCODABLE]], i8** [[T_DECODABLE]], i8** [[U_ENCODABLE]], i8** [[U_DECODABLE]], %T27distributed_actor_accessors7MyActorC* [[ACTOR]])
414
+
415
+
339
416
/// ---> Thunk and distributed method for `MyOtherActor.empty`
340
417
341
418
/// Let's check that there is no offset allocation here since parameter list is empty
0 commit comments