@@ -2361,29 +2361,32 @@ fn store(cg: *CodeGen, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErr
2361
2361
.error_union = > {
2362
2362
const pl_ty = ty .errorUnionPayload (zcu );
2363
2363
if (! pl_ty .hasRuntimeBitsIgnoreComptime (zcu )) {
2364
- return cg .store (lhs , rhs , Type .anyerror , 0 );
2364
+ return cg .store (lhs , rhs , Type .anyerror , offset );
2365
2365
}
2366
2366
2367
2367
const len = @as (u32 , @intCast (abi_size ));
2368
+ assert (offset == 0 );
2368
2369
return cg .memcpy (lhs , rhs , .{ .imm32 = len });
2369
2370
},
2370
2371
.optional = > {
2371
2372
if (ty .isPtrLikeOptional (zcu )) {
2372
- return cg .store (lhs , rhs , Type .usize , 0 );
2373
+ return cg .store (lhs , rhs , Type .usize , offset );
2373
2374
}
2374
2375
const pl_ty = ty .optionalChild (zcu );
2375
2376
if (! pl_ty .hasRuntimeBitsIgnoreComptime (zcu )) {
2376
- return cg .store (lhs , rhs , Type .u8 , 0 );
2377
+ return cg .store (lhs , rhs , Type .u8 , offset );
2377
2378
}
2378
2379
if (pl_ty .zigTypeTag (zcu ) == .error_set ) {
2379
- return cg .store (lhs , rhs , Type .anyerror , 0 );
2380
+ return cg .store (lhs , rhs , Type .anyerror , offset );
2380
2381
}
2381
2382
2382
2383
const len = @as (u32 , @intCast (abi_size ));
2384
+ assert (offset == 0 );
2383
2385
return cg .memcpy (lhs , rhs , .{ .imm32 = len });
2384
2386
},
2385
2387
.@"struct" , .array , .@"union" = > if (isByRef (ty , zcu , cg .target )) {
2386
2388
const len = @as (u32 , @intCast (abi_size ));
2389
+ assert (offset == 0 );
2387
2390
return cg .memcpy (lhs , rhs , .{ .imm32 = len });
2388
2391
},
2389
2392
.vector = > switch (determineSimdStoreStrategy (ty , zcu , cg .target )) {
@@ -2407,6 +2410,7 @@ fn store(cg: *CodeGen, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErr
2407
2410
},
2408
2411
.pointer = > {
2409
2412
if (ty .isSlice (zcu )) {
2413
+ assert (offset == 0 );
2410
2414
// store pointer first
2411
2415
// lower it to the stack so we do not have to store rhs into a local first
2412
2416
try cg .emitWValue (lhs );
@@ -2421,6 +2425,7 @@ fn store(cg: *CodeGen, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErr
2421
2425
}
2422
2426
},
2423
2427
.int , .@"enum" , .float = > if (abi_size > 8 and abi_size <= 16 ) {
2428
+ assert (offset == 0 );
2424
2429
try cg .emitWValue (lhs );
2425
2430
const lsb = try cg .load (rhs , Type .u64 , 0 );
2426
2431
try cg .store (.stack , lsb , Type .u64 , 0 + lhs .offset ());
@@ -2430,6 +2435,7 @@ fn store(cg: *CodeGen, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErr
2430
2435
try cg .store (.stack , msb , Type .u64 , 8 + lhs .offset ());
2431
2436
return ;
2432
2437
} else if (abi_size > 16 ) {
2438
+ assert (offset == 0 );
2433
2439
try cg .memcpy (lhs , rhs , .{ .imm32 = @as (u32 , @intCast (ty .abiSize (zcu ))) });
2434
2440
},
2435
2441
else = > if (abi_size > 8 ) {
@@ -4438,9 +4444,6 @@ fn airOptionalPayloadPtrSet(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void
4438
4444
const operand = try cg .resolveInst (ty_op .operand );
4439
4445
const opt_ty = cg .typeOf (ty_op .operand ).childType (zcu );
4440
4446
const payload_ty = opt_ty .optionalChild (zcu );
4441
- if (! payload_ty .hasRuntimeBitsIgnoreComptime (zcu )) {
4442
- return cg .fail ("TODO: Implement OptionalPayloadPtrSet for optional with zero-sized type {}" , .{payload_ty .fmtDebug ()});
4443
- }
4444
4447
4445
4448
if (opt_ty .optionalReprIsPayload (zcu )) {
4446
4449
return cg .finishAir (inst , operand , &.{ty_op .operand });
@@ -5407,31 +5410,40 @@ fn cmpOptionals(cg: *CodeGen, lhs: WValue, rhs: WValue, operand_ty: Type, op: st
5407
5410
assert (operand_ty .hasRuntimeBitsIgnoreComptime (zcu ));
5408
5411
assert (op == .eq or op == .neq );
5409
5412
const payload_ty = operand_ty .optionalChild (zcu );
5413
+ assert (! isByRef (payload_ty , zcu , cg .target ));
5410
5414
5411
- // We store the final result in here that will be validated
5412
- // if the optional is truly equal.
5413
- var result = try cg .ensureAllocLocal (Type .i32 );
5415
+ var result = try cg .allocLocal (Type .i32 );
5414
5416
defer result .free (cg );
5415
5417
5418
+ var lhs_null = try cg .allocLocal (Type .i32 );
5419
+ defer lhs_null .free (cg );
5420
+
5416
5421
try cg .startBlock (.block , .empty );
5422
+
5423
+ try cg .addImm32 (if (op == .eq ) 0 else 1 );
5424
+ try cg .addLocal (.local_set , result .local .value );
5425
+
5417
5426
_ = try cg .isNull (lhs , operand_ty , .i32_eq );
5427
+ try cg .addLocal (.local_tee , lhs_null .local .value );
5418
5428
_ = try cg .isNull (rhs , operand_ty , .i32_eq );
5419
- try cg .addTag (.i32_ne ); // inverse so we can exit early
5420
- try cg .addLabel (.br_if , 0 );
5429
+ try cg .addTag (.i32_ne );
5430
+ try cg .addLabel (.br_if , 0 ); // only one is null
5431
+
5432
+ try cg .addImm32 (if (op == .eq ) 1 else 0 );
5433
+ try cg .addLocal (.local_set , result .local .value );
5434
+
5435
+ try cg .addLocal (.local_get , lhs_null .local .value );
5436
+ try cg .addLabel (.br_if , 0 ); // both are null
5421
5437
5422
5438
_ = try cg .load (lhs , payload_ty , 0 );
5423
5439
_ = try cg .load (rhs , payload_ty , 0 );
5424
- const opcode = buildOpcode (.{ .op = .ne , .valtype1 = typeToValtype (payload_ty , zcu , cg .target ) });
5425
- try cg .addTag (Mir .Inst .Tag .fromOpcode (opcode ));
5426
- try cg .addLabel (.br_if , 0 );
5427
-
5428
- try cg .addImm32 (1 );
5440
+ _ = try cg .cmp (.stack , .stack , payload_ty , op );
5429
5441
try cg .addLocal (.local_set , result .local .value );
5442
+
5430
5443
try cg .endBlock ();
5431
5444
5432
- try cg .emitWValue (result );
5433
- try cg .addImm32 (0 );
5434
- try cg .addTag (if (op == .eq ) .i32_ne else .i32_eq );
5445
+ try cg .addLocal (.local_get , result .local .value );
5446
+
5435
5447
return .stack ;
5436
5448
}
5437
5449
0 commit comments