@@ -255,6 +255,34 @@ impl Cond {
255
255
collector. reg_use ( src1) ;
256
256
collector. reg_use ( src2) ;
257
257
}
258
+
259
+ Cond :: IfXeq32I32 { src1, src2 }
260
+ | Cond :: IfXneq32I32 { src1, src2 }
261
+ | Cond :: IfXslt32I32 { src1, src2 }
262
+ | Cond :: IfXslteq32I32 { src1, src2 }
263
+ | Cond :: IfXsgt32I32 { src1, src2 }
264
+ | Cond :: IfXsgteq32I32 { src1, src2 }
265
+ | Cond :: IfXeq64I32 { src1, src2 }
266
+ | Cond :: IfXneq64I32 { src1, src2 }
267
+ | Cond :: IfXslt64I32 { src1, src2 }
268
+ | Cond :: IfXslteq64I32 { src1, src2 }
269
+ | Cond :: IfXsgt64I32 { src1, src2 }
270
+ | Cond :: IfXsgteq64I32 { src1, src2 } => {
271
+ collector. reg_use ( src1) ;
272
+ let _: & mut i32 = src2;
273
+ }
274
+
275
+ Cond :: IfXult32I32 { src1, src2 }
276
+ | Cond :: IfXulteq32I32 { src1, src2 }
277
+ | Cond :: IfXugt32I32 { src1, src2 }
278
+ | Cond :: IfXugteq32I32 { src1, src2 }
279
+ | Cond :: IfXult64I32 { src1, src2 }
280
+ | Cond :: IfXulteq64I32 { src1, src2 }
281
+ | Cond :: IfXugt64I32 { src1, src2 }
282
+ | Cond :: IfXugteq64I32 { src1, src2 } => {
283
+ collector. reg_use ( src1) ;
284
+ let _: & mut u32 = src2;
285
+ }
258
286
}
259
287
}
260
288
@@ -263,7 +291,7 @@ impl Cond {
263
291
/// Note that the offset encoded to jump by is filled in as 0 and it's
264
292
/// assumed `MachBuffer` will come back and clean it up.
265
293
pub fn encode ( & self , sink : & mut impl Extend < u8 > ) {
266
- match self {
294
+ match * self {
267
295
Cond :: If32 { reg } => encode:: br_if32 ( sink, reg, 0 ) ,
268
296
Cond :: IfNot32 { reg } => encode:: br_if_not32 ( sink, reg, 0 ) ,
269
297
Cond :: IfXeq32 { src1, src2 } => encode:: br_if_xeq32 ( sink, src1, src2, 0 ) ,
@@ -278,6 +306,88 @@ impl Cond {
278
306
Cond :: IfXslteq64 { src1, src2 } => encode:: br_if_xslteq64 ( sink, src1, src2, 0 ) ,
279
307
Cond :: IfXult64 { src1, src2 } => encode:: br_if_xult64 ( sink, src1, src2, 0 ) ,
280
308
Cond :: IfXulteq64 { src1, src2 } => encode:: br_if_xulteq64 ( sink, src1, src2, 0 ) ,
309
+
310
+ Cond :: IfXeq32I32 { src1, src2 } => match i8:: try_from ( src2) {
311
+ Ok ( src2) => encode:: br_if_xeq32_i8 ( sink, src1, src2, 0 ) ,
312
+ Err ( _) => encode:: br_if_xeq32_i32 ( sink, src1, src2, 0 ) ,
313
+ } ,
314
+ Cond :: IfXneq32I32 { src1, src2 } => match i8:: try_from ( src2) {
315
+ Ok ( src2) => encode:: br_if_xneq32_i8 ( sink, src1, src2, 0 ) ,
316
+ Err ( _) => encode:: br_if_xneq32_i32 ( sink, src1, src2, 0 ) ,
317
+ } ,
318
+ Cond :: IfXslt32I32 { src1, src2 } => match i8:: try_from ( src2) {
319
+ Ok ( src2) => encode:: br_if_xslt32_i8 ( sink, src1, src2, 0 ) ,
320
+ Err ( _) => encode:: br_if_xslt32_i32 ( sink, src1, src2, 0 ) ,
321
+ } ,
322
+ Cond :: IfXslteq32I32 { src1, src2 } => match i8:: try_from ( src2) {
323
+ Ok ( src2) => encode:: br_if_xslteq32_i8 ( sink, src1, src2, 0 ) ,
324
+ Err ( _) => encode:: br_if_xslteq32_i32 ( sink, src1, src2, 0 ) ,
325
+ } ,
326
+ Cond :: IfXsgt32I32 { src1, src2 } => match i8:: try_from ( src2) {
327
+ Ok ( src2) => encode:: br_if_xsgt32_i8 ( sink, src1, src2, 0 ) ,
328
+ Err ( _) => encode:: br_if_xsgt32_i32 ( sink, src1, src2, 0 ) ,
329
+ } ,
330
+ Cond :: IfXsgteq32I32 { src1, src2 } => match i8:: try_from ( src2) {
331
+ Ok ( src2) => encode:: br_if_xsgteq32_i8 ( sink, src1, src2, 0 ) ,
332
+ Err ( _) => encode:: br_if_xsgteq32_i32 ( sink, src1, src2, 0 ) ,
333
+ } ,
334
+ Cond :: IfXult32I32 { src1, src2 } => match u8:: try_from ( src2) {
335
+ Ok ( src2) => encode:: br_if_xult32_u8 ( sink, src1, src2, 0 ) ,
336
+ Err ( _) => encode:: br_if_xult32_u32 ( sink, src1, src2, 0 ) ,
337
+ } ,
338
+ Cond :: IfXulteq32I32 { src1, src2 } => match u8:: try_from ( src2) {
339
+ Ok ( src2) => encode:: br_if_xulteq32_u8 ( sink, src1, src2, 0 ) ,
340
+ Err ( _) => encode:: br_if_xulteq32_u32 ( sink, src1, src2, 0 ) ,
341
+ } ,
342
+ Cond :: IfXugt32I32 { src1, src2 } => match u8:: try_from ( src2) {
343
+ Ok ( src2) => encode:: br_if_xugt32_u8 ( sink, src1, src2, 0 ) ,
344
+ Err ( _) => encode:: br_if_xugt32_u32 ( sink, src1, src2, 0 ) ,
345
+ } ,
346
+ Cond :: IfXugteq32I32 { src1, src2 } => match u8:: try_from ( src2) {
347
+ Ok ( src2) => encode:: br_if_xugteq32_u8 ( sink, src1, src2, 0 ) ,
348
+ Err ( _) => encode:: br_if_xugteq32_u32 ( sink, src1, src2, 0 ) ,
349
+ } ,
350
+
351
+ Cond :: IfXeq64I32 { src1, src2 } => match i8:: try_from ( src2) {
352
+ Ok ( src2) => encode:: br_if_xeq64_i8 ( sink, src1, src2, 0 ) ,
353
+ Err ( _) => encode:: br_if_xeq64_i32 ( sink, src1, src2, 0 ) ,
354
+ } ,
355
+ Cond :: IfXneq64I32 { src1, src2 } => match i8:: try_from ( src2) {
356
+ Ok ( src2) => encode:: br_if_xneq64_i8 ( sink, src1, src2, 0 ) ,
357
+ Err ( _) => encode:: br_if_xneq64_i32 ( sink, src1, src2, 0 ) ,
358
+ } ,
359
+ Cond :: IfXslt64I32 { src1, src2 } => match i8:: try_from ( src2) {
360
+ Ok ( src2) => encode:: br_if_xslt64_i8 ( sink, src1, src2, 0 ) ,
361
+ Err ( _) => encode:: br_if_xslt64_i32 ( sink, src1, src2, 0 ) ,
362
+ } ,
363
+ Cond :: IfXslteq64I32 { src1, src2 } => match i8:: try_from ( src2) {
364
+ Ok ( src2) => encode:: br_if_xslteq64_i8 ( sink, src1, src2, 0 ) ,
365
+ Err ( _) => encode:: br_if_xslteq64_i32 ( sink, src1, src2, 0 ) ,
366
+ } ,
367
+ Cond :: IfXsgt64I32 { src1, src2 } => match i8:: try_from ( src2) {
368
+ Ok ( src2) => encode:: br_if_xsgt64_i8 ( sink, src1, src2, 0 ) ,
369
+ Err ( _) => encode:: br_if_xsgt64_i32 ( sink, src1, src2, 0 ) ,
370
+ } ,
371
+ Cond :: IfXsgteq64I32 { src1, src2 } => match i8:: try_from ( src2) {
372
+ Ok ( src2) => encode:: br_if_xsgteq64_i8 ( sink, src1, src2, 0 ) ,
373
+ Err ( _) => encode:: br_if_xsgteq64_i32 ( sink, src1, src2, 0 ) ,
374
+ } ,
375
+ Cond :: IfXult64I32 { src1, src2 } => match u8:: try_from ( src2) {
376
+ Ok ( src2) => encode:: br_if_xult64_u8 ( sink, src1, src2, 0 ) ,
377
+ Err ( _) => encode:: br_if_xult64_u32 ( sink, src1, src2, 0 ) ,
378
+ } ,
379
+ Cond :: IfXulteq64I32 { src1, src2 } => match u8:: try_from ( src2) {
380
+ Ok ( src2) => encode:: br_if_xulteq64_u8 ( sink, src1, src2, 0 ) ,
381
+ Err ( _) => encode:: br_if_xulteq64_u32 ( sink, src1, src2, 0 ) ,
382
+ } ,
383
+ Cond :: IfXugt64I32 { src1, src2 } => match u8:: try_from ( src2) {
384
+ Ok ( src2) => encode:: br_if_xugt64_u8 ( sink, src1, src2, 0 ) ,
385
+ Err ( _) => encode:: br_if_xugt64_u32 ( sink, src1, src2, 0 ) ,
386
+ } ,
387
+ Cond :: IfXugteq64I32 { src1, src2 } => match u8:: try_from ( src2) {
388
+ Ok ( src2) => encode:: br_if_xugteq64_u8 ( sink, src1, src2, 0 ) ,
389
+ Err ( _) => encode:: br_if_xugteq64_u32 ( sink, src1, src2, 0 ) ,
390
+ } ,
281
391
}
282
392
}
283
393
@@ -325,6 +435,28 @@ impl Cond {
325
435
src1 : src2,
326
436
src2 : src1,
327
437
} ,
438
+
439
+ Cond :: IfXeq32I32 { src1, src2 } => Cond :: IfXneq32I32 { src1, src2 } ,
440
+ Cond :: IfXneq32I32 { src1, src2 } => Cond :: IfXeq32I32 { src1, src2 } ,
441
+ Cond :: IfXslt32I32 { src1, src2 } => Cond :: IfXsgteq32I32 { src1, src2 } ,
442
+ Cond :: IfXslteq32I32 { src1, src2 } => Cond :: IfXsgt32I32 { src1, src2 } ,
443
+ Cond :: IfXult32I32 { src1, src2 } => Cond :: IfXugteq32I32 { src1, src2 } ,
444
+ Cond :: IfXulteq32I32 { src1, src2 } => Cond :: IfXugt32I32 { src1, src2 } ,
445
+ Cond :: IfXsgt32I32 { src1, src2 } => Cond :: IfXslteq32I32 { src1, src2 } ,
446
+ Cond :: IfXsgteq32I32 { src1, src2 } => Cond :: IfXslt32I32 { src1, src2 } ,
447
+ Cond :: IfXugt32I32 { src1, src2 } => Cond :: IfXulteq32I32 { src1, src2 } ,
448
+ Cond :: IfXugteq32I32 { src1, src2 } => Cond :: IfXult32I32 { src1, src2 } ,
449
+
450
+ Cond :: IfXeq64I32 { src1, src2 } => Cond :: IfXneq64I32 { src1, src2 } ,
451
+ Cond :: IfXneq64I32 { src1, src2 } => Cond :: IfXeq64I32 { src1, src2 } ,
452
+ Cond :: IfXslt64I32 { src1, src2 } => Cond :: IfXsgteq64I32 { src1, src2 } ,
453
+ Cond :: IfXslteq64I32 { src1, src2 } => Cond :: IfXsgt64I32 { src1, src2 } ,
454
+ Cond :: IfXult64I32 { src1, src2 } => Cond :: IfXugteq64I32 { src1, src2 } ,
455
+ Cond :: IfXulteq64I32 { src1, src2 } => Cond :: IfXugt64I32 { src1, src2 } ,
456
+ Cond :: IfXsgt64I32 { src1, src2 } => Cond :: IfXslteq64I32 { src1, src2 } ,
457
+ Cond :: IfXsgteq64I32 { src1, src2 } => Cond :: IfXslt64I32 { src1, src2 } ,
458
+ Cond :: IfXugt64I32 { src1, src2 } => Cond :: IfXulteq64I32 { src1, src2 } ,
459
+ Cond :: IfXugteq64I32 { src1, src2 } => Cond :: IfXult64I32 { src1, src2 } ,
328
460
}
329
461
}
330
462
}
@@ -370,6 +502,66 @@ impl fmt::Display for Cond {
370
502
Cond :: IfXulteq64 { src1, src2 } => {
371
503
write ! ( f, "if_xulteq64 {}, {}" , reg_name( * * src1) , reg_name( * * src2) )
372
504
}
505
+ Cond :: IfXeq32I32 { src1, src2 } => {
506
+ write ! ( f, "if_xeq32_i32 {}, {src2}" , reg_name( * * src1) )
507
+ }
508
+ Cond :: IfXneq32I32 { src1, src2 } => {
509
+ write ! ( f, "if_xneq32_i32 {}, {src2}" , reg_name( * * src1) )
510
+ }
511
+ Cond :: IfXslt32I32 { src1, src2 } => {
512
+ write ! ( f, "if_xslt32_i32 {}, {src2}" , reg_name( * * src1) )
513
+ }
514
+ Cond :: IfXslteq32I32 { src1, src2 } => {
515
+ write ! ( f, "if_xslteq32_i32 {}, {src2}" , reg_name( * * src1) )
516
+ }
517
+ Cond :: IfXsgt32I32 { src1, src2 } => {
518
+ write ! ( f, "if_xsgt32_i32 {}, {src2}" , reg_name( * * src1) )
519
+ }
520
+ Cond :: IfXsgteq32I32 { src1, src2 } => {
521
+ write ! ( f, "if_xsgteq32_i32 {}, {src2}" , reg_name( * * src1) )
522
+ }
523
+ Cond :: IfXult32I32 { src1, src2 } => {
524
+ write ! ( f, "if_xult32_i32 {}, {src2}" , reg_name( * * src1) )
525
+ }
526
+ Cond :: IfXulteq32I32 { src1, src2 } => {
527
+ write ! ( f, "if_xulteq32_i32 {}, {src2}" , reg_name( * * src1) )
528
+ }
529
+ Cond :: IfXugt32I32 { src1, src2 } => {
530
+ write ! ( f, "if_xugt32_i32 {}, {src2}" , reg_name( * * src1) )
531
+ }
532
+ Cond :: IfXugteq32I32 { src1, src2 } => {
533
+ write ! ( f, "if_xugteq32_i32 {}, {src2}" , reg_name( * * src1) )
534
+ }
535
+ Cond :: IfXeq64I32 { src1, src2 } => {
536
+ write ! ( f, "if_xeq64_i32 {}, {src2}" , reg_name( * * src1) )
537
+ }
538
+ Cond :: IfXneq64I32 { src1, src2 } => {
539
+ write ! ( f, "if_xneq64_i32 {}, {src2}" , reg_name( * * src1) )
540
+ }
541
+ Cond :: IfXslt64I32 { src1, src2 } => {
542
+ write ! ( f, "if_xslt64_i32 {}, {src2}" , reg_name( * * src1) )
543
+ }
544
+ Cond :: IfXslteq64I32 { src1, src2 } => {
545
+ write ! ( f, "if_xslteq64_i32 {}, {src2}" , reg_name( * * src1) )
546
+ }
547
+ Cond :: IfXsgt64I32 { src1, src2 } => {
548
+ write ! ( f, "if_xsgt64_i32 {}, {src2}" , reg_name( * * src1) )
549
+ }
550
+ Cond :: IfXsgteq64I32 { src1, src2 } => {
551
+ write ! ( f, "if_xsgteq64_i32 {}, {src2}" , reg_name( * * src1) )
552
+ }
553
+ Cond :: IfXult64I32 { src1, src2 } => {
554
+ write ! ( f, "if_xult64_i32 {}, {src2}" , reg_name( * * src1) )
555
+ }
556
+ Cond :: IfXulteq64I32 { src1, src2 } => {
557
+ write ! ( f, "if_xulteq64_i32 {}, {src2}" , reg_name( * * src1) )
558
+ }
559
+ Cond :: IfXugt64I32 { src1, src2 } => {
560
+ write ! ( f, "if_xugt64_i32 {}, {src2}" , reg_name( * * src1) )
561
+ }
562
+ Cond :: IfXugteq64I32 { src1, src2 } => {
563
+ write ! ( f, "if_xugteq64_i32 {}, {src2}" , reg_name( * * src1) )
564
+ }
373
565
}
374
566
}
375
567
}
0 commit comments