@@ -90,7 +90,7 @@ uintptr_t dynarecDF(dynarec_arm_t* dyn, uintptr_t addr, uintptr_t ip, int ninst,
9090 } else {
9191 VCMP_F64 (v1 , v2 );
9292 }
93- FCOMI (x1 , x2 , x3 , x14 , v1 , v2 , ST_IS_F ( 0 ) );
93+ FCOMI (x1 , x2 );
9494 X87_POP_OR_FAIL (dyn , ninst , x3 );
9595 break ;
9696 case 0xF0 :
@@ -111,7 +111,7 @@ uintptr_t dynarecDF(dynarec_arm_t* dyn, uintptr_t addr, uintptr_t ip, int ninst,
111111 } else {
112112 VCMP_F64 (v1 , v2 );
113113 }
114- FCOMI (x1 , x2 , x3 , x14 , v1 , v2 , ST_IS_F ( 0 ) );
114+ FCOMI (x1 , x2 );
115115 X87_POP_OR_FAIL (dyn , ninst , x3 );
116116 break ;
117117
@@ -186,9 +186,9 @@ uintptr_t dynarecDF(dynarec_arm_t* dyn, uintptr_t addr, uintptr_t ip, int ninst,
186186 BIC_IMM8 (x3 , x3 , 0b10011111 , 0 );
187187 VMSR (x3 );
188188 if (ST_IS_F (0 )) {
189- VCVTR_S32_F32 (s0 , v1 );
189+ VCVT_S32_F32 (s0 , v1 );
190190 } else {
191- VCVTR_S32_F64 (s0 , v1 );
191+ VCVT_S32_F64 (s0 , v1 );
192192 }
193193 VMRS (x3 ); // get the FPCSR reg and test FPU execption (invalid operation only)
194194 VMOVfrV (ed , s0 );
@@ -337,12 +337,10 @@ uintptr_t dynarecDF(dynarec_arm_t* dyn, uintptr_t addr, uintptr_t ip, int ninst,
337337 STR_IMM9 (x3 , ed , 4 );
338338 }
339339 } else {
340- #if 1
341340 v1 = x87_get_st (dyn , ninst , x2 , x3 , 0 , NEON_CACHE_ST_D );
342341 //addr = geted(dyn, addr, ninst, nextop, &ed, x1, &fixedaddress, 0, 0, 0, NULL);
343342 fpu_get_scratch_double (dyn ); // to alocate v0
344343 v2 = fpu_get_scratch_double (dyn );
345- #if 1
346344 // get TOP
347345 LDR_IMM9 (x14 , xEmu , offsetof(x86emu_t , top ));
348346 int a = 0 - dyn -> n .x87stack ;
@@ -375,74 +373,6 @@ uintptr_t dynarecDF(dynarec_arm_t* dyn, uintptr_t addr, uintptr_t ip, int ninst,
375373 MARK3 ;
376374 STR_IMM9 (x2 , ed , 0 );
377375 STR_IMM9 (x3 , ed , 4 );
378- #else
379- v0 = fpu_get_scratch_double (dyn );
380- s0 = fpu_get_scratch_single (dyn );
381- // check STll(0).ref==ST(0).q so emu->fpu_ll[emu->top].ref == emu->mmx87[emu->top]
382- // get TOP
383- LDR_IMM9 (x14 , xEmu , offsetof(x86emu_t , top ));
384- int a = 0 - dyn -> n .x87stack ;
385- if (a < 0 ) {
386- SUB_IMM8 (x14 , x14 , - a );
387- AND_IMM8 (x14 , x14 , 7 ); // (emu->top + i)&7
388- } else if (a > 0 ) {
389- ADD_IMM8 (x14 , x14 , a );
390- AND_IMM8 (x14 , x14 , 7 ); // (emu->top + i)&7
391- }
392- ADD_REG_LSL_IMM5 (x14 , xEmu , x14 , 4 ); // each fpu_ll is 2 int64: ref than ll
393- MOVW (x2 , offsetof(x86emu_t , fpu_ll )); //can be optimized?
394- ADD_REG_LSL_IMM5 (x14 , x14 , x2 , 0 );
395- VLDR_64 (v2 , x14 , 0 );
396- VCEQ_32 (v2 , v2 , v1 ); // compare
397- VMOVfrV_D (x2 , x3 , v2 );
398- ANDS_REG_LSL_IMM5 (x2 , x2 , x3 , 0 ); // if NE then values are the same!
399- B_MARK (cEQ ); // do the i64 conversion
400- // memcpy(ed, &STll(0).ll, sizeof(int64_t));
401- LDRD_IMM8 (x2 , x14 , 8 ); // load ll
402- B_MARK3 (c__ );
403- MARK ;
404- VEOR (v0 , v0 , v0 );
405- MOVW (x2 , 0x41F0 );
406- VMOVtoDx_16 (v0 , 3 , x2 ); // V0 = (1<<32) as double
407- VMOVfrDx_32 (x14 , v1 , 1 ); // get high part to extract sign
408- VABS_F64 (v1 ,v1 ); //ST0 will be poped, so lost...
409- VDIV_F64 (v2 , v1 , v0 ); // v2 = abs(ST0)/(1<<32) : so 32 bits high part
410- MSR_nzcvq_0 ();
411- VMRS (x2 ); // get fpscr
412- ORR_IMM8 (x3 , x2 , 0b010 , 9 ); // enable exceptions
413- BIC_IMM8 (x3 , x3 , 0b10011111 , 0 );
414- VMSR (x3 );
415- VCVT_U32_F64 (s0 , v2 ); // convert high part to U32
416- VMRS (x3 ); // get the FPCSR reg and test FPU execption (invalid operation only)
417- VMSR (x2 ); // put back fpscr
418- TSTS_IMM8_ROR (x3 , 0b00000001 , 0 );
419- B_MARK2 (cEQ ); // not overflow...
420- MARKLOCK ;
421- MOV_IMM (x3 , 0b10 , 1 ); // 0x80000000
422- MOVW (x2 , 0 );
423- B_MARK3 (c__ );
424- MARK2 ; // continue conversion, it fits an int64!
425- VCVT_F64_U32 (v2 , s0 ); // int part now
426- VMLS_F64 (v1 , v2 , v0 ); // compute low part
427- VMOVfrV (x3 , s0 ); // transfert high path
428- TSTS_IMM8_ROR (x14 , 0b10 , 1 ); // test high part with 0x800000000
429- B_MARKLOCK (cNE ); // int overflow...
430- VCVT_U32_F64 (s0 , v1 ); // convert low part
431- VMOVfrV (x2 , s0 ); // transfert low part
432- TSTS_IMM8_ROR (x14 , 0b10 , 1 ); // 0x800000000
433- B_MARK3 (cEQ );
434- RSBS_IMM8 (x2 , x2 , 0 ); // NEG(i64)
435- RSC_IMM8 (x3 , x3 , 0 );
436- MARK3 ;
437- STR_IMM9 (x2 , x1 , 0 );
438- STR_IMM9 (x3 , x1 , 4 );
439- #endif
440- #else
441- MESSAGE (LOG_DUMP , "Need Optimization\n" );
442- x87_forget (dyn , ninst , x2 , x3 , 0 );
443- if (ed != x1 ) {MOV_REG (x1 , ed );}
444- CALL (arm_fistp64 , -1 , 0 );
445- #endif
446376 }
447377 X87_POP_OR_FAIL (dyn , ninst , x3 );
448378 break ;
0 commit comments