@@ -433,6 +433,19 @@ static struct range range_refine(enum num_t x_t, struct range x, enum num_t y_t,
433433
434434 y_cast = range_cast (y_t , x_t , y );
435435
436+ /* If we know that
437+ * - *x* is in the range of signed 32bit value, and
438+ * - *y_cast* range is 32-bit signed non-negative
439+ * then *x* range can be improved with *y_cast* such that *x* range
440+ * is 32-bit signed non-negative. Otherwise, if the new range for *x*
441+ * allows upper 32-bit * 0xffffffff then the eventual new range for
442+ * *x* will be out of signed 32-bit range which violates the origin
443+ * *x* range.
444+ */
445+ if (x_t == S64 && y_t == S32 && y_cast .a <= S32_MAX && y_cast .b <= S32_MAX &&
446+ (s64 )x .a >= S32_MIN && (s64 )x .b <= S32_MAX )
447+ return range_improve (x_t , x , y_cast );
448+
436449 /* the case when new range knowledge, *y*, is a 32-bit subregister
437450 * range, while previous range knowledge, *x*, is a full register
438451 * 64-bit range, needs special treatment to take into account upper 32
@@ -2108,6 +2121,9 @@ static struct subtest_case crafted_cases[] = {
21082121 {S32 , U32 , {(u32 )S32_MIN , 0 }, {0 , 0 }},
21092122 {S32 , U32 , {(u32 )S32_MIN , 0 }, {(u32 )S32_MIN , (u32 )S32_MIN }},
21102123 {S32 , U32 , {(u32 )S32_MIN , S32_MAX }, {S32_MAX , S32_MAX }},
2124+ {S64 , U32 , {0x0 , 0x1f }, {0xffffffff80000000ULL , 0x000000007fffffffULL }},
2125+ {S64 , U32 , {0x0 , 0x1f }, {0xffffffffffff8000ULL , 0x0000000000007fffULL }},
2126+ {S64 , U32 , {0x0 , 0x1f }, {0xffffffffffffff80ULL , 0x000000000000007fULL }},
21112127};
21122128
21132129/* Go over crafted hard-coded cases. This is fast, so we do it as part of
0 commit comments