|
| 1 | +/* Copyright (c) 2019-2025 Griefer@Work * |
| 2 | + * * |
| 3 | + * This software is provided 'as-is', without any express or implied * |
| 4 | + * warranty. In no event will the authors be held liable for any damages * |
| 5 | + * arising from the use of this software. * |
| 6 | + * * |
| 7 | + * Permission is granted to anyone to use this software for any purpose, * |
| 8 | + * including commercial applications, and to alter it and redistribute it * |
| 9 | + * freely, subject to the following restrictions: * |
| 10 | + * * |
| 11 | + * 1. The origin of this software must not be misrepresented; you must not * |
| 12 | + * claim that you wrote the original software. If you use this software * |
| 13 | + * in a product, an acknowledgement (see the following) in the product * |
| 14 | + * documentation is required: * |
| 15 | + * Portions Copyright (c) 2019-2025 Griefer@Work * |
| 16 | + * 2. Altered source versions must be plainly marked as such, and must not be * |
| 17 | + * misrepresented as being the original software. * |
| 18 | + * 3. This notice may not be removed or altered from any source distribution. * |
| 19 | + */ |
| 20 | +/* (>) Standard: ISO C23 (ISO/IEC 9899:2024) */ |
| 21 | +#ifndef _STDCKDINT_H |
| 22 | +#define _STDCKDINT_H 1 |
| 23 | + |
| 24 | +#include "__stdinc.h" |
| 25 | + |
| 26 | +#define __STDC_VERSION_STDCKDINT_H__ 202311L |
| 27 | + |
| 28 | +#if ((__has_builtin(__builtin_add_overflow) && !defined(__ibmxl__)) || \ |
| 29 | + (defined(__GNUC__) && (__GNUC__ >= 5) && !defined(__INTEL_COMPILER))) |
| 30 | +#define ckd_add(r, a, b) ((__BOOL)__builtin_add_overflow(a, b, r)) |
| 31 | +#define ckd_sub(r, a, b) ((__BOOL)__builtin_sub_overflow(a, b, r)) |
| 32 | +#define ckd_mul(r, a, b) ((__BOOL)__builtin_mul_overflow(a, b, r)) |
| 33 | +#else /* __GNUC__... */ |
| 34 | +#include <hybrid/__overflow.h> |
| 35 | +#include <hybrid/typecore.h> |
| 36 | + |
| 37 | +/* Figure out some way of detecting if the type of a macro argument is signed... */ |
| 38 | +#ifdef __COMPILER_HAVE_C11_GENERIC |
| 39 | +#ifdef __COMPILER_HAVE_LONGLONG |
| 40 | +#define __PRIVATE_CDK_ISSIGNED_LL , long long: 1 |
| 41 | +#else /* __COMPILER_HAVE_LONGLONG */ |
| 42 | +#define __PRIVATE_CDK_ISSIGNED_LL /* nothing */ |
| 43 | +#endif /* !__COMPILER_HAVE_LONGLONG */ |
| 44 | +#ifdef __COMPILER_INT8_IS_UNIQUE_TYPE |
| 45 | +#define __PRIVATE_CDK_ISSIGNED_I8 , __int8: 1 |
| 46 | +#else /* __COMPILER_INT8_IS_UNIQUE_TYPE */ |
| 47 | +#define __PRIVATE_CDK_ISSIGNED_I8 /* nothing */ |
| 48 | +#endif /* !__COMPILER_INT8_IS_UNIQUE_TYPE */ |
| 49 | +#ifdef __COMPILER_INT16_IS_UNIQUE_TYPE |
| 50 | +#define __PRIVATE_CDK_ISSIGNED_I16 , __int16: 1 |
| 51 | +#else /* __COMPILER_INT16_IS_UNIQUE_TYPE */ |
| 52 | +#define __PRIVATE_CDK_ISSIGNED_I16 /* nothing */ |
| 53 | +#endif /* !__COMPILER_INT16_IS_UNIQUE_TYPE */ |
| 54 | +#ifdef __COMPILER_INT32_IS_UNIQUE_TYPE |
| 55 | +#define __PRIVATE_CDK_ISSIGNED_I32 , __int32: 1 |
| 56 | +#else /* __COMPILER_INT32_IS_UNIQUE_TYPE */ |
| 57 | +#define __PRIVATE_CDK_ISSIGNED_I32 /* nothing */ |
| 58 | +#endif /* !__COMPILER_INT32_IS_UNIQUE_TYPE */ |
| 59 | +#ifdef __COMPILER_INT64_IS_UNIQUE_TYPE |
| 60 | +#define __PRIVATE_CDK_ISSIGNED_I64 , __int64: 1 |
| 61 | +#else /* __COMPILER_INT64_IS_UNIQUE_TYPE */ |
| 62 | +#define __PRIVATE_CDK_ISSIGNED_I64 /* nothing */ |
| 63 | +#endif /* !__COMPILER_INT64_IS_UNIQUE_TYPE */ |
| 64 | +#ifdef __INT128_TYPE__ |
| 65 | +#define __PRIVATE_CDK_ISSIGNED_I128 , __INT128_TYPE__: 1 |
| 66 | +#else /* __INT128_TYPE__ */ |
| 67 | +#define __PRIVATE_CDK_ISSIGNED_I128 /* nothing */ |
| 68 | +#endif /* !__INT128_TYPE__ */ |
| 69 | +#define __PRIVATE_CDK_ISSIGNED(x) \ |
| 70 | + _Generic((x), signed char: 1, short: 1, int: 1, long: 1 __PRIVATE_CDK_ISSIGNED_LL __PRIVATE_CDK_ISSIGNED_I8 __PRIVATE_CDK_ISSIGNED_I16 __PRIVATE_CDK_ISSIGNED_I32 __PRIVATE_CDK_ISSIGNED_I64 __PRIVATE_CDK_ISSIGNED_I128, default: 0) |
| 71 | +#elif defined(__COMPILER_HAVE_TYPEOF) && !defined(__NO_builtin_types_compatible_p) |
| 72 | +#ifdef __COMPILER_HAVE_LONGLONG |
| 73 | +#define __PRIVATE_CDK_ISSIGNED_LL(T) || __builtin_types_compatible_p(T, long long) |
| 74 | +#else /* __COMPILER_HAVE_LONGLONG */ |
| 75 | +#define __PRIVATE_CDK_ISSIGNED_LL(T) /* nothing */ |
| 76 | +#endif /* !__COMPILER_HAVE_LONGLONG */ |
| 77 | +#ifdef __COMPILER_INT8_IS_UNIQUE_TYPE |
| 78 | +#define __PRIVATE_CDK_ISSIGNED_I8(T) || __builtin_types_compatible_p(T, __int8) |
| 79 | +#else /* __COMPILER_INT8_IS_UNIQUE_TYPE */ |
| 80 | +#define __PRIVATE_CDK_ISSIGNED_I8(T) /* nothing */ |
| 81 | +#endif /* !__COMPILER_INT8_IS_UNIQUE_TYPE */ |
| 82 | +#ifdef __COMPILER_INT16_IS_UNIQUE_TYPE |
| 83 | +#define __PRIVATE_CDK_ISSIGNED_I16(T) || __builtin_types_compatible_p(T, __int16) |
| 84 | +#else /* __COMPILER_INT16_IS_UNIQUE_TYPE */ |
| 85 | +#define __PRIVATE_CDK_ISSIGNED_I16(T) /* nothing */ |
| 86 | +#endif /* !__COMPILER_INT16_IS_UNIQUE_TYPE */ |
| 87 | +#ifdef __COMPILER_INT32_IS_UNIQUE_TYPE |
| 88 | +#define __PRIVATE_CDK_ISSIGNED_I32(T) || __builtin_types_compatible_p(T, __int32) |
| 89 | +#else /* __COMPILER_INT32_IS_UNIQUE_TYPE */ |
| 90 | +#define __PRIVATE_CDK_ISSIGNED_I32(T) /* nothing */ |
| 91 | +#endif /* !__COMPILER_INT32_IS_UNIQUE_TYPE */ |
| 92 | +#ifdef __COMPILER_INT64_IS_UNIQUE_TYPE |
| 93 | +#define __PRIVATE_CDK_ISSIGNED_I64(T) || __builtin_types_compatible_p(T, __int64) |
| 94 | +#else /* __COMPILER_INT64_IS_UNIQUE_TYPE */ |
| 95 | +#define __PRIVATE_CDK_ISSIGNED_I64(T) /* nothing */ |
| 96 | +#endif /* !__COMPILER_INT64_IS_UNIQUE_TYPE */ |
| 97 | +#ifdef __INT128_TYPE__ |
| 98 | +#define __PRIVATE_CDK_ISSIGNED_I128(T) || __builtin_types_compatible_p(T, __INT128_TYPE__) |
| 99 | +#else /* __INT128_TYPE__ */ |
| 100 | +#define __PRIVATE_CDK_ISSIGNED_I128(T) /* nothing */ |
| 101 | +#endif /* !__INT128_TYPE__ */ |
| 102 | +#define __PRIVATE_CDK_ISSIGNED_T(T) \ |
| 103 | + (__builtin_types_compatible_p(T, signed char) || \ |
| 104 | + __builtin_types_compatible_p(T, short) || \ |
| 105 | + __builtin_types_compatible_p(T, int) || \ |
| 106 | + __builtin_types_compatible_p(T, long) \ |
| 107 | + __PRIVATE_CDK_ISSIGNED_LL(T) \ |
| 108 | + __PRIVATE_CDK_ISSIGNED_I8(T) \ |
| 109 | + __PRIVATE_CDK_ISSIGNED_I16(T) \ |
| 110 | + __PRIVATE_CDK_ISSIGNED_I32(T) \ |
| 111 | + __PRIVATE_CDK_ISSIGNED_I64(T) \ |
| 112 | + __PRIVATE_CDK_ISSIGNED_I128(T)) |
| 113 | +#define __PRIVATE_CDK_ISSIGNED(x) __PRIVATE_CDK_ISSIGNED_T(__typeof__(x)) |
| 114 | +#elif !defined(__cplusplus) |
| 115 | +#if 1 /* WARNING: This version incorrectly returns "true" for "signed char" and "short" */ |
| 116 | +#define __PRIVATE_CDK_ISSIGNED(x) ((0 ? (x) : -1) < 0 && (sizeof(x) >= __SIZEOF_INT__)) |
| 117 | +#else /* WARNING: This version incorrectly returns "true" for "unsigned char" and "unsigned short" */ |
| 118 | +#define __PRIVATE_CDK_ISSIGNED(x) ((0 ? (x) : -1) < 0) |
| 119 | +#endif |
| 120 | +#endif /* ... */ |
| 121 | + |
| 122 | +#ifdef __PRIVATE_CDK_ISSIGNED |
| 123 | +#ifdef __NO_builtin_choose_expr |
| 124 | +#define ckd_add(r, a, b) ((__BOOL)(__PRIVATE_CDK_ISSIGNED(*(r)) ? __hybrid_overflow_sadd(a, b, r) : __hybrid_overflow_uadd(a, b, r))) |
| 125 | +#define ckd_sub(r, a, b) ((__BOOL)(__PRIVATE_CDK_ISSIGNED(*(r)) ? __hybrid_overflow_ssub(a, b, r) : __hybrid_overflow_usub(a, b, r))) |
| 126 | +#define ckd_mul(r, a, b) ((__BOOL)(__PRIVATE_CDK_ISSIGNED(*(r)) ? __hybrid_overflow_smul(a, b, r) : __hybrid_overflow_umul(a, b, r))) |
| 127 | +#else /* __NO_builtin_choose_expr */ |
| 128 | +#define ckd_add(r, a, b) ((__BOOL)__builtin_choose_expr(__PRIVATE_CDK_ISSIGNED(*(r)), __hybrid_overflow_sadd(a, b, r), __hybrid_overflow_uadd(a, b, r))) |
| 129 | +#define ckd_sub(r, a, b) ((__BOOL)__builtin_choose_expr(__PRIVATE_CDK_ISSIGNED(*(r)), __hybrid_overflow_ssub(a, b, r), __hybrid_overflow_usub(a, b, r))) |
| 130 | +#define ckd_mul(r, a, b) ((__BOOL)__builtin_choose_expr(__PRIVATE_CDK_ISSIGNED(*(r)), __hybrid_overflow_smul(a, b, r), __hybrid_overflow_umul(a, b, r))) |
| 131 | +#endif /* !__NO_builtin_choose_expr */ |
| 132 | +#elif defined(__cplusplus) |
| 133 | +#include "__stdcxx.h" |
| 134 | +__CXXDECL_BEGIN |
| 135 | +#undef ckd_add |
| 136 | +#undef ckd_sub |
| 137 | +#undef ckd_mul |
| 138 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(signed char *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_sadd(__a, __b, __r); } |
| 139 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(signed char *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_ssub(__a, __b, __r); } |
| 140 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(signed char *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_smul(__a, __b, __r); } |
| 141 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(unsigned char *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_uadd(__a, __b, __r); } |
| 142 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(unsigned char *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_usub(__a, __b, __r); } |
| 143 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(unsigned char *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_umul(__a, __b, __r); } |
| 144 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(short *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_sadd(__a, __b, __r); } |
| 145 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(short *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_ssub(__a, __b, __r); } |
| 146 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(short *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_smul(__a, __b, __r); } |
| 147 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(unsigned short *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_uadd(__a, __b, __r); } |
| 148 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(unsigned short *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_usub(__a, __b, __r); } |
| 149 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(unsigned short *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_umul(__a, __b, __r); } |
| 150 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(int *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_sadd(__a, __b, __r); } |
| 151 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(int *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_ssub(__a, __b, __r); } |
| 152 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(int *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_smul(__a, __b, __r); } |
| 153 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(unsigned int *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_uadd(__a, __b, __r); } |
| 154 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(unsigned int *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_usub(__a, __b, __r); } |
| 155 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(unsigned int *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_umul(__a, __b, __r); } |
| 156 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(long *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_sadd(__a, __b, __r); } |
| 157 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(long *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_ssub(__a, __b, __r); } |
| 158 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(long *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_smul(__a, __b, __r); } |
| 159 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(unsigned long *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_uadd(__a, __b, __r); } |
| 160 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(unsigned long *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_usub(__a, __b, __r); } |
| 161 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(unsigned long *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_umul(__a, __b, __r); } |
| 162 | +#ifdef __COMPILER_HAVE_LONGLONG |
| 163 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(__LONGLONG *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_sadd(__a, __b, __r); } |
| 164 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(__LONGLONG *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_ssub(__a, __b, __r); } |
| 165 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(__LONGLONG *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_smul(__a, __b, __r); } |
| 166 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(__ULONGLONG *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_uadd(__a, __b, __r); } |
| 167 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(__ULONGLONG *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_usub(__a, __b, __r); } |
| 168 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(__ULONGLONG *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_umul(__a, __b, __r); } |
| 169 | +#endif /* __COMPILER_HAVE_LONGLONG */ |
| 170 | +#ifdef __COMPILER_INT8_IS_UNIQUE_TYPE |
| 171 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(__int8 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_sadd(__a, __b, __r); } |
| 172 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(__int8 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_ssub(__a, __b, __r); } |
| 173 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(__int8 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_smul(__a, __b, __r); } |
| 174 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(unsigned __int8 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_uadd(__a, __b, __r); } |
| 175 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(unsigned __int8 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_usub(__a, __b, __r); } |
| 176 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(unsigned __int8 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_umul(__a, __b, __r); } |
| 177 | +#endif /* __COMPILER_INT8_IS_UNIQUE_TYPE */ |
| 178 | +#ifdef __COMPILER_INT16_IS_UNIQUE_TYPE |
| 179 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(__int16 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_sadd(__a, __b, __r); } |
| 180 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(__int16 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_ssub(__a, __b, __r); } |
| 181 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(__int16 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_smul(__a, __b, __r); } |
| 182 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(unsigned __int16 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_uadd(__a, __b, __r); } |
| 183 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(unsigned __int16 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_usub(__a, __b, __r); } |
| 184 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(unsigned __int16 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_umul(__a, __b, __r); } |
| 185 | +#endif /* __COMPILER_INT16_IS_UNIQUE_TYPE */ |
| 186 | +#ifdef __COMPILER_INT32_IS_UNIQUE_TYPE |
| 187 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(__int32 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_sadd(__a, __b, __r); } |
| 188 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(__int32 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_ssub(__a, __b, __r); } |
| 189 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(__int32 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_smul(__a, __b, __r); } |
| 190 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(unsigned __int32 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_uadd(__a, __b, __r); } |
| 191 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(unsigned __int32 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_usub(__a, __b, __r); } |
| 192 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(unsigned __int32 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_umul(__a, __b, __r); } |
| 193 | +#endif /* __COMPILER_INT32_IS_UNIQUE_TYPE */ |
| 194 | +#ifdef __COMPILER_INT64_IS_UNIQUE_TYPE |
| 195 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(__int64 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_sadd(__a, __b, __r); } |
| 196 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(__int64 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_ssub(__a, __b, __r); } |
| 197 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(__int64 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_smul(__a, __b, __r); } |
| 198 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(unsigned __int64 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_uadd(__a, __b, __r); } |
| 199 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(unsigned __int64 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_usub(__a, __b, __r); } |
| 200 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(unsigned __int64 *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_umul(__a, __b, __r); } |
| 201 | +#endif /* __COMPILER_INT64_IS_UNIQUE_TYPE */ |
| 202 | +#ifdef __INT128_TYPE__ |
| 203 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(__INT128_TYPE__ *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_sadd(__a, __b, __r); } |
| 204 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(__INT128_TYPE__ *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_ssub(__a, __b, __r); } |
| 205 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(__INT128_TYPE__ *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_smul(__a, __b, __r); } |
| 206 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_add(__UINT128_TYPE__ *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_uadd(__a, __b, __r); } |
| 207 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_sub(__UINT128_TYPE__ *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_usub(__a, __b, __r); } |
| 208 | +template<class __Ta, class __Tb> __FORCELOCAL __ATTR_ARTIFICIAL __ATTR_WUNUSED __ATTR_OUT(1) __BOOL ckd_mul(__UINT128_TYPE__ *__r, __Ta __a, __Tb __b) { return __hybrid_overflow_umul(__a, __b, __r); } |
| 209 | +#endif /* __INT128_TYPE__ */ |
| 210 | +#define ckd_add ckd_add |
| 211 | +#define ckd_sub ckd_sub |
| 212 | +#define ckd_mul ckd_mul |
| 213 | +__CXXDECL_END |
| 214 | +#else /* ... */ |
| 215 | +#error "No way to implement 'ckd_add', 'ckd_sub', 'ckd_mul'" |
| 216 | +#endif /* !... */ |
| 217 | +#endif /* !__GNUC__... */ |
| 218 | + |
| 219 | +#endif /* !_STDCKDINT_H */ |
0 commit comments