|
1 | | -use core::str::FromStr; |
| 1 | +use core::{cmp::Ordering, str::FromStr}; |
2 | 2 |
|
3 | 3 | use super::Byte; |
4 | 4 | use crate::{ExceededBoundsError, ParseError, TryFromIntError}; |
@@ -202,3 +202,115 @@ impl FromStr for Byte { |
202 | 202 | Byte::parse_str(s, false) |
203 | 203 | } |
204 | 204 | } |
| 205 | + |
| 206 | +impl PartialEq<u64> for Byte { |
| 207 | + #[cfg(feature = "u128")] |
| 208 | + #[inline] |
| 209 | + fn eq(&self, other: &u64) -> bool { |
| 210 | + self.0 == *other as u128 |
| 211 | + } |
| 212 | + |
| 213 | + #[cfg(not(feature = "u128"))] |
| 214 | + #[inline] |
| 215 | + fn eq(&self, other: &u64) -> bool { |
| 216 | + self.0 == *other |
| 217 | + } |
| 218 | +} |
| 219 | + |
| 220 | +impl PartialEq<u128> for Byte { |
| 221 | + #[cfg(feature = "u128")] |
| 222 | + #[inline] |
| 223 | + fn eq(&self, other: &u128) -> bool { |
| 224 | + self.0 == *other |
| 225 | + } |
| 226 | + |
| 227 | + #[cfg(not(feature = "u128"))] |
| 228 | + #[inline] |
| 229 | + fn eq(&self, other: &u128) -> bool { |
| 230 | + self.0 as u128 == *other |
| 231 | + } |
| 232 | +} |
| 233 | + |
| 234 | +impl PartialEq<Byte> for u64 { |
| 235 | + #[cfg(feature = "u128")] |
| 236 | + #[inline] |
| 237 | + fn eq(&self, other: &Byte) -> bool { |
| 238 | + *self as u128 == other.0 |
| 239 | + } |
| 240 | + |
| 241 | + #[cfg(not(feature = "u128"))] |
| 242 | + #[inline] |
| 243 | + fn eq(&self, other: &Byte) -> bool { |
| 244 | + *self == other.0 |
| 245 | + } |
| 246 | +} |
| 247 | + |
| 248 | +impl PartialEq<Byte> for u128 { |
| 249 | + #[cfg(feature = "u128")] |
| 250 | + #[inline] |
| 251 | + fn eq(&self, other: &Byte) -> bool { |
| 252 | + *self == other.0 |
| 253 | + } |
| 254 | + |
| 255 | + #[cfg(not(feature = "u128"))] |
| 256 | + #[inline] |
| 257 | + fn eq(&self, other: &Byte) -> bool { |
| 258 | + *self == other.0 as u128 |
| 259 | + } |
| 260 | +} |
| 261 | + |
| 262 | +impl PartialOrd<u64> for Byte { |
| 263 | + #[cfg(feature = "u128")] |
| 264 | + #[inline] |
| 265 | + fn partial_cmp(&self, other: &u64) -> Option<Ordering> { |
| 266 | + self.0.partial_cmp(&(*other as u128)) |
| 267 | + } |
| 268 | + |
| 269 | + #[cfg(not(feature = "u128"))] |
| 270 | + #[inline] |
| 271 | + fn partial_cmp(&self, other: &u64) -> Option<Ordering> { |
| 272 | + self.0.partial_cmp(other) |
| 273 | + } |
| 274 | +} |
| 275 | + |
| 276 | +impl PartialOrd<u128> for Byte { |
| 277 | + #[cfg(feature = "u128")] |
| 278 | + #[inline] |
| 279 | + fn partial_cmp(&self, other: &u128) -> Option<Ordering> { |
| 280 | + self.0.partial_cmp(other) |
| 281 | + } |
| 282 | + |
| 283 | + #[cfg(not(feature = "u128"))] |
| 284 | + #[inline] |
| 285 | + fn partial_cmp(&self, other: &u128) -> Option<Ordering> { |
| 286 | + (self.0 as u128).partial_cmp(other) |
| 287 | + } |
| 288 | +} |
| 289 | + |
| 290 | +impl PartialOrd<Byte> for u64 { |
| 291 | + #[cfg(feature = "u128")] |
| 292 | + #[inline] |
| 293 | + fn partial_cmp(&self, other: &Byte) -> Option<Ordering> { |
| 294 | + (*self as u128).partial_cmp(&other.0) |
| 295 | + } |
| 296 | + |
| 297 | + #[cfg(not(feature = "u128"))] |
| 298 | + #[inline] |
| 299 | + fn partial_cmp(&self, other: &Byte) -> Option<Ordering> { |
| 300 | + self.partial_cmp(&other.0) |
| 301 | + } |
| 302 | +} |
| 303 | + |
| 304 | +impl PartialOrd<Byte> for u128 { |
| 305 | + #[cfg(feature = "u128")] |
| 306 | + #[inline] |
| 307 | + fn partial_cmp(&self, other: &Byte) -> Option<Ordering> { |
| 308 | + self.partial_cmp(&other.0) |
| 309 | + } |
| 310 | + |
| 311 | + #[cfg(not(feature = "u128"))] |
| 312 | + #[inline] |
| 313 | + fn partial_cmp(&self, other: &Byte) -> Option<Ordering> { |
| 314 | + self.partial_cmp(&(other.0 as u128)) |
| 315 | + } |
| 316 | +} |
0 commit comments