Fix bug in hex parsing for negative numbers.
This commit is contained in:
@@ -27,6 +27,11 @@ fn test_hex_literal() -> Result<(), Box<EvalAltResult>> {
|
||||
assert_eq!(engine.eval::<INT>("let x = 0Xf; x")?, 15);
|
||||
assert_eq!(engine.eval::<INT>("let x = 0xff; x")?, 255);
|
||||
|
||||
#[cfg(not(feature = "only_i32"))]
|
||||
assert_eq!(engine.eval::<INT>("let x = 0xffffffffffffffff; x")?, -1);
|
||||
#[cfg(feature = "only_i32")]
|
||||
assert_eq!(engine.eval::<INT>("let x = 0xffffffff; x")?, -1);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -51,6 +56,18 @@ fn test_binary_literal() -> Result<(), Box<EvalAltResult>> {
|
||||
engine.eval::<INT>("let x = 0b0011_1100_1010_0101; x")?,
|
||||
15525
|
||||
);
|
||||
#[cfg(not(feature = "only_i32"))]
|
||||
assert_eq!(
|
||||
engine.eval::<INT>(
|
||||
"let x = 0b11111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111; x"
|
||||
)?,
|
||||
-1
|
||||
);
|
||||
#[cfg(feature = "only_i32")]
|
||||
assert_eq!(
|
||||
engine.eval::<INT>("let x = 0b11111111_11111111_11111111_11111111; x")?,
|
||||
-1
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
Reference in New Issue
Block a user