Add to_json for maps.

This commit is contained in:
Stephen Chung
2022-04-21 12:15:21 +08:00
parent 4f2764d233
commit c3d013bddc
6 changed files with 205 additions and 129 deletions

View File

@@ -308,132 +308,4 @@ impl Engine {
self.options.optimization_level,
)
}
/// Parse a JSON string into an [object map][crate::Map].
/// This is a light-weight alternative to using, say,
/// [`serde_json`](https://crates.io/crates/serde_json) to deserialize the JSON.
///
/// Not available under `no_object`.
///
/// The JSON string must be an object hash. It cannot be a simple primitive value.
///
/// Set `has_null` to `true` in order to map `null` values to `()`.
/// Setting it to `false` causes a syntax error for any `null` value.
///
/// JSON sub-objects are handled transparently.
///
/// # Example
///
/// ```
/// # fn main() -> Result<(), Box<rhai::EvalAltResult>> {
/// use rhai::{Engine, Map};
///
/// let engine = Engine::new();
///
/// let map = engine.parse_json(r#"
/// {
/// "a": 123,
/// "b": 42,
/// "c": {
/// "x": false,
/// "y": true,
/// "z": '$'
/// },
/// "d": null
/// }"#, true)?;
///
/// assert_eq!(map.len(), 4);
/// assert_eq!(map["a"].as_int().expect("a should exist"), 123);
/// assert_eq!(map["b"].as_int().expect("b should exist"), 42);
/// assert_eq!(map["d"].as_unit().expect("d should exist"), ());
///
/// let c = map["c"].read_lock::<Map>().expect("c should exist");
/// assert_eq!(c["x"].as_bool().expect("x should be bool"), false);
/// assert_eq!(c["y"].as_bool().expect("y should be bool"), true);
/// assert_eq!(c["z"].as_char().expect("z should be char"), '$');
/// # Ok(())
/// # }
/// ```
#[cfg(not(feature = "no_object"))]
#[inline(always)]
pub fn parse_json(
&self,
json: impl AsRef<str>,
has_null: bool,
) -> crate::RhaiResultOf<crate::Map> {
use crate::{tokenizer::Token, LexError};
let scripts = [json.as_ref()];
let (stream, tokenizer_control) = self.lex_raw(
&scripts,
if has_null {
Some(&|token, _, _| {
match token {
// `null` => `()`
Token::Reserved(s) if &*s == "null" => Token::Unit,
// `{` => `#{`
Token::LeftBrace => Token::MapStart,
// Disallowed syntax
t @ (Token::Unit | Token::MapStart) => Token::LexError(
LexError::ImproperSymbol(
t.literal_syntax().to_string(),
"".to_string(),
)
.into(),
),
Token::InterpolatedString(..) => Token::LexError(
LexError::ImproperSymbol(
"interpolated string".to_string(),
"".to_string(),
)
.into(),
),
// All others
_ => token,
}
})
} else {
Some(&|token, _, _| {
match token {
Token::Reserved(s) if &*s == "null" => Token::LexError(
LexError::ImproperSymbol("null".to_string(), "".to_string()).into(),
),
// `{` => `#{`
Token::LeftBrace => Token::MapStart,
// Disallowed syntax
t @ (Token::Unit | Token::MapStart) => Token::LexError(
LexError::ImproperSymbol(
t.literal_syntax().to_string(),
"".to_string(),
)
.into(),
),
Token::InterpolatedString(..) => Token::LexError(
LexError::ImproperSymbol(
"interpolated string".to_string(),
"".to_string(),
)
.into(),
),
// All others
_ => token,
}
})
},
);
let mut state = ParseState::new(self, tokenizer_control);
let ast = self.parse_global_expr(
&mut stream.peekable(),
&mut state,
&Scope::new(),
#[cfg(not(feature = "no_optimize"))]
OptimizationLevel::None,
#[cfg(feature = "no_optimize")]
OptimizationLevel::default(),
)?;
self.eval_ast(&ast)
}
}