RflySimSDK v4.10
RflySimSDK说明文档
载入中...
搜索中...
未找到
parser.hpp
1// __ _____ _____ _____
2// __| | __| | | | JSON for Modern C++
3// | | |__ | | | | | | version 3.12.0
4// |_____|_____|_____|_|___| https://github.com/nlohmann/json
5//
6// SPDX-FileCopyrightText: 2013-2025 Niels Lohmann <https://nlohmann.me>
7// SPDX-License-Identifier: MIT
8
9#pragma once
10
11#include <cmath> // isfinite
12#include <cstdint> // uint8_t
13#include <functional> // function
14#include <string> // string
15#include <utility> // move
16#include <vector> // vector
17
18#include <nlohmann/detail/exceptions.hpp>
19#include <nlohmann/detail/input/input_adapters.hpp>
20#include <nlohmann/detail/input/json_sax.hpp>
21#include <nlohmann/detail/input/lexer.hpp>
22#include <nlohmann/detail/macro_scope.hpp>
23#include <nlohmann/detail/meta/is_sax.hpp>
24#include <nlohmann/detail/string_concat.hpp>
25#include <nlohmann/detail/value_t.hpp>
26
27NLOHMANN_JSON_NAMESPACE_BEGIN
28namespace detail
29{
30////////////
31// parser //
32////////////
33
34enum class parse_event_t : std::uint8_t
35{
36 /// the parser read `{` and started to process a JSON object
38 /// the parser read `}` and finished processing a JSON object
40 /// the parser read `[` and started to process a JSON array
42 /// the parser read `]` and finished processing a JSON array
44 /// the parser read a key of a value in an object
46 /// the parser finished reading a JSON value
47 value
48};
49
50template<typename BasicJsonType>
51using parser_callback_t =
52 std::function<bool(int /*depth*/, parse_event_t /*event*/, BasicJsonType& /*parsed*/)>;
53
54/*!
55@brief syntax analysis
56
57This class implements a recursive descent parser.
58*/
59template<typename BasicJsonType, typename InputAdapterType>
60class parser
61{
62 using number_integer_t = typename BasicJsonType::number_integer_t;
63 using number_unsigned_t = typename BasicJsonType::number_unsigned_t;
64 using number_float_t = typename BasicJsonType::number_float_t;
65 using string_t = typename BasicJsonType::string_t;
67 using token_type = typename lexer_t::token_type;
68
69 public:
70 /// a parser reading from an input adapter
71 explicit parser(InputAdapterType&& adapter,
72 parser_callback_t<BasicJsonType> cb = nullptr,
73 const bool allow_exceptions_ = true,
74 const bool ignore_comments = false,
75 const bool ignore_trailing_commas_ = false)
76 : callback(std::move(cb))
77 , m_lexer(std::move(adapter), ignore_comments)
78 , allow_exceptions(allow_exceptions_)
79 , ignore_trailing_commas(ignore_trailing_commas_)
80 {
81 // read first token
82 get_token();
83 }
84
85 /*!
86 @brief public parser interface
87
88 @param[in] strict whether to expect the last token to be EOF
89 @param[in,out] result parsed JSON value
90
91 @throw parse_error.101 in case of an unexpected token
92 @throw parse_error.102 if to_unicode fails or surrogate error
93 @throw parse_error.103 if to_unicode fails
94 */
95 void parse(const bool strict, BasicJsonType& result)
96 {
97 if (callback)
98 {
99 json_sax_dom_callback_parser<BasicJsonType, InputAdapterType> sdp(result, callback, allow_exceptions, &m_lexer);
100 sax_parse_internal(&sdp);
101
102 // in strict mode, input must be completely read
103 if (strict && (get_token() != token_type::end_of_input))
104 {
105 sdp.parse_error(m_lexer.get_position(),
106 m_lexer.get_token_string(),
107 parse_error::create(101, m_lexer.get_position(),
108 exception_message(token_type::end_of_input, "value"), nullptr));
109 }
110
111 // in case of an error, return a discarded value
112 if (sdp.is_errored())
113 {
114 result = value_t::discarded;
115 return;
116 }
117
118 // set top-level value to null if it was discarded by the callback
119 // function
120 if (result.is_discarded())
121 {
122 result = nullptr;
123 }
124 }
125 else
126 {
127 json_sax_dom_parser<BasicJsonType, InputAdapterType> sdp(result, allow_exceptions, &m_lexer);
128 sax_parse_internal(&sdp);
129
130 // in strict mode, input must be completely read
131 if (strict && (get_token() != token_type::end_of_input))
132 {
133 sdp.parse_error(m_lexer.get_position(),
134 m_lexer.get_token_string(),
135 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_of_input, "value"), nullptr));
136 }
137
138 // in case of an error, return a discarded value
139 if (sdp.is_errored())
140 {
141 result = value_t::discarded;
142 return;
143 }
144 }
145
146 result.assert_invariant();
147 }
148
149 /*!
150 @brief public accept interface
151
152 @param[in] strict whether to expect the last token to be EOF
153 @return whether the input is a proper JSON text
154 */
155 bool accept(const bool strict = true)
156 {
158 return sax_parse(&sax_acceptor, strict);
159 }
160
161 template<typename SAX>
162 JSON_HEDLEY_NON_NULL(2)
163 bool sax_parse(SAX* sax, const bool strict = true)
164 {
166 const bool result = sax_parse_internal(sax);
167
168 // strict mode: next byte must be EOF
169 if (result && strict && (get_token() != token_type::end_of_input))
170 {
171 return sax->parse_error(m_lexer.get_position(),
172 m_lexer.get_token_string(),
173 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_of_input, "value"), nullptr));
174 }
175
176 return result;
177 }
178
179 private:
180 template<typename SAX>
181 JSON_HEDLEY_NON_NULL(2)
182 bool sax_parse_internal(SAX* sax)
183 {
184 // stack to remember the hierarchy of structured values we are parsing
185 // true = array; false = object
186 std::vector<bool> states;
187 // value to avoid a goto (see comment where set to true)
188 bool skip_to_state_evaluation = false;
189
190 while (true)
191 {
192 if (!skip_to_state_evaluation)
193 {
194 // invariant: get_token() was called before each iteration
195 switch (last_token)
196 {
197 case token_type::begin_object:
198 {
199 if (JSON_HEDLEY_UNLIKELY(!sax->start_object(detail::unknown_size())))
200 {
201 return false;
202 }
203
204 // closing } -> we are done
205 if (get_token() == token_type::end_object)
206 {
207 if (JSON_HEDLEY_UNLIKELY(!sax->end_object()))
208 {
209 return false;
210 }
211 break;
212 }
213
214 // parse key
215 if (JSON_HEDLEY_UNLIKELY(last_token != token_type::value_string))
216 {
217 return sax->parse_error(m_lexer.get_position(),
218 m_lexer.get_token_string(),
219 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::value_string, "object key"), nullptr));
220 }
221 if (JSON_HEDLEY_UNLIKELY(!sax->key(m_lexer.get_string())))
222 {
223 return false;
224 }
225
226 // parse separator (:)
227 if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::name_separator))
228 {
229 return sax->parse_error(m_lexer.get_position(),
230 m_lexer.get_token_string(),
231 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator, "object separator"), nullptr));
232 }
233
234 // remember we are now inside an object
235 states.push_back(false);
236
237 // parse values
238 get_token();
239 continue;
240 }
241
242 case token_type::begin_array:
243 {
244 if (JSON_HEDLEY_UNLIKELY(!sax->start_array(detail::unknown_size())))
245 {
246 return false;
247 }
248
249 // closing ] -> we are done
250 if (get_token() == token_type::end_array)
251 {
252 if (JSON_HEDLEY_UNLIKELY(!sax->end_array()))
253 {
254 return false;
255 }
256 break;
257 }
258
259 // remember we are now inside an array
260 states.push_back(true);
261
262 // parse values (no need to call get_token)
263 continue;
264 }
265
266 case token_type::value_float:
267 {
268 const auto res = m_lexer.get_number_float();
269
270 if (JSON_HEDLEY_UNLIKELY(!std::isfinite(res)))
271 {
272 return sax->parse_error(m_lexer.get_position(),
273 m_lexer.get_token_string(),
274 out_of_range::create(406, concat("number overflow parsing '", m_lexer.get_token_string(), '\''), nullptr));
275 }
276
277 if (JSON_HEDLEY_UNLIKELY(!sax->number_float(res, m_lexer.get_string())))
278 {
279 return false;
280 }
281
282 break;
283 }
284
285 case token_type::literal_false:
286 {
287 if (JSON_HEDLEY_UNLIKELY(!sax->boolean(false)))
288 {
289 return false;
290 }
291 break;
292 }
293
294 case token_type::literal_null:
295 {
296 if (JSON_HEDLEY_UNLIKELY(!sax->null()))
297 {
298 return false;
299 }
300 break;
301 }
302
303 case token_type::literal_true:
304 {
305 if (JSON_HEDLEY_UNLIKELY(!sax->boolean(true)))
306 {
307 return false;
308 }
309 break;
310 }
311
312 case token_type::value_integer:
313 {
314 if (JSON_HEDLEY_UNLIKELY(!sax->number_integer(m_lexer.get_number_integer())))
315 {
316 return false;
317 }
318 break;
319 }
320
321 case token_type::value_string:
322 {
323 if (JSON_HEDLEY_UNLIKELY(!sax->string(m_lexer.get_string())))
324 {
325 return false;
326 }
327 break;
328 }
329
330 case token_type::value_unsigned:
331 {
332 if (JSON_HEDLEY_UNLIKELY(!sax->number_unsigned(m_lexer.get_number_unsigned())))
333 {
334 return false;
335 }
336 break;
337 }
338
339 case token_type::parse_error:
340 {
341 // using "uninitialized" to avoid an "expected" message
342 return sax->parse_error(m_lexer.get_position(),
343 m_lexer.get_token_string(),
344 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::uninitialized, "value"), nullptr));
345 }
346 case token_type::end_of_input:
347 {
348 if (JSON_HEDLEY_UNLIKELY(m_lexer.get_position().chars_read_total == 1))
349 {
350 return sax->parse_error(m_lexer.get_position(),
351 m_lexer.get_token_string(),
352 parse_error::create(101, m_lexer.get_position(),
353 "attempting to parse an empty input; check that your input string or stream contains the expected JSON", nullptr));
354 }
355
356 return sax->parse_error(m_lexer.get_position(),
357 m_lexer.get_token_string(),
358 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::literal_or_value, "value"), nullptr));
359 }
360 case token_type::uninitialized:
361 case token_type::end_array:
362 case token_type::end_object:
363 case token_type::name_separator:
364 case token_type::value_separator:
365 case token_type::literal_or_value:
366 default: // the last token was unexpected
367 {
368 return sax->parse_error(m_lexer.get_position(),
369 m_lexer.get_token_string(),
370 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::literal_or_value, "value"), nullptr));
371 }
372 }
373 }
374 else
375 {
376 skip_to_state_evaluation = false;
377 }
378
379 // we reached this line after we successfully parsed a value
380 if (states.empty())
381 {
382 // empty stack: we reached the end of the hierarchy: done
383 return true;
384 }
385
386 if (states.back()) // array
387 {
388 // comma -> next value
389 // or end of array (ignore_trailing_commas = true)
390 if (get_token() == token_type::value_separator)
391 {
392 // parse a new value
393 get_token();
394
395 // if ignore_trailing_commas and last_token is ], we can continue to "closing ]"
396 if (!(ignore_trailing_commas && last_token == token_type::end_array))
397 {
398 continue;
399 }
400 }
401
402 // closing ]
403 if (JSON_HEDLEY_LIKELY(last_token == token_type::end_array))
404 {
405 if (JSON_HEDLEY_UNLIKELY(!sax->end_array()))
406 {
407 return false;
408 }
409
410 // We are done with this array. Before we can parse a
411 // new value, we need to evaluate the new state first.
412 // By setting skip_to_state_evaluation to false, we
413 // are effectively jumping to the beginning of this if.
414 JSON_ASSERT(!states.empty());
415 states.pop_back();
416 skip_to_state_evaluation = true;
417 continue;
418 }
419
420 return sax->parse_error(m_lexer.get_position(),
421 m_lexer.get_token_string(),
422 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array, "array"), nullptr));
423 }
424
425 // states.back() is false -> object
426
427 // comma -> next value
428 // or end of object (ignore_trailing_commas = true)
429 if (get_token() == token_type::value_separator)
430 {
431 get_token();
432
433 // if ignore_trailing_commas and last_token is }, we can continue to "closing }"
434 if (!(ignore_trailing_commas && last_token == token_type::end_object))
435 {
436 // parse key
437 if (JSON_HEDLEY_UNLIKELY(last_token != token_type::value_string))
438 {
439 return sax->parse_error(m_lexer.get_position(),
440 m_lexer.get_token_string(),
441 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::value_string, "object key"), nullptr));
442 }
443
444 if (JSON_HEDLEY_UNLIKELY(!sax->key(m_lexer.get_string())))
445 {
446 return false;
447 }
448
449 // parse separator (:)
450 if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::name_separator))
451 {
452 return sax->parse_error(m_lexer.get_position(),
453 m_lexer.get_token_string(),
454 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator, "object separator"), nullptr));
455 }
456
457 // parse values
458 get_token();
459 continue;
460 }
461 }
462
463 // closing }
464 if (JSON_HEDLEY_LIKELY(last_token == token_type::end_object))
465 {
466 if (JSON_HEDLEY_UNLIKELY(!sax->end_object()))
467 {
468 return false;
469 }
470
471 // We are done with this object. Before we can parse a
472 // new value, we need to evaluate the new state first.
473 // By setting skip_to_state_evaluation to false, we
474 // are effectively jumping to the beginning of this if.
475 JSON_ASSERT(!states.empty());
476 states.pop_back();
477 skip_to_state_evaluation = true;
478 continue;
479 }
480
481 return sax->parse_error(m_lexer.get_position(),
482 m_lexer.get_token_string(),
483 parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object, "object"), nullptr));
484 }
485 }
486
487 /// get next token from lexer
488 token_type get_token()
489 {
490 return last_token = m_lexer.scan();
491 }
492
493 std::string exception_message(const token_type expected, const std::string& context)
494 {
495 std::string error_msg = "syntax error ";
496
497 if (!context.empty())
498 {
499 error_msg += concat("while parsing ", context, ' ');
500 }
501
502 error_msg += "- ";
503
504 if (last_token == token_type::parse_error)
505 {
506 error_msg += concat(m_lexer.get_error_message(), "; last read: '",
507 m_lexer.get_token_string(), '\'');
508 }
509 else
510 {
511 error_msg += concat("unexpected ", lexer_t::token_type_name(last_token));
512 }
513
514 if (expected != token_type::uninitialized)
515 {
516 error_msg += concat("; expected ", lexer_t::token_type_name(expected));
517 }
518
519 return error_msg;
520 }
521
522 private:
523 /// callback function
524 const parser_callback_t<BasicJsonType> callback = nullptr;
525 /// the type of the last read token
526 token_type last_token = token_type::uninitialized;
527 /// the lexer
528 lexer_t m_lexer;
529 /// whether to throw exceptions in case of errors
530 const bool allow_exceptions = true;
531 /// whether trailing commas in objects and arrays should be ignored (true) or signaled as errors (false)
532 const bool ignore_trailing_commas = false;
533};
534
535} // namespace detail
536NLOHMANN_JSON_NAMESPACE_END
定义 json_sax.hpp:911
SAX implementation to create a JSON value from SAX events
定义 json_sax.hpp:168
JSON_HEDLEY_RETURNS_NON_NULL static JSON_HEDLEY_CONST const char * token_type_name(const token_type t) noexcept
return name of values of type token_type (only used for errors)
定义 lexer.hpp:63
lexical analysis
定义 lexer.hpp:113
constexpr position_t get_position() const noexcept
return position of last read token
定义 lexer.hpp:1449
std::string get_token_string() const
定义 lexer.hpp:1457
static parse_error create(int id_, const position_t &pos, const std::string &what_arg, BasicJsonContext context)
create a parse error exception
定义 exceptions.hpp:179
void parse(const bool strict, BasicJsonType &result)
public parser interface
定义 parser.hpp:95
parser(InputAdapterType &&adapter, parser_callback_t< BasicJsonType > cb=nullptr, const bool allow_exceptions_=true, const bool ignore_comments=false, const bool ignore_trailing_commas_=false)
a parser reading from an input adapter
定义 parser.hpp:71
bool accept(const bool strict=true)
public accept interface
定义 parser.hpp:155
detail namespace with internal helper functions
定义 from_json.hpp:43
parse_event_t
定义 parser.hpp:35
@ key
the parser read a key of a value in an object
定义 parser.hpp:45
@ array_end
the parser read ] and finished processing a JSON array
定义 parser.hpp:43
@ array_start
the parser read [ and started to process a JSON array
定义 parser.hpp:41
@ object_start
the parser read { and started to process a JSON object
定义 parser.hpp:37
@ object_end
the parser read } and finished processing a JSON object
定义 parser.hpp:39
@ strict
throw a type_error exception in case of invalid UTF-8
定义 serializer.hpp:45
@ discarded
discarded by the parser callback function
定义 value_t.hpp:64
定义 is_sax.hpp:106