mirror of
https://github.com/MadeOfJelly/MushMachine.git
synced 2025-06-20 11:46:36 +02:00
update nlohmann:json to v3.12.0
This commit is contained in:
@ -1,9 +1,9 @@
|
||||
// __ _____ _____ _____
|
||||
// __| | __| | | | JSON for Modern C++
|
||||
// | | |__ | | | | | | version 3.11.2
|
||||
// | | |__ | | | | | | version 3.12.0
|
||||
// |_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-FileCopyrightText: 2013 - 2025 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
#pragma once
|
||||
@ -20,6 +20,9 @@
|
||||
#include <string> // char_traits, string
|
||||
#include <utility> // make_pair, move
|
||||
#include <vector> // vector
|
||||
#ifdef __cpp_lib_byteswap
|
||||
#include <bit> //byteswap
|
||||
#endif
|
||||
|
||||
#include <nlohmann/detail/exceptions.hpp>
|
||||
#include <nlohmann/detail/input/input_adapters.hpp>
|
||||
@ -55,7 +58,6 @@ static inline bool little_endianness(int num = 1) noexcept
|
||||
return *reinterpret_cast<char*>(&num) == 1;
|
||||
}
|
||||
|
||||
|
||||
///////////////////
|
||||
// binary reader //
|
||||
///////////////////
|
||||
@ -63,7 +65,7 @@ static inline bool little_endianness(int num = 1) noexcept
|
||||
/*!
|
||||
@brief deserialization of CBOR, MessagePack, and UBJSON values
|
||||
*/
|
||||
template<typename BasicJsonType, typename InputAdapterType, typename SAX = json_sax_dom_parser<BasicJsonType>>
|
||||
template<typename BasicJsonType, typename InputAdapterType, typename SAX = json_sax_dom_parser<BasicJsonType, InputAdapterType>>
|
||||
class binary_reader
|
||||
{
|
||||
using number_integer_t = typename BasicJsonType::number_integer_t;
|
||||
@ -73,7 +75,7 @@ class binary_reader
|
||||
using binary_t = typename BasicJsonType::binary_t;
|
||||
using json_sax_t = SAX;
|
||||
using char_type = typename InputAdapterType::char_type;
|
||||
using char_int_type = typename std::char_traits<char_type>::int_type;
|
||||
using char_int_type = typename char_traits<char_type>::int_type;
|
||||
|
||||
public:
|
||||
/*!
|
||||
@ -146,7 +148,7 @@ class binary_reader
|
||||
get();
|
||||
}
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(current != std::char_traits<char_type>::eof()))
|
||||
if (JSON_HEDLEY_UNLIKELY(current != char_traits<char_type>::eof()))
|
||||
{
|
||||
return sax->parse_error(chars_read, get_token_string(), parse_error::create(110, chars_read,
|
||||
exception_message(input_format, concat("expected end of input; last byte: 0x", get_token_string()), "value"), nullptr));
|
||||
@ -170,7 +172,7 @@ class binary_reader
|
||||
std::int32_t document_size{};
|
||||
get_number<std::int32_t, true>(input_format_t::bson, document_size);
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_object(static_cast<std::size_t>(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_object(detail::unknown_size())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@ -229,7 +231,7 @@ class binary_reader
|
||||
exception_message(input_format_t::bson, concat("string length must be at least 1, is ", std::to_string(len)), "string"), nullptr));
|
||||
}
|
||||
|
||||
return get_string(input_format_t::bson, len - static_cast<NumberType>(1), result) && get() != std::char_traits<char_type>::eof();
|
||||
return get_string(input_format_t::bson, len - static_cast<NumberType>(1), result) && get() != char_traits<char_type>::eof();
|
||||
}
|
||||
|
||||
/*!
|
||||
@ -326,11 +328,17 @@ class binary_reader
|
||||
return get_number<std::int64_t, true>(input_format_t::bson, value) && sax->number_integer(value);
|
||||
}
|
||||
|
||||
case 0x11: // uint64
|
||||
{
|
||||
std::uint64_t value{};
|
||||
return get_number<std::uint64_t, true>(input_format_t::bson, value) && sax->number_unsigned(value);
|
||||
}
|
||||
|
||||
default: // anything else not supported (yet)
|
||||
{
|
||||
std::array<char, 3> cr{{}};
|
||||
static_cast<void>((std::snprintf)(cr.data(), cr.size(), "%.2hhX", static_cast<unsigned char>(element_type))); // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg)
|
||||
std::string cr_str{cr.data()};
|
||||
const std::string cr_str{cr.data()};
|
||||
return sax->parse_error(element_type_parse_position, cr_str,
|
||||
parse_error::create(114, element_type_parse_position, concat("Unsupported BSON record type 0x", cr_str), nullptr));
|
||||
}
|
||||
@ -392,7 +400,7 @@ class binary_reader
|
||||
std::int32_t document_size{};
|
||||
get_number<std::int32_t, true>(input_format_t::bson, document_size);
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_array(static_cast<std::size_t>(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_array(detail::unknown_size())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@ -423,7 +431,7 @@ class binary_reader
|
||||
switch (get_char ? get() : current)
|
||||
{
|
||||
// EOF
|
||||
case std::char_traits<char_type>::eof():
|
||||
case char_traits<char_type>::eof():
|
||||
return unexpect_eof(input_format_t::cbor, "value");
|
||||
|
||||
// Integer 0x00..0x17 (0..23)
|
||||
@ -652,7 +660,7 @@ class binary_reader
|
||||
}
|
||||
|
||||
case 0x9F: // array (indefinite length)
|
||||
return get_cbor_array(static_cast<std::size_t>(-1), tag_handler);
|
||||
return get_cbor_array(detail::unknown_size(), tag_handler);
|
||||
|
||||
// map (0x00..0x17 pairs of data items follow)
|
||||
case 0xA0:
|
||||
@ -706,7 +714,7 @@ class binary_reader
|
||||
}
|
||||
|
||||
case 0xBF: // map (indefinite length)
|
||||
return get_cbor_object(static_cast<std::size_t>(-1), tag_handler);
|
||||
return get_cbor_object(detail::unknown_size(), tag_handler);
|
||||
|
||||
case 0xC6: // tagged item
|
||||
case 0xC7:
|
||||
@ -1094,7 +1102,7 @@ class binary_reader
|
||||
}
|
||||
|
||||
/*!
|
||||
@param[in] len the length of the array or static_cast<std::size_t>(-1) for an
|
||||
@param[in] len the length of the array or detail::unknown_size() for an
|
||||
array of indefinite size
|
||||
@param[in] tag_handler how CBOR tags should be treated
|
||||
@return whether array creation completed
|
||||
@ -1107,7 +1115,7 @@ class binary_reader
|
||||
return false;
|
||||
}
|
||||
|
||||
if (len != static_cast<std::size_t>(-1))
|
||||
if (len != detail::unknown_size())
|
||||
{
|
||||
for (std::size_t i = 0; i < len; ++i)
|
||||
{
|
||||
@ -1132,7 +1140,7 @@ class binary_reader
|
||||
}
|
||||
|
||||
/*!
|
||||
@param[in] len the length of the object or static_cast<std::size_t>(-1) for an
|
||||
@param[in] len the length of the object or detail::unknown_size() for an
|
||||
object of indefinite size
|
||||
@param[in] tag_handler how CBOR tags should be treated
|
||||
@return whether object creation completed
|
||||
@ -1148,7 +1156,7 @@ class binary_reader
|
||||
if (len != 0)
|
||||
{
|
||||
string_t key;
|
||||
if (len != static_cast<std::size_t>(-1))
|
||||
if (len != detail::unknown_size())
|
||||
{
|
||||
for (std::size_t i = 0; i < len; ++i)
|
||||
{
|
||||
@ -1198,7 +1206,7 @@ class binary_reader
|
||||
switch (get())
|
||||
{
|
||||
// EOF
|
||||
case std::char_traits<char_type>::eof():
|
||||
case char_traits<char_type>::eof():
|
||||
return unexpect_eof(input_format_t::msgpack, "value");
|
||||
|
||||
// positive fixint
|
||||
@ -2153,7 +2161,7 @@ class binary_reader
|
||||
}
|
||||
if (is_ndarray) // ndarray dimensional vector can only contain integers, and can not embed another array
|
||||
{
|
||||
return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, exception_message(input_format, "ndarray dimentional vector is not allowed", "size"), nullptr));
|
||||
return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, exception_message(input_format, "ndarray dimensional vector is not allowed", "size"), nullptr));
|
||||
}
|
||||
std::vector<size_t> dim;
|
||||
if (JSON_HEDLEY_UNLIKELY(!get_ubjson_ndarray_size(dim)))
|
||||
@ -2265,7 +2273,7 @@ class binary_reader
|
||||
exception_message(input_format, concat("expected '#' after type information; last byte: 0x", last_token), "size"), nullptr));
|
||||
}
|
||||
|
||||
bool is_error = get_ubjson_size_value(result.first, is_ndarray);
|
||||
const bool is_error = get_ubjson_size_value(result.first, is_ndarray);
|
||||
if (input_format == input_format_t::bjdata && is_ndarray)
|
||||
{
|
||||
if (inside_ndarray)
|
||||
@ -2280,7 +2288,7 @@ class binary_reader
|
||||
|
||||
if (current == '#')
|
||||
{
|
||||
bool is_error = get_ubjson_size_value(result.first, is_ndarray);
|
||||
const bool is_error = get_ubjson_size_value(result.first, is_ndarray);
|
||||
if (input_format == input_format_t::bjdata && is_ndarray)
|
||||
{
|
||||
return sax->parse_error(chars_read, get_token_string(), parse_error::create(112, chars_read,
|
||||
@ -2300,7 +2308,7 @@ class binary_reader
|
||||
{
|
||||
switch (prefix)
|
||||
{
|
||||
case std::char_traits<char_type>::eof(): // EOF
|
||||
case char_traits<char_type>::eof(): // EOF
|
||||
return unexpect_eof(input_format, "value");
|
||||
|
||||
case 'T': // true
|
||||
@ -2311,6 +2319,16 @@ class binary_reader
|
||||
case 'Z': // null
|
||||
return sax->null();
|
||||
|
||||
case 'B': // byte
|
||||
{
|
||||
if (input_format != input_format_t::bjdata)
|
||||
{
|
||||
break;
|
||||
}
|
||||
std::uint8_t number{};
|
||||
return get_number(input_format, number) && sax->number_unsigned(number);
|
||||
}
|
||||
|
||||
case 'U':
|
||||
{
|
||||
std::uint8_t number{};
|
||||
@ -2511,7 +2529,7 @@ class binary_reader
|
||||
return false;
|
||||
}
|
||||
|
||||
if (size_and_type.second == 'C')
|
||||
if (size_and_type.second == 'C' || size_and_type.second == 'B')
|
||||
{
|
||||
size_and_type.second = 'U';
|
||||
}
|
||||
@ -2533,6 +2551,13 @@ class binary_reader
|
||||
return (sax->end_array() && sax->end_object());
|
||||
}
|
||||
|
||||
// If BJData type marker is 'B' decode as binary
|
||||
if (input_format == input_format_t::bjdata && size_and_type.first != npos && size_and_type.second == 'B')
|
||||
{
|
||||
binary_t result;
|
||||
return get_binary(input_format, size_and_type.first, result) && sax->binary(result);
|
||||
}
|
||||
|
||||
if (size_and_type.first != npos)
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_array(size_and_type.first)))
|
||||
@ -2566,7 +2591,7 @@ class binary_reader
|
||||
}
|
||||
else
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_array(static_cast<std::size_t>(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_array(detail::unknown_size())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@ -2644,7 +2669,7 @@ class binary_reader
|
||||
}
|
||||
else
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_object(static_cast<std::size_t>(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_object(detail::unknown_size())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@ -2745,7 +2770,7 @@ class binary_reader
|
||||
|
||||
This function provides the interface to the used input adapter. It does
|
||||
not throw in case the input reached EOF, but returns a -'ve valued
|
||||
`std::char_traits<char_type>::eof()` in that case.
|
||||
`char_traits<char_type>::eof()` in that case.
|
||||
|
||||
@return character read from the input
|
||||
*/
|
||||
@ -2755,6 +2780,29 @@ class binary_reader
|
||||
return current = ia.get_character();
|
||||
}
|
||||
|
||||
/*!
|
||||
@brief get_to read into a primitive type
|
||||
|
||||
This function provides the interface to the used input adapter. It does
|
||||
not throw in case the input reached EOF, but returns false instead
|
||||
|
||||
@return bool, whether the read was successful
|
||||
*/
|
||||
template<class T>
|
||||
bool get_to(T& dest, const input_format_t format, const char* context)
|
||||
{
|
||||
auto new_chars_read = ia.get_elements(&dest);
|
||||
chars_read += new_chars_read;
|
||||
if (JSON_HEDLEY_UNLIKELY(new_chars_read < sizeof(T)))
|
||||
{
|
||||
// in case of failure, advance position by 1 to report failing location
|
||||
++chars_read;
|
||||
sax->parse_error(chars_read, "<end of file>", parse_error::create(110, chars_read, exception_message(format, "unexpected end of input", context), nullptr));
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/*!
|
||||
@return character read from the input after ignoring all 'N' entries
|
||||
*/
|
||||
@ -2769,6 +2817,28 @@ class binary_reader
|
||||
return current;
|
||||
}
|
||||
|
||||
template<class NumberType>
|
||||
static void byte_swap(NumberType& number)
|
||||
{
|
||||
constexpr std::size_t sz = sizeof(number);
|
||||
#ifdef __cpp_lib_byteswap
|
||||
if constexpr (sz == 1)
|
||||
{
|
||||
return;
|
||||
}
|
||||
if constexpr(std::is_integral_v<NumberType>)
|
||||
{
|
||||
number = std::byteswap(number);
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
auto* ptr = reinterpret_cast<std::uint8_t*>(&number);
|
||||
for (std::size_t i = 0; i < sz / 2; ++i)
|
||||
{
|
||||
std::swap(ptr[i], ptr[sz - i - 1]);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@brief read a number from the input
|
||||
|
||||
@ -2787,29 +2857,16 @@ class binary_reader
|
||||
template<typename NumberType, bool InputIsLittleEndian = false>
|
||||
bool get_number(const input_format_t format, NumberType& result)
|
||||
{
|
||||
// step 1: read input into array with system's byte order
|
||||
std::array<std::uint8_t, sizeof(NumberType)> vec{};
|
||||
for (std::size_t i = 0; i < sizeof(NumberType); ++i)
|
||||
// read in the original format
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(!get_to(result, format, "number")))
|
||||
{
|
||||
get();
|
||||
if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(format, "number")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// reverse byte order prior to conversion if necessary
|
||||
if (is_little_endian != (InputIsLittleEndian || format == input_format_t::bjdata))
|
||||
{
|
||||
vec[sizeof(NumberType) - i - 1] = static_cast<std::uint8_t>(current);
|
||||
}
|
||||
else
|
||||
{
|
||||
vec[i] = static_cast<std::uint8_t>(current); // LCOV_EXCL_LINE
|
||||
}
|
||||
return false;
|
||||
}
|
||||
if (is_little_endian != (InputIsLittleEndian || format == input_format_t::bjdata))
|
||||
{
|
||||
byte_swap(result);
|
||||
}
|
||||
|
||||
// step 2: convert array into number of type T and return
|
||||
std::memcpy(&result, vec.data(), sizeof(NumberType));
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -2887,7 +2944,7 @@ class binary_reader
|
||||
JSON_HEDLEY_NON_NULL(3)
|
||||
bool unexpect_eof(const input_format_t format, const char* context) const
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(current == std::char_traits<char_type>::eof()))
|
||||
if (JSON_HEDLEY_UNLIKELY(current == char_traits<char_type>::eof()))
|
||||
{
|
||||
return sax->parse_error(chars_read, "<end of file>",
|
||||
parse_error::create(110, chars_read, exception_message(format, "unexpected end of input", context), nullptr));
|
||||
@ -2948,13 +3005,13 @@ class binary_reader
|
||||
}
|
||||
|
||||
private:
|
||||
static JSON_INLINE_VARIABLE constexpr std::size_t npos = static_cast<std::size_t>(-1);
|
||||
static JSON_INLINE_VARIABLE constexpr std::size_t npos = detail::unknown_size();
|
||||
|
||||
/// input adapter
|
||||
InputAdapterType ia;
|
||||
|
||||
/// the current character
|
||||
char_int_type current = std::char_traits<char_type>::eof();
|
||||
char_int_type current = char_traits<char_type>::eof();
|
||||
|
||||
/// the number of characters read
|
||||
std::size_t chars_read = 0;
|
||||
@ -2974,6 +3031,7 @@ class binary_reader
|
||||
|
||||
#define JSON_BINARY_READER_MAKE_BJD_TYPES_MAP_ \
|
||||
make_array<bjd_type>( \
|
||||
bjd_type{'B', "byte"}, \
|
||||
bjd_type{'C', "char"}, \
|
||||
bjd_type{'D', "double"}, \
|
||||
bjd_type{'I', "int16"}, \
|
||||
|
@ -1,9 +1,9 @@
|
||||
// __ _____ _____ _____
|
||||
// __| | __| | | | JSON for Modern C++
|
||||
// | | |__ | | | | | | version 3.11.2
|
||||
// | | |__ | | | | | | version 3.12.0
|
||||
// |_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-FileCopyrightText: 2013 - 2025 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
#pragma once
|
||||
@ -23,8 +23,10 @@
|
||||
#include <istream> // istream
|
||||
#endif // JSON_NO_IO
|
||||
|
||||
#include <nlohmann/detail/exceptions.hpp>
|
||||
#include <nlohmann/detail/iterators/iterator_traits.hpp>
|
||||
#include <nlohmann/detail/macro_scope.hpp>
|
||||
#include <nlohmann/detail/meta/type_traits.hpp>
|
||||
|
||||
NLOHMANN_JSON_NAMESPACE_BEGIN
|
||||
namespace detail
|
||||
@ -66,12 +68,18 @@ class file_input_adapter
|
||||
return std::fgetc(m_file);
|
||||
}
|
||||
|
||||
// returns the number of characters successfully read
|
||||
template<class T>
|
||||
std::size_t get_elements(T* dest, std::size_t count = 1)
|
||||
{
|
||||
return fread(dest, 1, sizeof(T) * count, m_file);
|
||||
}
|
||||
|
||||
private:
|
||||
/// the file pointer to read from
|
||||
std::FILE* m_file;
|
||||
};
|
||||
|
||||
|
||||
/*!
|
||||
Input adapter for a (caching) istream. Ignores a UFT Byte Order Mark at
|
||||
beginning of input. Does not support changing the underlying std::streambuf
|
||||
@ -126,6 +134,17 @@ class input_stream_adapter
|
||||
return res;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
std::size_t get_elements(T* dest, std::size_t count = 1)
|
||||
{
|
||||
auto res = static_cast<std::size_t>(sb->sgetn(reinterpret_cast<char*>(dest), static_cast<std::streamsize>(count * sizeof(T))));
|
||||
if (JSON_HEDLEY_UNLIKELY(res < count * sizeof(T)))
|
||||
{
|
||||
is->clear(is->rdstate() | std::ios::eofbit);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
private:
|
||||
/// the associated input stream
|
||||
std::istream* is = nullptr;
|
||||
@ -145,16 +164,36 @@ class iterator_input_adapter
|
||||
: current(std::move(first)), end(std::move(last))
|
||||
{}
|
||||
|
||||
typename std::char_traits<char_type>::int_type get_character()
|
||||
typename char_traits<char_type>::int_type get_character()
|
||||
{
|
||||
if (JSON_HEDLEY_LIKELY(current != end))
|
||||
{
|
||||
auto result = std::char_traits<char_type>::to_int_type(*current);
|
||||
auto result = char_traits<char_type>::to_int_type(*current);
|
||||
std::advance(current, 1);
|
||||
return result;
|
||||
}
|
||||
|
||||
return std::char_traits<char_type>::eof();
|
||||
return char_traits<char_type>::eof();
|
||||
}
|
||||
|
||||
// for general iterators, we cannot really do something better than falling back to processing the range one-by-one
|
||||
template<class T>
|
||||
std::size_t get_elements(T* dest, std::size_t count = 1)
|
||||
{
|
||||
auto* ptr = reinterpret_cast<char*>(dest);
|
||||
for (std::size_t read_index = 0; read_index < count * sizeof(T); ++read_index)
|
||||
{
|
||||
if (JSON_HEDLEY_LIKELY(current != end))
|
||||
{
|
||||
ptr[read_index] = static_cast<char>(*current);
|
||||
std::advance(current, 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
return read_index;
|
||||
}
|
||||
}
|
||||
return count * sizeof(T);
|
||||
}
|
||||
|
||||
private:
|
||||
@ -170,7 +209,6 @@ class iterator_input_adapter
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
template<typename BaseInputAdapter, size_t T>
|
||||
struct wide_string_input_helper;
|
||||
|
||||
@ -294,7 +332,7 @@ struct wide_string_input_helper<BaseInputAdapter, 2>
|
||||
}
|
||||
};
|
||||
|
||||
// Wraps another input apdater to convert wide character types into individual bytes.
|
||||
// Wraps another input adapter to convert wide character types into individual bytes.
|
||||
template<typename BaseInputAdapter, typename WideCharType>
|
||||
class wide_string_input_adapter
|
||||
{
|
||||
@ -321,6 +359,13 @@ class wide_string_input_adapter
|
||||
return utf8_bytes[utf8_bytes_index++];
|
||||
}
|
||||
|
||||
// parsing binary with wchar doesn't make sense, but since the parsing mode can be runtime, we need something here
|
||||
template<class T>
|
||||
std::size_t get_elements(T* /*dest*/, std::size_t /*count*/ = 1)
|
||||
{
|
||||
JSON_THROW(parse_error::create(112, 1, "wide string type cannot be interpreted as binary data", nullptr));
|
||||
}
|
||||
|
||||
private:
|
||||
BaseInputAdapter base_adapter;
|
||||
|
||||
@ -339,7 +384,6 @@ class wide_string_input_adapter
|
||||
std::size_t utf8_bytes_filled = 0;
|
||||
};
|
||||
|
||||
|
||||
template<typename IteratorType, typename Enable = void>
|
||||
struct iterator_input_adapter_factory
|
||||
{
|
||||
@ -418,10 +462,17 @@ typename container_input_adapter_factory_impl::container_input_adapter_factory<C
|
||||
return container_input_adapter_factory_impl::container_input_adapter_factory<ContainerType>::create(container);
|
||||
}
|
||||
|
||||
// specialization for std::string
|
||||
using string_input_adapter_type = decltype(input_adapter(std::declval<std::string>()));
|
||||
|
||||
#ifndef JSON_NO_IO
|
||||
// Special cases with fast paths
|
||||
inline file_input_adapter input_adapter(std::FILE* file)
|
||||
{
|
||||
if (file == nullptr)
|
||||
{
|
||||
JSON_THROW(parse_error::create(101, 0, "attempting to parse an empty input; check that your input string or stream contains the expected JSON", nullptr));
|
||||
}
|
||||
return file_input_adapter(file);
|
||||
}
|
||||
|
||||
@ -448,9 +499,13 @@ template < typename CharT,
|
||||
int >::type = 0 >
|
||||
contiguous_bytes_input_adapter input_adapter(CharT b)
|
||||
{
|
||||
if (b == nullptr)
|
||||
{
|
||||
JSON_THROW(parse_error::create(101, 0, "attempting to parse an empty input; check that your input string or stream contains the expected JSON", nullptr));
|
||||
}
|
||||
auto length = std::strlen(reinterpret_cast<const char*>(b));
|
||||
const auto* ptr = reinterpret_cast<const char*>(b);
|
||||
return input_adapter(ptr, ptr + length);
|
||||
return input_adapter(ptr, ptr + length); // cppcheck-suppress[nullPointerArithmeticRedundantCheck]
|
||||
}
|
||||
|
||||
template<typename T, std::size_t N>
|
||||
|
@ -1,22 +1,23 @@
|
||||
// __ _____ _____ _____
|
||||
// __| | __| | | | JSON for Modern C++
|
||||
// | | |__ | | | | | | version 3.11.2
|
||||
// | | |__ | | | | | | version 3.12.0
|
||||
// |_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-FileCopyrightText: 2013 - 2025 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <cstddef>
|
||||
#include <string> // string
|
||||
#include <type_traits> // enable_if_t
|
||||
#include <utility> // move
|
||||
#include <vector> // vector
|
||||
|
||||
#include <nlohmann/detail/exceptions.hpp>
|
||||
#include <nlohmann/detail/input/lexer.hpp>
|
||||
#include <nlohmann/detail/macro_scope.hpp>
|
||||
#include <nlohmann/detail/string_concat.hpp>
|
||||
|
||||
NLOHMANN_JSON_NAMESPACE_BEGIN
|
||||
|
||||
/*!
|
||||
@ -142,9 +143,13 @@ struct json_sax
|
||||
virtual ~json_sax() = default;
|
||||
};
|
||||
|
||||
|
||||
namespace detail
|
||||
{
|
||||
constexpr std::size_t unknown_size()
|
||||
{
|
||||
return (std::numeric_limits<std::size_t>::max)();
|
||||
}
|
||||
|
||||
/*!
|
||||
@brief SAX implementation to create a JSON value from SAX events
|
||||
|
||||
@ -158,7 +163,7 @@ constructor contains the parsed value.
|
||||
|
||||
@tparam BasicJsonType the JSON type
|
||||
*/
|
||||
template<typename BasicJsonType>
|
||||
template<typename BasicJsonType, typename InputAdapterType>
|
||||
class json_sax_dom_parser
|
||||
{
|
||||
public:
|
||||
@ -167,14 +172,15 @@ class json_sax_dom_parser
|
||||
using number_float_t = typename BasicJsonType::number_float_t;
|
||||
using string_t = typename BasicJsonType::string_t;
|
||||
using binary_t = typename BasicJsonType::binary_t;
|
||||
using lexer_t = lexer<BasicJsonType, InputAdapterType>;
|
||||
|
||||
/*!
|
||||
@param[in,out] r reference to a JSON value that is manipulated while
|
||||
parsing
|
||||
@param[in] allow_exceptions_ whether parse errors yield exceptions
|
||||
*/
|
||||
explicit json_sax_dom_parser(BasicJsonType& r, const bool allow_exceptions_ = true)
|
||||
: root(r), allow_exceptions(allow_exceptions_)
|
||||
explicit json_sax_dom_parser(BasicJsonType& r, const bool allow_exceptions_ = true, lexer_t* lexer_ = nullptr)
|
||||
: root(r), allow_exceptions(allow_exceptions_), m_lexer_ref(lexer_)
|
||||
{}
|
||||
|
||||
// make class move-only
|
||||
@ -230,7 +236,18 @@ class json_sax_dom_parser
|
||||
{
|
||||
ref_stack.push_back(handle_value(BasicJsonType::value_t::object));
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size()))
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
// Manually set the start position of the object here.
|
||||
// Ensure this is after the call to handle_value to ensure correct start position.
|
||||
if (m_lexer_ref)
|
||||
{
|
||||
// Lexer has read the first character of the object, so
|
||||
// subtract 1 from the position to get the correct start position.
|
||||
ref_stack.back()->start_position = m_lexer_ref->get_position() - 1;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(len != detail::unknown_size() && len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408, concat("excessive object size: ", std::to_string(len)), ref_stack.back()));
|
||||
}
|
||||
@ -244,7 +261,7 @@ class json_sax_dom_parser
|
||||
JSON_ASSERT(ref_stack.back()->is_object());
|
||||
|
||||
// add null at given key and store the reference for later
|
||||
object_element = &(ref_stack.back()->m_value.object->operator[](val));
|
||||
object_element = &(ref_stack.back()->m_data.m_value.object->operator[](val));
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -253,6 +270,14 @@ class json_sax_dom_parser
|
||||
JSON_ASSERT(!ref_stack.empty());
|
||||
JSON_ASSERT(ref_stack.back()->is_object());
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
if (m_lexer_ref)
|
||||
{
|
||||
// Lexer's position is past the closing brace, so set that as the end position.
|
||||
ref_stack.back()->end_position = m_lexer_ref->get_position();
|
||||
}
|
||||
#endif
|
||||
|
||||
ref_stack.back()->set_parents();
|
||||
ref_stack.pop_back();
|
||||
return true;
|
||||
@ -262,7 +287,16 @@ class json_sax_dom_parser
|
||||
{
|
||||
ref_stack.push_back(handle_value(BasicJsonType::value_t::array));
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size()))
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
// Manually set the start position of the array here.
|
||||
// Ensure this is after the call to handle_value to ensure correct start position.
|
||||
if (m_lexer_ref)
|
||||
{
|
||||
ref_stack.back()->start_position = m_lexer_ref->get_position() - 1;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(len != detail::unknown_size() && len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408, concat("excessive array size: ", std::to_string(len)), ref_stack.back()));
|
||||
}
|
||||
@ -275,6 +309,14 @@ class json_sax_dom_parser
|
||||
JSON_ASSERT(!ref_stack.empty());
|
||||
JSON_ASSERT(ref_stack.back()->is_array());
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
if (m_lexer_ref)
|
||||
{
|
||||
// Lexer's position is past the closing bracket, so set that as the end position.
|
||||
ref_stack.back()->end_position = m_lexer_ref->get_position();
|
||||
}
|
||||
#endif
|
||||
|
||||
ref_stack.back()->set_parents();
|
||||
ref_stack.pop_back();
|
||||
return true;
|
||||
@ -299,6 +341,75 @@ class json_sax_dom_parser
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
void handle_diagnostic_positions_for_json_value(BasicJsonType& v)
|
||||
{
|
||||
if (m_lexer_ref)
|
||||
{
|
||||
// Lexer has read past the current field value, so set the end position to the current position.
|
||||
// The start position will be set below based on the length of the string representation
|
||||
// of the value.
|
||||
v.end_position = m_lexer_ref->get_position();
|
||||
|
||||
switch (v.type())
|
||||
{
|
||||
case value_t::boolean:
|
||||
{
|
||||
// 4 and 5 are the string length of "true" and "false"
|
||||
v.start_position = v.end_position - (v.m_data.m_value.boolean ? 4 : 5);
|
||||
break;
|
||||
}
|
||||
|
||||
case value_t::null:
|
||||
{
|
||||
// 4 is the string length of "null"
|
||||
v.start_position = v.end_position - 4;
|
||||
break;
|
||||
}
|
||||
|
||||
case value_t::string:
|
||||
{
|
||||
// include the length of the quotes, which is 2
|
||||
v.start_position = v.end_position - v.m_data.m_value.string->size() - 2;
|
||||
break;
|
||||
}
|
||||
|
||||
// As we handle the start and end positions for values created during parsing,
|
||||
// we do not expect the following value type to be called. Regardless, set the positions
|
||||
// in case this is created manually or through a different constructor. Exclude from lcov
|
||||
// since the exact condition of this switch is esoteric.
|
||||
// LCOV_EXCL_START
|
||||
case value_t::discarded:
|
||||
{
|
||||
v.end_position = std::string::npos;
|
||||
v.start_position = v.end_position;
|
||||
break;
|
||||
}
|
||||
// LCOV_EXCL_STOP
|
||||
case value_t::binary:
|
||||
case value_t::number_integer:
|
||||
case value_t::number_unsigned:
|
||||
case value_t::number_float:
|
||||
{
|
||||
v.start_position = v.end_position - m_lexer_ref->get_string().size();
|
||||
break;
|
||||
}
|
||||
case value_t::object:
|
||||
case value_t::array:
|
||||
{
|
||||
// object and array are handled in start_object() and start_array() handlers
|
||||
// skip setting the values here.
|
||||
break;
|
||||
}
|
||||
default: // LCOV_EXCL_LINE
|
||||
// Handle all possible types discretely, default handler should never be reached.
|
||||
JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert,-warnings-as-errors) LCOV_EXCL_LINE
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
/*!
|
||||
@invariant If the ref stack is empty, then the passed value will be the new
|
||||
root.
|
||||
@ -312,6 +423,11 @@ class json_sax_dom_parser
|
||||
if (ref_stack.empty())
|
||||
{
|
||||
root = BasicJsonType(std::forward<Value>(v));
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
handle_diagnostic_positions_for_json_value(root);
|
||||
#endif
|
||||
|
||||
return &root;
|
||||
}
|
||||
|
||||
@ -319,13 +435,23 @@ class json_sax_dom_parser
|
||||
|
||||
if (ref_stack.back()->is_array())
|
||||
{
|
||||
ref_stack.back()->m_value.array->emplace_back(std::forward<Value>(v));
|
||||
return &(ref_stack.back()->m_value.array->back());
|
||||
ref_stack.back()->m_data.m_value.array->emplace_back(std::forward<Value>(v));
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
handle_diagnostic_positions_for_json_value(ref_stack.back()->m_data.m_value.array->back());
|
||||
#endif
|
||||
|
||||
return &(ref_stack.back()->m_data.m_value.array->back());
|
||||
}
|
||||
|
||||
JSON_ASSERT(ref_stack.back()->is_object());
|
||||
JSON_ASSERT(object_element);
|
||||
*object_element = BasicJsonType(std::forward<Value>(v));
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
handle_diagnostic_positions_for_json_value(*object_element);
|
||||
#endif
|
||||
|
||||
return object_element;
|
||||
}
|
||||
|
||||
@ -339,9 +465,11 @@ class json_sax_dom_parser
|
||||
bool errored = false;
|
||||
/// whether to throw exceptions in case of errors
|
||||
const bool allow_exceptions = true;
|
||||
/// the lexer reference to obtain the current position
|
||||
lexer_t* m_lexer_ref = nullptr;
|
||||
};
|
||||
|
||||
template<typename BasicJsonType>
|
||||
template<typename BasicJsonType, typename InputAdapterType>
|
||||
class json_sax_dom_callback_parser
|
||||
{
|
||||
public:
|
||||
@ -352,11 +480,13 @@ class json_sax_dom_callback_parser
|
||||
using binary_t = typename BasicJsonType::binary_t;
|
||||
using parser_callback_t = typename BasicJsonType::parser_callback_t;
|
||||
using parse_event_t = typename BasicJsonType::parse_event_t;
|
||||
using lexer_t = lexer<BasicJsonType, InputAdapterType>;
|
||||
|
||||
json_sax_dom_callback_parser(BasicJsonType& r,
|
||||
const parser_callback_t cb,
|
||||
const bool allow_exceptions_ = true)
|
||||
: root(r), callback(cb), allow_exceptions(allow_exceptions_)
|
||||
parser_callback_t cb,
|
||||
const bool allow_exceptions_ = true,
|
||||
lexer_t* lexer_ = nullptr)
|
||||
: root(r), callback(std::move(cb)), allow_exceptions(allow_exceptions_), m_lexer_ref(lexer_)
|
||||
{
|
||||
keep_stack.push_back(true);
|
||||
}
|
||||
@ -419,12 +549,26 @@ class json_sax_dom_callback_parser
|
||||
auto val = handle_value(BasicJsonType::value_t::object, true);
|
||||
ref_stack.push_back(val.second);
|
||||
|
||||
// check object limit
|
||||
if (ref_stack.back() && JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size()))
|
||||
if (ref_stack.back())
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408, concat("excessive object size: ", std::to_string(len)), ref_stack.back()));
|
||||
}
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
// Manually set the start position of the object here.
|
||||
// Ensure this is after the call to handle_value to ensure correct start position.
|
||||
if (m_lexer_ref)
|
||||
{
|
||||
// Lexer has read the first character of the object, so
|
||||
// subtract 1 from the position to get the correct start position.
|
||||
ref_stack.back()->start_position = m_lexer_ref->get_position() - 1;
|
||||
}
|
||||
#endif
|
||||
|
||||
// check object limit
|
||||
if (JSON_HEDLEY_UNLIKELY(len != detail::unknown_size() && len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408, concat("excessive object size: ", std::to_string(len)), ref_stack.back()));
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -439,7 +583,7 @@ class json_sax_dom_callback_parser
|
||||
// add discarded value at given key and store the reference for later
|
||||
if (keep && ref_stack.back())
|
||||
{
|
||||
object_element = &(ref_stack.back()->m_value.object->operator[](val) = discarded);
|
||||
object_element = &(ref_stack.back()->m_data.m_value.object->operator[](val) = discarded);
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -453,9 +597,23 @@ class json_sax_dom_callback_parser
|
||||
{
|
||||
// discard object
|
||||
*ref_stack.back() = discarded;
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
// Set start/end positions for discarded object.
|
||||
handle_diagnostic_positions_for_json_value(*ref_stack.back());
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
if (m_lexer_ref)
|
||||
{
|
||||
// Lexer's position is past the closing brace, so set that as the end position.
|
||||
ref_stack.back()->end_position = m_lexer_ref->get_position();
|
||||
}
|
||||
#endif
|
||||
|
||||
ref_stack.back()->set_parents();
|
||||
}
|
||||
}
|
||||
@ -489,10 +647,25 @@ class json_sax_dom_callback_parser
|
||||
auto val = handle_value(BasicJsonType::value_t::array, true);
|
||||
ref_stack.push_back(val.second);
|
||||
|
||||
// check array limit
|
||||
if (ref_stack.back() && JSON_HEDLEY_UNLIKELY(len != static_cast<std::size_t>(-1) && len > ref_stack.back()->max_size()))
|
||||
if (ref_stack.back())
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408, concat("excessive array size: ", std::to_string(len)), ref_stack.back()));
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
// Manually set the start position of the array here.
|
||||
// Ensure this is after the call to handle_value to ensure correct start position.
|
||||
if (m_lexer_ref)
|
||||
{
|
||||
// Lexer has read the first character of the array, so
|
||||
// subtract 1 from the position to get the correct start position.
|
||||
ref_stack.back()->start_position = m_lexer_ref->get_position() - 1;
|
||||
}
|
||||
#endif
|
||||
|
||||
// check array limit
|
||||
if (JSON_HEDLEY_UNLIKELY(len != detail::unknown_size() && len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408, concat("excessive array size: ", std::to_string(len)), ref_stack.back()));
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -507,12 +680,26 @@ class json_sax_dom_callback_parser
|
||||
keep = callback(static_cast<int>(ref_stack.size()) - 1, parse_event_t::array_end, *ref_stack.back());
|
||||
if (keep)
|
||||
{
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
if (m_lexer_ref)
|
||||
{
|
||||
// Lexer's position is past the closing bracket, so set that as the end position.
|
||||
ref_stack.back()->end_position = m_lexer_ref->get_position();
|
||||
}
|
||||
#endif
|
||||
|
||||
ref_stack.back()->set_parents();
|
||||
}
|
||||
else
|
||||
{
|
||||
// discard array
|
||||
*ref_stack.back() = discarded;
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
// Set start/end positions for discarded array.
|
||||
handle_diagnostic_positions_for_json_value(*ref_stack.back());
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@ -524,7 +711,7 @@ class json_sax_dom_callback_parser
|
||||
// remove discarded value
|
||||
if (!keep && !ref_stack.empty() && ref_stack.back()->is_array())
|
||||
{
|
||||
ref_stack.back()->m_value.array->pop_back();
|
||||
ref_stack.back()->m_data.m_value.array->pop_back();
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -549,6 +736,71 @@ class json_sax_dom_callback_parser
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
void handle_diagnostic_positions_for_json_value(BasicJsonType& v)
|
||||
{
|
||||
if (m_lexer_ref)
|
||||
{
|
||||
// Lexer has read past the current field value, so set the end position to the current position.
|
||||
// The start position will be set below based on the length of the string representation
|
||||
// of the value.
|
||||
v.end_position = m_lexer_ref->get_position();
|
||||
|
||||
switch (v.type())
|
||||
{
|
||||
case value_t::boolean:
|
||||
{
|
||||
// 4 and 5 are the string length of "true" and "false"
|
||||
v.start_position = v.end_position - (v.m_data.m_value.boolean ? 4 : 5);
|
||||
break;
|
||||
}
|
||||
|
||||
case value_t::null:
|
||||
{
|
||||
// 4 is the string length of "null"
|
||||
v.start_position = v.end_position - 4;
|
||||
break;
|
||||
}
|
||||
|
||||
case value_t::string:
|
||||
{
|
||||
// include the length of the quotes, which is 2
|
||||
v.start_position = v.end_position - v.m_data.m_value.string->size() - 2;
|
||||
break;
|
||||
}
|
||||
|
||||
case value_t::discarded:
|
||||
{
|
||||
v.end_position = std::string::npos;
|
||||
v.start_position = v.end_position;
|
||||
break;
|
||||
}
|
||||
|
||||
case value_t::binary:
|
||||
case value_t::number_integer:
|
||||
case value_t::number_unsigned:
|
||||
case value_t::number_float:
|
||||
{
|
||||
v.start_position = v.end_position - m_lexer_ref->get_string().size();
|
||||
break;
|
||||
}
|
||||
|
||||
case value_t::object:
|
||||
case value_t::array:
|
||||
{
|
||||
// object and array are handled in start_object() and start_array() handlers
|
||||
// skip setting the values here.
|
||||
break;
|
||||
}
|
||||
default: // LCOV_EXCL_LINE
|
||||
// Handle all possible types discretely, default handler should never be reached.
|
||||
JSON_ASSERT(false); // NOLINT(cert-dcl03-c,hicpp-static-assert,misc-static-assert,-warnings-as-errors) LCOV_EXCL_LINE
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
/*!
|
||||
@param[in] v value to add to the JSON value we build during parsing
|
||||
@param[in] skip_callback whether we should skip calling the callback
|
||||
@ -579,6 +831,10 @@ class json_sax_dom_callback_parser
|
||||
// create value
|
||||
auto value = BasicJsonType(std::forward<Value>(v));
|
||||
|
||||
#if JSON_DIAGNOSTIC_POSITIONS
|
||||
handle_diagnostic_positions_for_json_value(value);
|
||||
#endif
|
||||
|
||||
// check callback
|
||||
const bool keep = skip_callback || callback(static_cast<int>(ref_stack.size()), parse_event_t::value, value);
|
||||
|
||||
@ -591,7 +847,7 @@ class json_sax_dom_callback_parser
|
||||
if (ref_stack.empty())
|
||||
{
|
||||
root = std::move(value);
|
||||
return {true, &root};
|
||||
return {true, & root};
|
||||
}
|
||||
|
||||
// skip this value if we already decided to skip the parent
|
||||
@ -607,8 +863,8 @@ class json_sax_dom_callback_parser
|
||||
// array
|
||||
if (ref_stack.back()->is_array())
|
||||
{
|
||||
ref_stack.back()->m_value.array->emplace_back(std::move(value));
|
||||
return {true, &(ref_stack.back()->m_value.array->back())};
|
||||
ref_stack.back()->m_data.m_value.array->emplace_back(std::move(value));
|
||||
return {true, & (ref_stack.back()->m_data.m_value.array->back())};
|
||||
}
|
||||
|
||||
// object
|
||||
@ -633,9 +889,9 @@ class json_sax_dom_callback_parser
|
||||
/// stack to model hierarchy of values
|
||||
std::vector<BasicJsonType*> ref_stack {};
|
||||
/// stack to manage which values to keep
|
||||
std::vector<bool> keep_stack {};
|
||||
std::vector<bool> keep_stack {}; // NOLINT(readability-redundant-member-init)
|
||||
/// stack to manage which object keys to keep
|
||||
std::vector<bool> key_keep_stack {};
|
||||
std::vector<bool> key_keep_stack {}; // NOLINT(readability-redundant-member-init)
|
||||
/// helper to hold the reference for the next object element
|
||||
BasicJsonType* object_element = nullptr;
|
||||
/// whether a syntax error occurred
|
||||
@ -646,6 +902,8 @@ class json_sax_dom_callback_parser
|
||||
const bool allow_exceptions = true;
|
||||
/// a discarded value for the callback
|
||||
BasicJsonType discarded = BasicJsonType::value_t::discarded;
|
||||
/// the lexer reference to obtain the current position
|
||||
lexer_t* m_lexer_ref = nullptr;
|
||||
};
|
||||
|
||||
template<typename BasicJsonType>
|
||||
@ -693,7 +951,7 @@ class json_sax_acceptor
|
||||
return true;
|
||||
}
|
||||
|
||||
bool start_object(std::size_t /*unused*/ = static_cast<std::size_t>(-1))
|
||||
bool start_object(std::size_t /*unused*/ = detail::unknown_size())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
@ -708,7 +966,7 @@ class json_sax_acceptor
|
||||
return true;
|
||||
}
|
||||
|
||||
bool start_array(std::size_t /*unused*/ = static_cast<std::size_t>(-1))
|
||||
bool start_array(std::size_t /*unused*/ = detail::unknown_size())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
// __ _____ _____ _____
|
||||
// __| | __| | | | JSON for Modern C++
|
||||
// | | |__ | | | | | | version 3.11.2
|
||||
// | | |__ | | | | | | version 3.12.0
|
||||
// |_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-FileCopyrightText: 2013 - 2025 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
#pragma once
|
||||
@ -21,6 +21,7 @@
|
||||
#include <nlohmann/detail/input/input_adapters.hpp>
|
||||
#include <nlohmann/detail/input/position_t.hpp>
|
||||
#include <nlohmann/detail/macro_scope.hpp>
|
||||
#include <nlohmann/detail/meta/type_traits.hpp>
|
||||
|
||||
NLOHMANN_JSON_NAMESPACE_BEGIN
|
||||
namespace detail
|
||||
@ -115,7 +116,7 @@ class lexer : public lexer_base<BasicJsonType>
|
||||
using number_float_t = typename BasicJsonType::number_float_t;
|
||||
using string_t = typename BasicJsonType::string_t;
|
||||
using char_type = typename InputAdapterType::char_type;
|
||||
using char_int_type = typename std::char_traits<char_type>::int_type;
|
||||
using char_int_type = typename char_traits<char_type>::int_type;
|
||||
|
||||
public:
|
||||
using token_type = typename lexer_base<BasicJsonType>::token_type;
|
||||
@ -222,7 +223,7 @@ class lexer : public lexer_base<BasicJsonType>
|
||||
for (auto range = ranges.begin(); range != ranges.end(); ++range)
|
||||
{
|
||||
get();
|
||||
if (JSON_HEDLEY_LIKELY(*range <= current && current <= *(++range)))
|
||||
if (JSON_HEDLEY_LIKELY(*range <= current && current <= *(++range))) // NOLINT(bugprone-inc-dec-in-conditions)
|
||||
{
|
||||
add(current);
|
||||
}
|
||||
@ -265,7 +266,7 @@ class lexer : public lexer_base<BasicJsonType>
|
||||
switch (get())
|
||||
{
|
||||
// end of file while parsing string
|
||||
case std::char_traits<char_type>::eof():
|
||||
case char_traits<char_type>::eof():
|
||||
{
|
||||
error_message = "invalid string: missing closing quote";
|
||||
return token_type::parse_error;
|
||||
@ -854,7 +855,7 @@ class lexer : public lexer_base<BasicJsonType>
|
||||
{
|
||||
case '\n':
|
||||
case '\r':
|
||||
case std::char_traits<char_type>::eof():
|
||||
case char_traits<char_type>::eof():
|
||||
case '\0':
|
||||
return true;
|
||||
|
||||
@ -871,7 +872,7 @@ class lexer : public lexer_base<BasicJsonType>
|
||||
{
|
||||
switch (get())
|
||||
{
|
||||
case std::char_traits<char_type>::eof():
|
||||
case char_traits<char_type>::eof():
|
||||
case '\0':
|
||||
{
|
||||
error_message = "invalid comment; missing closing '*/'";
|
||||
@ -966,7 +967,7 @@ class lexer : public lexer_base<BasicJsonType>
|
||||
locale's decimal point is used instead of `.` to work with the
|
||||
locale-dependent converters.
|
||||
*/
|
||||
token_type scan_number() // lgtm [cpp/use-of-goto]
|
||||
token_type scan_number() // lgtm [cpp/use-of-goto] `goto` is used in this function to implement the number-parsing state machine described above. By design, any finite input will eventually reach the "done" state or return token_type::parse_error. In each intermediate state, 1 byte of the input is appended to the token_buffer vector, and only the already initialized variables token_buffer, number_type, and error_message are manipulated.
|
||||
{
|
||||
// reset token_buffer to store the number's bytes
|
||||
reset();
|
||||
@ -1048,6 +1049,7 @@ scan_number_zero:
|
||||
case '.':
|
||||
{
|
||||
add(decimal_point_char);
|
||||
decimal_point_position = token_buffer.size() - 1;
|
||||
goto scan_number_decimal1;
|
||||
}
|
||||
|
||||
@ -1084,6 +1086,7 @@ scan_number_any1:
|
||||
case '.':
|
||||
{
|
||||
add(decimal_point_char);
|
||||
decimal_point_position = token_buffer.size() - 1;
|
||||
goto scan_number_decimal1;
|
||||
}
|
||||
|
||||
@ -1244,7 +1247,7 @@ scan_number_done:
|
||||
// we are done scanning a number)
|
||||
unget();
|
||||
|
||||
char* endptr = nullptr; // NOLINT(cppcoreguidelines-pro-type-vararg,hicpp-vararg)
|
||||
char* endptr = nullptr; // NOLINT(misc-const-correctness,cppcoreguidelines-pro-type-vararg,hicpp-vararg)
|
||||
errno = 0;
|
||||
|
||||
// try to parse integers first and fall back to floats
|
||||
@ -1255,7 +1258,7 @@ scan_number_done:
|
||||
// we checked the number format before
|
||||
JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size());
|
||||
|
||||
if (errno == 0)
|
||||
if (errno != ERANGE)
|
||||
{
|
||||
value_unsigned = static_cast<number_unsigned_t>(x);
|
||||
if (value_unsigned == x)
|
||||
@ -1271,7 +1274,7 @@ scan_number_done:
|
||||
// we checked the number format before
|
||||
JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size());
|
||||
|
||||
if (errno == 0)
|
||||
if (errno != ERANGE)
|
||||
{
|
||||
value_integer = static_cast<number_integer_t>(x);
|
||||
if (value_integer == x)
|
||||
@ -1300,10 +1303,10 @@ scan_number_done:
|
||||
token_type scan_literal(const char_type* literal_text, const std::size_t length,
|
||||
token_type return_type)
|
||||
{
|
||||
JSON_ASSERT(std::char_traits<char_type>::to_char_type(current) == literal_text[0]);
|
||||
JSON_ASSERT(char_traits<char_type>::to_char_type(current) == literal_text[0]);
|
||||
for (std::size_t i = 1; i < length; ++i)
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(std::char_traits<char_type>::to_char_type(get()) != literal_text[i]))
|
||||
if (JSON_HEDLEY_UNLIKELY(char_traits<char_type>::to_char_type(get()) != literal_text[i]))
|
||||
{
|
||||
error_message = "invalid literal";
|
||||
return token_type::parse_error;
|
||||
@ -1321,7 +1324,8 @@ scan_number_done:
|
||||
{
|
||||
token_buffer.clear();
|
||||
token_string.clear();
|
||||
token_string.push_back(std::char_traits<char_type>::to_char_type(current));
|
||||
decimal_point_position = std::string::npos;
|
||||
token_string.push_back(char_traits<char_type>::to_char_type(current));
|
||||
}
|
||||
|
||||
/*
|
||||
@ -1329,7 +1333,7 @@ scan_number_done:
|
||||
|
||||
This function provides the interface to the used input adapter. It does
|
||||
not throw in case the input reached EOF, but returns a
|
||||
`std::char_traits<char>::eof()` in that case. Stores the scanned characters
|
||||
`char_traits<char>::eof()` in that case. Stores the scanned characters
|
||||
for use in error messages.
|
||||
|
||||
@return character read from the input
|
||||
@ -1349,9 +1353,9 @@ scan_number_done:
|
||||
current = ia.get_character();
|
||||
}
|
||||
|
||||
if (JSON_HEDLEY_LIKELY(current != std::char_traits<char_type>::eof()))
|
||||
if (JSON_HEDLEY_LIKELY(current != char_traits<char_type>::eof()))
|
||||
{
|
||||
token_string.push_back(std::char_traits<char_type>::to_char_type(current));
|
||||
token_string.push_back(char_traits<char_type>::to_char_type(current));
|
||||
}
|
||||
|
||||
if (current == '\n')
|
||||
@ -1390,7 +1394,7 @@ scan_number_done:
|
||||
--position.chars_read_current_line;
|
||||
}
|
||||
|
||||
if (JSON_HEDLEY_LIKELY(current != std::char_traits<char_type>::eof()))
|
||||
if (JSON_HEDLEY_LIKELY(current != char_traits<char_type>::eof()))
|
||||
{
|
||||
JSON_ASSERT(!token_string.empty());
|
||||
token_string.pop_back();
|
||||
@ -1429,6 +1433,11 @@ scan_number_done:
|
||||
/// return current string value (implicitly resets the token; useful only once)
|
||||
string_t& get_string()
|
||||
{
|
||||
// translate decimal points from locale back to '.' (#4084)
|
||||
if (decimal_point_char != '.' && decimal_point_position != std::string::npos)
|
||||
{
|
||||
token_buffer[decimal_point_position] = '.';
|
||||
}
|
||||
return token_buffer;
|
||||
}
|
||||
|
||||
@ -1584,7 +1593,7 @@ scan_number_done:
|
||||
// end of input (the null byte is needed when parsing from
|
||||
// string literals)
|
||||
case '\0':
|
||||
case std::char_traits<char_type>::eof():
|
||||
case char_traits<char_type>::eof():
|
||||
return token_type::end_of_input;
|
||||
|
||||
// error
|
||||
@ -1602,7 +1611,7 @@ scan_number_done:
|
||||
const bool ignore_comments = false;
|
||||
|
||||
/// the current character
|
||||
char_int_type current = std::char_traits<char_type>::eof();
|
||||
char_int_type current = char_traits<char_type>::eof();
|
||||
|
||||
/// whether the next get() call should just return current
|
||||
bool next_unget = false;
|
||||
@ -1626,6 +1635,8 @@ scan_number_done:
|
||||
|
||||
/// the decimal point
|
||||
const char_int_type decimal_point_char = '.';
|
||||
/// the position of the decimal point in the input
|
||||
std::size_t decimal_point_position = std::string::npos;
|
||||
};
|
||||
|
||||
} // namespace detail
|
||||
|
@ -1,9 +1,9 @@
|
||||
// __ _____ _____ _____
|
||||
// __| | __| | | | JSON for Modern C++
|
||||
// | | |__ | | | | | | version 3.11.2
|
||||
// | | |__ | | | | | | version 3.12.0
|
||||
// |_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-FileCopyrightText: 2013 - 2025 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
#pragma once
|
||||
@ -69,10 +69,10 @@ class parser
|
||||
public:
|
||||
/// a parser reading from an input adapter
|
||||
explicit parser(InputAdapterType&& adapter,
|
||||
const parser_callback_t<BasicJsonType> cb = nullptr,
|
||||
parser_callback_t<BasicJsonType> cb = nullptr,
|
||||
const bool allow_exceptions_ = true,
|
||||
const bool skip_comments = false)
|
||||
: callback(cb)
|
||||
: callback(std::move(cb))
|
||||
, m_lexer(std::move(adapter), skip_comments)
|
||||
, allow_exceptions(allow_exceptions_)
|
||||
{
|
||||
@ -94,7 +94,7 @@ class parser
|
||||
{
|
||||
if (callback)
|
||||
{
|
||||
json_sax_dom_callback_parser<BasicJsonType> sdp(result, callback, allow_exceptions);
|
||||
json_sax_dom_callback_parser<BasicJsonType, InputAdapterType> sdp(result, callback, allow_exceptions, &m_lexer);
|
||||
sax_parse_internal(&sdp);
|
||||
|
||||
// in strict mode, input must be completely read
|
||||
@ -122,7 +122,7 @@ class parser
|
||||
}
|
||||
else
|
||||
{
|
||||
json_sax_dom_parser<BasicJsonType> sdp(result, allow_exceptions);
|
||||
json_sax_dom_parser<BasicJsonType, InputAdapterType> sdp(result, allow_exceptions, &m_lexer);
|
||||
sax_parse_internal(&sdp);
|
||||
|
||||
// in strict mode, input must be completely read
|
||||
@ -194,7 +194,7 @@ class parser
|
||||
{
|
||||
case token_type::begin_object:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_object(static_cast<std::size_t>(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_object(detail::unknown_size())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@ -239,7 +239,7 @@ class parser
|
||||
|
||||
case token_type::begin_array:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_array(static_cast<std::size_t>(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_array(detail::unknown_size())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@ -341,13 +341,25 @@ class parser
|
||||
m_lexer.get_token_string(),
|
||||
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::uninitialized, "value"), nullptr));
|
||||
}
|
||||
case token_type::end_of_input:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(m_lexer.get_position().chars_read_total == 1))
|
||||
{
|
||||
return sax->parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
parse_error::create(101, m_lexer.get_position(),
|
||||
"attempting to parse an empty input; check that your input string or stream contains the expected JSON", nullptr));
|
||||
}
|
||||
|
||||
return sax->parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::literal_or_value, "value"), nullptr));
|
||||
}
|
||||
case token_type::uninitialized:
|
||||
case token_type::end_array:
|
||||
case token_type::end_object:
|
||||
case token_type::name_separator:
|
||||
case token_type::value_separator:
|
||||
case token_type::end_of_input:
|
||||
case token_type::literal_or_value:
|
||||
default: // the last token was unexpected
|
||||
{
|
||||
|
@ -1,9 +1,9 @@
|
||||
// __ _____ _____ _____
|
||||
// __| | __| | | | JSON for Modern C++
|
||||
// | | |__ | | | | | | version 3.11.2
|
||||
// | | |__ | | | | | | version 3.12.0
|
||||
// |_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-FileCopyrightText: 2013 - 2025 Niels Lohmann <https://nlohmann.me>
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
#pragma once
|
||||
|
Reference in New Issue
Block a user