undid changes that broke the build

This commit is contained in:
Niels 2015-04-26 21:51:35 +02:00
parent f874b5f0f8
commit 81d3973151
2 changed files with 768 additions and 431 deletions

File diff suppressed because it is too large Load diff

View file

@ -416,12 +416,12 @@ class basic_json
// the initializer list could describe an object // the initializer list could describe an object
bool is_object = true; bool is_object = true;
// check if each element is an array with two elements whose first // check if each element is an array with two elements whose first element
// element is a string // is a string
for (const auto& element : init) for (const auto& element : init)
{ {
if ((element.m_type != value_t::array or element.size() != 2 if (element.m_type != value_t::array or element.size() != 2
or element[0].m_type != value_t::string)) or element[0].m_type != value_t::string)
{ {
// we found an element that makes it impossible to use the // we found an element that makes it impossible to use the
// initializer list as object // initializer list as object
@ -754,12 +754,6 @@ class basic_json
return m_type; return m_type;
} }
/// return the type of the object (implicit)
inline operator value_t() const noexcept
{
return m_type;
}
// return whether value is null // return whether value is null
inline bool is_null() const noexcept inline bool is_null() const noexcept
{ {
@ -796,6 +790,12 @@ class basic_json
return m_type == value_t::string; return m_type == value_t::string;
} }
/// return the type of the object (implicit)
inline operator value_t() const noexcept
{
return m_type;
}
private: private:
////////////////////// //////////////////////
// value conversion // // value conversion //
@ -1079,7 +1079,6 @@ class basic_json
throw std::runtime_error("cannot use [] with " + type_name()); throw std::runtime_error("cannot use [] with " + type_name());
} }
// fill gaps with null values
for (size_t i = m_value.array->size(); i <= idx; ++i) for (size_t i = m_value.array->size(); i <= idx; ++i)
{ {
m_value.array->push_back(basic_json()); m_value.array->push_back(basic_json());
@ -1517,7 +1516,6 @@ class basic_json
{ {
case (value_t::null): case (value_t::null):
{ {
// null values are empty
return true; return true;
} }
@ -1549,7 +1547,6 @@ class basic_json
{ {
case (value_t::null): case (value_t::null):
{ {
// null values are empty
return 0; return 0;
} }
@ -1581,7 +1578,6 @@ class basic_json
{ {
case (value_t::null): case (value_t::null):
{ {
// null values are empty
return 0; return 0;
} }
@ -3847,8 +3843,7 @@ class basic_json
/// public parser interface /// public parser interface
inline basic_json parse() inline basic_json parse()
{ {
basic_json result; basic_json result = parse_internal();
parse_internal(result);
expect(lexer::token_type::end_of_input); expect(lexer::token_type::end_of_input);
@ -3857,17 +3852,14 @@ class basic_json
private: private:
/// the actual parser /// the actual parser
inline void parse_internal(basic_json& pos) inline basic_json parse_internal()
{ {
switch (last_token) switch (last_token)
{ {
case (lexer::token_type::begin_object): case (lexer::token_type::begin_object):
{ {
// explicitly set result to object to cope with {} // explicitly set result to object to cope with {}
pos.m_type = value_t::object; basic_json result(value_t::object);
AllocatorType<object_t> alloc;
pos.m_value.object = alloc.allocate(1);
alloc.construct(pos.m_value.object);
// read next token // read next token
get_token(); get_token();
@ -3876,7 +3868,7 @@ class basic_json
if (last_token == lexer::token_type::end_object) if (last_token == lexer::token_type::end_object)
{ {
get_token(); get_token();
return; return result;
} }
// otherwise: parse key-value pairs // otherwise: parse key-value pairs
@ -3898,25 +3890,21 @@ class basic_json
// parse and add value // parse and add value
get_token(); get_token();
auto it = pos.m_value.object->emplace(key, nullptr); result.m_value.object->emplace(key, parse_internal());
parse_internal(it.first->second);
} }
while (last_token == lexer::token_type::value_separator); while (last_token == lexer::token_type::value_separator);
// closing } // closing }
expect(lexer::token_type::end_object); expect(lexer::token_type::end_object);
get_token(); get_token();
return;
return result;
} }
case (lexer::token_type::begin_array): case (lexer::token_type::begin_array):
{ {
// explicitly set result to object to cope with [] // explicitly set result to object to cope with []
pos.m_type = value_t::array; basic_json result(value_t::array);
AllocatorType<array_t> alloc;
pos.m_value.array = alloc.allocate(1);
alloc.construct(pos.m_value.array);
// read next token // read next token
get_token(); get_token();
@ -3925,7 +3913,7 @@ class basic_json
if (last_token == lexer::token_type::end_array) if (last_token == lexer::token_type::end_array)
{ {
get_token(); get_token();
return; return result;
} }
// otherwise: parse values // otherwise: parse values
@ -3938,51 +3926,40 @@ class basic_json
} }
// parse and add value // parse and add value
auto it = pos.m_value.array->insert(pos.m_value.array->end(), nullptr); result.m_value.array->emplace_back(parse_internal());
parse_internal(*it);
} }
while (last_token == lexer::token_type::value_separator); while (last_token == lexer::token_type::value_separator);
// closing ] // closing ]
expect(lexer::token_type::end_array); expect(lexer::token_type::end_array);
get_token(); get_token();
return;
return result;
} }
case (lexer::token_type::literal_null): case (lexer::token_type::literal_null):
{ {
get_token(); get_token();
return; return basic_json(nullptr);
} }
case (lexer::token_type::value_string): case (lexer::token_type::value_string):
{ {
pos.m_type = value_t::string; const auto s = m_lexer.get_string();
AllocatorType<string_t> alloc;
pos.m_value.string = alloc.allocate(1);
alloc.construct(pos.m_value.string, m_lexer.get_string());
get_token(); get_token();
return; return basic_json(s);
} }
case (lexer::token_type::literal_true): case (lexer::token_type::literal_true):
{ {
pos.m_type = value_t::boolean;
pos.m_value.boolean = true;
get_token(); get_token();
return; return basic_json(true);
} }
case (lexer::token_type::literal_false): case (lexer::token_type::literal_false):
{ {
pos.m_type = value_t::boolean;
pos.m_value.boolean = false;
get_token(); get_token();
return; return basic_json(false);
} }
case (lexer::token_type::value_number): case (lexer::token_type::value_number):
@ -3997,23 +3974,20 @@ class basic_json
m_lexer.get_token() + " is not a number"); m_lexer.get_token() + " is not a number");
} }
get_token();
// check if conversion loses precision // check if conversion loses precision
const auto int_val = static_cast<number_integer_t>(float_val); const auto int_val = static_cast<number_integer_t>(float_val);
if (approx(float_val, static_cast<number_float_t>(int_val))) if (approx(float_val, static_cast<number_float_t>(int_val)))
{ {
// we basic_json not lose precision -> return int // we basic_json not lose precision -> return int
pos.m_type = value_t::number_integer; return basic_json(int_val);
pos.m_value.number_integer = int_val;
} }
else else
{ {
// we would lose precision -> returnfloat // we would lose precision -> returnfloat
pos.m_type = value_t::number_float; return basic_json(float_val);
pos.m_value.number_float = float_val;
} }
get_token();
return;
} }
default: default: