diff options
author | 2015-01-10 13:53:11 +0100 | |
---|---|---|
committer | 2015-01-10 13:53:11 +0100 | |
commit | 02d37b6915e453fbc40afc80830d9a2cbb459d1f (patch) | |
tree | 87d7a4c5a93f9ecca1d955cfdac6fe25399a9b69 /3rdparty/jsoncpp | |
parent | 950a428641808b5d083dcfddebbed06367f15a15 (diff) |
Added integral version of jsoncpp source (nw)
Diffstat (limited to '3rdparty/jsoncpp')
228 files changed, 20301 insertions, 0 deletions
diff --git a/3rdparty/jsoncpp/.clang-format b/3rdparty/jsoncpp/.clang-format new file mode 100644 index 00000000000..dd51247d50a --- /dev/null +++ b/3rdparty/jsoncpp/.clang-format @@ -0,0 +1,47 @@ +--- +# BasedOnStyle: LLVM +AccessModifierOffset: -2 +ConstructorInitializerIndentWidth: 4 +AlignEscapedNewlinesLeft: false +AlignTrailingComments: true +AllowAllParametersOfDeclarationOnNextLine: true +AllowShortIfStatementsOnASingleLine: false +AllowShortLoopsOnASingleLine: false +AlwaysBreakTemplateDeclarations: false +AlwaysBreakBeforeMultilineStrings: false +BreakBeforeBinaryOperators: false +BreakBeforeTernaryOperators: true +BreakConstructorInitializersBeforeComma: false +BinPackParameters: false +ColumnLimit: 80 +ConstructorInitializerAllOnOneLineOrOnePerLine: false +DerivePointerBinding: false +ExperimentalAutoDetectBinPacking: false +IndentCaseLabels: false +MaxEmptyLinesToKeep: 1 +NamespaceIndentation: None +ObjCSpaceBeforeProtocolList: true +PenaltyBreakBeforeFirstCallParameter: 19 +PenaltyBreakComment: 60 +PenaltyBreakString: 1000 +PenaltyBreakFirstLessLess: 120 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 60 +PointerBindsToType: true +SpacesBeforeTrailingComments: 1 +Cpp11BracedListStyle: false +Standard: Cpp03 +IndentWidth: 2 +TabWidth: 8 +UseTab: Never +BreakBeforeBraces: Attach +IndentFunctionDeclarationAfterType: false +SpacesInParentheses: false +SpacesInAngles: false +SpaceInEmptyParentheses: false +SpacesInCStyleCastParentheses: false +SpaceAfterControlStatementKeyword: true +SpaceBeforeAssignmentOperators: true +ContinuationIndentWidth: 4 +... + diff --git a/3rdparty/jsoncpp/.gitignore b/3rdparty/jsoncpp/.gitignore new file mode 100644 index 00000000000..60c4a0b9aee --- /dev/null +++ b/3rdparty/jsoncpp/.gitignore @@ -0,0 +1,13 @@ +/build/ +*.pyc +*.swp +*.actual +*.actual-rewrite +*.process-output +*.rewrite +/bin/ +/buildscons/ +/libs/ +/doc/doxyfile +/dist/ +/include/json/version.h diff --git a/3rdparty/jsoncpp/.travis.yml b/3rdparty/jsoncpp/.travis.yml new file mode 100644 index 00000000000..a913b095849 --- /dev/null +++ b/3rdparty/jsoncpp/.travis.yml @@ -0,0 +1,18 @@ +# Build matrix / environment variable are explained on: +# http://about.travis-ci.org/docs/user/build-configuration/ +# This file can be validated on: +# http://lint.travis-ci.org/ +before_install: sudo apt-get install cmake +language: cpp +compiler: + - gcc + - clang +script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE . && make +env: + matrix: + - SHARED_LIBRARY=ON BUILD_TYPE=release VERBOSE_MAKE=false + - SHARED_LIBRARY=OFF BUILD_TYPE=release VERBOSE_MAKE=false + - SHARED_LIBRARY=OFF BUILD_TYPE=debug VERBOSE VERBOSE_MAKE=true +notifications: + email: + - aaronjjacobs@gmail.com diff --git a/3rdparty/jsoncpp/AUTHORS b/3rdparty/jsoncpp/AUTHORS new file mode 100644 index 00000000000..c0fbbeec111 --- /dev/null +++ b/3rdparty/jsoncpp/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur <blep@users.sourceforge.net> diff --git a/3rdparty/jsoncpp/CMakeLists.txt b/3rdparty/jsoncpp/CMakeLists.txt new file mode 100644 index 00000000000..3e8f96ecb90 --- /dev/null +++ b/3rdparty/jsoncpp/CMakeLists.txt @@ -0,0 +1,119 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5) +PROJECT(jsoncpp) +ENABLE_TESTING() + +OPTION(JSONCPP_WITH_TESTS "Compile and run JsonCpp test executables" ON) +OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON) +OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF) +OPTION(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON) +OPTION(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" OFF) + +# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix +IF(NOT WIN32) + IF(NOT CMAKE_BUILD_TYPE) + SET(CMAKE_BUILD_TYPE Release CACHE STRING + "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage." + FORCE) + ENDIF(NOT CMAKE_BUILD_TYPE) +ENDIF(NOT WIN32) + +SET(LIB_SUFFIX "" CACHE STRING "Optional arch-dependent suffix for the library installation directory") + +SET(RUNTIME_INSTALL_DIR bin + CACHE PATH "Install dir for executables and dlls") +SET(ARCHIVE_INSTALL_DIR lib${LIB_SUFFIX} + CACHE PATH "Install dir for static libraries") +SET(LIBRARY_INSTALL_DIR lib${LIB_SUFFIX} + CACHE PATH "Install dir for shared libraries") +SET(INCLUDE_INSTALL_DIR include + CACHE PATH "Install dir for headers") +SET(PACKAGE_INSTALL_DIR lib${LIB_SUFFIX}/cmake + CACHE PATH "Install dir for cmake package config files") +MARK_AS_ADVANCED( RUNTIME_INSTALL_DIR ARCHIVE_INSTALL_DIR INCLUDE_INSTALL_DIR PACKAGE_INSTALL_DIR ) + +# This ensures shared DLL are in the same dir as executable on Windows. +# Put all executables / libraries are in a project global directory. +SET(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib + CACHE PATH "Single directory for all static libraries.") +SET(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib + CACHE PATH "Single directory for all dynamic libraries on Unix.") +SET(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/bin + CACHE PATH "Single directory for all executable and dynamic libraries on Windows.") +MARK_AS_ADVANCED( CMAKE_RUNTIME_OUTPUT_DIRECTORY CMAKE_LIBRARY_OUTPUT_DIRECTORY CMAKE_ARCHIVE_OUTPUT_DIRECTORY ) + +# Set variable named ${VAR_NAME} to value ${VALUE} +FUNCTION(set_using_dynamic_name VAR_NAME VALUE) + SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE) +ENDFUNCTION(set_using_dynamic_name) + +# Extract major, minor, patch from version text +# Parse a version string "X.Y.Z" and outputs +# version parts in ${OUPUT_PREFIX}_MAJOR, _MINOR, _PATCH. +# If parse succeeds then ${OUPUT_PREFIX}_FOUND is TRUE. +MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX) + SET(VERSION_REGEX "[0-9]+\\.[0-9]+\\.[0-9]+(-[a-zA-Z0-9_]+)?") + IF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} ) + STRING(REGEX MATCHALL "[0-9]+|-([A-Za-z0-9_]+)" VERSION_PARTS ${VERSION_TEXT}) + LIST(GET VERSION_PARTS 0 ${OUPUT_PREFIX}_MAJOR) + LIST(GET VERSION_PARTS 1 ${OUPUT_PREFIX}_MINOR) + LIST(GET VERSION_PARTS 2 ${OUPUT_PREFIX}_PATCH) + set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE ) + ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} ) + set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE ) + ENDIF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} ) +ENDMACRO(jsoncpp_parse_version) + +# Read out version from "version" file +FILE(STRINGS "version" JSONCPP_VERSION) + +jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION ) +IF(NOT JSONCPP_VERSION_FOUND) + MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z") +ENDIF(NOT JSONCPP_VERSION_FOUND) + +MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}") +# File version.h is only regenerated on CMake configure step +CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in" + "${PROJECT_SOURCE_DIR}/include/json/version.h" ) + +macro(UseCompilationWarningAsError) + if ( MSVC ) + # Only enabled in debug because some old versions of VS STL generate + # warnings when compiled in release configuration. + set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ") + endif( MSVC ) +endmacro() + +# Include our configuration header +INCLUDE_DIRECTORIES( ${jsoncpp_SOURCE_DIR}/include ) + +if ( MSVC ) + # Only enabled in debug because some old versions of VS STL generate + # unreachable code warning when compiled in release configuration. + set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ") +endif( MSVC ) + +IF(JSONCPP_WITH_WARNING_AS_ERROR) + UseCompilationWarningAsError() +ENDIF(JSONCPP_WITH_WARNING_AS_ERROR) + +IF(JSONCPP_WITH_PKGCONFIG_SUPPORT) + CONFIGURE_FILE( + "pkg-config/jsoncpp.pc.in" + "pkg-config/jsoncpp.pc" + @ONLY) + INSTALL(FILES "${CMAKE_BINARY_DIR}/pkg-config/jsoncpp.pc" + DESTINATION "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}/pkgconfig") +ENDIF(JSONCPP_WITH_PKGCONFIG_SUPPORT) + +IF(JSONCPP_WITH_CMAKE_PACKAGE) + INSTALL(EXPORT jsoncpp + DESTINATION ${PACKAGE_INSTALL_DIR}/jsoncpp + FILE jsoncppConfig.cmake) +ENDIF(JSONCPP_WITH_CMAKE_PACKAGE) + +# Build the different applications +ADD_SUBDIRECTORY( src ) + +#install the includes +ADD_SUBDIRECTORY( include ) diff --git a/3rdparty/jsoncpp/LICENSE b/3rdparty/jsoncpp/LICENSE new file mode 100644 index 00000000000..ca2bfe1a03e --- /dev/null +++ b/3rdparty/jsoncpp/LICENSE @@ -0,0 +1,55 @@ +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. diff --git a/3rdparty/jsoncpp/NEWS.txt b/3rdparty/jsoncpp/NEWS.txt new file mode 100644 index 00000000000..1be7b8ef894 --- /dev/null +++ b/3rdparty/jsoncpp/NEWS.txt @@ -0,0 +1,175 @@ +New in SVN +---------- + + * Updated the type system's behavior, in order to better support backwards + compatibility with code that was written before 64-bit integer support was + introduced. Here's how it works now: + + * isInt, isInt64, isUInt, and isUInt64 return true if and only if the + value can be exactly represented as that type. In particular, a value + constructed with a double like 17.0 will now return true for all of + these methods. + + * isDouble and isFloat now return true for all numeric values, since all + numeric values can be converted to a double or float without + truncation. Note however that the conversion may not be exact -- for + example, doubles cannot exactly represent all integers above 2^53 + 1. + + * isBool, isNull, isString, isArray, and isObject now return true if and + only if the value is of that type. + + * isConvertibleTo(fooValue) indicates that it is safe to call asFoo. + (For each type foo, isFoo always implies isConvertibleTo(fooValue).) + asFoo returns an approximate or exact representation as appropriate. + For example, a double value may be truncated when asInt is called. + + * For backwards compatibility with old code, isConvertibleTo(intValue) + may return false even if type() == intValue. This is because the value + may have been constructed with a 64-bit integer larger than maxInt, + and calling asInt() would cause an exception. If you're writing new + code, use isInt64 to find out whether the value is exactly + representable using an Int64, or asDouble() combined with minInt64 and + maxInt64 to figure out whether it is approximately representable. + +* Value + - Patch #10: BOOST_FOREACH compatibility. Made Json::iterator more + standard compliant, added missing iterator_category and value_type + typedefs (contribued by Robert A. Iannucci). + +* Compilation + + - New CMake based build system. Based in part on contribution from + Igor Okulist and Damien Buhl (Patch #14). + + - New header json/version.h now contains version number macros + (JSONCPP_VERSION_MAJOR, JSONCPP_VERSION_MINOR, JSONCPP_VERSION_PATCH + and JSONCPP_VERSION_HEXA). + + - Patch #11: added missing JSON_API on some classes causing link issues + when building as a dynamic library on Windows + (contributed by Francis Bolduc). + + - Visual Studio DLL: suppressed warning "C4251: <data member>: <type> + needs to have dll-interface to be used by..." via pragma push/pop + in json-cpp headers. + + - Added Travis CI intregration: https://travis-ci.org/blep/jsoncpp-mirror + +* Bug fixes + - Patch #15: Copy constructor does not initialize allocated_ for stringValue + (contributed by rmongia). + + - Patch #16: Missing field copy in Json::Value::iterator causing infinite + loop when using experimental internal map (#define JSON_VALUE_USE_INTERNAL_MAP) + (contributed by Ming-Lin Kao). + + + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now + propagated to the build environment as this is required for some + compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + + - Added support for amalgamated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgamated source and header" + for detail. + +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + + - Added support for 64 bits integer: + + Types Json::Int64 and Json::UInt64 have been added. They are aliased + to 64 bits integers on system that support them (based on __int64 on + Microsoft Visual Studio platform, and long long on other platforms). + + Types Json::LargestInt and Json::LargestUInt have been added. They are + aliased to the largest integer type supported: + either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. + + Json::Value::asInt() and Json::Value::asUInt() still returns plain + "int" based types, but asserts if an attempt is made to retrieve + a 64 bits value that can not represented as the return type. + + Json::Value::asInt64() and Json::Value::asUInt64() have been added + to obtain the 64 bits integer value. + + Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns + the integer as a LargestInt/LargestUInt respectively. Those functions + functions are typically used when implementing writer. + + The reader attempts to read number as 64 bits integer, and fall back + to reading a double if the number is not in the range of 64 bits + integer. + + Warning: Json::Value::asInt() and Json::Value::asUInt() now returns + long long. This changes break code that was passing the return value + to *printf() function. + + Support for 64 bits integer can be disabled by defining the macro + JSON_NO_INT64 (uncomment it in json/config.h for example), though + it should have no impact on existing usage. + + - The type Json::ArrayIndex is used for indexes of a JSON value array. It + is an unsigned int (typically 32 bits). + + - Array index can be passed as int to operator[], allowing use of literal: + Json::Value array; + array.append( 1234 ); + int value = array[0].asInt(); // did not compile previously + + - Added float Json::Value::asFloat() to obtain a floating point value as a + float (avoid lost of precision warning caused by used of asDouble() + to initialize a float). + +* Reader + + - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. + Bug #3023708 (Formatted has 2 't'). The old member function is deprecated + but still present for backward compatibility. + +* Tests + + - Added test to ensure that the escape sequence "\/" is corrected handled + by the parser. + +* Bug fixes + + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now + correctly detected. + + - Bug #3139678: stack buffer overflow when parsing a double with a + length of 32 characters. + + - Fixed Value::operator <= implementation (had the semantic of operator >=). + Found when adding unit tests for comparison operators. + + - Value::compare() is now const and has an actual implementation with + unit tests. + + - Bug #2407932: strpbrk() can fail for NULL pointer. + + - Bug #3306345: Fixed minor typo in Path::resolve(). + + - Bug #3314841/#3306896: errors in amalgamate.py + + - Fixed some Coverity warnings and line-endings. + +* License + + - See file LICENSE for details. Basically JsonCpp is now licensed under + MIT license, or public domain if desired and recognized in your jurisdiction. + Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who + helped figuring out the solution to the public domain issue. diff --git a/3rdparty/jsoncpp/README.md b/3rdparty/jsoncpp/README.md new file mode 100644 index 00000000000..099f17fa2fd --- /dev/null +++ b/3rdparty/jsoncpp/README.md @@ -0,0 +1,224 @@ +Introduction +------------ + +[JSON][json-org] is a lightweight data-interchange format. It can represent +numbers, strings, ordered sequences of values, and collections of name/value +pairs. + +[json-org]: http://json.org/ + +JsonCpp is a C++ library that allows manipulating JSON values, including +serialization and deserialization to and from strings. It can also preserve +existing comment in unserialization/serialization steps, making it a convenient +format to store user input files. + +## A note on backward-compatibility +Very soon, we are switching to C++11 only. For older compilers, try the `pre-C++11` branch. + +Using JsonCpp in your project +----------------------------- + +The recommended approach to integrating JsonCpp in your project is to build +the amalgamated source (a single `.cpp` file) with your own build system. This +ensures consistency of compilation flags and ABI compatibility. See the section +"Generating amalgamated source and header" for instructions. + +The `include/` should be added to your compiler include path. Jsoncpp headers +should be included as follow: + + #include <json/json.h> + +If JsonCpp was build as a dynamic library on Windows, then your project needs to +define the macro `JSON_DLL`. + + +Building and testing with new CMake +----------------------------------- + +[CMake][] is a C++ Makefiles/Solution generator. It is usually available on most +Linux system as package. On Ubuntu: + + sudo apt-get install cmake + +[CMake]: http://www.cmake.org + +Note that Python is also required to run the JSON reader/writer tests. If +missing, the build will skip running those tests. + +When running CMake, a few parameters are required: + +* a build directory where the makefiles/solution are generated. It is also used + to store objects, libraries and executables files. +* the generator to use: makefiles or Visual Studio solution? What version or + Visual Studio, 32 or 64 bits solution? + +Steps for generating solution/makefiles using `cmake-gui`: + +* Make "source code" point to the source directory. +* Make "where to build the binary" point to the directory to use for the build. +* Click on the "Grouped" check box. +* Review JsonCpp build options (tick `JSONCPP_LIB_BUILD_SHARED` to build as a + dynamic library). +* Click the configure button at the bottom, then the generate button. +* The generated solution/makefiles can be found in the binary directory. + +Alternatively, from the command-line on Unix in the source directory: + + mkdir -p build/debug + cd build/debug + cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../.. + make + +Running `cmake -`" will display the list of available generators (passed using +the `-G` option). + +By default CMake hides compilation commands. This can be modified by specifying +`-DCMAKE_VERBOSE_MAKEFILE=true` when generating makefiles. + + +Building and testing with SCons +------------------------------- + +**Note:** The SCons-based build system is deprecated. Please use CMake; see the +section above. + +JsonCpp can use [Scons][] as a build system. Note that SCons requires Python to +be installed. + +[SCons]: http://www.scons.org/ + +Invoke SCons as follows: + + scons platform=$PLATFORM [TARGET] + +where `$PLATFORM` may be one of: + +* `suncc`: Sun C++ (Solaris) +* `vacpp`: Visual Age C++ (AIX) +* `mingw` +* `msvc6`: Microsoft Visual Studio 6 service pack 5-6 +* `msvc70`: Microsoft Visual Studio 2002 +* `msvc71`: Microsoft Visual Studio 2003 +* `msvc80`: Microsoft Visual Studio 2005 +* `msvc90`: Microsoft Visual Studio 2008 +* `linux-gcc`: Gnu C++ (linux, also reported to work for Mac OS X) + +If you are building with Microsoft Visual Studio 2008, you need to set up the +environment by running `vcvars32.bat` (e.g. MSVC 2008 command prompt) before +running SCons. + + +Running the tests manually +-------------------------- + +Note that test can be run using SCons using the `check` target: + + scons platform=$PLATFORM check + +You need to run tests manually only if you are troubleshooting an issue. + +In the instructions below, replace `path/to/jsontest` with the path of the +`jsontest` executable that was compiled on your platform. + + cd test + # This will run the Reader/Writer tests + python runjsontests.py path/to/jsontest + + # This will run the Reader/Writer tests, using JSONChecker test suite + # (http://www.json.org/JSON_checker/). + # Notes: not all tests pass: JsonCpp is too lenient (for example, + # it allows an integer to start with '0'). The goal is to improve + # strict mode parsing to get all tests to pass. + python runjsontests.py --with-json-checker path/to/jsontest + + # This will run the unit tests (mostly Value) + python rununittests.py path/to/test_lib_json + + # You can run the tests using valgrind: + python rununittests.py --valgrind path/to/test_lib_json + + +Building the documentation +-------------------------- + +Run the Python script `doxybuild.py` from the top directory: + + python doxybuild.py --doxygen=$(which doxygen) --open --with-dot + +See `doxybuild.py --help` for options. + + +Generating amalgamated source and header +---------------------------------------- + +JsonCpp is provided with a script to generate a single header and a single +source file to ease inclusion into an existing project. The amalgamated source +can be generated at any time by running the following command from the +top-directory (this requires Python 2.6): + + python amalgamate.py + +It is possible to specify header name. See the `-h` option for detail. + +By default, the following files are generated: +* `dist/jsoncpp.cpp`: source file that needs to be added to your project. +* `dist/json/json.h`: corresponding header file for use in your project. It is + equivalent to including `json/json.h` in non-amalgamated source. This header + only depends on standard headers. +* `dist/json/json-forwards.h`: header that provides forward declaration of all + JsonCpp types. + +The amalgamated sources are generated by concatenating JsonCpp source in the +correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion +of other headers. + + +Adding a reader/writer test +--------------------------- + +To add a test, you need to create two files in test/data: + +* a `TESTNAME.json` file, that contains the input document in JSON format. +* a `TESTNAME.expected` file, that contains a flatened representation of the + input document. + +The `TESTNAME.expected` file format is as follows: + +* each line represents a JSON element of the element tree represented by the + input document. +* each line has two parts: the path to access the element separated from the + element value by `=`. Array and object values are always empty (i.e. + represented by either `[]` or `{}`). +* element path: `.` represents the root element, and is used to separate object + members. `[N]` is used to specify the value of an array element at index `N`. + +See the examples `test_complex_01.json` and `test_complex_01.expected` to better +understand element paths. + + +Understanding reader/writer test output +--------------------------------------- + +When a test is run, output files are generated beside the input test files. +Below is a short description of the content of each file: + +* `test_complex_01.json`: input JSON document. +* `test_complex_01.expected`: flattened JSON element tree used to check if + parsing was corrected. +* `test_complex_01.actual`: flattened JSON element tree produced by `jsontest` + from reading `test_complex_01.json`. +* `test_complex_01.rewrite`: JSON document written by `jsontest` using the + `Json::Value` parsed from `test_complex_01.json` and serialized using + `Json::StyledWritter`. +* `test_complex_01.actual-rewrite`: flattened JSON element tree produced by + `jsontest` from reading `test_complex_01.rewrite`. +* `test_complex_01.process-output`: `jsontest` output, typically useful for + understanding parsing errors. + + +License +------- + +See the `LICENSE` file for details. In summary, JsonCpp is licensed under the +MIT license, or public domain if desired and recognized in your jurisdiction. + diff --git a/3rdparty/jsoncpp/SConstruct b/3rdparty/jsoncpp/SConstruct new file mode 100644 index 00000000000..1c55bcd0986 --- /dev/null +++ b/3rdparty/jsoncpp/SConstruct @@ -0,0 +1,248 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler + vars = {} + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = os.environ.get("CXXFLAGS", "-Wall"), LINKFLAGS=os.environ.get("LDFLAGS", "") ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/3rdparty/jsoncpp/amalgamate.py b/3rdparty/jsoncpp/amalgamate.py new file mode 100644 index 00000000000..550f6a67624 --- /dev/null +++ b/3rdparty/jsoncpp/amalgamate.py @@ -0,0 +1,150 @@ +"""Amalgate json-cpp library sources into a single source and header file. + +Requires Python 2.6 + +Example of invocation (must be invoked from json-cpp top directory): +python amalgate.py +""" +import os +import os.path +import sys + +class AmalgamationFile: + def __init__( self, top_dir ): + self.top_dir = top_dir + self.blocks = [] + + def add_text( self, text ): + if not text.endswith( "\n" ): + text += "\n" + self.blocks.append( text ) + + def add_file( self, relative_input_path, wrap_in_comment=False ): + def add_marker( prefix ): + self.add_text( "" ) + self.add_text( "// " + "/"*70 ) + self.add_text( "// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/")) ) + self.add_text( "// " + "/"*70 ) + self.add_text( "" ) + add_marker( "Beginning" ) + f = open( os.path.join( self.top_dir, relative_input_path ), "rt" ) + content = f.read() + if wrap_in_comment: + content = "/*\n" + content + "\n*/" + self.add_text( content ) + f.close() + add_marker( "End" ) + self.add_text( "\n\n\n\n" ) + + def get_value( self ): + return "".join( self.blocks ).replace("\r\n","\n") + + def write_to( self, output_path ): + output_dir = os.path.dirname( output_path ) + if output_dir and not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + f = open( output_path, "wb" ) + f.write( str.encode(self.get_value(), 'UTF-8') ) + f.close() + +def amalgamate_source( source_top_dir=None, + target_source_path=None, + header_include_path=None ): + """Produces amalgated source. + Parameters: + source_top_dir: top-directory + target_source_path: output .cpp path + header_include_path: generated header path relative to target_source_path. + """ + print("Amalgating header...") + header = AmalgamationFile( source_top_dir ) + header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." ) + header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path ) + header.add_file( "LICENSE", wrap_in_comment=True ) + header.add_text( "#ifndef JSON_AMALGATED_H_INCLUDED" ) + header.add_text( "# define JSON_AMALGATED_H_INCLUDED" ) + header.add_text( "/// If defined, indicates that the source file is amalgated" ) + header.add_text( "/// to prevent private header inclusion." ) + header.add_text( "#define JSON_IS_AMALGAMATION" ) + header.add_file( "include/json/version.h" ) + header.add_file( "include/json/config.h" ) + header.add_file( "include/json/forwards.h" ) + header.add_file( "include/json/features.h" ) + header.add_file( "include/json/value.h" ) + header.add_file( "include/json/reader.h" ) + header.add_file( "include/json/writer.h" ) + header.add_file( "include/json/assertions.h" ) + header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" ) + + target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) + print("Writing amalgated header to %r" % target_header_path) + header.write_to( target_header_path ) + + base, ext = os.path.splitext( header_include_path ) + forward_header_include_path = base + "-forwards" + ext + print("Amalgating forward header...") + header = AmalgamationFile( source_top_dir ) + header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." ) + header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path ) + header.add_text( "/// This header provides forward declaration for all JsonCpp types." ) + header.add_file( "LICENSE", wrap_in_comment=True ) + header.add_text( "#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED" ) + header.add_text( "# define JSON_FORWARD_AMALGATED_H_INCLUDED" ) + header.add_text( "/// If defined, indicates that the source file is amalgated" ) + header.add_text( "/// to prevent private header inclusion." ) + header.add_text( "#define JSON_IS_AMALGAMATION" ) + header.add_file( "include/json/config.h" ) + header.add_file( "include/json/forwards.h" ) + header.add_text( "#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED" ) + + target_forward_header_path = os.path.join( os.path.dirname(target_source_path), + forward_header_include_path ) + print("Writing amalgated forward header to %r" % target_forward_header_path) + header.write_to( target_forward_header_path ) + + print("Amalgating source...") + source = AmalgamationFile( source_top_dir ) + source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." ) + source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path ) + source.add_file( "LICENSE", wrap_in_comment=True ) + source.add_text( "" ) + source.add_text( "#include <%s>" % header_include_path ) + source.add_text( "" ) + lib_json = "src/lib_json" + source.add_file( os.path.join(lib_json, "json_tool.h") ) + source.add_file( os.path.join(lib_json, "json_reader.cpp") ) + source.add_file( os.path.join(lib_json, "json_batchallocator.h") ) + source.add_file( os.path.join(lib_json, "json_valueiterator.inl") ) + source.add_file( os.path.join(lib_json, "json_value.cpp") ) + source.add_file( os.path.join(lib_json, "json_writer.cpp") ) + + print("Writing amalgated source to %r" % target_source_path) + source.write_to( target_source_path ) + +def main(): + usage = """%prog [options] +Generate a single amalgated source and header file from the sources. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option("-s", "--source", dest="target_source_path", action="store", default="dist/jsoncpp.cpp", + help="""Output .cpp source path. [Default: %default]""") + parser.add_option("-i", "--include", dest="header_include_path", action="store", default="json/json.h", + help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""") + parser.add_option("-t", "--top-dir", dest="top_dir", action="store", default=os.getcwd(), + help="""Source top-directory. [Default: %default]""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + msg = amalgamate_source( source_top_dir=options.top_dir, + target_source_path=options.target_source_path, + header_include_path=options.header_include_path ) + if msg: + sys.stderr.write( msg + "\n" ) + sys.exit( 1 ) + else: + print("Source succesfully amalagated") + +if __name__ == "__main__": + main() diff --git a/3rdparty/jsoncpp/dev.makefile b/3rdparty/jsoncpp/dev.makefile new file mode 100644 index 00000000000..dd16bdd6ef9 --- /dev/null +++ b/3rdparty/jsoncpp/dev.makefile @@ -0,0 +1,14 @@ +all: build test-amalgamate + +build: + mkdir -p build/debug + cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=ON -G "Unix Makefiles" ../.. + make -C build/debug + +# Currently, this depends on include/json/version.h generated +# by cmake. +test-amalgamate: build + python2.7 amalgamate.py + python3.4 amalgamate.py + +.PHONY: build diff --git a/3rdparty/jsoncpp/devtools/__init__.py b/3rdparty/jsoncpp/devtools/__init__.py new file mode 100644 index 00000000000..c944e7cb0c0 --- /dev/null +++ b/3rdparty/jsoncpp/devtools/__init__.py @@ -0,0 +1 @@ +# module
\ No newline at end of file diff --git a/3rdparty/jsoncpp/devtools/agent_vmw7.json b/3rdparty/jsoncpp/devtools/agent_vmw7.json new file mode 100644 index 00000000000..a1db7db6f18 --- /dev/null +++ b/3rdparty/jsoncpp/devtools/agent_vmw7.json @@ -0,0 +1,33 @@ +{ + "cmake_variants" : [ + {"name": "generator", + "generators": [ + {"generator": [ + "Visual Studio 7 .NET 2003", + "Visual Studio 9 2008", + "Visual Studio 9 2008 Win64", + "Visual Studio 10", + "Visual Studio 10 Win64", + "Visual Studio 11", + "Visual Studio 11 Win64" + ] + }, + {"generator": ["MinGW Makefiles"], + "env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}] + } + ] + }, + {"name": "shared_dll", + "variables": [ + ["JSONCPP_LIB_BUILD_SHARED=true"], + ["JSONCPP_LIB_BUILD_SHARED=false"] + ] + }, + {"name": "build_type", + "build_types": [ + "debug", + "release" + ] + } + ] +} diff --git a/3rdparty/jsoncpp/devtools/agent_vmxp.json b/3rdparty/jsoncpp/devtools/agent_vmxp.json new file mode 100644 index 00000000000..d34cf86addf --- /dev/null +++ b/3rdparty/jsoncpp/devtools/agent_vmxp.json @@ -0,0 +1,26 @@ +{ + "cmake_variants" : [ + {"name": "generator", + "generators": [ + {"generator": [ + "Visual Studio 6", + "Visual Studio 7", + "Visual Studio 8 2005" + ] + } + ] + }, + {"name": "shared_dll", + "variables": [ + ["JSONCPP_LIB_BUILD_SHARED=true"], + ["JSONCPP_LIB_BUILD_SHARED=false"] + ] + }, + {"name": "build_type", + "build_types": [ + "debug", + "release" + ] + } + ] +} diff --git a/3rdparty/jsoncpp/devtools/antglob.py b/3rdparty/jsoncpp/devtools/antglob.py new file mode 100644 index 00000000000..8b7b4ca297e --- /dev/null +++ b/3rdparty/jsoncpp/devtools/antglob.py @@ -0,0 +1,202 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from __future__ import print_function +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print('ant_pattern:', ant_pattern, ' => ', rex.pattern) + for accepted_match in accepted_matches: + print('Accepted?:', accepted_match) + self.assertTrue( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print('Rejected?:', rejected_match) + self.assertTrue( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/3rdparty/jsoncpp/devtools/batchbuild.py b/3rdparty/jsoncpp/devtools/batchbuild.py new file mode 100644 index 00000000000..6f57945a7c3 --- /dev/null +++ b/3rdparty/jsoncpp/devtools/batchbuild.py @@ -0,0 +1,281 @@ +from __future__ import print_function +import collections +import itertools +import json +import os +import os.path +import re +import shutil +import string +import subprocess +import sys +import cgi + +class BuildDesc: + def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None): + self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ] + self.variables = variables or [] + self.build_type = build_type + self.generator = generator + + def merged_with( self, build_desc ): + """Returns a new BuildDesc by merging field content. + Prefer build_desc fields to self fields for single valued field. + """ + return BuildDesc( self.prepend_envs + build_desc.prepend_envs, + self.variables + build_desc.variables, + build_desc.build_type or self.build_type, + build_desc.generator or self.generator ) + + def env( self ): + environ = os.environ.copy() + for values_by_name in self.prepend_envs: + for var, value in list(values_by_name.items()): + var = var.upper() + if type(value) is unicode: + value = value.encode( sys.getdefaultencoding() ) + if var in environ: + environ[var] = value + os.pathsep + environ[var] + else: + environ[var] = value + return environ + + def cmake_args( self ): + args = ["-D%s" % var for var in self.variables] + # skip build type for Visual Studio solution as it cause warning + if self.build_type and 'Visual' not in self.generator: + args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type ) + if self.generator: + args.extend( ['-G', self.generator] ) + return args + + def __repr__( self ): + return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type) + +class BuildData: + def __init__( self, desc, work_dir, source_dir ): + self.desc = desc + self.work_dir = work_dir + self.source_dir = source_dir + self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' ) + self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' ) + self.cmake_succeeded = False + self.build_succeeded = False + + def execute_build(self): + print('Build %s' % self.desc) + self._make_new_work_dir( ) + self.cmake_succeeded = self._generate_makefiles( ) + if self.cmake_succeeded: + self.build_succeeded = self._build_using_makefiles( ) + return self.build_succeeded + + def _generate_makefiles(self): + print(' Generating makefiles: ', end=' ') + cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )] + succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path ) + print('done' if succeeded else 'FAILED') + return succeeded + + def _build_using_makefiles(self): + print(' Building:', end=' ') + cmd = ['cmake', '--build', self.work_dir] + if self.desc.build_type: + cmd += ['--config', self.desc.build_type] + succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path ) + print('done' if succeeded else 'FAILED') + return succeeded + + def _execute_build_subprocess(self, cmd, env, log_path): + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir, + env=env ) + stdout, _ = process.communicate( ) + succeeded = (process.returncode == 0) + with open( log_path, 'wb' ) as flog: + log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode + flog.write( fix_eol( log ) ) + return succeeded + + def _make_new_work_dir(self): + if os.path.isdir( self.work_dir ): + print(' Removing work directory', self.work_dir) + shutil.rmtree( self.work_dir, ignore_errors=True ) + if not os.path.isdir( self.work_dir ): + os.makedirs( self.work_dir ) + +def fix_eol( stdout ): + """Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n). + """ + return re.sub( '\r*\n', os.linesep, stdout ) + +def load_build_variants_from_config( config_path ): + with open( config_path, 'rb' ) as fconfig: + data = json.load( fconfig ) + variants = data[ 'cmake_variants' ] + build_descs_by_axis = collections.defaultdict( list ) + for axis in variants: + axis_name = axis["name"] + build_descs = [] + if "generators" in axis: + for generator_data in axis["generators"]: + for generator in generator_data["generator"]: + build_desc = BuildDesc( generator=generator, + prepend_envs=generator_data.get("env_prepend") ) + build_descs.append( build_desc ) + elif "variables" in axis: + for variables in axis["variables"]: + build_desc = BuildDesc( variables=variables ) + build_descs.append( build_desc ) + elif "build_types" in axis: + for build_type in axis["build_types"]: + build_desc = BuildDesc( build_type=build_type ) + build_descs.append( build_desc ) + build_descs_by_axis[axis_name].extend( build_descs ) + return build_descs_by_axis + +def generate_build_variants( build_descs_by_axis ): + """Returns a list of BuildDesc generated for the partial BuildDesc for each axis.""" + axis_names = list(build_descs_by_axis.keys()) + build_descs = [] + for axis_name, axis_build_descs in list(build_descs_by_axis.items()): + if len(build_descs): + # for each existing build_desc and each axis build desc, create a new build_desc + new_build_descs = [] + for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs): + new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) ) + build_descs = new_build_descs + else: + build_descs = axis_build_descs + return build_descs + +HTML_TEMPLATE = string.Template('''<html> +<head> + <title>$title</title> + <style type="text/css"> + td.failed {background-color:#f08080;} + td.ok {background-color:#c0eec0;} + </style> +</head> +<body> +<table border="1"> +<thead> + <tr> + <th>Variables</th> + $th_vars + </tr> + <tr> + <th>Build type</th> + $th_build_types + </tr> +</thead> +<tbody> +$tr_builds +</tbody> +</table> +</body></html>''') + +def generate_html_report( html_report_path, builds ): + report_dir = os.path.dirname( html_report_path ) + # Vertical axis: generator + # Horizontal: variables, then build_type + builds_by_generator = collections.defaultdict( list ) + variables = set() + build_types_by_variable = collections.defaultdict( set ) + build_by_pos_key = {} # { (generator, var_key, build_type): build } + for build in builds: + builds_by_generator[build.desc.generator].append( build ) + var_key = tuple(sorted(build.desc.variables)) + variables.add( var_key ) + build_types_by_variable[var_key].add( build.desc.build_type ) + pos_key = (build.desc.generator, var_key, build.desc.build_type) + build_by_pos_key[pos_key] = build + variables = sorted( variables ) + th_vars = [] + th_build_types = [] + for variable in variables: + build_types = sorted( build_types_by_variable[variable] ) + nb_build_type = len(build_types_by_variable[variable]) + th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) ) + for build_type in build_types: + th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) ) + tr_builds = [] + for generator in sorted( builds_by_generator ): + tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ] + for variable in variables: + build_types = sorted( build_types_by_variable[variable] ) + for build_type in build_types: + pos_key = (generator, variable, build_type) + build = build_by_pos_key.get(pos_key) + if build: + cmake_status = 'ok' if build.cmake_succeeded else 'FAILED' + build_status = 'ok' if build.build_succeeded else 'FAILED' + cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir ) + build_log_url = os.path.relpath( build.build_log_path, report_dir ) + td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % ( + build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status) + if build.cmake_succeeded: + td += '<br><a href="%s" class="%s">Build: %s</a>' % ( + build_log_url, build_status.lower(), build_status) + td += '</td>' + else: + td = '<td></td>' + tds.append( td ) + tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) ) + html = HTML_TEMPLATE.substitute( + title='Batch build report', + th_vars=' '.join(th_vars), + th_build_types=' '.join( th_build_types), + tr_builds='\n'.join( tr_builds ) ) + with open( html_report_path, 'wt' ) as fhtml: + fhtml.write( html ) + print('HTML report generated in:', html_report_path) + +def main(): + usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...] +Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run +as described in CONFIG_JSON_PATH building in WORK_DIR. + +Example of call: +python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = True +# parser.add_option('-v', '--verbose', dest="verbose", action='store_true', +# help="""Be verbose.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + if len(args) < 3: + parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." ) + work_dir = args[0] + source_dir = args[1].rstrip('/\\') + config_paths = args[2:] + for config_path in config_paths: + if not os.path.isfile( config_path ): + parser.error( "Can not read: %r" % config_path ) + + # generate build variants + build_descs = [] + for config_path in config_paths: + build_descs_by_axis = load_build_variants_from_config( config_path ) + build_descs.extend( generate_build_variants( build_descs_by_axis ) ) + print('Build variants (%d):' % len(build_descs)) + # assign build directory for each variant + if not os.path.isdir( work_dir ): + os.makedirs( work_dir ) + builds = [] + with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap: + for index, build_desc in enumerate( build_descs ): + build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) ) + builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) ) + fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) ) + for build in builds: + build.execute_build() + html_report_path = os.path.join( work_dir, 'batchbuild-report.html' ) + generate_html_report( html_report_path, builds ) + print('Done') + + +if __name__ == '__main__': + main() + diff --git a/3rdparty/jsoncpp/devtools/fixeol.py b/3rdparty/jsoncpp/devtools/fixeol.py new file mode 100644 index 00000000000..53af7612bc7 --- /dev/null +++ b/3rdparty/jsoncpp/devtools/fixeol.py @@ -0,0 +1,64 @@ +from __future__ import print_function +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError as msg: + print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print('%s =>' % path, end=' ') + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print(is_dry_run and ' NEED FIX' or ' FIXED') + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/3rdparty/jsoncpp/devtools/licenseupdater.py b/3rdparty/jsoncpp/devtools/licenseupdater.py new file mode 100644 index 00000000000..8cb71d737b3 --- /dev/null +++ b/3rdparty/jsoncpp/devtools/licenseupdater.py @@ -0,0 +1,94 @@ +"""Updates the license text in source file. +""" +from __future__ import print_function + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print('Updated', path) + if show_diff: + import difflib + print('\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) )) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print('Done') + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/3rdparty/jsoncpp/devtools/tarball.py b/3rdparty/jsoncpp/devtools/tarball.py new file mode 100644 index 00000000000..ccbda394255 --- /dev/null +++ b/3rdparty/jsoncpp/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/3rdparty/jsoncpp/doc/doxyfile.in b/3rdparty/jsoncpp/doc/doxyfile.in new file mode 100644 index 00000000000..a4161865cc7 --- /dev/null +++ b/3rdparty/jsoncpp/doc/doxyfile.in @@ -0,0 +1,2302 @@ +# Doxyfile 1.8.5 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all text +# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv +# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv +# for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify an logo or icon that is included in +# the documentation. The maximum height of the logo should not exceed 55 pixels +# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo +# to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese- +# Traditional, Croatian, Czech, Danish, Dutch, English, Esperanto, Farsi, +# Finnish, French, German, Greek, Hungarian, Italian, Japanese, Japanese-en, +# Korean, Korean-en, Latvian, Norwegian, Macedonian, Persian, Polish, +# Portuguese, Romanian, Russian, Serbian, Slovak, Slovene, Spanish, Swedish, +# Turkish, Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a +# new page for each member. If set to NO, the documentation of a member will be +# part of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:\n" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". You can put \n's in the value part of an alias to insert +# newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=<a HREF='http://www.json.org/'>JSON (JavaScript Object Notation)</a>" + +# This tag can be used to specify a number of word-keyword mappings (TCL only). +# A mapping has the form "name=value". For example adding "class=itcl::class" +# will allow you to use the command class in the itcl::class meaning. + +TCL_SUBST = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, Javascript, +# C#, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL. For instance to make +# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C +# (default is Fortran), use: inc=Fortran f=C. +# +# Note For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See http://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by by putting a % sign in front of the word +# or globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO these classes will be included in the various overviews. This option has +# no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# (class|struct|union) declarations. If set to NO these declarations will be +# included in the documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file +# names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. +# The default value is: system dependent. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO the members will appear in declaration order. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = YES + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the +# todo list. This list is created by putting \todo commands in the +# documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the +# test list. This list is created by putting \test commands in the +# documentation. +# The default value is: YES. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if <section_label> ... \endif and \cond <section_label> +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES the list +# will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. Do not use file names with spaces, bibtex cannot handle them. See +# also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some parameters +# in a documented function, or documenting parameters that don't exist or using +# markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO doxygen will only warn about wrong or incomplete parameter +# documentation, but not about the absence of documentation. +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. +# Note: If this tag is empty the current directory is searched. + +INPUT = ../include \ + ../src/lib_json \ + . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: http://www.gnu.org/software/libiconv) for the list of +# possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank the +# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii, +# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, +# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, +# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, +# *.qsf, *.as and *.js. + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# <filter> <input-file> +# +# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER ) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# function all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES, then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see http://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the config file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = NO + +# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in +# which the alphabetical index list will be split. +# Minimum value: 1, maximum value: 20, default value: 5. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user- +# defined cascading style sheet that is included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefor more robust against future updates. +# Doxygen will copy the style sheet file to the output directory. For an example +# see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the stylesheet and background images according to +# this color. Hue is specified as an angle on a colorwheel, see +# http://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use grayscales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to NO can help when comparing the output of multiple runs. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = YES + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: http://developer.apple.com/tools/xcode/), introduced with +# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# Makefile in the HTML output directory. Running make will produce the docset in +# that directory and running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html +# for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# Windows. +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = %HTML_HELP% + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler ( hhc.exe). If non-empty +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# The GENERATE_CHI flag controls if a separate .chi index file is generated ( +# YES) or that it should be included in the master .chm file ( NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = YES + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated ( +# YES) or a normal table of contents ( NO) in the .chm file. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- +# folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location of Qt's +# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the +# generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can +# further fine-tune the look of the index. As an example, the default style +# sheet generated by doxygen has an example that shows how to put an image at +# the root of the tree instead of the PROJECT_NAME. Since the tree basically has +# the same information as the tab index, you could consider setting +# DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = NO + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# http://www.mathjax.org) which uses client side Javascript for the rendering +# instead of using prerendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. See the MathJax site (see: +# http://docs.mathjax.org/en/latest/output.html) for more details. +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility), NativeMML (i.e. MathML) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from http://www.mathjax.org before deployment. +# The default value is: http://cdn.mathjax.org/mathjax/latest. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use <access key> + S +# (what the <access key> is depends on the OS and browser, but it is typically +# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down +# key> to jump into the search results window, the results can be navigated +# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel +# the search. The filter options can be selected when the cursor is inside the +# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys> +# to select a filter and <Enter> or <escape> to activate or cancel the filter +# option. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +SEARCHENGINE = NO + +# When the SERVER_BASED_SEARCH tag is enabled the search engine will be +# implemented using a web server instead of a web client using Javascript. There +# are two flavours of web server based searching depending on the +# EXTERNAL_SEARCH setting. When disabled, doxygen will generate a PHP script for +# searching and an index file used by the script. When EXTERNAL_SEARCH is +# enabled the indexing and searching needs to be provided by external tools. See +# the section "External Indexing and Searching" for details. +# The default value is: NO. +# This tag requires that the tag SEARCHENGINE is set to YES. + +SERVER_BASED_SEARCH = NO + +# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP +# script for searching. Instead the search results are written to an XML file +# which needs to be processed by an external indexer. Doxygen will invoke an +# external search engine pointed to by the SEARCHENGINE_URL option to obtain the +# search results. +# +# Doxygen ships with an example indexer ( doxyindexer) and search engine +# (doxysearch.cgi) which are based on the open source search engine library +# Xapian (see: http://xapian.org/). +# +# See the section "External Indexing and Searching" for details. +# The default value is: NO. +# This tag requires that the tag SEARCHENGINE is set to YES. + +EXTERNAL_SEARCH = NO + +# The SEARCHENGINE_URL should point to a search engine hosted by a web server +# which will return the search results when EXTERNAL_SEARCH is enabled. +# +# Doxygen ships with an example indexer ( doxyindexer) and search engine +# (doxysearch.cgi) which are based on the open source search engine library +# Xapian (see: http://xapian.org/). See the section "External Indexing and +# Searching" for details. +# This tag requires that the tag SEARCHENGINE is set to YES. + +SEARCHENGINE_URL = + +# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed +# search data is written to a file for indexing by an external tool. With the +# SEARCHDATA_FILE tag the name of this file can be specified. +# The default file is: searchdata.xml. +# This tag requires that the tag SEARCHENGINE is set to YES. + +SEARCHDATA_FILE = searchdata.xml + +# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the +# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is +# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple +# projects and redirect the results back to the right project. +# This tag requires that the tag SEARCHENGINE is set to YES. + +EXTERNAL_SEARCH_ID = + +# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen +# projects other than the one defined by this configuration file, but that are +# all added to the same external search index. Each project needs to have a +# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of +# to a relative location where the documentation can be found. The format is: +# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ... +# This tag requires that the tag SEARCHENGINE is set to YES. + +EXTRA_SEARCH_MAPPINGS = + +#--------------------------------------------------------------------------- +# Configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES doxygen will generate LaTeX output. +# The default value is: YES. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: latex. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. +# +# Note that when enabling USE_PDFLATEX this option is only used for generating +# bitmaps for formulas in the HTML output, but not in the Makefile that is +# written to the output directory. +# The default file is: latex. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate +# index for LaTeX. +# The default file is: makeindex. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES doxygen generates more compact LaTeX +# documents. This may be useful for small projects and may help to save some +# trees in general. +# The default value is: NO. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used by the +# printer. +# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x +# 14 inches) and executive (7.25 x 10.5 inches). +# The default value is: a4. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names +# that should be included in the LaTeX output. To get the times font for +# instance you can specify +# EXTRA_PACKAGES=times +# If left blank no extra packages will be included. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the +# generated LaTeX document. The header should contain everything until the first +# chapter. If it is left blank doxygen will generate a standard header. See +# section "Doxygen usage" for information on how to let doxygen write the +# default header to a separate file. +# +# Note: Only use a user-defined header if you know what you are doing! The +# following commands have a special meaning inside the header: $title, +# $datetime, $date, $doxygenversion, $projectname, $projectnumber. Doxygen will +# replace them by respectively the title of the page, the current date and time, +# only the current date, the version number of doxygen, the project name (see +# PROJECT_NAME), or the project number (see PROJECT_NUMBER). +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_HEADER = + +# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the +# generated LaTeX document. The footer should contain everything after the last +# chapter. If it is left blank doxygen will generate a standard footer. +# +# Note: Only use a user-defined footer if you know what you are doing! +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_FOOTER = + +# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the LATEX_OUTPUT output +# directory. Note that the files will be copied as-is; there are no commands or +# markers available. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_EXTRA_FILES = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is +# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will +# contain links (just like the HTML output) instead of page references. This +# makes the output suitable for online browsing using a PDF viewer. +# The default value is: YES. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +PDF_HYPERLINKS = NO + +# If the LATEX_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate +# the PDF file directly from the LaTeX files. Set this option to YES to get a +# higher quality PDF documentation. +# The default value is: YES. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode +# command to the generated LaTeX files. This will instruct LaTeX to keep running +# if errors occur, instead of asking the user for help. This option is also used +# when generating formulas in HTML. +# The default value is: NO. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_BATCHMODE = NO + +# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the +# index chapters (such as File Index, Compound Index, etc.) in the output. +# The default value is: NO. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_HIDE_INDICES = NO + +# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source +# code with syntax highlighting in the LaTeX output. +# +# Note that which sources are shown also depends on other settings such as +# SOURCE_BROWSER. +# The default value is: NO. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_SOURCE_CODE = NO + +# The LATEX_BIB_STYLE tag can be used to specify the style to use for the +# bibliography, e.g. plainnat, or ieeetr. See +# http://en.wikipedia.org/wiki/BibTeX and \cite for more info. +# The default value is: plain. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_BIB_STYLE = plain + +#--------------------------------------------------------------------------- +# Configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES doxygen will generate RTF output. The +# RTF output is optimized for Word 97 and may not look too pretty with other RTF +# readers/editors. +# The default value is: NO. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: rtf. +# This tag requires that the tag GENERATE_RTF is set to YES. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES doxygen generates more compact RTF +# documents. This may be useful for small projects and may help to save some +# trees in general. +# The default value is: NO. +# This tag requires that the tag GENERATE_RTF is set to YES. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will +# contain hyperlink fields. The RTF file will contain links (just like the HTML +# output) instead of page references. This makes the output suitable for online +# browsing using Word or some other Word compatible readers that support those +# fields. +# +# Note: WordPad (write) and others do not support links. +# The default value is: NO. +# This tag requires that the tag GENERATE_RTF is set to YES. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's config +# file, i.e. a series of assignments. You only have to provide replacements, +# missing definitions are set to their default value. +# +# See also section "Doxygen usage" for information on how to generate the +# default style sheet that doxygen normally uses. +# This tag requires that the tag GENERATE_RTF is set to YES. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an RTF document. Syntax is +# similar to doxygen's config file. A template extensions file can be generated +# using doxygen -e rtf extensionFile. +# This tag requires that the tag GENERATE_RTF is set to YES. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES doxygen will generate man pages for +# classes and files. +# The default value is: NO. + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. A directory man3 will be created inside the directory specified by +# MAN_OUTPUT. +# The default directory is: man. +# This tag requires that the tag GENERATE_MAN is set to YES. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to the generated +# man pages. In case the manual section does not start with a number, the number +# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is +# optional. +# The default value is: .3. +# This tag requires that the tag GENERATE_MAN is set to YES. + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it +# will generate one additional man file for each entity documented in the real +# man page(s). These additional files only source the real man page, but without +# them the man command would be unable to find the correct page. +# The default value is: NO. +# This tag requires that the tag GENERATE_MAN is set to YES. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# Configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES doxygen will generate an XML file that +# captures the structure of the code including all documentation. +# The default value is: NO. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: xml. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify a XML schema, which can be used by a +# validating XML parser to check the syntax of the XML files. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify a XML DTD, which can be used by a +# validating XML parser to check the syntax of the XML files. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES doxygen will dump the program +# listings (including syntax highlighting and cross-referencing information) to +# the XML output. Note that enabling this will significantly increase the size +# of the XML output. +# The default value is: YES. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# Configuration options related to the DOCBOOK output +#--------------------------------------------------------------------------- + +# If the GENERATE_DOCBOOK tag is set to YES doxygen will generate Docbook files +# that can be used to generate PDF. +# The default value is: NO. + +GENERATE_DOCBOOK = NO + +# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in +# front of it. +# The default directory is: docbook. +# This tag requires that the tag GENERATE_DOCBOOK is set to YES. + +DOCBOOK_OUTPUT = docbook + +#--------------------------------------------------------------------------- +# Configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES doxygen will generate an AutoGen +# Definitions (see http://autogen.sf.net) file that captures the structure of +# the code including all documentation. Note that this feature is still +# experimental and incomplete at the moment. +# The default value is: NO. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# Configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES doxygen will generate a Perl module +# file that captures the structure of the code including all documentation. +# +# Note that this feature is still experimental and incomplete at the moment. +# The default value is: NO. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES doxygen will generate the necessary +# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI +# output from the Perl module output. +# The default value is: NO. +# This tag requires that the tag GENERATE_PERLMOD is set to YES. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be nicely +# formatted so it can be parsed by a human reader. This is useful if you want to +# understand what is going on. On the other hand, if this tag is set to NO the +# size of the Perl module output will be much smaller and Perl will parse it +# just the same. +# The default value is: YES. +# This tag requires that the tag GENERATE_PERLMOD is set to YES. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file are +# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful +# so different doxyrules.make files included by the same Makefile don't +# overwrite each other's variables. +# This tag requires that the tag GENERATE_PERLMOD is set to YES. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES doxygen will evaluate all +# C-preprocessor directives found in the sources and include files. +# The default value is: YES. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES doxygen will expand all macro names +# in the source code. If set to NO only conditional compilation will be +# performed. Macro expansion can be done in a controlled way by setting +# EXPAND_ONLY_PREDEF to YES. +# The default value is: NO. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then +# the macro expansion is limited to the macros specified with the PREDEFINED and +# EXPAND_AS_DEFINED tags. +# The default value is: NO. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES the includes files in the +# INCLUDE_PATH will be searched if a #include is found. +# The default value is: YES. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by the +# preprocessor. +# This tag requires that the tag SEARCH_INCLUDES is set to YES. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will be +# used. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that are +# defined before the preprocessor is started (similar to the -D option of e.g. +# gcc). The argument of the tag is a list of macros of the form: name or +# name=definition (no spaces). If the definition and the "=" are omitted, "=1" +# is assumed. To prevent a macro definition from being undefined via #undef or +# recursively expanded use the := operator instead of the = operator. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this +# tag can be used to specify a list of macro names that should be expanded. The +# macro definition that is found in the sources will be used. Use the PREDEFINED +# tag if you want to use a different macro definition that overrules the +# definition found in the source code. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will +# remove all refrences to function-like macros that are alone on a line, have an +# all uppercase name, and do not end with a semicolon. Such function macros are +# typically used for boiler-plate code, and will confuse the parser if not +# removed. +# The default value is: YES. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration options related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES tag can be used to specify one or more tag files. For each tag +# file the location of the external documentation should be added. The format of +# a tag file without this location is as follows: +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where loc1 and loc2 can be relative or absolute paths or URLs. See the +# section "Linking to external documentation" for more information about the use +# of tag files. +# Note: Each tag file must have an unique name (where the name does NOT include +# the path). If a tag file is not located in the directory in which doxygen is +# run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create a +# tag file that is based on the input files it reads. See section "Linking to +# external documentation" for more information about the usage of tag files. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external class will be listed in the +# class index. If set to NO only the inherited external classes will be listed. +# The default value is: NO. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed in +# the modules index. If set to NO, only the current project's groups will be +# listed. +# The default value is: YES. + +EXTERNAL_GROUPS = YES + +# If the EXTERNAL_PAGES tag is set to YES all external pages will be listed in +# the related pages index. If set to NO, only the current project's pages will +# be listed. +# The default value is: YES. + +EXTERNAL_PAGES = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of 'which perl'). +# The default file (with absolute path) is: /usr/bin/perl. + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES doxygen will generate a class diagram +# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to +# NO turns the diagrams off. Note that this option also works with HAVE_DOT +# disabled, but it is recommended to install and use dot, since it yields more +# powerful graphs. +# The default value is: YES. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see: +# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide inheritance +# and usage relations if the target is undocumented or is not a class. +# The default value is: YES. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz (see: +# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent +# Bell Labs. The other options in this section have no effect if this option is +# set to NO +# The default value is: NO. + +HAVE_DOT = %HAVE_DOT% + +# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed +# to run in parallel. When set to 0 doxygen will base this on the number of +# processors available in the system. You can set it explicitly to a value +# larger than 0 to get control over the balance between CPU load and processing +# speed. +# Minimum value: 0, maximum value: 32, default value: 0. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_NUM_THREADS = 0 + +# When you want a differently looking font n the dot files that doxygen +# generates you can specify the font name using DOT_FONTNAME. You need to make +# sure dot is able to find the font, which can be done by putting it in a +# standard location or by setting the DOTFONTPATH environment variable or by +# setting DOT_FONTPATH to the directory containing the font. +# The default value is: Helvetica. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of +# dot graphs. +# Minimum value: 4, maximum value: 24, default value: 10. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the default font as specified with +# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set +# the path where dot can find it using this tag. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_FONTPATH = + +# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for +# each documented class showing the direct and indirect inheritance relations. +# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a +# graph for each documented class showing the direct and indirect implementation +# dependencies (inheritance, containment, and class references variables) of the +# class with other documented classes. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for +# groups, showing the direct groups dependencies. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +UML_LOOK = %UML_LOOK% + +# If the UML_LOOK tag is enabled, the fields and methods are shown inside the +# class node. If there are many fields or methods and many nodes the graph may +# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the +# number of items for each type to make the size more manageable. Set this to 0 +# for no limit. Note that the threshold may be exceeded by 50% before the limit +# is enforced. So when you set the threshold to 10, up to 15 fields may appear, +# but if the number exceeds 15, the total amount of fields shown is limited to +# 10. +# Minimum value: 0, maximum value: 100, default value: 10. +# This tag requires that the tag HAVE_DOT is set to YES. + +UML_LIMIT_NUM_FIELDS = 10 + +# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and +# collaboration graphs will show the relations between templates and their +# instances. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +TEMPLATE_RELATIONS = YES + +# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to +# YES then doxygen will generate a graph for each documented file showing the +# direct and indirect include dependencies of the file with other documented +# files. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +INCLUDE_GRAPH = YES + +# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are +# set to YES then doxygen will generate a graph for each documented file showing +# the direct and indirect include dependencies of the file with other documented +# files. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH tag is set to YES then doxygen will generate a call +# dependency graph for every global function or class method. +# +# Note that enabling this option will significantly increase the time of a run. +# So in most cases it will be better to enable call graphs for selected +# functions only using the \callgraph command. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller +# dependency graph for every global function or class method. +# +# Note that enabling this option will significantly increase the time of a run. +# So in most cases it will be better to enable caller graphs for selected +# functions only using the \callergraph command. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical +# hierarchy of all classes instead of a textual one. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the +# dependencies a directory has on other directories in a graphical way. The +# dependency relations are determined by the #include relations between the +# files in the directories. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. +# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order +# to make the SVG files visible in IE 9+ (other browsers do not have this +# requirement). +# Possible values are: png, jpg, gif and svg. +# The default value is: png. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_IMAGE_FORMAT = png + +# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to +# enable generation of interactive SVG images that allow zooming and panning. +# +# Note that this requires a modern browser other than Internet Explorer. Tested +# and working are Firefox, Chrome, Safari, and Opera. +# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make +# the SVG files visible. Older versions of IE do not have SVG support. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +INTERACTIVE_SVG = NO + +# The DOT_PATH tag can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the \dotfile +# command). +# This tag requires that the tag HAVE_DOT is set to YES. + +DOTFILE_DIRS = + +# The MSCFILE_DIRS tag can be used to specify one or more directories that +# contain msc files that are included in the documentation (see the \mscfile +# command). + +MSCFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes +# that will be shown in the graph. If the number of nodes in a graph becomes +# larger than this value, doxygen will truncate the graph, which is visualized +# by representing a node as a red box. Note that doxygen if the number of direct +# children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that +# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. +# Minimum value: 0, maximum value: 10000, default value: 50. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs +# generated by dot. A depth value of 3 means that only nodes reachable from the +# root by following a path via at most 3 edges will be shown. Nodes that lay +# further from the root node will be omitted. Note that setting this option to 1 +# or 2 may greatly reduce the computation time needed for large code bases. Also +# note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. +# Minimum value: 0, maximum value: 1000, default value: 0. +# This tag requires that the tag HAVE_DOT is set to YES. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not seem +# to support this out of the box. +# +# Warning: Depending on the platform used, enabling this option may lead to +# badly anti-aliased labels on the edges of a graph (i.e. they become hard to +# read). +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) support +# this, this feature is disabled by default. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page +# explaining the meaning of the various boxes and arrows in the dot generated +# graphs. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES doxygen will remove the intermediate dot +# files that are used to generate the various graphs. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_CLEANUP = YES diff --git a/3rdparty/jsoncpp/doc/footer.html b/3rdparty/jsoncpp/doc/footer.html new file mode 100644 index 00000000000..c83e5bcc0ea --- /dev/null +++ b/3rdparty/jsoncpp/doc/footer.html @@ -0,0 +1,3 @@ +<hr> +</body> +</html> diff --git a/3rdparty/jsoncpp/doc/header.html b/3rdparty/jsoncpp/doc/header.html new file mode 100644 index 00000000000..6bd2ad9e32a --- /dev/null +++ b/3rdparty/jsoncpp/doc/header.html @@ -0,0 +1,24 @@ +<html> +<head> +<title> +JsonCpp - JSON data format manipulation library +</title> +<link href="doxygen.css" rel="stylesheet" type="text/css"> +<link href="tabs.css" rel="stylesheet" type="text/css"> +</head> + +<body bgcolor="#ffffff"> +<table width="100%"> + <tr> + <td width="40%" align="left" valign="center"> + <a href="https://github.com/open-source-parsers/jsoncpp"> + JsonCpp project page + </a> + </td> + <td width="40%" align="right" valign="center"> + <a href="https://github.com/open-source-parsers/jsoncpp">JsonCpp home page</a> + </td> + </tr> +</table> + +<hr> diff --git a/3rdparty/jsoncpp/doc/jsoncpp.dox b/3rdparty/jsoncpp/doc/jsoncpp.dox new file mode 100644 index 00000000000..a9ed47ec4e9 --- /dev/null +++ b/3rdparty/jsoncpp/doc/jsoncpp.dox @@ -0,0 +1,119 @@ +/** +\mainpage +\section _intro Introduction + +<a HREF="http://www.json.org/">JSON (JavaScript Object Notation)</a> + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space": true } +} +\endverbatim +<code>jsoncpp</code> supports comments as <i>meta-data</i>. + +\section _features Features +- read and write JSON document +- attach C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormattedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _pbuild Build instructions +The build instructions are located in the file +<a HREF="https://github.com/open-source-parsers/jsoncpp/blob/master/README.md">README.md</a> in the top-directory of the project. + +The latest version of the source is available in the project's GitHub repository: +<a HREF="https://github.com/open-source-parsers/jsoncpp/"> +jsoncpp</a> + +\section _news What's New? +The description of latest changes can be found in +<a HREF="https://github.com/open-source-parsers/jsoncpp/wiki/NEWS"> + the NEWS wiki +</a>. + +\section _rlinks Related links +- <a HREF="http://www.json.org/">JSON</a> Specification and alternate language implementations. +- <a HREF="http://www.yaml.org/">YAML</a> A data format designed for human readability. +- <a HREF="http://www.cl.cam.ac.uk/~mgk25/unicode.html">UTF-8 and Unicode FAQ</a>. + +\section _plinks Old project links +- <a href="https://sourceforge.net/projects/jsoncpp/">https://sourceforge.net/projects/jsoncpp/</a> +- <a href="http://jsoncpp.sourceforge.net">http://jsoncpp.sourceforge.net</a> +- <a href="http://sourceforge.net/projects/jsoncpp/files/">http://sourceforge.net/projects/jsoncpp/files/</a> +- <a href="http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/">http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/</a> +- <a href="http://jsoncpp.sourceforge.net/old.html">http://jsoncpp.sourceforge.net/old.html</a> + +\section _license License +See file <a href="https://github.com/open-source-parsers/jsoncpp/blob/master/LICENSE"><code>LICENSE</code></a> in the top-directory of the project. + +Basically JsonCpp is licensed under MIT license, or public domain if desired +and recognized in your jurisdiction. + +\author Baptiste Lepilleur <blep@users.sourceforge.net> (originator) +*/ diff --git a/3rdparty/jsoncpp/doc/readme.txt b/3rdparty/jsoncpp/doc/readme.txt new file mode 100644 index 00000000000..0e42cdfb4cf --- /dev/null +++ b/3rdparty/jsoncpp/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/3rdparty/jsoncpp/doc/roadmap.dox b/3rdparty/jsoncpp/doc/roadmap.dox new file mode 100644 index 00000000000..12294bbbd29 --- /dev/null +++ b/3rdparty/jsoncpp/doc/roadmap.dox @@ -0,0 +1,3 @@ +/*! \page roadmap JsonCpp roadmap + Moved to: https://github.com/open-source-parsers/jsoncpp/wiki/Roadmap +*/ diff --git a/3rdparty/jsoncpp/doxybuild.py b/3rdparty/jsoncpp/doxybuild.py new file mode 100644 index 00000000000..0b61c397e43 --- /dev/null +++ b/3rdparty/jsoncpp/doxybuild.py @@ -0,0 +1,169 @@ +"""Script to generate doxygen documentation. +""" +from __future__ import print_function +from devtools import tarball +import re +import os +import os.path +import sys +import shutil + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print("Can't read source file %s"%sourcefile) + raise + for (k,v) in list(dict.items()): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print("Can't write target file %s"%targetfile) + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print('Running:', ' '.join( cmd )) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print('Documentation generation failed') + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print('Documentation generation failed:') + print(stdout) + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print('Deleting directory:', output_dir) + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print(open(warning_log_path, 'rb').read()) + index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html')) + print('Generated documentation can be found in:') + print(index_path) + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print('Generating doc tarball to', tarball_path) + tarball_sources = [ + output_dir, + 'README.txt', + 'LICENSE', + 'NEWS.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/3rdparty/jsoncpp/include/CMakeLists.txt b/3rdparty/jsoncpp/include/CMakeLists.txt new file mode 100644 index 00000000000..7dde10d6feb --- /dev/null +++ b/3rdparty/jsoncpp/include/CMakeLists.txt @@ -0,0 +1,2 @@ +FILE(GLOB INCLUDE_FILES "json/*.h") +INSTALL(FILES ${INCLUDE_FILES} DESTINATION ${INCLUDE_INSTALL_DIR}/json) diff --git a/3rdparty/jsoncpp/include/json/assertions.h b/3rdparty/jsoncpp/include/json/assertions.h new file mode 100644 index 00000000000..5ef7e7bb7d4 --- /dev/null +++ b/3rdparty/jsoncpp/include/json/assertions.h @@ -0,0 +1,41 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED +#define CPPTL_JSON_ASSERTIONS_H_INCLUDED + +#include <stdlib.h> + +#if !defined(JSON_IS_AMALGAMATION) +#include "config.h" +#endif // if !defined(JSON_IS_AMALGAMATION) + +#if JSON_USE_EXCEPTION +#include <stdexcept> +#define JSON_ASSERT(condition) \ + assert(condition); // @todo <= change this into an exception throw +#define JSON_FAIL_MESSAGE(message) throw std::runtime_error(message); +#else // JSON_USE_EXCEPTION +#define JSON_ASSERT(condition) assert(condition); + +// The call to assert() will show the failure message in debug builds. In +// release bugs we write to invalid memory in order to crash hard, so that a +// debugger or crash reporter gets the chance to take over. We still call exit() +// afterward in order to tell the compiler that this macro doesn't return. +#define JSON_FAIL_MESSAGE(message) \ + { \ + assert(false&& message); \ + strcpy(reinterpret_cast<char*>(666), message); \ + exit(123); \ + } + +#endif + +#define JSON_ASSERT_MESSAGE(condition, message) \ + if (!(condition)) { \ + JSON_FAIL_MESSAGE(message) \ + } + +#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED diff --git a/3rdparty/jsoncpp/include/json/autolink.h b/3rdparty/jsoncpp/include/json/autolink.h new file mode 100644 index 00000000000..6fcc8afac57 --- /dev/null +++ b/3rdparty/jsoncpp/include/json/autolink.h @@ -0,0 +1,25 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_AUTOLINK_H_INCLUDED +#define JSON_AUTOLINK_H_INCLUDED + +#include "config.h" + +#ifdef JSON_IN_CPPTL +#include <cpptl/cpptl_autolink.h> +#endif + +#if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && \ + !defined(JSON_IN_CPPTL) +#define CPPTL_AUTOLINK_NAME "json" +#undef CPPTL_AUTOLINK_DLL +#ifdef JSON_DLL +#define CPPTL_AUTOLINK_DLL +#endif +#include "autolink.h" +#endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/3rdparty/jsoncpp/include/json/config.h b/3rdparty/jsoncpp/include/json/config.h new file mode 100644 index 00000000000..afd3a45607c --- /dev/null +++ b/3rdparty/jsoncpp/include/json/config.h @@ -0,0 +1,112 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_CONFIG_H_INCLUDED +#define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of +/// std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool +/// based allocator. +/// The memory pools allocator used optimization (initializing Value and +/// ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +// If non-zero, the library uses exceptions to report bad input instead of C +// assertion macros. The default is to use exceptions. +#ifndef JSON_USE_EXCEPTION +#define JSON_USE_EXCEPTION 1 +#endif + +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGAMATION + +#ifdef JSON_IN_CPPTL +#include <cpptl/config.h> +#ifndef JSON_USE_CPPTL +#define JSON_USE_CPPTL 1 +#endif +#endif + +#ifdef JSON_IN_CPPTL +#define JSON_API CPPTL_API +#elif defined(JSON_DLL_BUILD) +#if defined(_MSC_VER) +#define JSON_API __declspec(dllexport) +#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING +#endif // if defined(_MSC_VER) +#elif defined(JSON_DLL) +#if defined(_MSC_VER) +#define JSON_API __declspec(dllimport) +#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING +#endif // if defined(_MSC_VER) +#endif // ifdef JSON_IN_CPPTL +#if !defined(JSON_API) +#define JSON_API +#endif + +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for +// integer +// Storages, and 64 bits integer support is disabled. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 +// Microsoft Visual Studio 6 only support conversion from __int64 to double +// (no conversion from unsigned __int64). +#define JSON_USE_INT64_DOUBLE_CONVERSION 1 +// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255' +// characters in the debug information) +// All projects I've ever seen with VS6 were using this globally (not bothering +// with pragma push/pop). +#pragma warning(disable : 4786) +#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 + +#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 +/// Indicates that the following function is deprecated. +#define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +#endif + +#if !defined(JSONCPP_DEPRECATED) +#define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) + +namespace Json { +typedef int Int; +typedef unsigned int UInt; +#if defined(JSON_NO_INT64) +typedef int LargestInt; +typedef unsigned int LargestUInt; +#undef JSON_HAS_INT64 +#else // if defined(JSON_NO_INT64) +// For Microsoft Visual use specific types as long long is not supported +#if defined(_MSC_VER) // Microsoft Visual Studio +typedef __int64 Int64; +typedef unsigned __int64 UInt64; +#else // if defined(_MSC_VER) // Other platforms, use long long +typedef long long int Int64; +typedef unsigned long long int UInt64; +#endif // if defined(_MSC_VER) +typedef Int64 LargestInt; +typedef UInt64 LargestUInt; +#define JSON_HAS_INT64 +#endif // if defined(JSON_NO_INT64) +} // end namespace Json + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/3rdparty/jsoncpp/include/json/features.h b/3rdparty/jsoncpp/include/json/features.h new file mode 100644 index 00000000000..1bb7bb61486 --- /dev/null +++ b/3rdparty/jsoncpp/include/json/features.h @@ -0,0 +1,57 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +#define CPPTL_JSON_FEATURES_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +#include "forwards.h" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + +/** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ +class JSON_API Features { +public: + /** \brief A configuration that allows all features and assumes all strings + * are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON + * specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c + /// false. + bool strictRoot_; + + /// \c true if dropped null placeholders are allowed. Default: \c false. + bool allowDroppedNullPlaceholders_; + + /// \c true if numeric object key are allowed. Default: \c false. + bool allowNumericKeys_; +}; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/3rdparty/jsoncpp/include/json/forwards.h b/3rdparty/jsoncpp/include/json/forwards.h new file mode 100644 index 00000000000..84a26cd2f72 --- /dev/null +++ b/3rdparty/jsoncpp/include/json/forwards.h @@ -0,0 +1,43 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_FORWARDS_H_INCLUDED +#define JSON_FORWARDS_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +#include "config.h" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + +// writer.h +class FastWriter; +class StyledWriter; + +// reader.h +class Reader; + +// features.h +class Features; + +// value.h +typedef unsigned int ArrayIndex; +class StaticString; +class Path; +class PathArgument; +class Value; +class ValueIteratorBase; +class ValueIterator; +class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP +class ValueMapAllocator; +class ValueInternalLink; +class ValueInternalArray; +class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/3rdparty/jsoncpp/include/json/json.h b/3rdparty/jsoncpp/include/json/json.h new file mode 100644 index 00000000000..8f10ac2bf72 --- /dev/null +++ b/3rdparty/jsoncpp/include/json/json.h @@ -0,0 +1,15 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_JSON_H_INCLUDED +#define JSON_JSON_H_INCLUDED + +#include "autolink.h" +#include "value.h" +#include "reader.h" +#include "writer.h" +#include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/3rdparty/jsoncpp/include/json/reader.h b/3rdparty/jsoncpp/include/json/reader.h new file mode 100644 index 00000000000..98814d50e29 --- /dev/null +++ b/3rdparty/jsoncpp/include/json/reader.h @@ -0,0 +1,276 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_READER_H_INCLUDED +#define CPPTL_JSON_READER_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +#include "features.h" +#include "value.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include <deque> +#include <iosfwd> +#include <stack> +#include <string> + +// Disable warning C4251: <data member>: <type> needs to have dll-interface to +// be used by... +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(push) +#pragma warning(disable : 4251) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +namespace Json { + +/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a + *Value. + * + */ +class JSON_API Reader { +public: + typedef char Char; + typedef const Char* Location; + + /** \brief An error tagged with where in the JSON text it was encountered. + * + * The offsets give the [start, limit) range of bytes within the text. Note + * that this is bytes, not codepoints. + * + */ + struct StructuredError { + size_t offset_start; + size_t offset_limit; + std::string message; + }; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader(const Features& features); + + /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a> + * document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them + * back during + * serialization, \c false to discard comments. + * This parameter is ignored if + * Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an + * error occurred. + */ + bool + parse(const std::string& document, Value& root, bool collectComments = true); + + /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a> + document. + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the + document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the + document to read. + \ Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them + back during + * serialization, \c false to discard comments. + * This parameter is ignored if + Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an + error occurred. + */ + bool parse(const char* beginDoc, + const char* endDoc, + Value& root, + bool collectComments = true); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse(std::istream& is, Value& root, bool collectComments = true); + + /** \brief Returns a user friendly string that list errors in the parsed + * document. + * \return Formatted error message with the list of errors with their location + * in + * the parsed document. An empty string is returned if no error + * occurred + * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). + */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") + std::string getFormatedErrorMessages() const; + + /** \brief Returns a user friendly string that list errors in the parsed + * document. + * \return Formatted error message with the list of errors with their location + * in + * the parsed document. An empty string is returned if no error + * occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + + /** \brief Returns a vector of structured erros encounted while parsing. + * \return A (possibly empty) vector of StructuredError objects. Currently + * only one error can be returned, but the caller should tolerate + * multiple + * errors. This can occur if the parser recovers from a non-fatal + * parse error and then encounters additional errors. + */ + std::vector<StructuredError> getStructuredErrors() const; + + /** \brief Add a semantic error message. + * \param value JSON Value location associated with the error + * \param message The error message. + * \return \c true if the error was successfully added, \c false if the + * Value offset exceeds the document size. + */ + bool pushError(const Value& value, const std::string& message); + + /** \brief Add a semantic error message with extra context. + * \param value JSON Value location associated with the error + * \param message The error message. + * \param extra Additional JSON Value location to contextualize the error + * \return \c true if the error was successfully added, \c false if either + * Value offset exceeds the document size. + */ + bool pushError(const Value& value, const std::string& message, const Value& extra); + + /** \brief Return whether there are any errors. + * \return \c true if there are no errors to report \c false if + * errors have occurred. + */ + bool good() const; + +private: + enum TokenType { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque<ErrorInfo> Errors; + + bool expectToken(TokenType type, Token& token, const char* message); + bool readToken(Token& token); + void skipSpaces(); + bool match(Location pattern, int patternLength); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject(Token& token); + bool readArray(Token& token); + bool decodeNumber(Token& token); + bool decodeNumber(Token& token, Value& decoded); + bool decodeString(Token& token); + bool decodeString(Token& token, std::string& decoded); + bool decodeDouble(Token& token); + bool decodeDouble(Token& token, Value& decoded); + bool decodeUnicodeCodePoint(Token& token, + Location& current, + Location end, + unsigned int& unicode); + bool decodeUnicodeEscapeSequence(Token& token, + Location& current, + Location end, + unsigned int& unicode); + bool addError(const std::string& message, Token& token, Location extra = 0); + bool recoverFromError(TokenType skipUntilToken); + bool addErrorAndRecover(const std::string& message, + Token& token, + TokenType skipUntilToken); + void skipUntilSpace(); + Value& currentValue(); + Char getNextChar(); + void + getLocationLineAndColumn(Location location, int& line, int& column) const; + std::string getLocationLineAndColumn(Location location) const; + void addComment(Location begin, Location end, CommentPlacement placement); + void skipCommentTokens(Token& token); + + typedef std::stack<Value*> Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value* lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; +}; + +/** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() +*/ +JSON_API std::istream& operator>>(std::istream&, Value&); + +} // namespace Json + +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(pop) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/3rdparty/jsoncpp/include/json/value.h b/3rdparty/jsoncpp/include/json/value.h new file mode 100644 index 00000000000..197a85614df --- /dev/null +++ b/3rdparty/jsoncpp/include/json/value.h @@ -0,0 +1,1088 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_H_INCLUDED +#define CPPTL_JSON_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +#include "forwards.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include <string> +#include <vector> + +#ifndef JSON_USE_CPPTL_SMALLMAP +#include <map> +#else +#include <cpptl/smallmap.h> +#endif +#ifdef JSON_USE_CPPTL +#include <cpptl/forwards.h> +#endif + +// Disable warning C4251: <data member>: <type> needs to have dll-interface to +// be used by... +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(push) +#pragma warning(disable : 4251) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + +/** \brief Type of the value held by a Value object. + */ +enum ValueType { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). +}; + +enum CommentPlacement { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for + /// root value) + numberOfCommentPlacement +}; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator<const char *> EnumMemberNames; +// typedef CppTL::AnyEnumerator<const Value &> EnumValues; +//# endif + +/** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ +class JSON_API StaticString { +public: + explicit StaticString(const char* czstring) : str_(czstring) {} + + operator const char*() const { return str_; } + + const char* c_str() const { return str_; } + +private: + const char* str_; +}; + +/** \brief Represents a <a HREF="http://www.json.org">JSON</a> value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() + *methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the + *required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ +class JSON_API Value { + friend class ValueIteratorBase; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +#endif +public: + typedef std::vector<std::string> Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; +#if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; + typedef Json::ArrayIndex ArrayIndex; + + static const Value& null; + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. + static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. + static const UInt maxUInt; + +#if defined(JSON_HAS_INT64) + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; +#endif // defined(JSON_HAS_INT64) + +private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +#ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString { + public: + enum DuplicationPolicy { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString(ArrayIndex index); + CZString(const char* cstr, DuplicationPolicy allocate); + CZString(const CZString& other); + ~CZString(); + CZString& operator=(CZString other); + bool operator<(const CZString& other) const; + bool operator==(const CZString& other) const; + ArrayIndex index() const; + const char* c_str() const; + bool isStaticString() const; + + private: + void swap(CZString& other); + const char* cstr_; + ArrayIndex index_; + }; + +public: +#ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map<CZString, Value> ObjectValues; +#else + typedef CppTL::SmallMap<CZString, Value> ObjectValues; +#endif // ifndef JSON_USE_CPPTL_SMALLMAP +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. +This is useful since clear() and resize() will not alter types. + + Examples: +\code +Json::Value null_value; // null +Json::Value arr_value(Json::arrayValue); // [] +Json::Value obj_value(Json::objectValue); // {} +\endcode + */ + Value(ValueType type = nullValue); + Value(Int value); + Value(UInt value); +#if defined(JSON_HAS_INT64) + Value(Int64 value); + Value(UInt64 value); +#endif // if defined(JSON_HAS_INT64) + Value(double value); + Value(const char* value); + Value(const char* beginValue, const char* endValue); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value(const StaticString& value); + Value(const std::string& value); +#ifdef JSON_USE_CPPTL + Value(const CppTL::ConstString& value); +#endif + Value(bool value); + Value(const Value& other); + ~Value(); + + Value& operator=(Value other); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap(Value& other); + + ValueType type() const; + + bool operator<(const Value& other) const; + bool operator<=(const Value& other) const; + bool operator>=(const Value& other) const; + bool operator>(const Value& other) const; + + bool operator==(const Value& other) const; + bool operator!=(const Value& other) const; + + int compare(const Value& other) const; + + const char* asCString() const; + std::string asString() const; +#ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +#endif + Int asInt() const; + UInt asUInt() const; +#if defined(JSON_HAS_INT64) + Int64 asInt64() const; + UInt64 asUInt64() const; +#endif // if defined(JSON_HAS_INT64) + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; + float asFloat() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isInt64() const; + bool isUInt() const; + bool isUInt64() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo(ValueType other) const; + + /// Number of values in array or object + ArrayIndex size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize(ArrayIndex size); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are + /// inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value& operator[](ArrayIndex index); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are + /// inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value& operator[](int index); + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value& operator[](ArrayIndex index) const; + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value& operator[](int index) const; + + /// If the array contains at least index+1 elements, returns the element + /// value, + /// otherwise returns defaultValue. + Value get(ArrayIndex index, const Value& defaultValue) const; + /// Return true if index < size(). + bool isValidIndex(ArrayIndex index) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value& append(const Value& value); + + /// Access an object value by name, create a null member if it does not exist. + Value& operator[](const char* key); + /// Access an object value by name, returns null if there is no member with + /// that name. + const Value& operator[](const char* key) const; + /// Access an object value by name, create a null member if it does not exist. + Value& operator[](const std::string& key); + /// Access an object value by name, returns null if there is no member with + /// that name. + const Value& operator[](const std::string& key) const; + /** \brief Access an object value by name, create a null member if it does not + exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value& operator[](const StaticString& key); +#ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value& operator[](const CppTL::ConstString& key); + /// Access an object value by name, returns null if there is no member with + /// that name. + const Value& operator[](const CppTL::ConstString& key) const; +#endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get(const char* key, const Value& defaultValue) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get(const std::string& key, const Value& defaultValue) const; +#ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get(const CppTL::ConstString& key, const Value& defaultValue) const; +#endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember(const char* key); + /// Same as removeMember(const char*) + Value removeMember(const std::string& key); + + /// Return true if the object has a member named key. + bool isMember(const char* key) const; + /// Return true if the object has a member named key. + bool isMember(const std::string& key) const; +#ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember(const CppTL::ConstString& key) const; +#endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + + //# ifdef JSON_USE_CPPTL + // EnumMemberNames enumMemberNames() const; + // EnumValues enumValues() const; + //# endif + + /// Comments must be //... or /* ... */ + void setComment(const char* comment, CommentPlacement placement); + /// Comments must be //... or /* ... */ + void setComment(const std::string& comment, CommentPlacement placement); + bool hasComment(CommentPlacement placement) const; + /// Include delimiters and embedded newlines. + std::string getComment(CommentPlacement placement) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + // Accessors for the [start, limit) range of bytes within the JSON text from + // which this value was parsed, if any. + void setOffsetStart(size_t start); + void setOffsetLimit(size_t limit); + size_t getOffsetStart() const; + size_t getOffsetLimit() const; + +private: + void initBasic(ValueType type, bool allocated = false); + + Value& resolveReference(const char* key, bool isStatic); + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const { return itemIsUsed_ == 0; } + + inline void setItemUsed(bool isUsed = true) { itemIsUsed_ = isUsed ? 1 : 0; } + + inline bool isMemberNameStatic() const { return memberNameIsStatic_ == 0; } + + inline void setMemberNameIsStatic(bool isStatic) { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +#endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + +private: + struct CommentInfo { + CommentInfo(); + ~CommentInfo(); + + void setComment(const char* text); + + char* comment_; + }; + + // struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder { + LargestInt int_; + LargestUInt uint_; + double real_; + bool bool_; + char* string_; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray* array_; + ValueInternalMap* map_; +#else + ObjectValues* map_; +#endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +#ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +#endif + CommentInfo* comments_; + + // [start, limit) byte offsets in the source JSON text from which this Value + // was extracted. + size_t start_; + size_t limit_; +}; + +/** \brief Experimental and untested: represents an element of the "path" to + * access a node. + */ +class JSON_API PathArgument { +public: + friend class Path; + + PathArgument(); + PathArgument(ArrayIndex index); + PathArgument(const char* key); + PathArgument(const std::string& key); + +private: + enum Kind { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + ArrayIndex index_; + Kind kind_; +}; + +/** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ +class JSON_API Path { +public: + Path(const std::string& path, + const PathArgument& a1 = PathArgument(), + const PathArgument& a2 = PathArgument(), + const PathArgument& a3 = PathArgument(), + const PathArgument& a4 = PathArgument(), + const PathArgument& a5 = PathArgument()); + + const Value& resolve(const Value& root) const; + Value resolve(const Value& root, const Value& defaultValue) const; + /// Creates the "path" to access the specified node and returns a reference on + /// the node. + Value& make(Value& root) const; + +private: + typedef std::vector<const PathArgument*> InArgs; + typedef std::vector<PathArgument> Args; + + void makePath(const std::string& path, const InArgs& in); + void addPathInArg(const std::string& path, + const InArgs& in, + InArgs::const_iterator& itInArg, + PathArgument::Kind kind); + void invalidPath(const std::string& path, int location); + + Args args_; +}; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP +/** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation + actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ +class JSON_API ValueMapAllocator { +public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap* newMap() = 0; + virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) = 0; + virtual void destructMap(ValueInternalMap* map) = 0; + virtual ValueInternalLink* allocateMapBuckets(unsigned int size) = 0; + virtual void releaseMapBuckets(ValueInternalLink* links) = 0; + virtual ValueInternalLink* allocateMapLink() = 0; + virtual void releaseMapLink(ValueInternalLink* link) = 0; +}; + +/** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ +class JSON_API ValueInternalLink { +public: + enum { + itemPerLink = 6 + }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char* keys_[itemPerLink]; + ValueInternalLink* previous_; + ValueInternalLink* next_; +}; + +/** \brief A linked page based hash-table implementation used internally by + *Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a + *linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following + *bucket. + * The last link of the last bucket is stored in tailLink_ as it has no + *following bucket. + * Only the last link of a bucket may contains 'available' item. The last link + *always + * contains at least one element unless is it the bucket one very first link. + */ +class JSON_API ValueInternalMap { + friend class ValueIteratorBase; + friend class Value; + +public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState { + IteratorState() : map_(0), link_(0), itemIndex_(0), bucketIndex_(0) {} + ValueInternalMap* map_; + ValueInternalLink* link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap(const ValueInternalMap& other); + ValueInternalMap& operator=(ValueInternalMap other); + ~ValueInternalMap(); + + void swap(ValueInternalMap& other); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta(BucketIndex growth); + + bool reserve(BucketIndex newItemCount); + + const Value* find(const char* key) const; + + Value* find(const char* key); + + Value& resolveReference(const char* key, bool isStatic); + + void remove(const char* key); + + void doActualRemove(ValueInternalLink* link, + BucketIndex index, + BucketIndex bucketIndex); + + ValueInternalLink*& getLastLinkInBucket(BucketIndex bucketIndex); + + Value& setNewItem(const char* key, + bool isStatic, + ValueInternalLink* link, + BucketIndex index); + + Value& unsafeAdd(const char* key, bool isStatic, HashKey hashedKey); + + HashKey hash(const char* key) const; + + int compare(const ValueInternalMap& other) const; + +private: + void makeBeginIterator(IteratorState& it) const; + void makeEndIterator(IteratorState& it) const; + static bool equals(const IteratorState& x, const IteratorState& other); + static void increment(IteratorState& iterator); + static void incrementBucket(IteratorState& iterator); + static void decrement(IteratorState& iterator); + static const char* key(const IteratorState& iterator); + static const char* key(const IteratorState& iterator, bool& isStatic); + static Value& value(const IteratorState& iterator); + static int distance(const IteratorState& x, const IteratorState& y); + +private: + ValueInternalLink* buckets_; + ValueInternalLink* tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; +}; + +/** \brief A simplified deque implementation used internally by Value. +* \internal +* It is based on a list of fixed "page", each page contains a fixed number of +*items. +* Instead of using a linked-list, a array of pointer is used for fast item +*look-up. +* Look-up for an element is as follow: +* - compute page index: pageIndex = itemIndex / itemsPerPage +* - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] +* +* Insertion is amortized constant time (only the array containing the index of +*pointers +* need to be reallocated when items are appended). +*/ +class JSON_API ValueInternalArray { + friend class Value; + friend class ValueIteratorBase; + +public: + enum { + itemsPerPage = 8 + }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() : array_(0), currentPageIndex_(0), currentItemIndex_(0) {} + ValueInternalArray* array_; + Value** currentPageIndex_; + unsigned int currentItemIndex_; + }; +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray(const ValueInternalArray& other); + ValueInternalArray& operator=(ValueInternalArray other); + ~ValueInternalArray(); + void swap(ValueInternalArray& other); + + void clear(); + void resize(ArrayIndex newSize); + + Value& resolveReference(ArrayIndex index); + + Value* find(ArrayIndex index) const; + + ArrayIndex size() const; + + int compare(const ValueInternalArray& other) const; + +private: + static bool equals(const IteratorState& x, const IteratorState& other); + static void increment(IteratorState& iterator); + static void decrement(IteratorState& iterator); + static Value& dereference(const IteratorState& iterator); + static Value& unsafeDereference(const IteratorState& iterator); + static int distance(const IteratorState& x, const IteratorState& y); + static ArrayIndex indexOf(const IteratorState& iterator); + void makeBeginIterator(IteratorState& it) const; + void makeEndIterator(IteratorState& it) const; + void makeIterator(IteratorState& it, ArrayIndex index) const; + + void makeIndexValid(ArrayIndex index); + + Value** pages_; + ArrayIndex size_; + PageIndex pageCount_; +}; + +/** \brief Experimental: do not use. Allocator to customize Value internal +array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator +virtual ~DefaultValueArrayAllocator() +{ +} + +virtual ValueInternalArray *newArray() +{ + return new ValueInternalArray(); +} + +virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) +{ + return new ValueInternalArray( other ); +} + +virtual void destruct( ValueInternalArray *array ) +{ + delete array; +} + +virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex +&indexCount, + ValueInternalArray::PageIndex +minNewIndexCount ) +{ + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast<Value **>( newIndexes ); +} +virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) +{ + if ( indexes ) + free( indexes ); +} + +virtual Value *allocateArrayPage() +{ + return static_cast<Value *>( malloc( sizeof(Value) * +ValueInternalArray::itemsPerPage ) ); +} + +virtual void releaseArrayPage( Value *value ) +{ + if ( value ) + free( value ); +} +}; + \endcode + */ +class JSON_API ValueArrayAllocator { +public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray* newArray() = 0; + virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) = 0; + virtual void destructArray(ValueInternalArray* array) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able + * to + * handle. + */ + virtual void + reallocateArrayPageIndex(Value**& indexes, + ValueInternalArray::PageIndex& indexCount, + ValueInternalArray::PageIndex minNewIndexCount) = 0; + virtual void + releaseArrayPageIndex(Value** indexes, + ValueInternalArray::PageIndex indexCount) = 0; + virtual Value* allocateArrayPage() = 0; + virtual void releaseArrayPage(Value* value) = 0; +}; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +/** \brief base class for Value iterators. + * + */ +class JSON_API ValueIteratorBase { +public: + typedef std::bidirectional_iterator_tag iterator_category; + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase(const Value::ObjectValues::iterator& current); +#else + ValueIteratorBase(const ValueInternalArray::IteratorState& state); + ValueIteratorBase(const ValueInternalMap::IteratorState& state); +#endif + + bool operator==(const SelfType& other) const { return isEqual(other); } + + bool operator!=(const SelfType& other) const { return !isEqual(other); } + + difference_type operator-(const SelfType& other) const { + return computeDistance(other); + } + + /// Return either the index or the member name of the referenced value as a + /// Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an + /// objectValue. + const char* memberName() const; + +protected: + Value& deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance(const SelfType& other) const; + + bool isEqual(const SelfType& other) const; + + void copy(const SelfType& other); + +private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif +}; + +/** \brief const iterator for object and array value. + * + */ +class JSON_API ValueConstIterator : public ValueIteratorBase { + friend class Value; + +public: + typedef const Value value_type; + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value& reference; + typedef const Value* pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + +private: +/*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator(const Value::ObjectValues::iterator& current); +#else + ValueConstIterator(const ValueInternalArray::IteratorState& state); + ValueConstIterator(const ValueInternalMap::IteratorState& state); +#endif +public: + SelfType& operator=(const ValueIteratorBase& other); + + SelfType operator++(int) { + SelfType temp(*this); + ++*this; + return temp; + } + + SelfType operator--(int) { + SelfType temp(*this); + --*this; + return temp; + } + + SelfType& operator--() { + decrement(); + return *this; + } + + SelfType& operator++() { + increment(); + return *this; + } + + reference operator*() const { return deref(); } + + pointer operator->() const { return &deref(); } +}; + +/** \brief Iterator for object and array value. + */ +class JSON_API ValueIterator : public ValueIteratorBase { + friend class Value; + +public: + typedef Value value_type; + typedef unsigned int size_t; + typedef int difference_type; + typedef Value& reference; + typedef Value* pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator(const ValueConstIterator& other); + ValueIterator(const ValueIterator& other); + +private: +/*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator(const Value::ObjectValues::iterator& current); +#else + ValueIterator(const ValueInternalArray::IteratorState& state); + ValueIterator(const ValueInternalMap::IteratorState& state); +#endif +public: + SelfType& operator=(const SelfType& other); + + SelfType operator++(int) { + SelfType temp(*this); + ++*this; + return temp; + } + + SelfType operator--(int) { + SelfType temp(*this); + --*this; + return temp; + } + + SelfType& operator--() { + decrement(); + return *this; + } + + SelfType& operator++() { + increment(); + return *this; + } + + reference operator*() const { return deref(); } + + pointer operator->() const { return &deref(); } +}; + +} // namespace Json + +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(pop) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/3rdparty/jsoncpp/include/json/version.h b/3rdparty/jsoncpp/include/json/version.h new file mode 100644 index 00000000000..58fca8a9820 --- /dev/null +++ b/3rdparty/jsoncpp/include/json/version.h @@ -0,0 +1,14 @@ +// DO NOT EDIT. This file is generated by CMake from "version" +// and "version.h.in" files. +// Run CMake configure step to update it. +#ifndef JSON_VERSION_H_INCLUDED +# define JSON_VERSION_H_INCLUDED + +# define JSONCPP_VERSION_STRING "1.1.0" +# define JSONCPP_VERSION_MAJOR 1 +# define JSONCPP_VERSION_MINOR 1 +# define JSONCPP_VERSION_PATCH 0 +# define JSONCPP_VERSION_QUALIFIER +# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) + +#endif // JSON_VERSION_H_INCLUDED diff --git a/3rdparty/jsoncpp/include/json/writer.h b/3rdparty/jsoncpp/include/json/writer.h new file mode 100644 index 00000000000..dc9e46f4bd6 --- /dev/null +++ b/3rdparty/jsoncpp/include/json/writer.h @@ -0,0 +1,213 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_WRITER_H_INCLUDED +#define JSON_WRITER_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +#include "value.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include <vector> +#include <string> + +// Disable warning C4251: <data member>: <type> needs to have dll-interface to +// be used by... +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(push) +#pragma warning(disable : 4251) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +namespace Json { + +class Value; + +/** \brief Abstract class for writers. + */ +class JSON_API Writer { +public: + virtual ~Writer(); + + virtual std::string write(const Value& root) = 0; +}; + +/** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format + *without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' + *consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ +class JSON_API FastWriter : public Writer { +public: + FastWriter(); + virtual ~FastWriter() {} + + void enableYAMLCompatibility(); + + /** \brief Drop the "null" string from the writer's output for nullValues. + * Strictly speaking, this is not valid JSON. But when the output is being + * fed to a browser's Javascript, it makes for smaller output and the + * browser can handle the output just fine. + */ + void dropNullPlaceholders(); + + void omitEndingLineFeed(); + +public: // overridden from Writer + virtual std::string write(const Value& root); + +private: + void writeValue(const Value& value); + + std::string document_; + bool yamlCompatiblityEnabled_; + bool dropNullPlaceholders_; + bool omitEndingLineFeed_; +}; + +/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a + *human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per + *line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value + *types, + * and all the values fit on one lines, then print the array on a single + *line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their + *#CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ +class JSON_API StyledWriter : public Writer { +public: + StyledWriter(); + virtual ~StyledWriter() {} + +public: // overridden from Writer + /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write(const Value& root); + +private: + void writeValue(const Value& value); + void writeArrayValue(const Value& value); + bool isMultineArray(const Value& value); + void pushValue(const std::string& value); + void writeIndent(); + void writeWithIndent(const std::string& value); + void indent(); + void unindent(); + void writeCommentBeforeValue(const Value& root); + void writeCommentAfterValueOnSameLine(const Value& root); + bool hasCommentForValue(const Value& value); + static std::string normalizeEOL(const std::string& text); + + typedef std::vector<std::string> ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; +}; + +/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a + human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per + line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value + types, + * and all the values fit on one lines, then print the array on a single + line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their + #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ +class JSON_API StyledStreamWriter { +public: + StyledStreamWriter(std::string indentation = "\t"); + ~StyledStreamWriter() {} + +public: + /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not + * return a value. + */ + void write(std::ostream& out, const Value& root); + +private: + void writeValue(const Value& value); + void writeArrayValue(const Value& value); + bool isMultineArray(const Value& value); + void pushValue(const std::string& value); + void writeIndent(); + void writeWithIndent(const std::string& value); + void indent(); + void unindent(); + void writeCommentBeforeValue(const Value& root); + void writeCommentAfterValueOnSameLine(const Value& root); + bool hasCommentForValue(const Value& value); + static std::string normalizeEOL(const std::string& text); + + typedef std::vector<std::string> ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; +}; + +#if defined(JSON_HAS_INT64) +std::string JSON_API valueToString(Int value); +std::string JSON_API valueToString(UInt value); +#endif // if defined(JSON_HAS_INT64) +std::string JSON_API valueToString(LargestInt value); +std::string JSON_API valueToString(LargestUInt value); +std::string JSON_API valueToString(double value); +std::string JSON_API valueToString(bool value); +std::string JSON_API valueToQuotedString(const char* value); + +/// \brief Output using the StyledStreamWriter. +/// \see Json::operator>>() +JSON_API std::ostream& operator<<(std::ostream&, const Value& root); + +} // namespace Json + +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(pop) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +#endif // JSON_WRITER_H_INCLUDED diff --git a/3rdparty/jsoncpp/makefiles/msvc2010/jsoncpp.sln b/3rdparty/jsoncpp/makefiles/msvc2010/jsoncpp.sln new file mode 100644 index 00000000000..c4ecb907072 --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/msvc2010/jsoncpp.sln @@ -0,0 +1,42 @@ + +Microsoft Visual Studio Solution File, Format Version 11.00 +# Visual Studio 2010 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcxproj", "{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}" +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcxproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcxproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Win32 = Debug|Win32 + Debug|x64 = Debug|x64 + Release|Win32 = Release|Win32 + Release|x64 = Release|x64 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|Win32.ActiveCfg = Debug|Win32 + {1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|Win32.Build.0 = Debug|Win32 + {1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|x64.ActiveCfg = Debug|x64 + {1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|x64.Build.0 = Debug|x64 + {1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|Win32.ActiveCfg = Release|Win32 + {1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|Win32.Build.0 = Release|Win32 + {1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|x64.ActiveCfg = Release|x64 + {1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|x64.Build.0 = Release|x64 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug|Win32.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug|Win32.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug|x64.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release|Win32.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release|Win32.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release|x64.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug|Win32.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug|Win32.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug|x64.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release|Win32.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release|Win32.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release|x64.ActiveCfg = Release|Win32 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/3rdparty/jsoncpp/makefiles/msvc2010/jsontest.vcxproj b/3rdparty/jsoncpp/makefiles/msvc2010/jsontest.vcxproj new file mode 100644 index 00000000000..939d440ddcd --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/msvc2010/jsontest.vcxproj @@ -0,0 +1,96 @@ +<?xml version="1.0" encoding="utf-8"?> +<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> + <ItemGroup Label="ProjectConfigurations"> + <ProjectConfiguration Include="Debug|Win32"> + <Configuration>Debug</Configuration> + <Platform>Win32</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Release|Win32"> + <Configuration>Release</Configuration> + <Platform>Win32</Platform> + </ProjectConfiguration> + </ItemGroup> + <PropertyGroup Label="Globals"> + <ProjectGuid>{25AF2DD2-D396-4668-B188-488C33B8E620}</ProjectGuid> + <Keyword>Win32Proj</Keyword> + </PropertyGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" /> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration"> + <ConfigurationType>Application</ConfigurationType> + <CharacterSet>MultiByte</CharacterSet> + </PropertyGroup> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration"> + <ConfigurationType>Application</ConfigurationType> + <CharacterSet>MultiByte</CharacterSet> + </PropertyGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" /> + <ImportGroup Label="ExtensionSettings"> + </ImportGroup> + <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets"> + <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> + </ImportGroup> + <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets"> + <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> + </ImportGroup> + <PropertyGroup Label="UserMacros" /> + <PropertyGroup> + <_ProjectFileVersion>10.0.40219.1</_ProjectFileVersion> + <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/jsontest\</OutDir> + <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/jsontest\</IntDir> + <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</LinkIncremental> + <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/jsontest\</OutDir> + <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/jsontest\</IntDir> + <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental> + </PropertyGroup> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'"> + <ClCompile> + <Optimization>Disabled</Optimization> + <AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> + <PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions> + <MinimalRebuild>true</MinimalRebuild> + <BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks> + <RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary> + <PrecompiledHeader> + </PrecompiledHeader> + <WarningLevel>Level3</WarningLevel> + <DebugInformationFormat>EditAndContinue</DebugInformationFormat> + </ClCompile> + <Link> + <OutputFile>$(OutDir)jsontest.exe</OutputFile> + <GenerateDebugInformation>true</GenerateDebugInformation> + <ProgramDatabaseFile>$(OutDir)jsontest.pdb</ProgramDatabaseFile> + <SubSystem>Console</SubSystem> + <TargetMachine>MachineX86</TargetMachine> + </Link> + </ItemDefinitionGroup> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'"> + <ClCompile> + <AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> + <PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions> + <RuntimeLibrary>MultiThreaded</RuntimeLibrary> + <PrecompiledHeader> + </PrecompiledHeader> + <WarningLevel>Level3</WarningLevel> + <DebugInformationFormat>ProgramDatabase</DebugInformationFormat> + </ClCompile> + <Link> + <OutputFile>$(OutDir)jsontest.exe</OutputFile> + <GenerateDebugInformation>true</GenerateDebugInformation> + <SubSystem>Console</SubSystem> + <OptimizeReferences>true</OptimizeReferences> + <EnableCOMDATFolding>true</EnableCOMDATFolding> + <TargetMachine>MachineX86</TargetMachine> + </Link> + </ItemDefinitionGroup> + <ItemGroup> + <ClCompile Include="..\..\src\jsontestrunner\main.cpp" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="lib_json.vcxproj"> + <Project>{1e6c2c1c-6453-4129-ae3f-0ee8e6599c89}</Project> + </ProjectReference> + </ItemGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" /> + <ImportGroup Label="ExtensionTargets"> + </ImportGroup> +</Project>
\ No newline at end of file diff --git a/3rdparty/jsoncpp/makefiles/msvc2010/jsontest.vcxproj.filters b/3rdparty/jsoncpp/makefiles/msvc2010/jsontest.vcxproj.filters new file mode 100644 index 00000000000..610b540ea52 --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/msvc2010/jsontest.vcxproj.filters @@ -0,0 +1,13 @@ +<?xml version="1.0" encoding="utf-8"?> +<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> + <ItemGroup> + <Filter Include="Source Files"> + <UniqueIdentifier>{903591b3-ade3-4ce4-b1f9-1e175e62b014}</UniqueIdentifier> + </Filter> + </ItemGroup> + <ItemGroup> + <ClCompile Include="..\..\src\jsontestrunner\main.cpp"> + <Filter>Source Files</Filter> + </ClCompile> + </ItemGroup> +</Project>
\ No newline at end of file diff --git a/3rdparty/jsoncpp/makefiles/msvc2010/lib_json.vcxproj b/3rdparty/jsoncpp/makefiles/msvc2010/lib_json.vcxproj new file mode 100644 index 00000000000..3cfd0f9362d --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/msvc2010/lib_json.vcxproj @@ -0,0 +1,143 @@ +<?xml version="1.0" encoding="utf-8"?> +<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> + <ItemGroup Label="ProjectConfigurations"> + <ProjectConfiguration Include="Debug|Win32"> + <Configuration>Debug</Configuration> + <Platform>Win32</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Debug|x64"> + <Configuration>Debug</Configuration> + <Platform>x64</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Release|Win32"> + <Configuration>Release</Configuration> + <Platform>Win32</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Release|x64"> + <Configuration>Release</Configuration> + <Platform>x64</Platform> + </ProjectConfiguration> + </ItemGroup> + <ItemGroup> + <ClCompile Include="..\..\src\lib_json\json_reader.cpp" /> + <ClCompile Include="..\..\src\lib_json\json_value.cpp" /> + <ClCompile Include="..\..\src\lib_json\json_writer.cpp" /> + </ItemGroup> + <ItemGroup> + <ClInclude Include="..\..\include\json\reader.h" /> + <ClInclude Include="..\..\include\json\value.h" /> + <ClInclude Include="..\..\include\json\writer.h" /> + </ItemGroup> + <PropertyGroup Label="Globals"> + <ProjectGuid>{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}</ProjectGuid> + <Keyword>Win32Proj</Keyword> + <RootNamespace>jsoncpp</RootNamespace> + </PropertyGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" /> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration"> + <ConfigurationType>StaticLibrary</ConfigurationType> + <UseDebugLibraries>true</UseDebugLibraries> + <CharacterSet>Unicode</CharacterSet> + </PropertyGroup> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration"> + <ConfigurationType>StaticLibrary</ConfigurationType> + <UseDebugLibraries>true</UseDebugLibraries> + <CharacterSet>Unicode</CharacterSet> + </PropertyGroup> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration"> + <ConfigurationType>StaticLibrary</ConfigurationType> + <UseDebugLibraries>false</UseDebugLibraries> + <WholeProgramOptimization>true</WholeProgramOptimization> + <CharacterSet>Unicode</CharacterSet> + </PropertyGroup> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration"> + <ConfigurationType>StaticLibrary</ConfigurationType> + <UseDebugLibraries>false</UseDebugLibraries> + <WholeProgramOptimization>true</WholeProgramOptimization> + <CharacterSet>Unicode</CharacterSet> + </PropertyGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" /> + <ImportGroup Label="ExtensionSettings"> + </ImportGroup> + <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'"> + <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> + </ImportGroup> + <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets"> + <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> + </ImportGroup> + <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'"> + <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> + </ImportGroup> + <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets"> + <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> + </ImportGroup> + <PropertyGroup Label="UserMacros" /> + <PropertyGroup /> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'"> + <ClCompile> + <PrecompiledHeader>NotUsing</PrecompiledHeader> + <WarningLevel>Level3</WarningLevel> + <Optimization>Disabled</Optimization> + <PreprocessorDefinitions>WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions> + <AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories> + <RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary> + </ClCompile> + <Link> + <SubSystem>Windows</SubSystem> + <GenerateDebugInformation>true</GenerateDebugInformation> + </Link> + </ItemDefinitionGroup> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'"> + <ClCompile> + <PrecompiledHeader>NotUsing</PrecompiledHeader> + <WarningLevel>Level3</WarningLevel> + <Optimization>Disabled</Optimization> + <PreprocessorDefinitions>WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions> + <AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories> + <RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary> + </ClCompile> + <Link> + <SubSystem>Windows</SubSystem> + <GenerateDebugInformation>true</GenerateDebugInformation> + </Link> + </ItemDefinitionGroup> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'"> + <ClCompile> + <WarningLevel>Level3</WarningLevel> + <PrecompiledHeader>NotUsing</PrecompiledHeader> + <Optimization>MaxSpeed</Optimization> + <FunctionLevelLinking>true</FunctionLevelLinking> + <IntrinsicFunctions>true</IntrinsicFunctions> + <PreprocessorDefinitions>WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions> + <AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories> + <RuntimeLibrary>MultiThreaded</RuntimeLibrary> + </ClCompile> + <Link> + <SubSystem>Windows</SubSystem> + <GenerateDebugInformation>true</GenerateDebugInformation> + <EnableCOMDATFolding>true</EnableCOMDATFolding> + <OptimizeReferences>true</OptimizeReferences> + </Link> + </ItemDefinitionGroup> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'"> + <ClCompile> + <WarningLevel>Level3</WarningLevel> + <PrecompiledHeader>NotUsing</PrecompiledHeader> + <Optimization>MaxSpeed</Optimization> + <FunctionLevelLinking>true</FunctionLevelLinking> + <IntrinsicFunctions>true</IntrinsicFunctions> + <PreprocessorDefinitions>WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions> + <AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories> + <RuntimeLibrary>MultiThreaded</RuntimeLibrary> + </ClCompile> + <Link> + <SubSystem>Windows</SubSystem> + <GenerateDebugInformation>true</GenerateDebugInformation> + <EnableCOMDATFolding>true</EnableCOMDATFolding> + <OptimizeReferences>true</OptimizeReferences> + </Link> + </ItemDefinitionGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" /> + <ImportGroup Label="ExtensionTargets"> + </ImportGroup> +</Project>
\ No newline at end of file diff --git a/3rdparty/jsoncpp/makefiles/msvc2010/lib_json.vcxproj.filters b/3rdparty/jsoncpp/makefiles/msvc2010/lib_json.vcxproj.filters new file mode 100644 index 00000000000..63c740331e7 --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/msvc2010/lib_json.vcxproj.filters @@ -0,0 +1,33 @@ +<?xml version="1.0" encoding="utf-8"?> +<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> + <ItemGroup> + <Filter Include="Header Files"> + <UniqueIdentifier>{c110bc57-c46e-476c-97ea-84d8014f431c}</UniqueIdentifier> + </Filter> + <Filter Include="Source Files"> + <UniqueIdentifier>{ed718592-5acf-47b5-8f2b-b8224590da6a}</UniqueIdentifier> + </Filter> + </ItemGroup> + <ItemGroup> + <ClCompile Include="..\..\src\lib_json\json_reader.cpp"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\..\src\lib_json\json_value.cpp"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\..\src\lib_json\json_writer.cpp"> + <Filter>Source Files</Filter> + </ClCompile> + </ItemGroup> + <ItemGroup> + <ClInclude Include="..\..\include\json\reader.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="..\..\include\json\value.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="..\..\include\json\writer.h"> + <Filter>Header Files</Filter> + </ClInclude> + </ItemGroup> +</Project>
\ No newline at end of file diff --git a/3rdparty/jsoncpp/makefiles/msvc2010/test_lib_json.vcxproj b/3rdparty/jsoncpp/makefiles/msvc2010/test_lib_json.vcxproj new file mode 100644 index 00000000000..068af613e4f --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/msvc2010/test_lib_json.vcxproj @@ -0,0 +1,109 @@ +<?xml version="1.0" encoding="utf-8"?> +<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> + <ItemGroup Label="ProjectConfigurations"> + <ProjectConfiguration Include="Debug|Win32"> + <Configuration>Debug</Configuration> + <Platform>Win32</Platform> + </ProjectConfiguration> + <ProjectConfiguration Include="Release|Win32"> + <Configuration>Release</Configuration> + <Platform>Win32</Platform> + </ProjectConfiguration> + </ItemGroup> + <PropertyGroup Label="Globals"> + <ProjectGuid>{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}</ProjectGuid> + <RootNamespace>test_lib_json</RootNamespace> + <Keyword>Win32Proj</Keyword> + </PropertyGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" /> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration"> + <ConfigurationType>Application</ConfigurationType> + <CharacterSet>MultiByte</CharacterSet> + </PropertyGroup> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration"> + <ConfigurationType>Application</ConfigurationType> + <CharacterSet>MultiByte</CharacterSet> + </PropertyGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" /> + <ImportGroup Label="ExtensionSettings"> + </ImportGroup> + <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets"> + <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> + </ImportGroup> + <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets"> + <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> + </ImportGroup> + <PropertyGroup Label="UserMacros" /> + <PropertyGroup> + <_ProjectFileVersion>10.0.40219.1</_ProjectFileVersion> + <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/test_lib_json\</OutDir> + <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/test_lib_json\</IntDir> + <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</LinkIncremental> + <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/test_lib_json\</OutDir> + <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/test_lib_json\</IntDir> + <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental> + </PropertyGroup> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'"> + <ClCompile> + <Optimization>Disabled</Optimization> + <AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> + <PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions> + <MinimalRebuild>true</MinimalRebuild> + <BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks> + <RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary> + <PrecompiledHeader> + </PrecompiledHeader> + <WarningLevel>Level3</WarningLevel> + <DebugInformationFormat>EditAndContinue</DebugInformationFormat> + </ClCompile> + <Link> + <OutputFile>$(OutDir)test_lib_json.exe</OutputFile> + <GenerateDebugInformation>true</GenerateDebugInformation> + <ProgramDatabaseFile>$(OutDir)test_lib_json.pdb</ProgramDatabaseFile> + <SubSystem>Console</SubSystem> + <TargetMachine>MachineX86</TargetMachine> + </Link> + <PostBuildEvent> + <Message>Running all unit tests</Message> + <Command>$(TargetPath)</Command> + </PostBuildEvent> + </ItemDefinitionGroup> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'"> + <ClCompile> + <AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> + <PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions> + <RuntimeLibrary>MultiThreaded</RuntimeLibrary> + <PrecompiledHeader> + </PrecompiledHeader> + <WarningLevel>Level3</WarningLevel> + <DebugInformationFormat>ProgramDatabase</DebugInformationFormat> + </ClCompile> + <Link> + <OutputFile>$(OutDir)test_lib_json.exe</OutputFile> + <GenerateDebugInformation>true</GenerateDebugInformation> + <SubSystem>Console</SubSystem> + <OptimizeReferences>true</OptimizeReferences> + <EnableCOMDATFolding>true</EnableCOMDATFolding> + <TargetMachine>MachineX86</TargetMachine> + </Link> + <PostBuildEvent> + <Message>Running all unit tests</Message> + <Command>$(TargetPath)</Command> + </PostBuildEvent> + </ItemDefinitionGroup> + <ItemGroup> + <ClCompile Include="..\..\src\test_lib_json\jsontest.cpp" /> + <ClCompile Include="..\..\src\test_lib_json\main.cpp" /> + </ItemGroup> + <ItemGroup> + <ClInclude Include="..\..\src\test_lib_json\jsontest.h" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="lib_json.vcxproj"> + <Project>{1e6c2c1c-6453-4129-ae3f-0ee8e6599c89}</Project> + </ProjectReference> + </ItemGroup> + <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" /> + <ImportGroup Label="ExtensionTargets"> + </ImportGroup> +</Project>
\ No newline at end of file diff --git a/3rdparty/jsoncpp/makefiles/msvc2010/test_lib_json.vcxproj.filters b/3rdparty/jsoncpp/makefiles/msvc2010/test_lib_json.vcxproj.filters new file mode 100644 index 00000000000..8f0a17b995c --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/msvc2010/test_lib_json.vcxproj.filters @@ -0,0 +1,24 @@ +<?xml version="1.0" encoding="utf-8"?> +<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> + <ItemGroup> + <ClCompile Include="..\..\src\test_lib_json\jsontest.cpp"> + <Filter>Source Filter</Filter> + </ClCompile> + <ClCompile Include="..\..\src\test_lib_json\main.cpp"> + <Filter>Source Filter</Filter> + </ClCompile> + </ItemGroup> + <ItemGroup> + <Filter Include="Source Filter"> + <UniqueIdentifier>{bf40cbfc-8e98-40b4-b9f3-7e8d579cbae2}</UniqueIdentifier> + </Filter> + <Filter Include="Header Files"> + <UniqueIdentifier>{5fd39074-89e6-4939-aa3f-694fefd296b1}</UniqueIdentifier> + </Filter> + </ItemGroup> + <ItemGroup> + <ClInclude Include="..\..\src\test_lib_json\jsontest.h"> + <Filter>Header Files</Filter> + </ClInclude> + </ItemGroup> +</Project>
\ No newline at end of file diff --git a/3rdparty/jsoncpp/makefiles/vs71/jsoncpp.sln b/3rdparty/jsoncpp/makefiles/vs71/jsoncpp.sln new file mode 100644 index 00000000000..dd2f91b4420 --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/3rdparty/jsoncpp/makefiles/vs71/jsontest.vcproj b/3rdparty/jsoncpp/makefiles/vs71/jsontest.vcproj new file mode 100644 index 00000000000..562c71f61ad --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ +<?xml version="1.0" encoding="Windows-1252"?> +<VisualStudioProject + ProjectType="Visual C++" + Version="7.10" + Name="jsontest" + ProjectGUID="{25AF2DD2-D396-4668-B188-488C33B8E620}" + Keyword="Win32Proj"> + <Platforms> + <Platform + Name="Win32"/> + </Platforms> + <Configurations> + <Configuration + Name="Debug|Win32" + OutputDirectory="../../build/vs71/debug/jsontest" + IntermediateDirectory="../../build/vs71/debug/jsontest" + ConfigurationType="1" + CharacterSet="2"> + <Tool + Name="VCCLCompilerTool" + Optimization="0" + AdditionalIncludeDirectories="../../include" + PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE" + MinimalRebuild="TRUE" + BasicRuntimeChecks="3" + RuntimeLibrary="1" + UsePrecompiledHeader="0" + WarningLevel="3" + Detect64BitPortabilityProblems="TRUE" + DebugInformationFormat="4"/> + <Tool + Name="VCCustomBuildTool"/> + <Tool + Name="VCLinkerTool" + OutputFile="$(OutDir)/jsontest.exe" + LinkIncremental="2" + GenerateDebugInformation="TRUE" + ProgramDatabaseFile="$(OutDir)/jsontest.pdb" + SubSystem="1" + TargetMachine="1"/> + <Tool + Name="VCMIDLTool"/> + <Tool + Name="VCPostBuildEventTool"/> + <Tool + Name="VCPreBuildEventTool"/> + <Tool + Name="VCPreLinkEventTool"/> + <Tool + Name="VCResourceCompilerTool"/> + <Tool + Name="VCWebServiceProxyGeneratorTool"/> + <Tool + Name="VCXMLDataGeneratorTool"/> + <Tool + Name="VCWebDeploymentTool"/> + <Tool + Name="VCManagedWrapperGeneratorTool"/> + <Tool + Name="VCAuxiliaryManagedWrapperGeneratorTool"/> + </Configuration> + <Configuration + Name="Release|Win32" + OutputDirectory="../../build/vs71/release/jsontest" + IntermediateDirectory="../../build/vs71/release/jsontest" + ConfigurationType="1" + CharacterSet="2"> + <Tool + Name="VCCLCompilerTool" + AdditionalIncludeDirectories="../../include" + PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE" + RuntimeLibrary="0" + UsePrecompiledHeader="0" + WarningLevel="3" + Detect64BitPortabilityProblems="TRUE" + DebugInformationFormat="3"/> + <Tool + Name="VCCustomBuildTool"/> + <Tool + Name="VCLinkerTool" + OutputFile="$(OutDir)/jsontest.exe" + LinkIncremental="1" + GenerateDebugInformation="TRUE" + SubSystem="1" + OptimizeReferences="2" + EnableCOMDATFolding="2" + TargetMachine="1"/> + <Tool + Name="VCMIDLTool"/> + <Tool + Name="VCPostBuildEventTool"/> + <Tool + Name="VCPreBuildEventTool"/> + <Tool + Name="VCPreLinkEventTool"/> + <Tool + Name="VCResourceCompilerTool"/> + <Tool + Name="VCWebServiceProxyGeneratorTool"/> + <Tool + Name="VCXMLDataGeneratorTool"/> + <Tool + Name="VCWebDeploymentTool"/> + <Tool + Name="VCManagedWrapperGeneratorTool"/> + <Tool + Name="VCAuxiliaryManagedWrapperGeneratorTool"/> + </Configuration> + </Configurations> + <References> + </References> + <Files> + <File + RelativePath="..\..\src\jsontestrunner\main.cpp"> + </File> + </Files> + <Globals> + </Globals> +</VisualStudioProject> diff --git a/3rdparty/jsoncpp/makefiles/vs71/lib_json.vcproj b/3rdparty/jsoncpp/makefiles/vs71/lib_json.vcproj new file mode 100644 index 00000000000..1aa5978a1fe --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ +<?xml version="1.0" encoding="Windows-1252"?> +<VisualStudioProject + ProjectType="Visual C++" + Version="7.10" + Name="lib_json" + ProjectGUID="{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + Keyword="Win32Proj"> + <Platforms> + <Platform + Name="Win32"/> + </Platforms> + <Configurations> + <Configuration + Name="Debug|Win32" + OutputDirectory="../../build/vs71/debug/lib_json" + IntermediateDirectory="../../build/vs71/debug/lib_json" + ConfigurationType="4" + CharacterSet="2"> + <Tool + Name="VCCLCompilerTool" + Optimization="0" + AdditionalIncludeDirectories="../../include" + PreprocessorDefinitions="WIN32;_DEBUG;_LIB" + StringPooling="TRUE" + MinimalRebuild="TRUE" + BasicRuntimeChecks="3" + RuntimeLibrary="1" + EnableFunctionLevelLinking="TRUE" + DisableLanguageExtensions="TRUE" + ForceConformanceInForLoopScope="FALSE" + RuntimeTypeInfo="TRUE" + UsePrecompiledHeader="0" + WarningLevel="3" + Detect64BitPortabilityProblems="TRUE" + DebugInformationFormat="4"/> + <Tool + Name="VCCustomBuildTool"/> + <Tool + Name="VCLibrarianTool" + OutputFile="$(OutDir)/json_vc71_libmtd.lib"/> + <Tool + Name="VCMIDLTool"/> + <Tool + Name="VCPostBuildEventTool"/> + <Tool + Name="VCPreBuildEventTool"/> + <Tool + Name="VCPreLinkEventTool"/> + <Tool + Name="VCResourceCompilerTool"/> + <Tool + Name="VCWebServiceProxyGeneratorTool"/> + <Tool + Name="VCXMLDataGeneratorTool"/> + <Tool + Name="VCManagedWrapperGeneratorTool"/> + <Tool + Name="VCAuxiliaryManagedWrapperGeneratorTool"/> + </Configuration> + <Configuration + Name="Release|Win32" + OutputDirectory="../../build/vs71/release/lib_json" + IntermediateDirectory="../../build/vs71/release/lib_json" + ConfigurationType="4" + CharacterSet="2" + WholeProgramOptimization="TRUE"> + <Tool + Name="VCCLCompilerTool" + GlobalOptimizations="TRUE" + EnableIntrinsicFunctions="TRUE" + AdditionalIncludeDirectories="../../include" + PreprocessorDefinitions="WIN32;NDEBUG;_LIB" + StringPooling="TRUE" + RuntimeLibrary="0" + EnableFunctionLevelLinking="TRUE" + DisableLanguageExtensions="TRUE" + ForceConformanceInForLoopScope="FALSE" + RuntimeTypeInfo="TRUE" + UsePrecompiledHeader="0" + AssemblerOutput="4" + WarningLevel="3" + Detect64BitPortabilityProblems="TRUE" + DebugInformationFormat="3"/> + <Tool + Name="VCCustomBuildTool"/> + <Tool + Name="VCLibrarianTool" + OutputFile="$(OutDir)/json_vc71_libmt.lib"/> + <Tool + Name="VCMIDLTool"/> + <Tool + Name="VCPostBuildEventTool"/> + <Tool + Name="VCPreBuildEventTool"/> + <Tool + Name="VCPreLinkEventTool"/> + <Tool + Name="VCResourceCompilerTool"/> + <Tool + Name="VCWebServiceProxyGeneratorTool"/> + <Tool + Name="VCXMLDataGeneratorTool"/> + <Tool + Name="VCManagedWrapperGeneratorTool"/> + <Tool + Name="VCAuxiliaryManagedWrapperGeneratorTool"/> + </Configuration> + <Configuration + Name="dummy|Win32" + OutputDirectory="$(ConfigurationName)" + IntermediateDirectory="$(ConfigurationName)" + ConfigurationType="2" + CharacterSet="2" + WholeProgramOptimization="TRUE"> + <Tool + Name="VCCLCompilerTool" + GlobalOptimizations="TRUE" + EnableIntrinsicFunctions="TRUE" + AdditionalIncludeDirectories="../../include" + PreprocessorDefinitions="WIN32;NDEBUG;_LIB" + StringPooling="TRUE" + RuntimeLibrary="4" + EnableFunctionLevelLinking="TRUE" + DisableLanguageExtensions="TRUE" + ForceConformanceInForLoopScope="FALSE" + RuntimeTypeInfo="TRUE" + UsePrecompiledHeader="0" + AssemblerOutput="4" + WarningLevel="3" + Detect64BitPortabilityProblems="TRUE" + DebugInformationFormat="3"/> + <Tool + Name="VCCustomBuildTool"/> + <Tool + Name="VCLinkerTool" + GenerateDebugInformation="TRUE" + SubSystem="2" + OptimizeReferences="2" + EnableCOMDATFolding="2" + TargetMachine="1"/> + <Tool + Name="VCMIDLTool"/> + <Tool + Name="VCPostBuildEventTool"/> + <Tool + Name="VCPreBuildEventTool"/> + <Tool + Name="VCPreLinkEventTool"/> + <Tool + Name="VCResourceCompilerTool"/> + <Tool + Name="VCWebServiceProxyGeneratorTool"/> + <Tool + Name="VCXMLDataGeneratorTool"/> + <Tool + Name="VCWebDeploymentTool"/> + <Tool + Name="VCManagedWrapperGeneratorTool"/> + <Tool + Name="VCAuxiliaryManagedWrapperGeneratorTool"/> + </Configuration> + </Configurations> + <References> + </References> + <Files> + <File + RelativePath="..\..\include\json\autolink.h"> + </File> + <File + RelativePath="..\..\include\json\config.h"> + </File> + <File + RelativePath="..\..\include\json\features.h"> + </File> + <File + RelativePath="..\..\include\json\forwards.h"> + </File> + <File + RelativePath="..\..\include\json\json.h"> + </File> + <File + RelativePath="..\..\src\lib_json\json_batchallocator.h"> + </File> + <File + RelativePath="..\..\src\lib_json\json_internalarray.inl"> + </File> + <File + RelativePath="..\..\src\lib_json\json_internalmap.inl"> + </File> + <File + RelativePath="..\..\src\lib_json\json_reader.cpp"> + </File> + <File + RelativePath="..\..\src\lib_json\json_value.cpp"> + </File> + <File + RelativePath="..\..\src\lib_json\json_valueiterator.inl"> + </File> + <File + RelativePath="..\..\src\lib_json\json_writer.cpp"> + </File> + <File + RelativePath="..\..\include\json\reader.h"> + </File> + <File + RelativePath="..\..\include\json\value.h"> + </File> + <File + RelativePath="..\..\include\json\writer.h"> + </File> + </Files> + <Globals> + </Globals> +</VisualStudioProject> diff --git a/3rdparty/jsoncpp/makefiles/vs71/test_lib_json.vcproj b/3rdparty/jsoncpp/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 00000000000..9ebb986a665 --- /dev/null +++ b/3rdparty/jsoncpp/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ +<?xml version="1.0" encoding="Windows-1252"?> +<VisualStudioProject + ProjectType="Visual C++" + Version="7.10" + Name="test_lib_json" + ProjectGUID="{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + RootNamespace="test_lib_json" + Keyword="Win32Proj"> + <Platforms> + <Platform + Name="Win32"/> + </Platforms> + <Configurations> + <Configuration + Name="Debug|Win32" + OutputDirectory="../../build/vs71/debug/test_lib_json" + IntermediateDirectory="../../build/vs71/debug/test_lib_json" + ConfigurationType="1" + CharacterSet="2"> + <Tool + Name="VCCLCompilerTool" + Optimization="0" + AdditionalIncludeDirectories="../../include" + PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE" + MinimalRebuild="TRUE" + BasicRuntimeChecks="3" + RuntimeLibrary="1" + UsePrecompiledHeader="0" + WarningLevel="3" + Detect64BitPortabilityProblems="TRUE" + DebugInformationFormat="4"/> + <Tool + Name="VCCustomBuildTool"/> + <Tool + Name="VCLinkerTool" + OutputFile="$(OutDir)/test_lib_json.exe" + LinkIncremental="2" + GenerateDebugInformation="TRUE" + ProgramDatabaseFile="$(OutDir)/test_lib_json.pdb" + SubSystem="1" + TargetMachine="1"/> + <Tool + Name="VCMIDLTool"/> + <Tool + Name="VCPostBuildEventTool" + Description="Running all unit tests" + CommandLine="$(TargetPath)"/> + <Tool + Name="VCPreBuildEventTool"/> + <Tool + Name="VCPreLinkEventTool"/> + <Tool + Name="VCResourceCompilerTool"/> + <Tool + Name="VCWebServiceProxyGeneratorTool"/> + <Tool + Name="VCXMLDataGeneratorTool"/> + <Tool + Name="VCWebDeploymentTool"/> + <Tool + Name="VCManagedWrapperGeneratorTool"/> + <Tool + Name="VCAuxiliaryManagedWrapperGeneratorTool"/> + </Configuration> + <Configuration + Name="Release|Win32" + OutputDirectory="../../build/vs71/release/test_lib_json" + IntermediateDirectory="../../build/vs71/release/test_lib_json" + ConfigurationType="1" + CharacterSet="2"> + <Tool + Name="VCCLCompilerTool" + AdditionalIncludeDirectories="../../include" + PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE" + RuntimeLibrary="0" + UsePrecompiledHeader="0" + WarningLevel="3" + Detect64BitPortabilityProblems="TRUE" + DebugInformationFormat="3"/> + <Tool + Name="VCCustomBuildTool"/> + <Tool + Name="VCLinkerTool" + OutputFile="$(OutDir)/test_lib_json.exe" + LinkIncremental="1" + GenerateDebugInformation="TRUE" + SubSystem="1" + OptimizeReferences="2" + EnableCOMDATFolding="2" + TargetMachine="1"/> + <Tool + Name="VCMIDLTool"/> + <Tool + Name="VCPostBuildEventTool" + Description="Running all unit tests" + CommandLine="$(TargetPath)"/> + <Tool + Name="VCPreBuildEventTool"/> + <Tool + Name="VCPreLinkEventTool"/> + <Tool + Name="VCResourceCompilerTool"/> + <Tool + Name="VCWebServiceProxyGeneratorTool"/> + <Tool + Name="VCXMLDataGeneratorTool"/> + <Tool + Name="VCWebDeploymentTool"/> + <Tool + Name="VCManagedWrapperGeneratorTool"/> + <Tool + Name="VCAuxiliaryManagedWrapperGeneratorTool"/> + </Configuration> + </Configurations> + <References> + </References> + <Files> + <File + RelativePath="..\..\src\test_lib_json\jsontest.cpp"> + </File> + <File + RelativePath="..\..\src\test_lib_json\jsontest.h"> + </File> + <File + RelativePath="..\..\src\test_lib_json\main.cpp"> + </File> + </Files> + <Globals> + </Globals> +</VisualStudioProject> diff --git a/3rdparty/jsoncpp/makerelease.py b/3rdparty/jsoncpp/makerelease.py new file mode 100644 index 00000000000..90276d120bb --- /dev/null +++ b/3rdparty/jsoncpp/makerelease.py @@ -0,0 +1,384 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev + +Note: This was for Subversion. Now that we are in GitHub, we do not +need to build versioned tarballs anymore, so makerelease.py is defunct. +""" +from __future__ import print_function +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball +import amalgamate + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print('Running:', ' '.join( cmd )) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError as e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print('Preparing exported source file EOL for distribution...') + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print('Running:', ' '.join( cmd )) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in range(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print(heading, ' '.join( cmd )) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print('Removing the following file from web:') + print('\n'.join( paths_to_remove )) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print('Uploading %d files:' % len(upload_paths)) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in range(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print('Setting version to', release_version) + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url) + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print('Generated doxygen document...') +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print('Generating source tarball to', source_tarball_path) + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir + print('Generating amalgamation source tarball to', amalgamation_tarball_path) + amalgamation_dir = 'dist/amalgamation' + amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' ) + amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version + tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir], + amalgamation_dir, prefix_dir=amalgamation_source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print('Decompressing source tarball to', distcheck_dir) + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print('Downloading scons-local to', scons_local_path) + download( SCONS_LOCAL_URL, scons_local_path ) + print('Decompressing scons-local to', distcheck_top_dir) + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print('Compiling decompressed tarball') + all_build_status = True + for platform in options.platforms.split(','): + print('Testing platform:', platform) + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print('see build log:', log_path) + print(build_status and '=> ok' or '=> FAILED') + all_build_status = all_build_status and build_status + if not build_status: + print('Testing failed on at least one platform, aborting...') + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print('Uploading documentation using user', options.user) + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print('Completed documentation upload') + print('Uploading source and documentation tarballs for release using user', options.user) + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print('Source and doc release tarballs uploaded') + else: + print('No upload user specified. Web site and download tarbal were not uploaded.') + print('Tarball can be found at:', doc_tarball_path) + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/3rdparty/jsoncpp/pkg-config/jsoncpp.pc.in b/3rdparty/jsoncpp/pkg-config/jsoncpp.pc.in new file mode 100644 index 00000000000..9613181b2d1 --- /dev/null +++ b/3rdparty/jsoncpp/pkg-config/jsoncpp.pc.in @@ -0,0 +1,11 @@ +prefix=@CMAKE_INSTALL_PREFIX@ +exec_prefix=${prefix} +libdir=${exec_prefix}/@LIBRARY_INSTALL_DIR@ +includedir=${prefix}/@INCLUDE_INSTALL_DIR@ + +Name: jsoncpp +Description: A C++ library for interacting with JSON +Version: @JSONCPP_VERSION@ +URL: https://github.com/open-source-parsers/jsoncpp +Libs: -L${libdir} -ljsoncpp +Cflags: -I${includedir} diff --git a/3rdparty/jsoncpp/scons-tools/globtool.py b/3rdparty/jsoncpp/scons-tools/globtool.py new file mode 100644 index 00000000000..811140e8aab --- /dev/null +++ b/3rdparty/jsoncpp/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/3rdparty/jsoncpp/scons-tools/srcdist.py b/3rdparty/jsoncpp/scons-tools/srcdist.py new file mode 100644 index 00000000000..864ff408158 --- /dev/null +++ b/3rdparty/jsoncpp/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/3rdparty/jsoncpp/scons-tools/substinfile.py b/3rdparty/jsoncpp/scons-tools/substinfile.py new file mode 100644 index 00000000000..ef18b4edbcb --- /dev/null +++ b/3rdparty/jsoncpp/scons-tools/substinfile.py @@ -0,0 +1,80 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript +import collections + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError("Can't read source file %s"%sourcefile) + for (k,v) in list(dict.items()): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError("Can't write target file %s"%targetfile) + return 0 # success + + def subst_in_file(target, source, env): + if 'SUBST_DICT' not in env: + raise SCons.Errors.UserError("SubstInFile requires SUBST_DICT to be set.") + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in list(d.items()): + if isinstance(v, collections.Callable): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError("SubstInFile: key %s: %s must be a string or callable"%(k, repr(v))) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in list(d.items()): + if isinstance(v, collections.Callable): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/3rdparty/jsoncpp/scons-tools/targz.py b/3rdparty/jsoncpp/scons-tools/targz.py new file mode 100644 index 00000000000..f5432003df7 --- /dev/null +++ b/3rdparty/jsoncpp/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/3rdparty/jsoncpp/src/CMakeLists.txt b/3rdparty/jsoncpp/src/CMakeLists.txt new file mode 100644 index 00000000000..608d3f7b67d --- /dev/null +++ b/3rdparty/jsoncpp/src/CMakeLists.txt @@ -0,0 +1,5 @@ +ADD_SUBDIRECTORY(lib_json) +IF(JSONCPP_WITH_TESTS) + ADD_SUBDIRECTORY(jsontestrunner) + ADD_SUBDIRECTORY(test_lib_json) +ENDIF(JSONCPP_WITH_TESTS) diff --git a/3rdparty/jsoncpp/src/jsontestrunner/CMakeLists.txt b/3rdparty/jsoncpp/src/jsontestrunner/CMakeLists.txt new file mode 100644 index 00000000000..dd8e2175f0f --- /dev/null +++ b/3rdparty/jsoncpp/src/jsontestrunner/CMakeLists.txt @@ -0,0 +1,22 @@ +FIND_PACKAGE(PythonInterp 2.6 REQUIRED) + +IF(JSONCPP_LIB_BUILD_SHARED) + ADD_DEFINITIONS( -DJSON_DLL ) +ENDIF(JSONCPP_LIB_BUILD_SHARED) + +ADD_EXECUTABLE(jsontestrunner_exe + main.cpp + ) +TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib) +SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe) + +IF(PYTHONINTERP_FOUND) + # Run end to end parser/writer tests + SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test) + SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py) + ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests ALL + "${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data" + DEPENDS jsontestrunner_exe jsoncpp_test + ) + ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests) +ENDIF(PYTHONINTERP_FOUND) diff --git a/3rdparty/jsoncpp/src/jsontestrunner/main.cpp b/3rdparty/jsoncpp/src/jsontestrunner/main.cpp new file mode 100644 index 00000000000..ba985877db4 --- /dev/null +++ b/3rdparty/jsoncpp/src/jsontestrunner/main.cpp @@ -0,0 +1,277 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +/* This executable is used for testing parser/writer using real JSON files. + */ + +#include <json/json.h> +#include <algorithm> // sort +#include <stdio.h> + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +#pragma warning(disable : 4996) // disable fopen deprecation warning +#endif + +static std::string normalizeFloatingPointStr(double value) { + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) + sprintf_s(buffer, sizeof(buffer), "%.16g", value); +#else + snprintf(buffer, sizeof(buffer), "%.16g", value); +#endif + buffer[sizeof(buffer) - 1] = 0; + std::string s(buffer); + std::string::size_type index = s.find_last_of("eE"); + if (index != std::string::npos) { + std::string::size_type hasSign = + (s[index + 1] == '+' || s[index + 1] == '-') ? 1 : 0; + std::string::size_type exponentStartIndex = index + 1 + hasSign; + std::string normalized = s.substr(0, exponentStartIndex); + std::string::size_type indexDigit = + s.find_first_not_of('0', exponentStartIndex); + std::string exponent = "0"; + if (indexDigit != + std::string::npos) // There is an exponent different from 0 + { + exponent = s.substr(indexDigit); + } + return normalized + exponent; + } + return s; +} + +static std::string readInputTestFile(const char* path) { + FILE* file = fopen(path, "rb"); + if (!file) + return std::string(""); + fseek(file, 0, SEEK_END); + long size = ftell(file); + fseek(file, 0, SEEK_SET); + std::string text; + char* buffer = new char[size + 1]; + buffer[size] = 0; + if (fread(buffer, 1, size, file) == (unsigned long)size) + text = buffer; + fclose(file); + delete[] buffer; + return text; +} + +static void +printValueTree(FILE* fout, Json::Value& value, const std::string& path = ".") { + if (value.hasComment(Json::commentBefore)) { + fprintf(fout, "%s\n", value.getComment(Json::commentBefore).c_str()); + } + switch (value.type()) { + case Json::nullValue: + fprintf(fout, "%s=null\n", path.c_str()); + break; + case Json::intValue: + fprintf(fout, + "%s=%s\n", + path.c_str(), + Json::valueToString(value.asLargestInt()).c_str()); + break; + case Json::uintValue: + fprintf(fout, + "%s=%s\n", + path.c_str(), + Json::valueToString(value.asLargestUInt()).c_str()); + break; + case Json::realValue: + fprintf(fout, + "%s=%s\n", + path.c_str(), + normalizeFloatingPointStr(value.asDouble()).c_str()); + break; + case Json::stringValue: + fprintf(fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str()); + break; + case Json::booleanValue: + fprintf(fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false"); + break; + case Json::arrayValue: { + fprintf(fout, "%s=[]\n", path.c_str()); + int size = value.size(); + for (int index = 0; index < size; ++index) { + static char buffer[16]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) + sprintf_s(buffer, sizeof(buffer), "[%d]", index); +#else + snprintf(buffer, sizeof(buffer), "[%d]", index); +#endif + printValueTree(fout, value[index], path + buffer); + } + } break; + case Json::objectValue: { + fprintf(fout, "%s={}\n", path.c_str()); + Json::Value::Members members(value.getMemberNames()); + std::sort(members.begin(), members.end()); + std::string suffix = *(path.end() - 1) == '.' ? "" : "."; + for (Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it) { + const std::string& name = *it; + printValueTree(fout, value[name], path + suffix + name); + } + } break; + default: + break; + } + + if (value.hasComment(Json::commentAfter)) { + fprintf(fout, "%s\n", value.getComment(Json::commentAfter).c_str()); + } +} + +static int parseAndSaveValueTree(const std::string& input, + const std::string& actual, + const std::string& kind, + Json::Value& root, + const Json::Features& features, + bool parseOnly) { + Json::Reader reader(features); + bool parsingSuccessful = reader.parse(input, root); + if (!parsingSuccessful) { + printf("Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormattedErrorMessages().c_str()); + return 1; + } + + if (!parseOnly) { + FILE* factual = fopen(actual.c_str(), "wt"); + if (!factual) { + printf("Failed to create %s actual file.\n", kind.c_str()); + return 2; + } + printValueTree(factual, root); + fclose(factual); + } + return 0; +} + +static int rewriteValueTree(const std::string& rewritePath, + const Json::Value& root, + std::string& rewrite) { + // Json::FastWriter writer; + // writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write(root); + FILE* fout = fopen(rewritePath.c_str(), "wt"); + if (!fout) { + printf("Failed to create rewrite file: %s\n", rewritePath.c_str()); + return 2; + } + fprintf(fout, "%s\n", rewrite.c_str()); + fclose(fout); + return 0; +} + +static std::string removeSuffix(const std::string& path, + const std::string& extension) { + if (extension.length() >= path.length()) + return std::string(""); + std::string suffix = path.substr(path.length() - extension.length()); + if (suffix != extension) + return std::string(""); + return path.substr(0, path.length() - extension.length()); +} + +static void printConfig() { +// Print the configuration used to compile JsonCpp +#if defined(JSON_NO_INT64) + printf("JSON_NO_INT64=1\n"); +#else + printf("JSON_NO_INT64=0\n"); +#endif +} + +static int printUsage(const char* argv[]) { + printf("Usage: %s [--strict] input-json-file", argv[0]); + return 3; +} + +int parseCommandLine(int argc, + const char* argv[], + Json::Features& features, + std::string& path, + bool& parseOnly) { + parseOnly = false; + if (argc < 2) { + return printUsage(argv); + } + + int index = 1; + if (std::string(argv[1]) == "--json-checker") { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if (std::string(argv[1]) == "--json-config") { + printConfig(); + return 3; + } + + if (index == argc || index + 1 < argc) { + return printUsage(argv); + } + + path = argv[index]; + return 0; +} + +int main(int argc, const char* argv[]) { + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine(argc, argv, features, path, parseOnly); + if (exitCode != 0) { + return exitCode; + } + + try { + std::string input = readInputTestFile(path.c_str()); + if (input.empty()) { + printf("Failed to read input or empty input: %s\n", path.c_str()); + return 3; + } + + std::string basePath = removeSuffix(argv[1], ".json"); + if (!parseOnly && basePath.empty()) { + printf("Bad input path. Path does not end with '.expected':\n%s\n", + path.c_str()); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( + input, actualPath, "input", root, features, parseOnly); + if (exitCode == 0 && !parseOnly) { + std::string rewrite; + exitCode = rewriteValueTree(rewritePath, root, rewrite); + if (exitCode == 0) { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree(rewrite, + rewriteActualPath, + "rewrite", + rewriteRoot, + features, + parseOnly); + } + } + } + catch (const std::exception& e) { + printf("Unhandled exception:\n%s\n", e.what()); + exitCode = 1; + } + + return exitCode; +} diff --git a/3rdparty/jsoncpp/src/jsontestrunner/sconscript b/3rdparty/jsoncpp/src/jsontestrunner/sconscript new file mode 100644 index 00000000000..6e68e31533e --- /dev/null +++ b/3rdparty/jsoncpp/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/3rdparty/jsoncpp/src/lib_json/CMakeLists.txt b/3rdparty/jsoncpp/src/lib_json/CMakeLists.txt new file mode 100644 index 00000000000..d0f6a5ea5ab --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/CMakeLists.txt @@ -0,0 +1,70 @@ +OPTION(JSONCPP_LIB_BUILD_SHARED "Build jsoncpp_lib as a shared library." OFF) +IF(BUILD_SHARED_LIBS) + SET(JSONCPP_LIB_BUILD_SHARED ON) +ENDIF(BUILD_SHARED_LIBS) + +IF(JSONCPP_LIB_BUILD_SHARED) + SET(JSONCPP_LIB_TYPE SHARED) + ADD_DEFINITIONS( -DJSON_DLL_BUILD ) +ELSE(JSONCPP_LIB_BUILD_SHARED) + SET(JSONCPP_LIB_TYPE STATIC) +ENDIF(JSONCPP_LIB_BUILD_SHARED) + +if( CMAKE_COMPILER_IS_GNUCXX ) + #Get compiler version. + execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion + OUTPUT_VARIABLE GNUCXX_VERSION ) + + #-Werror=* was introduced -after- GCC 4.1.2 + if( GNUCXX_VERSION VERSION_GREATER 4.1.2 ) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing") + endif() +endif( CMAKE_COMPILER_IS_GNUCXX ) + +SET( JSONCPP_INCLUDE_DIR ../../include ) + +SET( PUBLIC_HEADERS + ${JSONCPP_INCLUDE_DIR}/json/config.h + ${JSONCPP_INCLUDE_DIR}/json/forwards.h + ${JSONCPP_INCLUDE_DIR}/json/features.h + ${JSONCPP_INCLUDE_DIR}/json/value.h + ${JSONCPP_INCLUDE_DIR}/json/reader.h + ${JSONCPP_INCLUDE_DIR}/json/writer.h + ${JSONCPP_INCLUDE_DIR}/json/assertions.h + ${JSONCPP_INCLUDE_DIR}/json/version.h + ) + +SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} ) + +ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE} + ${PUBLIC_HEADERS} + json_tool.h + json_reader.cpp + json_batchallocator.h + json_valueiterator.inl + json_value.cpp + json_writer.cpp + version.h.in + ) +SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp ) +SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR} ) + +IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) + TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC + $<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}> + $<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}> + ) +ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) + +# Install instructions for this target +IF(JSONCPP_WITH_CMAKE_PACKAGE) + SET(INSTALL_EXPORT EXPORT jsoncpp) +ELSE(JSONCPP_WITH_CMAKE_PACKAGE) + SET(INSTALL_EXPORT) +ENDIF(JSONCPP_WITH_CMAKE_PACKAGE) + +INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT} + RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR} + LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR} + ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR} +) diff --git a/3rdparty/jsoncpp/src/lib_json/json_batchallocator.h b/3rdparty/jsoncpp/src/lib_json/json_batchallocator.h new file mode 100644 index 00000000000..2fbef7a8601 --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/json_batchallocator.h @@ -0,0 +1,121 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +#define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +#include <stdlib.h> +#include <assert.h> + +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated + * objects can be destroyed at once. The memory can be either released or reused + * for future allocation. + * + * The in-place new operator must be used to construct the object using the + * pointer returned by allocate. + */ +template <typename AllocatedType, const unsigned int objectPerAllocation> +class BatchAllocator { +public: + BatchAllocator(unsigned int objectsPerPage = 255) + : freeHead_(0), objectsPerPage_(objectsPerPage) { + // printf( "Size: %d => %s\n", sizeof(AllocatedType), + // typeid(AllocatedType).name() ); + assert(sizeof(AllocatedType) * objectPerAllocation >= + sizeof(AllocatedType*)); // We must be able to store a slist in the + // object free space. + assert(objectsPerPage >= 16); + batches_ = allocateBatch(0); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() { + for (BatchInfo* batch = batches_; batch;) { + BatchInfo* nextBatch = batch->next_; + free(batch); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects + /// constructors. + AllocatedType* allocate() { + if (freeHead_) // returns node from free list. + { + AllocatedType* object = freeHead_; + freeHead_ = *(AllocatedType**)object; + return object; + } + if (currentBatch_->used_ == currentBatch_->end_) { + currentBatch_ = currentBatch_->next_; + while (currentBatch_ && currentBatch_->used_ == currentBatch_->end_) + currentBatch_ = currentBatch_->next_; + + if (!currentBatch_) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch(objectsPerPage_); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType* allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the + /// object. + void release(AllocatedType* object) { + assert(object != 0); + *(AllocatedType**)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo { + BatchInfo* next_; + AllocatedType* used_; + AllocatedType* end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator(const BatchAllocator&); + void operator=(const BatchAllocator&); + + static BatchInfo* allocateBatch(unsigned int objectsPerPage) { + const unsigned int mallocSize = + sizeof(BatchInfo) - sizeof(AllocatedType) * objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo* batch = static_cast<BatchInfo*>(malloc(mallocSize)); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo* batches_; + BatchInfo* currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType* freeHead_; + unsigned int objectsPerPage_; +}; + +} // namespace Json + +#endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED diff --git a/3rdparty/jsoncpp/src/lib_json/json_internalarray.inl b/3rdparty/jsoncpp/src/lib_json/json_internalarray.inl new file mode 100644 index 00000000000..9ee15e9dbf2 --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/json_internalarray.inl @@ -0,0 +1,360 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() {} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator { +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() {} + + virtual ValueInternalArray* newArray() { return new ValueInternalArray(); } + + virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) { + return new ValueInternalArray(other); + } + + virtual void destructArray(ValueInternalArray* array) { delete array; } + + virtual void + reallocateArrayPageIndex(Value**& indexes, + ValueInternalArray::PageIndex& indexCount, + ValueInternalArray::PageIndex minNewIndexCount) { + ValueInternalArray::PageIndex newIndexCount = (indexCount * 3) / 2 + 1; + if (minNewIndexCount > newIndexCount) + newIndexCount = minNewIndexCount; + void* newIndexes = realloc(indexes, sizeof(Value*) * newIndexCount); + JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc."); + indexCount = newIndexCount; + indexes = static_cast<Value**>(newIndexes); + } + virtual void releaseArrayPageIndex(Value** indexes, + ValueInternalArray::PageIndex indexCount) { + if (indexes) + free(indexes); + } + + virtual Value* allocateArrayPage() { + return static_cast<Value*>( + malloc(sizeof(Value) * ValueInternalArray::itemsPerPage)); + } + + virtual void releaseArrayPage(Value* value) { + if (value) + free(value); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator { +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() {} + + virtual ValueInternalArray* newArray() { + ValueInternalArray* array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) { + ValueInternalArray* array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(other); // placement new + return array; + } + + virtual void destructArray(ValueInternalArray* array) { + if (array) { + array->~ValueInternalArray(); + arraysAllocator_.release(array); + } + } + + virtual void + reallocateArrayPageIndex(Value**& indexes, + ValueInternalArray::PageIndex& indexCount, + ValueInternalArray::PageIndex minNewIndexCount) { + ValueInternalArray::PageIndex newIndexCount = (indexCount * 3) / 2 + 1; + if (minNewIndexCount > newIndexCount) + newIndexCount = minNewIndexCount; + void* newIndexes = realloc(indexes, sizeof(Value*) * newIndexCount); + JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc."); + indexCount = newIndexCount; + indexes = static_cast<Value**>(newIndexes); + } + virtual void releaseArrayPageIndex(Value** indexes, + ValueInternalArray::PageIndex indexCount) { + if (indexes) + free(indexes); + } + + virtual Value* allocateArrayPage() { + return static_cast<Value*>(pagesAllocator_.allocate()); + } + + virtual void releaseArrayPage(Value* value) { + if (value) + pagesAllocator_.release(value); + } + +private: + BatchAllocator<ValueInternalArray, 1> arraysAllocator_; + BatchAllocator<Value, ValueInternalArray::itemsPerPage> pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator*& arrayAllocator() { + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator* arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before + // main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool ValueInternalArray::equals(const IteratorState& x, + const IteratorState& other) { + return x.array_ == other.array_ && + x.currentItemIndex_ == other.currentItemIndex_ && + x.currentPageIndex_ == other.currentPageIndex_; +} + +void ValueInternalArray::increment(IteratorState& it) { + JSON_ASSERT_MESSAGE( + it.array_ && (it.currentPageIndex_ - it.array_->pages_) * itemsPerPage + + it.currentItemIndex_ != + it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end"); + ++(it.currentItemIndex_); + if (it.currentItemIndex_ == itemsPerPage) { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + +void ValueInternalArray::decrement(IteratorState& it) { + JSON_ASSERT_MESSAGE( + it.array_ && it.currentPageIndex_ == it.array_->pages_ && + it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end"); + if (it.currentItemIndex_ == 0) { + it.currentItemIndex_ = itemsPerPage - 1; + --(it.currentPageIndex_); + } else { + --(it.currentItemIndex_); + } +} + +Value& ValueInternalArray::unsafeDereference(const IteratorState& it) { + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + +Value& ValueInternalArray::dereference(const IteratorState& it) { + JSON_ASSERT_MESSAGE( + it.array_ && (it.currentPageIndex_ - it.array_->pages_) * itemsPerPage + + it.currentItemIndex_ < + it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator"); + return unsafeDereference(it); +} + +void ValueInternalArray::makeBeginIterator(IteratorState& it) const { + it.array_ = const_cast<ValueInternalArray*>(this); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + +void ValueInternalArray::makeIterator(IteratorState& it, + ArrayIndex index) const { + it.array_ = const_cast<ValueInternalArray*>(this); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + +void ValueInternalArray::makeEndIterator(IteratorState& it) const { + makeIterator(it, size_); +} + +ValueInternalArray::ValueInternalArray() : pages_(0), size_(0), pageCount_(0) {} + +ValueInternalArray::ValueInternalArray(const ValueInternalArray& other) + : pages_(0), size_(other.size_), pageCount_(0) { + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex(pages_, pageCount_, minNewPages); + JSON_ASSERT_MESSAGE(pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation"); + IteratorState itOther; + other.makeBeginIterator(itOther); + Value* value; + for (ArrayIndex index = 0; index < size_; ++index, increment(itOther)) { + if (index % itemsPerPage == 0) { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value(dereference(itOther)); + } +} + +ValueInternalArray& ValueInternalArray::operator=(ValueInternalArray other) { + swap(other); + return *this; +} + +ValueInternalArray::~ValueInternalArray() { + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator(it); + makeEndIterator(itEnd); + for (; !equals(it, itEnd); increment(it)) { + Value* value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for (PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex) + arrayAllocator()->releaseArrayPage(pages_[pageIndex]); + // release pages index + arrayAllocator()->releaseArrayPageIndex(pages_, pageCount_); +} + +void ValueInternalArray::swap(ValueInternalArray& other) { + Value** tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void ValueInternalArray::clear() { + ValueInternalArray dummy; + swap(dummy); +} + +void ValueInternalArray::resize(ArrayIndex newSize) { + if (newSize == 0) + clear(); + else if (newSize < size_) { + IteratorState it; + IteratorState itEnd; + makeIterator(it, newSize); + makeIterator(itEnd, size_); + for (; !equals(it, itEnd); increment(it)) { + Value* value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for (; pageIndex < lastPageIndex; ++pageIndex) + arrayAllocator()->releaseArrayPage(pages_[pageIndex]); + size_ = newSize; + } else if (newSize > size_) + resolveReference(newSize); +} + +void ValueInternalArray::makeIndexValid(ArrayIndex index) { + // Need to enlarge page index ? + if (index >= pageCount_ * itemsPerPage) { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex(pages_, pageCount_, minNewPages); + JSON_ASSERT_MESSAGE(pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation"); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = (size_ % itemsPerPage) != 0 + ? size_ - (size_ % itemsPerPage) + itemsPerPage + : size_; + if (nextPageIndex <= index) { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for (; pageToAllocate-- > 0; ++pageIndex) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator(it, size_); + size_ = index + 1; + makeIterator(itEnd, size_); + for (; !equals(it, itEnd); increment(it)) { + Value* value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value& ValueInternalArray::resolveReference(ArrayIndex index) { + if (index >= size_) + makeIndexValid(index); + return pages_[index / itemsPerPage][index % itemsPerPage]; +} + +Value* ValueInternalArray::find(ArrayIndex index) const { + if (index >= size_) + return 0; + return &(pages_[index / itemsPerPage][index % itemsPerPage]); +} + +ValueInternalArray::ArrayIndex ValueInternalArray::size() const { + return size_; +} + +int ValueInternalArray::distance(const IteratorState& x, + const IteratorState& y) { + return indexOf(y) - indexOf(x); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf(const IteratorState& iterator) { + if (!iterator.array_) + return ArrayIndex(-1); + return ArrayIndex((iterator.currentPageIndex_ - iterator.array_->pages_) * + itemsPerPage + + iterator.currentItemIndex_); +} + +int ValueInternalArray::compare(const ValueInternalArray& other) const { + int sizeDiff(size_ - other.size_); + if (sizeDiff != 0) + return sizeDiff; + + for (ArrayIndex index = 0; index < size_; ++index) { + int diff = pages_[index / itemsPerPage][index % itemsPerPage].compare( + other.pages_[index / itemsPerPage][index % itemsPerPage]); + if (diff != 0) + return diff; + } + return 0; +} + +} // namespace Json diff --git a/3rdparty/jsoncpp/src/lib_json/json_internalmap.inl b/3rdparty/jsoncpp/src/lib_json/json_internalmap.inl new file mode 100644 index 00000000000..ef3f3302dc9 --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/json_internalmap.inl @@ -0,0 +1,473 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, + * sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() : previous_(0), next_(0) {} + +ValueInternalLink::~ValueInternalLink() { + for (int index = 0; index < itemPerLink; ++index) { + if (!items_[index].isItemAvailable()) { + if (!items_[index].isMemberNameStatic()) + free(keys_[index]); + } else + break; + } +} + +ValueMapAllocator::~ValueMapAllocator() {} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator { +public: // overridden from ValueMapAllocator + virtual ValueInternalMap* newMap() { return new ValueInternalMap(); } + + virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) { + return new ValueInternalMap(other); + } + + virtual void destructMap(ValueInternalMap* map) { delete map; } + + virtual ValueInternalLink* allocateMapBuckets(unsigned int size) { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets(ValueInternalLink* links) { delete[] links; } + + virtual ValueInternalLink* allocateMapLink() { + return new ValueInternalLink(); + } + + virtual void releaseMapLink(ValueInternalLink* link) { delete link; } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator { +public: // overridden from ValueMapAllocator + virtual ValueInternalMap* newMap() { + ValueInternalMap* map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) { + ValueInternalMap* map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(other); // placement new + return map; + } + + virtual void destructMap(ValueInternalMap* map) { + if (map) { + map->~ValueInternalMap(); + mapsAllocator_.release(map); + } + } + + virtual ValueInternalLink* allocateMapBuckets(unsigned int size) { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets(ValueInternalLink* links) { delete[] links; } + + virtual ValueInternalLink* allocateMapLink() { + ValueInternalLink* link = linksAllocator_.allocate(); + memset(link, 0, sizeof(ValueInternalLink)); + return link; + } + + virtual void releaseMapLink(ValueInternalLink* link) { + link->~ValueInternalLink(); + linksAllocator_.release(link); + } + +private: + BatchAllocator<ValueInternalMap, 1> mapsAllocator_; + BatchAllocator<ValueInternalLink, 1> linksAllocator_; +}; +#endif + +static ValueMapAllocator*& mapAllocator() { + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator* mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() { + mapAllocator(); // ensure mapAllocator() statics are initialized before + // main(). + } +} dummyMapAllocatorInitializer; + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + +ValueInternalMap::ValueInternalMap() + : buckets_(0), tailLink_(0), bucketsSize_(0), itemCount_(0) {} + +ValueInternalMap::ValueInternalMap(const ValueInternalMap& other) + : buckets_(0), tailLink_(0), bucketsSize_(0), itemCount_(0) { + reserve(other.itemCount_); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator(it); + other.makeEndIterator(itEnd); + for (; !equals(it, itEnd); increment(it)) { + bool isStatic; + const char* memberName = key(it, isStatic); + const Value& aValue = value(it); + resolveReference(memberName, isStatic) = aValue; + } +} + +ValueInternalMap& ValueInternalMap::operator=(ValueInternalMap other) { + swap(other); + return *this; +} + +ValueInternalMap::~ValueInternalMap() { + if (buckets_) { + for (BucketIndex bucketIndex = 0; bucketIndex < bucketsSize_; + ++bucketIndex) { + ValueInternalLink* link = buckets_[bucketIndex].next_; + while (link) { + ValueInternalLink* linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink(linkToRelease); + } + } + mapAllocator()->releaseMapBuckets(buckets_); + } +} + +void ValueInternalMap::swap(ValueInternalMap& other) { + ValueInternalLink* tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink* tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + +void ValueInternalMap::clear() { + ValueInternalMap dummy; + swap(dummy); +} + +ValueInternalMap::BucketIndex ValueInternalMap::size() const { + return itemCount_; +} + +bool ValueInternalMap::reserveDelta(BucketIndex growth) { + return reserve(itemCount_ + growth); +} + +bool ValueInternalMap::reserve(BucketIndex newItemCount) { + if (!buckets_ && newItemCount > 0) { + buckets_ = mapAllocator()->allocateMapBuckets(1); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } + // BucketIndex idealBucketCount = (newItemCount + + // ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + +const Value* ValueInternalMap::find(const char* key) const { + if (!bucketsSize_) + return 0; + HashKey hashedKey = hash(key); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for (const ValueInternalLink* current = &buckets_[bucketIndex]; current != 0; + current = current->next_) { + for (BucketIndex index = 0; index < ValueInternalLink::itemPerLink; + ++index) { + if (current->items_[index].isItemAvailable()) + return 0; + if (strcmp(key, current->keys_[index]) == 0) + return ¤t->items_[index]; + } + } + return 0; +} + +Value* ValueInternalMap::find(const char* key) { + const ValueInternalMap* constThis = this; + return const_cast<Value*>(constThis->find(key)); +} + +Value& ValueInternalMap::resolveReference(const char* key, bool isStatic) { + HashKey hashedKey = hash(key); + if (bucketsSize_) { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink** previous = 0; + BucketIndex index; + for (ValueInternalLink* current = &buckets_[bucketIndex]; current != 0; + previous = ¤t->next_, current = current->next_) { + for (index = 0; index < ValueInternalLink::itemPerLink; ++index) { + if (current->items_[index].isItemAvailable()) + return setNewItem(key, isStatic, current, index); + if (strcmp(key, current->keys_[index]) == 0) + return current->items_[index]; + } + } + } + + reserveDelta(1); + return unsafeAdd(key, isStatic, hashedKey); +} + +void ValueInternalMap::remove(const char* key) { + HashKey hashedKey = hash(key); + if (!bucketsSize_) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for (ValueInternalLink* link = &buckets_[bucketIndex]; link != 0; + link = link->next_) { + BucketIndex index; + for (index = 0; index < ValueInternalLink::itemPerLink; ++index) { + if (link->items_[index].isItemAvailable()) + return; + if (strcmp(key, link->keys_[index]) == 0) { + doActualRemove(link, index, bucketIndex); + return; + } + } + } +} + +void ValueInternalMap::doActualRemove(ValueInternalLink* link, + BucketIndex index, + BucketIndex bucketIndex) { + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's + // empty) + ValueInternalLink*& lastLink = getLastLinkInBucket(index); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for (; lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex) // may be optimized with dicotomic search + { + if (lastLink->items_[lastItemIndex].isItemAvailable()) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value* valueToDelete = &link->items_[index]; + Value* valueToPreserve = &lastLink->items_[lastUsedIndex]; + if (valueToDelete != valueToPreserve) + valueToDelete->swap(*valueToPreserve); + if (lastUsedIndex == 0) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink* linkPreviousToLast = lastLink->previous_; + if (linkPreviousToLast != 0) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink(lastLink); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } else { + Value dummy; + valueToPreserve->swap(dummy); // restore deleted to default Value. + valueToPreserve->setItemUsed(false); + } + --itemCount_; +} + +ValueInternalLink*& +ValueInternalMap::getLastLinkInBucket(BucketIndex bucketIndex) { + if (bucketIndex == bucketsSize_ - 1) + return tailLink_; + ValueInternalLink*& previous = buckets_[bucketIndex + 1].previous_; + if (!previous) + previous = &buckets_[bucketIndex]; + return previous; +} + +Value& ValueInternalMap::setNewItem(const char* key, + bool isStatic, + ValueInternalLink* link, + BucketIndex index) { + char* duplicatedKey = makeMemberName(key); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic(isStatic); + return link->items_[index]; // items already default constructed. +} + +Value& +ValueInternalMap::unsafeAdd(const char* key, bool isStatic, HashKey hashedKey) { + JSON_ASSERT_MESSAGE(bucketsSize_ > 0, + "ValueInternalMap::unsafeAdd(): internal logic error."); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink*& previousLink = getLastLinkInBucket(bucketIndex); + ValueInternalLink* link = previousLink; + BucketIndex index; + for (index = 0; index < ValueInternalLink::itemPerLink; ++index) { + if (link->items_[index].isItemAvailable()) + break; + } + if (index == ValueInternalLink::itemPerLink) // need to add a new page + { + ValueInternalLink* newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem(key, isStatic, link, index); +} + +ValueInternalMap::HashKey ValueInternalMap::hash(const char* key) const { + HashKey hash = 0; + while (*key) + hash += *key++ * 37; + return hash; +} + +int ValueInternalMap::compare(const ValueInternalMap& other) const { + int sizeDiff(itemCount_ - other.itemCount_); + if (sizeDiff != 0) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare + // values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator(it); + makeEndIterator(itEnd); + for (; !equals(it, itEnd); increment(it)) { + if (!other.find(key(it))) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator(it); + for (; !equals(it, itEnd); increment(it)) { + const Value* otherValue = other.find(key(it)); + int valueDiff = value(it).compare(*otherValue); + if (valueDiff != 0) + return valueDiff; + } + return 0; +} + +void ValueInternalMap::makeBeginIterator(IteratorState& it) const { + it.map_ = const_cast<ValueInternalMap*>(this); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + +void ValueInternalMap::makeEndIterator(IteratorState& it) const { + it.map_ = const_cast<ValueInternalMap*>(this); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + +bool ValueInternalMap::equals(const IteratorState& x, + const IteratorState& other) { + return x.map_ == other.map_ && x.bucketIndex_ == other.bucketIndex_ && + x.link_ == other.link_ && x.itemIndex_ == other.itemIndex_; +} + +void ValueInternalMap::incrementBucket(IteratorState& iterator) { + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( + iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end."); + if (iterator.bucketIndex_ == iterator.map_->bucketsSize_) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + +void ValueInternalMap::increment(IteratorState& iterator) { + JSON_ASSERT_MESSAGE(iterator.map_, + "Attempting to iterator using invalid iterator."); + ++iterator.itemIndex_; + if (iterator.itemIndex_ == ValueInternalLink::itemPerLink) { + JSON_ASSERT_MESSAGE( + iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end."); + iterator.link_ = iterator.link_->next_; + if (iterator.link_ == 0) + incrementBucket(iterator); + } else if (iterator.link_->items_[iterator.itemIndex_].isItemAvailable()) { + incrementBucket(iterator); + } +} + +void ValueInternalMap::decrement(IteratorState& iterator) { + if (iterator.itemIndex_ == 0) { + JSON_ASSERT_MESSAGE(iterator.map_, + "Attempting to iterate using invalid iterator."); + if (iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_]) { + JSON_ASSERT_MESSAGE(iterator.bucketIndex_ > 0, + "Attempting to iterate beyond beginning."); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + +const char* ValueInternalMap::key(const IteratorState& iterator) { + JSON_ASSERT_MESSAGE(iterator.link_, + "Attempting to iterate using invalid iterator."); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char* ValueInternalMap::key(const IteratorState& iterator, + bool& isStatic) { + JSON_ASSERT_MESSAGE(iterator.link_, + "Attempting to iterate using invalid iterator."); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +Value& ValueInternalMap::value(const IteratorState& iterator) { + JSON_ASSERT_MESSAGE(iterator.link_, + "Attempting to iterate using invalid iterator."); + return iterator.link_->items_[iterator.itemIndex_]; +} + +int ValueInternalMap::distance(const IteratorState& x, const IteratorState& y) { + int offset = 0; + IteratorState it = x; + while (!equals(it, y)) + increment(it); + return offset; +} + +} // namespace Json diff --git a/3rdparty/jsoncpp/src/lib_json/json_reader.cpp b/3rdparty/jsoncpp/src/lib_json/json_reader.cpp new file mode 100644 index 00000000000..c5111f8d708 --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +// Copyright 2007-2011 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +#include <json/assertions.h> +#include <json/reader.h> +#include <json/value.h> +#include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include <utility> +#include <cstdio> +#include <cassert> +#include <cstring> +#include <istream> + +#if defined(_MSC_VER) && _MSC_VER < 1500 // VC++ 8.0 and below +#define snprintf _snprintf +#endif + +#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0 +// Disable warning about strdup being deprecated. +#pragma warning(disable : 4996) +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_(true), strictRoot_(false), + allowDroppedNullPlaceholders_(false), allowNumericKeys_(false) {} + +Features Features::all() { return Features(); } + +Features Features::strictMode() { + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + features.allowDroppedNullPlaceholders_ = false; + features.allowNumericKeys_ = false; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + +static inline bool in(Reader::Char c, + Reader::Char c1, + Reader::Char c2, + Reader::Char c3, + Reader::Char c4) { + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool in(Reader::Char c, + Reader::Char c1, + Reader::Char c2, + Reader::Char c3, + Reader::Char c4, + Reader::Char c5) { + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + +static bool containsNewLine(Reader::Location begin, Reader::Location end) { + for (; begin < end; ++begin) + if (*begin == '\n' || *begin == '\r') + return true; + return false; +} + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(), + lastValue_(), commentsBefore_(), features_(Features::all()), + collectComments_() {} + +Reader::Reader(const Features& features) + : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(), + lastValue_(), commentsBefore_(), features_(features), collectComments_() { +} + +bool +Reader::parse(const std::string& document, Value& root, bool collectComments) { + document_ = document; + const char* begin = document_.c_str(); + const char* end = begin + document_.length(); + return parse(begin, end, root, collectComments); +} + +bool Reader::parse(std::istream& sin, Value& root, bool collectComments) { + // std::istream_iterator<char> begin(sin); + // std::istream_iterator<char> end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse(doc, root, collectComments); +} + +bool Reader::parse(const char* beginDoc, + const char* endDoc, + Value& root, + bool collectComments) { + if (!features_.allowComments_) { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while (!nodes_.empty()) + nodes_.pop(); + nodes_.push(&root); + + bool successful = readValue(); + Token token; + skipCommentTokens(token); + if (collectComments_ && !commentsBefore_.empty()) + root.setComment(commentsBefore_, commentAfter); + if (features_.strictRoot_) { + if (!root.isArray() && !root.isObject()) { + // Set error location to start of doc, ideally should be first token found + // in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( + "A valid JSON document must be either an array or an object value.", + token); + return false; + } + } + return successful; +} + +bool Reader::readValue() { + Token token; + skipCommentTokens(token); + bool successful = true; + + if (collectComments_ && !commentsBefore_.empty()) { + // Remove newline characters at the end of the comments + size_t lastNonNewline = commentsBefore_.find_last_not_of("\r\n"); + if (lastNonNewline != std::string::npos) { + commentsBefore_.erase(lastNonNewline + 1); + } else { + commentsBefore_.clear(); + } + + currentValue().setComment(commentsBefore_, commentBefore); + commentsBefore_ = ""; + } + + switch (token.type_) { + case tokenObjectBegin: + successful = readObject(token); + currentValue().setOffsetLimit(current_ - begin_); + break; + case tokenArrayBegin: + successful = readArray(token); + currentValue().setOffsetLimit(current_ - begin_); + break; + case tokenNumber: + successful = decodeNumber(token); + break; + case tokenString: + successful = decodeString(token); + break; + case tokenTrue: + currentValue() = true; + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + break; + case tokenFalse: + currentValue() = false; + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + break; + case tokenNull: + currentValue() = Value(); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + break; + case tokenArraySeparator: + if (features_.allowDroppedNullPlaceholders_) { + // "Un-read" the current token and mark the current value as a null + // token. + current_--; + currentValue() = Value(); + currentValue().setOffsetStart(current_ - begin_ - 1); + currentValue().setOffsetLimit(current_ - begin_); + break; + } + // Else, fall through... + default: + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return addError("Syntax error: value, object or array expected.", token); + } + + if (collectComments_) { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + +void Reader::skipCommentTokens(Token& token) { + if (features_.allowComments_) { + do { + readToken(token); + } while (token.type_ == tokenComment); + } else { + readToken(token); + } +} + +bool Reader::expectToken(TokenType type, Token& token, const char* message) { + readToken(token); + if (token.type_ != type) + return addError(message, token); + return true; +} + +bool Reader::readToken(Token& token) { + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch (c) { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match("rue", 3); + break; + case 'f': + token.type_ = tokenFalse; + ok = match("alse", 4); + break; + case 'n': + token.type_ = tokenNull; + ok = match("ull", 3); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if (!ok) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + +void Reader::skipSpaces() { + while (current_ != end_) { + Char c = *current_; + if (c == ' ' || c == '\t' || c == '\r' || c == '\n') + ++current_; + else + break; + } +} + +bool Reader::match(Location pattern, int patternLength) { + if (end_ - current_ < patternLength) + return false; + int index = patternLength; + while (index--) + if (current_[index] != pattern[index]) + return false; + current_ += patternLength; + return true; +} + +bool Reader::readComment() { + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if (c == '*') + successful = readCStyleComment(); + else if (c == '/') + successful = readCppStyleComment(); + if (!successful) + return false; + + if (collectComments_) { + CommentPlacement placement = commentBefore; + if (lastValueEnd_ && !containsNewLine(lastValueEnd_, commentBegin)) { + if (c != '*' || !containsNewLine(commentBegin, current_)) + placement = commentAfterOnSameLine; + } + + addComment(commentBegin, current_, placement); + } + return true; +} + +void +Reader::addComment(Location begin, Location end, CommentPlacement placement) { + assert(collectComments_); + if (placement == commentAfterOnSameLine) { + assert(lastValue_ != 0); + lastValue_->setComment(std::string(begin, end), placement); + } else { + commentsBefore_ += std::string(begin, end); + } +} + +bool Reader::readCStyleComment() { + while (current_ != end_) { + Char c = getNextChar(); + if (c == '*' && *current_ == '/') + break; + } + return getNextChar() == '/'; +} + +bool Reader::readCppStyleComment() { + while (current_ != end_) { + Char c = getNextChar(); + if (c == '\r' || c == '\n') + break; + } + return true; +} + +void Reader::readNumber() { + while (current_ != end_) { + if (!(*current_ >= '0' && *current_ <= '9') && + !in(*current_, '.', 'e', 'E', '+', '-')) + break; + ++current_; + } +} + +bool Reader::readString() { + Char c = 0; + while (current_ != end_) { + c = getNextChar(); + if (c == '\\') + getNextChar(); + else if (c == '"') + break; + } + return c == '"'; +} + +bool Reader::readObject(Token& tokenStart) { + Token tokenName; + std::string name; + currentValue() = Value(objectValue); + currentValue().setOffsetStart(tokenStart.start_ - begin_); + while (readToken(tokenName)) { + bool initialTokenOk = true; + while (tokenName.type_ == tokenComment && initialTokenOk) + initialTokenOk = readToken(tokenName); + if (!initialTokenOk) + break; + if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object + return true; + name = ""; + if (tokenName.type_ == tokenString) { + if (!decodeString(tokenName, name)) + return recoverFromError(tokenObjectEnd); + } else if (tokenName.type_ == tokenNumber && features_.allowNumericKeys_) { + Value numberName; + if (!decodeNumber(tokenName, numberName)) + return recoverFromError(tokenObjectEnd); + name = numberName.asString(); + } else { + break; + } + + Token colon; + if (!readToken(colon) || colon.type_ != tokenMemberSeparator) { + return addErrorAndRecover( + "Missing ':' after object member name", colon, tokenObjectEnd); + } + Value& value = currentValue()[name]; + nodes_.push(&value); + bool ok = readValue(); + nodes_.pop(); + if (!ok) // error already set + return recoverFromError(tokenObjectEnd); + + Token comma; + if (!readToken(comma) || + (comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment)) { + return addErrorAndRecover( + "Missing ',' or '}' in object declaration", comma, tokenObjectEnd); + } + bool finalizeTokenOk = true; + while (comma.type_ == tokenComment && finalizeTokenOk) + finalizeTokenOk = readToken(comma); + if (comma.type_ == tokenObjectEnd) + return true; + } + return addErrorAndRecover( + "Missing '}' or object member name", tokenName, tokenObjectEnd); +} + +bool Reader::readArray(Token& tokenStart) { + currentValue() = Value(arrayValue); + currentValue().setOffsetStart(tokenStart.start_ - begin_); + skipSpaces(); + if (*current_ == ']') // empty array + { + Token endArray; + readToken(endArray); + return true; + } + int index = 0; + for (;;) { + Value& value = currentValue()[index++]; + nodes_.push(&value); + bool ok = readValue(); + nodes_.pop(); + if (!ok) // error already set + return recoverFromError(tokenArrayEnd); + + Token token; + // Accept Comment after last item in the array. + ok = readToken(token); + while (token.type_ == tokenComment && ok) { + ok = readToken(token); + } + bool badTokenType = + (token.type_ != tokenArraySeparator && token.type_ != tokenArrayEnd); + if (!ok || badTokenType) { + return addErrorAndRecover( + "Missing ',' or ']' in array declaration", token, tokenArrayEnd); + } + if (token.type_ == tokenArrayEnd) + break; + } + return true; +} + +bool Reader::decodeNumber(Token& token) { + Value decoded; + if (!decodeNumber(token, decoded)) + return false; + currentValue() = decoded; + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return true; +} + +bool Reader::decodeNumber(Token& token, Value& decoded) { + bool isDouble = false; + for (Location inspect = token.start_; inspect != token.end_; ++inspect) { + isDouble = isDouble || in(*inspect, '.', 'e', 'E', '+') || + (*inspect == '-' && inspect != token.start_); + } + if (isDouble) + return decodeDouble(token, decoded); + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if (isNegative) + ++current; + Value::LargestUInt maxIntegerValue = + isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::LargestUInt value = 0; + while (current < token.end_) { + Char c = *current++; + if (c < '0' || c > '9') + return addError("'" + std::string(token.start_, token.end_) + + "' is not a number.", + token); + Value::UInt digit(c - '0'); + if (value >= threshold) { + // We've hit or exceeded the max value divided by 10 (rounded down). If + // a) we've only just touched the limit, b) this is the last digit, and + // c) it's small enough to fit in that rounding delta, we're okay. + // Otherwise treat this number as a double to avoid overflow. + if (value > threshold || current != token.end_ || + digit > maxIntegerValue % 10) { + return decodeDouble(token, decoded); + } + } + value = value * 10 + digit; + } + if (isNegative) + decoded = -Value::LargestInt(value); + else if (value <= Value::LargestUInt(Value::maxInt)) + decoded = Value::LargestInt(value); + else + decoded = value; + return true; +} + +bool Reader::decodeDouble(Token& token) { + Value decoded; + if (!decodeDouble(token, decoded)) + return false; + currentValue() = decoded; + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return true; +} + +bool Reader::decodeDouble(Token& token, Value& decoded) { + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + + // Sanity check to avoid buffer overflow exploits. + if (length < 0) { + return addError("Unable to parse token length", token); + } + + // Avoid using a string constant for the format control string given to + // sscanf, as this can cause hard to debug crashes on OS X. See here for more + // info: + // + // http://developer.apple.com/library/mac/#DOCUMENTATION/DeveloperTools/gcc-4.0.1/gcc/Incompatibilities.html + char format[] = "%lf"; + + if (length <= bufferSize) { + Char buffer[bufferSize + 1]; + memcpy(buffer, token.start_, length); + buffer[length] = 0; + count = sscanf(buffer, format, &value); + } else { + std::string buffer(token.start_, token.end_); + count = sscanf(buffer.c_str(), format, &value); + } + + if (count != 1) + return addError("'" + std::string(token.start_, token.end_) + + "' is not a number.", + token); + decoded = value; + return true; +} + +bool Reader::decodeString(Token& token) { + std::string decoded; + if (!decodeString(token, decoded)) + return false; + currentValue() = decoded; + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return true; +} + +bool Reader::decodeString(Token& token, std::string& decoded) { + decoded.reserve(token.end_ - token.start_ - 2); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while (current != end) { + Char c = *current++; + if (c == '"') + break; + else if (c == '\\') { + if (current == end) + return addError("Empty escape sequence in string", token, current); + Char escape = *current++; + switch (escape) { + case '"': + decoded += '"'; + break; + case '/': + decoded += '/'; + break; + case '\\': + decoded += '\\'; + break; + case 'b': + decoded += '\b'; + break; + case 'f': + decoded += '\f'; + break; + case 'n': + decoded += '\n'; + break; + case 'r': + decoded += '\r'; + break; + case 't': + decoded += '\t'; + break; + case 'u': { + unsigned int unicode; + if (!decodeUnicodeCodePoint(token, current, end, unicode)) + return false; + decoded += codePointToUTF8(unicode); + } break; + default: + return addError("Bad escape sequence in string", token, current); + } + } else { + decoded += c; + } + } + return true; +} + +bool Reader::decodeUnicodeCodePoint(Token& token, + Location& current, + Location end, + unsigned int& unicode) { + + if (!decodeUnicodeEscapeSequence(token, current, end, unicode)) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) { + // surrogate pairs + if (end - current < 6) + return addError( + "additional six characters expected to parse unicode surrogate pair.", + token, + current); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++) == 'u') { + if (decodeUnicodeEscapeSequence(token, current, end, surrogatePair)) { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } else + return false; + } else + return addError("expecting another \\u token to begin the second half of " + "a unicode surrogate pair", + token, + current); + } + return true; +} + +bool Reader::decodeUnicodeEscapeSequence(Token& token, + Location& current, + Location end, + unsigned int& unicode) { + if (end - current < 4) + return addError( + "Bad unicode escape sequence in string: four digits expected.", + token, + current); + unicode = 0; + for (int index = 0; index < 4; ++index) { + Char c = *current++; + unicode *= 16; + if (c >= '0' && c <= '9') + unicode += c - '0'; + else if (c >= 'a' && c <= 'f') + unicode += c - 'a' + 10; + else if (c >= 'A' && c <= 'F') + unicode += c - 'A' + 10; + else + return addError( + "Bad unicode escape sequence in string: hexadecimal digit expected.", + token, + current); + } + return true; +} + +bool +Reader::addError(const std::string& message, Token& token, Location extra) { + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back(info); + return false; +} + +bool Reader::recoverFromError(TokenType skipUntilToken) { + int errorCount = int(errors_.size()); + Token skip; + for (;;) { + if (!readToken(skip)) + errors_.resize(errorCount); // discard errors caused by recovery + if (skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream) + break; + } + errors_.resize(errorCount); + return false; +} + +bool Reader::addErrorAndRecover(const std::string& message, + Token& token, + TokenType skipUntilToken) { + addError(message, token); + return recoverFromError(skipUntilToken); +} + +Value& Reader::currentValue() { return *(nodes_.top()); } + +Reader::Char Reader::getNextChar() { + if (current_ == end_) + return 0; + return *current_++; +} + +void Reader::getLocationLineAndColumn(Location location, + int& line, + int& column) const { + Location current = begin_; + Location lastLineStart = current; + line = 0; + while (current < location && current != end_) { + Char c = *current++; + if (c == '\r') { + if (*current == '\n') + ++current; + lastLineStart = current; + ++line; + } else if (c == '\n') { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + +std::string Reader::getLocationLineAndColumn(Location location) const { + int line, column; + getLocationLineAndColumn(location, line, column); + char buffer[18 + 16 + 16 + 1]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) +#if defined(WINCE) + _snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column); +#else + sprintf_s(buffer, sizeof(buffer), "Line %d, Column %d", line, column); +#endif +#else + snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column); +#endif + return buffer; +} + +// Deprecated. Preserved for backward compatibility +std::string Reader::getFormatedErrorMessages() const { + return getFormattedErrorMessages(); +} + +std::string Reader::getFormattedErrorMessages() const { + std::string formattedMessage; + for (Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError) { + const ErrorInfo& error = *itError; + formattedMessage += + "* " + getLocationLineAndColumn(error.token_.start_) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if (error.extra_) + formattedMessage += + "See " + getLocationLineAndColumn(error.extra_) + " for detail.\n"; + } + return formattedMessage; +} + +std::vector<Reader::StructuredError> Reader::getStructuredErrors() const { + std::vector<Reader::StructuredError> allErrors; + for (Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError) { + const ErrorInfo& error = *itError; + Reader::StructuredError structured; + structured.offset_start = error.token_.start_ - begin_; + structured.offset_limit = error.token_.end_ - begin_; + structured.message = error.message_; + allErrors.push_back(structured); + } + return allErrors; +} + +bool Reader::pushError(const Value& value, const std::string& message) { + size_t length = end_ - begin_; + if(value.getOffsetStart() > length + || value.getOffsetLimit() > length) + return false; + Token token; + token.type_ = tokenError; + token.start_ = begin_ + value.getOffsetStart(); + token.end_ = end_ + value.getOffsetLimit(); + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = 0; + errors_.push_back(info); + return true; +} + +bool Reader::pushError(const Value& value, const std::string& message, const Value& extra) { + size_t length = end_ - begin_; + if(value.getOffsetStart() > length + || value.getOffsetLimit() > length + || extra.getOffsetLimit() > length) + return false; + Token token; + token.type_ = tokenError; + token.start_ = begin_ + value.getOffsetStart(); + token.end_ = begin_ + value.getOffsetLimit(); + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = begin_ + extra.getOffsetStart(); + errors_.push_back(info); + return true; +} + +bool Reader::good() const { + return !errors_.size(); +} + +std::istream& operator>>(std::istream& sin, Value& root) { + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + if (!ok) { + fprintf(stderr, + "Error from reader: %s", + reader.getFormattedErrorMessages().c_str()); + + JSON_FAIL_MESSAGE("reader error"); + } + return sin; +} + +} // namespace Json diff --git a/3rdparty/jsoncpp/src/lib_json/json_tool.h b/3rdparty/jsoncpp/src/lib_json/json_tool.h new file mode 100644 index 00000000000..f9b61c38c70 --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/json_tool.h @@ -0,0 +1,87 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +#define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string codePointToUTF8(unsigned int cp) { + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) { + result.resize(1); + result[0] = static_cast<char>(cp); + } else if (cp <= 0x7FF) { + result.resize(2); + result[1] = static_cast<char>(0x80 | (0x3f & cp)); + result[0] = static_cast<char>(0xC0 | (0x1f & (cp >> 6))); + } else if (cp <= 0xFFFF) { + result.resize(3); + result[2] = static_cast<char>(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast<char>((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast<char>((0xf & (cp >> 12))); + } else if (cp <= 0x10FFFF) { + result.resize(4); + result[3] = static_cast<char>(0x80 | (0x3f & cp)); + result[2] = static_cast<char>(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast<char>(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; } + +enum { + /// Constant that specify the size of the buffer that must be passed to + /// uintToString. + uintToStringBufferSize = 3 * sizeof(LargestUInt) + 1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void uintToString(LargestUInt value, char*& current) { + *--current = 0; + do { + *--current = char(value % 10) + '0'; + value /= 10; + } while (value != 0); +} + +/** Change ',' to '.' everywhere in buffer. + * + * We had a sophisticated way, but it did not work in WinCE. + * @see https://github.com/open-source-parsers/jsoncpp/pull/9 + */ +static inline void fixNumericLocale(char* begin, char* end) { + while (begin < end) { + if (*begin == ',') { + *begin = '.'; + } + ++begin; + } +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/3rdparty/jsoncpp/src/lib_json/json_value.cpp b/3rdparty/jsoncpp/src/lib_json/json_value.cpp new file mode 100644 index 00000000000..b73deac1725 --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/json_value.cpp @@ -0,0 +1,1478 @@ +// Copyright 2011 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +#include <json/assertions.h> +#include <json/value.h> +#include <json/writer.h> +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#endif // if !defined(JSON_IS_AMALGAMATION) +#include <math.h> +#include <sstream> +#include <utility> +#include <cstring> +#include <cassert> +#ifdef JSON_USE_CPPTL +#include <cpptl/conststring.h> +#endif +#include <cstddef> // size_t + +#define JSON_ASSERT_UNREACHABLE assert(false) + +namespace Json { + +// This is a walkaround to avoid the static initialization of Value::null. +// kNull must be word-aligned to avoid crashing on ARM. We use an alignment of +// 8 (instead of 4) as a bit of future-proofing. +#if defined(__ARMEL__) +#define ALIGNAS(byte_alignment) __attribute__((aligned(byte_alignment))) +#else +#define ALIGNAS(byte_alignment) +#endif +static const unsigned char ALIGNAS(8) kNull[sizeof(Value)] = { 0 }; +const unsigned char& kNullRef = kNull[0]; +const Value& Value::null = reinterpret_cast<const Value&>(kNullRef); + +const Int Value::minInt = Int(~(UInt(-1) / 2)); +const Int Value::maxInt = Int(UInt(-1) / 2); +const UInt Value::maxUInt = UInt(-1); +#if defined(JSON_HAS_INT64) +const Int64 Value::minInt64 = Int64(~(UInt64(-1) / 2)); +const Int64 Value::maxInt64 = Int64(UInt64(-1) / 2); +const UInt64 Value::maxUInt64 = UInt64(-1); +// The constant is hard-coded because some compiler have trouble +// converting Value::maxUInt64 to a double correctly (AIX/xlC). +// Assumes that UInt64 is a 64 bits integer. +static const double maxUInt64AsDouble = 18446744073709551615.0; +#endif // defined(JSON_HAS_INT64) +const LargestInt Value::minLargestInt = LargestInt(~(LargestUInt(-1) / 2)); +const LargestInt Value::maxLargestInt = LargestInt(LargestUInt(-1) / 2); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + +/// Unknown size marker +static const unsigned int unknown = (unsigned)-1; + +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) +template <typename T, typename U> +static inline bool InRange(double d, T min, U max) { + return d >= min && d <= max; +} +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) +static inline double integerToDouble(Json::UInt64 value) { + return static_cast<double>(Int64(value / 2)) * 2.0 + Int64(value & 1); +} + +template <typename T> static inline double integerToDouble(T value) { + return static_cast<double>(value); +} + +template <typename T, typename U> +static inline bool InRange(double d, T min, U max) { + return d >= integerToDouble(min) && d <= integerToDouble(max); +} +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char* duplicateStringValue(const char* value, + unsigned int length = unknown) { + if (length == unknown) + length = (unsigned int)strlen(value); + + // Avoid an integer overflow in the call to malloc below by limiting length + // to a sane value. + if (length >= (unsigned)Value::maxInt) + length = Value::maxInt - 1; + + char* newString = static_cast<char*>(malloc(length + 1)); + JSON_ASSERT_MESSAGE(newString != 0, + "in Json::Value::duplicateStringValue(): " + "Failed to allocate string value buffer"); + memcpy(newString, value, length); + newString[length] = 0; + return newString; +} + +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void releaseStringValue(char* value) { free(value); } + +} // namespace Json + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#if !defined(JSON_IS_AMALGAMATION) +#ifdef JSON_VALUE_USE_INTERNAL_MAP +#include "json_internalarray.inl" +#include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +#include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +Value::CommentInfo::CommentInfo() : comment_(0) {} + +Value::CommentInfo::~CommentInfo() { + if (comment_) + releaseStringValue(comment_); +} + +void Value::CommentInfo::setComment(const char* text) { + if (comment_) + releaseStringValue(comment_); + JSON_ASSERT(text != 0); + JSON_ASSERT_MESSAGE( + text[0] == '\0' || text[0] == '/', + "in Json::Value::setComment(): Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue(text); +} + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString(ArrayIndex index) : cstr_(0), index_(index) {} + +Value::CZString::CZString(const char* cstr, DuplicationPolicy allocate) + : cstr_(allocate == duplicate ? duplicateStringValue(cstr) : cstr), + index_(allocate) {} + +Value::CZString::CZString(const CZString& other) + : cstr_(other.index_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue(other.cstr_) + : other.cstr_), + index_(other.cstr_ + ? static_cast<ArrayIndex>(other.index_ == noDuplication + ? noDuplication : duplicate) + : other.index_) {} + +Value::CZString::~CZString() { + if (cstr_ && index_ == duplicate) + releaseStringValue(const_cast<char*>(cstr_)); +} + +void Value::CZString::swap(CZString& other) { + std::swap(cstr_, other.cstr_); + std::swap(index_, other.index_); +} + +Value::CZString& Value::CZString::operator=(CZString other) { + swap(other); + return *this; +} + +bool Value::CZString::operator<(const CZString& other) const { + if (cstr_) + return strcmp(cstr_, other.cstr_) < 0; + return index_ < other.index_; +} + +bool Value::CZString::operator==(const CZString& other) const { + if (cstr_) + return strcmp(cstr_, other.cstr_) == 0; + return index_ == other.index_; +} + +ArrayIndex Value::CZString::index() const { return index_; } + +const char* Value::CZString::c_str() const { return cstr_; } + +bool Value::CZString::isStaticString() const { return index_ == noDuplication; } + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value(ValueType type) { + initBasic(type); + switch (type) { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + +Value::Value(Int value) { + initBasic(intValue); + value_.int_ = value; +} + +Value::Value(UInt value) { + initBasic(uintValue); + value_.uint_ = value; +} +#if defined(JSON_HAS_INT64) +Value::Value(Int64 value) { + initBasic(intValue); + value_.int_ = value; +} +Value::Value(UInt64 value) { + initBasic(uintValue); + value_.uint_ = value; +} +#endif // defined(JSON_HAS_INT64) + +Value::Value(double value) { + initBasic(realValue); + value_.real_ = value; +} + +Value::Value(const char* value) { + initBasic(stringValue, true); + value_.string_ = duplicateStringValue(value); +} + +Value::Value(const char* beginValue, const char* endValue) { + initBasic(stringValue, true); + value_.string_ = + duplicateStringValue(beginValue, (unsigned int)(endValue - beginValue)); +} + +Value::Value(const std::string& value) { + initBasic(stringValue, true); + value_.string_ = + duplicateStringValue(value.c_str(), (unsigned int)value.length()); +} + +Value::Value(const StaticString& value) { + initBasic(stringValue); + value_.string_ = const_cast<char*>(value.c_str()); +} + +#ifdef JSON_USE_CPPTL +Value::Value(const CppTL::ConstString& value) { + initBasic(stringValue, true); + value_.string_ = duplicateStringValue(value, value.length()); +} +#endif + +Value::Value(bool value) { + initBasic(booleanValue); + value_.bool_ = value; +} + +Value::Value(const Value& other) + : type_(other.type_), allocated_(false) +#ifdef JSON_VALUE_USE_INTERNAL_MAP + , + itemIsUsed_(0) +#endif + , + comments_(0), start_(other.start_), limit_(other.limit_) { + switch (type_) { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if (other.value_.string_) { + value_.string_ = duplicateStringValue(other.value_.string_); + allocated_ = true; + } else { + value_.string_ = 0; + allocated_ = false; + } + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(*other.value_.map_); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy(*other.value_.array_); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy(*other.value_.map_); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if (other.comments_) { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for (int comment = 0; comment < numberOfCommentPlacement; ++comment) { + const CommentInfo& otherComment = other.comments_[comment]; + if (otherComment.comment_) + comments_[comment].setComment(otherComment.comment_); + } + } +} + +Value::~Value() { + switch (type_) { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if (allocated_) + releaseStringValue(value_.string_); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray(value_.array_); + break; + case objectValue: + mapAllocator()->destructMap(value_.map_); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if (comments_) + delete[] comments_; +} + +Value& Value::operator=(Value other) { + swap(other); + return *this; +} + +void Value::swap(Value& other) { + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap(value_, other.value_); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; + std::swap(start_, other.start_); + std::swap(limit_, other.limit_); +} + +ValueType Value::type() const { return type_; } + +int Value::compare(const Value& other) const { + if (*this < other) + return -1; + if (*this > other) + return 1; + return 0; +} + +bool Value::operator<(const Value& other) const { + int typeDelta = type_ - other.type_; + if (typeDelta) + return typeDelta < 0 ? true : false; + switch (type_) { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return (value_.string_ == 0 && other.value_.string_) || + (other.value_.string_ && value_.string_ && + strcmp(value_.string_, other.value_.string_) < 0); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: { + int delta = int(value_.map_->size() - other.value_.map_->size()); + if (delta) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare(*(other.value_.array_)) < 0; + case objectValue: + return value_.map_->compare(*(other.value_.map_)) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable +} + +bool Value::operator<=(const Value& other) const { return !(other < *this); } + +bool Value::operator>=(const Value& other) const { return !(*this < other); } + +bool Value::operator>(const Value& other) const { return other < *this; } + +bool Value::operator==(const Value& other) const { + // if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if (type_ != temp) + return false; + switch (type_) { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return (value_.string_ == other.value_.string_) || + (other.value_.string_ && value_.string_ && + strcmp(value_.string_, other.value_.string_) == 0); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() && + (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare(*(other.value_.array_)) == 0; + case objectValue: + return value_.map_->compare(*(other.value_.map_)) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable +} + +bool Value::operator!=(const Value& other) const { return !(*this == other); } + +const char* Value::asCString() const { + JSON_ASSERT_MESSAGE(type_ == stringValue, + "in Json::Value::asCString(): requires stringValue"); + return value_.string_; +} + +std::string Value::asString() const { + switch (type_) { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + return valueToString(value_.int_); + case uintValue: + return valueToString(value_.uint_); + case realValue: + return valueToString(value_.real_); + default: + JSON_FAIL_MESSAGE("Type is not convertible to string"); + } +} + +#ifdef JSON_USE_CPPTL +CppTL::ConstString Value::asConstString() const { + return CppTL::ConstString(asString().c_str()); +} +#endif + +Value::Int Value::asInt() const { + switch (type_) { + case intValue: + JSON_ASSERT_MESSAGE(isInt(), "LargestInt out of Int range"); + return Int(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE(isInt(), "LargestUInt out of Int range"); + return Int(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt, maxInt), + "double out of Int range"); + return Int(value_.real_); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to Int."); +} + +Value::UInt Value::asUInt() const { + switch (type_) { + case intValue: + JSON_ASSERT_MESSAGE(isUInt(), "LargestInt out of UInt range"); + return UInt(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE(isUInt(), "LargestUInt out of UInt range"); + return UInt(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE(InRange(value_.real_, 0, maxUInt), + "double out of UInt range"); + return UInt(value_.real_); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to UInt."); +} + +#if defined(JSON_HAS_INT64) + +Value::Int64 Value::asInt64() const { + switch (type_) { + case intValue: + return Int64(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE(isInt64(), "LargestUInt out of Int64 range"); + return Int64(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt64, maxInt64), + "double out of Int64 range"); + return Int64(value_.real_); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to Int64."); +} + +Value::UInt64 Value::asUInt64() const { + switch (type_) { + case intValue: + JSON_ASSERT_MESSAGE(isUInt64(), "LargestInt out of UInt64 range"); + return UInt64(value_.int_); + case uintValue: + return UInt64(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE(InRange(value_.real_, 0, maxUInt64), + "double out of UInt64 range"); + return UInt64(value_.real_); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to UInt64."); +} +#endif // if defined(JSON_HAS_INT64) + +LargestInt Value::asLargestInt() const { +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + +LargestUInt Value::asLargestUInt() const { +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + +double Value::asDouble() const { + switch (type_) { + case intValue: + return static_cast<double>(value_.int_); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast<double>(value_.uint_); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return integerToDouble(value_.uint_); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return value_.real_; + case nullValue: + return 0.0; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to double."); +} + +float Value::asFloat() const { + switch (type_) { + case intValue: + return static_cast<float>(value_.int_); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast<float>(value_.uint_); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return integerToDouble(value_.uint_); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast<float>(value_.real_); + case nullValue: + return 0.0; + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to float."); +} + +bool Value::asBool() const { + switch (type_) { + case booleanValue: + return value_.bool_; + case nullValue: + return false; + case intValue: + return value_.int_ ? true : false; + case uintValue: + return value_.uint_ ? true : false; + case realValue: + return value_.real_ ? true : false; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to bool."); +} + +bool Value::isConvertibleTo(ValueType other) const { + switch (other) { + case nullValue: + return (isNumeric() && asDouble() == 0.0) || + (type_ == booleanValue && value_.bool_ == false) || + (type_ == stringValue && asString() == "") || + (type_ == arrayValue && value_.map_->size() == 0) || + (type_ == objectValue && value_.map_->size() == 0) || + type_ == nullValue; + case intValue: + return isInt() || + (type_ == realValue && InRange(value_.real_, minInt, maxInt)) || + type_ == booleanValue || type_ == nullValue; + case uintValue: + return isUInt() || + (type_ == realValue && InRange(value_.real_, 0, maxUInt)) || + type_ == booleanValue || type_ == nullValue; + case realValue: + return isNumeric() || type_ == booleanValue || type_ == nullValue; + case booleanValue: + return isNumeric() || type_ == booleanValue || type_ == nullValue; + case stringValue: + return isNumeric() || type_ == booleanValue || type_ == stringValue || + type_ == nullValue; + case arrayValue: + return type_ == arrayValue || type_ == nullValue; + case objectValue: + return type_ == objectValue || type_ == nullValue; + } + JSON_ASSERT_UNREACHABLE; + return false; +} + +/// Number of values in array or object +ArrayIndex Value::size() const { + switch (type_) { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if (!value_.map_->empty()) { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index() + 1; + } + return 0; + case objectValue: + return ArrayIndex(value_.map_->size()); +#else + case arrayValue: + return Int(value_.array_->size()); + case objectValue: + return Int(value_.map_->size()); +#endif + } + JSON_ASSERT_UNREACHABLE; + return 0; // unreachable; +} + +bool Value::empty() const { + if (isNull() || isArray() || isObject()) + return size() == 0u; + else + return false; +} + +bool Value::operator!() const { return isNull(); } + +void Value::clear() { + JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == arrayValue || + type_ == objectValue, + "in Json::Value::clear(): requires complex value"); + start_ = 0; + limit_ = 0; + switch (type_) { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void Value::resize(ArrayIndex newSize) { + JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == arrayValue, + "in Json::Value::resize(): requires arrayValue"); + if (type_ == nullValue) + *this = Value(arrayValue); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ArrayIndex oldSize = size(); + if (newSize == 0) + clear(); + else if (newSize > oldSize) + (*this)[newSize - 1]; + else { + for (ArrayIndex index = newSize; index < oldSize; ++index) { + value_.map_->erase(index); + } + assert(size() == newSize); + } +#else + value_.array_->resize(newSize); +#endif +} + +Value& Value::operator[](ArrayIndex index) { + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == arrayValue, + "in Json::Value::operator[](ArrayIndex): requires arrayValue"); + if (type_ == nullValue) + *this = Value(arrayValue); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key(index); + ObjectValues::iterator it = value_.map_->lower_bound(key); + if (it != value_.map_->end() && (*it).first == key) + return (*it).second; + + ObjectValues::value_type defaultValue(key, null); + it = value_.map_->insert(it, defaultValue); + return (*it).second; +#else + return value_.array_->resolveReference(index); +#endif +} + +Value& Value::operator[](int index) { + JSON_ASSERT_MESSAGE( + index >= 0, + "in Json::Value::operator[](int index): index cannot be negative"); + return (*this)[ArrayIndex(index)]; +} + +const Value& Value::operator[](ArrayIndex index) const { + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == arrayValue, + "in Json::Value::operator[](ArrayIndex)const: requires arrayValue"); + if (type_ == nullValue) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key(index); + ObjectValues::const_iterator it = value_.map_->find(key); + if (it == value_.map_->end()) + return null; + return (*it).second; +#else + Value* value = value_.array_->find(index); + return value ? *value : null; +#endif +} + +const Value& Value::operator[](int index) const { + JSON_ASSERT_MESSAGE( + index >= 0, + "in Json::Value::operator[](int index) const: index cannot be negative"); + return (*this)[ArrayIndex(index)]; +} + +Value& Value::operator[](const char* key) { + return resolveReference(key, false); +} + +void Value::initBasic(ValueType type, bool allocated) { + type_ = type; + allocated_ = allocated; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + itemIsUsed_ = 0; +#endif + comments_ = 0; + start_ = 0; + limit_ = 0; +} + +Value& Value::resolveReference(const char* key, bool isStatic) { + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == objectValue, + "in Json::Value::resolveReference(): requires objectValue"); + if (type_ == nullValue) + *this = Value(objectValue); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( + key, isStatic ? CZString::noDuplication : CZString::duplicateOnCopy); + ObjectValues::iterator it = value_.map_->lower_bound(actualKey); + if (it != value_.map_->end() && (*it).first == actualKey) + return (*it).second; + + ObjectValues::value_type defaultValue(actualKey, null); + it = value_.map_->insert(it, defaultValue); + Value& value = (*it).second; + return value; +#else + return value_.map_->resolveReference(key, isStatic); +#endif +} + +Value Value::get(ArrayIndex index, const Value& defaultValue) const { + const Value* value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + +bool Value::isValidIndex(ArrayIndex index) const { return index < size(); } + +const Value& Value::operator[](const char* key) const { + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == objectValue, + "in Json::Value::operator[](char const*)const: requires objectValue"); + if (type_ == nullValue) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey(key, CZString::noDuplication); + ObjectValues::const_iterator it = value_.map_->find(actualKey); + if (it == value_.map_->end()) + return null; + return (*it).second; +#else + const Value* value = value_.map_->find(key); + return value ? *value : null; +#endif +} + +Value& Value::operator[](const std::string& key) { + return (*this)[key.c_str()]; +} + +const Value& Value::operator[](const std::string& key) const { + return (*this)[key.c_str()]; +} + +Value& Value::operator[](const StaticString& key) { + return resolveReference(key, true); +} + +#ifdef JSON_USE_CPPTL +Value& Value::operator[](const CppTL::ConstString& key) { + return (*this)[key.c_str()]; +} + +const Value& Value::operator[](const CppTL::ConstString& key) const { + return (*this)[key.c_str()]; +} +#endif + +Value& Value::append(const Value& value) { return (*this)[size()] = value; } + +Value Value::get(const char* key, const Value& defaultValue) const { + const Value* value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + +Value Value::get(const std::string& key, const Value& defaultValue) const { + return get(key.c_str(), defaultValue); +} + +Value Value::removeMember(const char* key) { + JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == objectValue, + "in Json::Value::removeMember(): requires objectValue"); + if (type_ == nullValue) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey(key, CZString::noDuplication); + ObjectValues::iterator it = value_.map_->find(actualKey); + if (it == value_.map_->end()) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value* value = value_.map_->find(key); + if (value) { + Value old(*value); + value_.map_.remove(key); + return old; + } else { + return null; + } +#endif +} + +Value Value::removeMember(const std::string& key) { + return removeMember(key.c_str()); +} + +#ifdef JSON_USE_CPPTL +Value Value::get(const CppTL::ConstString& key, + const Value& defaultValue) const { + return get(key.c_str(), defaultValue); +} +#endif + +bool Value::isMember(const char* key) const { + const Value* value = &((*this)[key]); + return value != &null; +} + +bool Value::isMember(const std::string& key) const { + return isMember(key.c_str()); +} + +#ifdef JSON_USE_CPPTL +bool Value::isMember(const CppTL::ConstString& key) const { + return isMember(key.c_str()); +} +#endif + +Value::Members Value::getMemberNames() const { + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == objectValue, + "in Json::Value::getMemberNames(), value must be objectValue"); + if (type_ == nullValue) + return Value::Members(); + Members members; + members.reserve(value_.map_->size()); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for (; it != itEnd; ++it) + members.push_back(std::string((*it).first.c_str())); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator(it); + value_.map_->makeEndIterator(itEnd); + for (; !ValueInternalMap::equals(it, itEnd); ValueInternalMap::increment(it)) + members.push_back(std::string(ValueInternalMap::key(it))); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +// EnumMemberNames +// Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type<const CZString &>() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +// EnumValues +// Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type<const Value &>() ); +// return EnumValues(); +//} +// +//# endif + +static bool IsIntegral(double d) { + double integral_part; + return modf(d, &integral_part) == 0.0; +} + +bool Value::isNull() const { return type_ == nullValue; } + +bool Value::isBool() const { return type_ == booleanValue; } + +bool Value::isInt() const { + switch (type_) { + case intValue: + return value_.int_ >= minInt && value_.int_ <= maxInt; + case uintValue: + return value_.uint_ <= UInt(maxInt); + case realValue: + return value_.real_ >= minInt && value_.real_ <= maxInt && + IsIntegral(value_.real_); + default: + break; + } + return false; +} + +bool Value::isUInt() const { + switch (type_) { + case intValue: + return value_.int_ >= 0 && LargestUInt(value_.int_) <= LargestUInt(maxUInt); + case uintValue: + return value_.uint_ <= maxUInt; + case realValue: + return value_.real_ >= 0 && value_.real_ <= maxUInt && + IsIntegral(value_.real_); + default: + break; + } + return false; +} + +bool Value::isInt64() const { +#if defined(JSON_HAS_INT64) + switch (type_) { + case intValue: + return true; + case uintValue: + return value_.uint_ <= UInt64(maxInt64); + case realValue: + // Note that maxInt64 (= 2^63 - 1) is not exactly representable as a + // double, so double(maxInt64) will be rounded up to 2^63. Therefore we + // require the value to be strictly less than the limit. + return value_.real_ >= double(minInt64) && + value_.real_ < double(maxInt64) && IsIntegral(value_.real_); + default: + break; + } +#endif // JSON_HAS_INT64 + return false; +} + +bool Value::isUInt64() const { +#if defined(JSON_HAS_INT64) + switch (type_) { + case intValue: + return value_.int_ >= 0; + case uintValue: + return true; + case realValue: + // Note that maxUInt64 (= 2^64 - 1) is not exactly representable as a + // double, so double(maxUInt64) will be rounded up to 2^64. Therefore we + // require the value to be strictly less than the limit. + return value_.real_ >= 0 && value_.real_ < maxUInt64AsDouble && + IsIntegral(value_.real_); + default: + break; + } +#endif // JSON_HAS_INT64 + return false; +} + +bool Value::isIntegral() const { +#if defined(JSON_HAS_INT64) + return isInt64() || isUInt64(); +#else + return isInt() || isUInt(); +#endif +} + +bool Value::isDouble() const { return type_ == realValue || isIntegral(); } + +bool Value::isNumeric() const { return isIntegral() || isDouble(); } + +bool Value::isString() const { return type_ == stringValue; } + +bool Value::isArray() const { return type_ == arrayValue; } + +bool Value::isObject() const { return type_ == objectValue; } + +void Value::setComment(const char* comment, CommentPlacement placement) { + if (!comments_) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment(comment); +} + +void Value::setComment(const std::string& comment, CommentPlacement placement) { + setComment(comment.c_str(), placement); +} + +bool Value::hasComment(CommentPlacement placement) const { + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string Value::getComment(CommentPlacement placement) const { + if (hasComment(placement)) + return comments_[placement].comment_; + return ""; +} + +void Value::setOffsetStart(size_t start) { start_ = start; } + +void Value::setOffsetLimit(size_t limit) { limit_ = limit; } + +size_t Value::getOffsetStart() const { return start_; } + +size_t Value::getOffsetLimit() const { return limit_; } + +std::string Value::toStyledString() const { + StyledWriter writer; + return writer.write(*this); +} + +Value::const_iterator Value::begin() const { + switch (type_) { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if (value_.array_) { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator(it); + return const_iterator(it); + } + break; + case objectValue: + if (value_.map_) { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator(it); + return const_iterator(it); + } + break; +#else + case arrayValue: + case objectValue: + if (value_.map_) + return const_iterator(value_.map_->begin()); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator Value::end() const { + switch (type_) { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if (value_.array_) { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator(it); + return const_iterator(it); + } + break; + case objectValue: + if (value_.map_) { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator(it); + return const_iterator(it); + } + break; +#else + case arrayValue: + case objectValue: + if (value_.map_) + return const_iterator(value_.map_->end()); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::iterator Value::begin() { + switch (type_) { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if (value_.array_) { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator(it); + return iterator(it); + } + break; + case objectValue: + if (value_.map_) { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator(it); + return iterator(it); + } + break; +#else + case arrayValue: + case objectValue: + if (value_.map_) + return iterator(value_.map_->begin()); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator Value::end() { + switch (type_) { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if (value_.array_) { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator(it); + return iterator(it); + } + break; + case objectValue: + if (value_.map_) { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator(it); + return iterator(it); + } + break; +#else + case arrayValue: + case objectValue: + if (value_.map_) + return iterator(value_.map_->end()); + break; +#endif + default: + break; + } + return iterator(); +} + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() : key_(), index_(), kind_(kindNone) {} + +PathArgument::PathArgument(ArrayIndex index) + : key_(), index_(index), kind_(kindIndex) {} + +PathArgument::PathArgument(const char* key) + : key_(key), index_(), kind_(kindKey) {} + +PathArgument::PathArgument(const std::string& key) + : key_(key.c_str()), index_(), kind_(kindKey) {} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path(const std::string& path, + const PathArgument& a1, + const PathArgument& a2, + const PathArgument& a3, + const PathArgument& a4, + const PathArgument& a5) { + InArgs in; + in.push_back(&a1); + in.push_back(&a2); + in.push_back(&a3); + in.push_back(&a4); + in.push_back(&a5); + makePath(path, in); +} + +void Path::makePath(const std::string& path, const InArgs& in) { + const char* current = path.c_str(); + const char* end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while (current != end) { + if (*current == '[') { + ++current; + if (*current == '%') + addPathInArg(path, in, itInArg, PathArgument::kindIndex); + else { + ArrayIndex index = 0; + for (; current != end && *current >= '0' && *current <= '9'; ++current) + index = index * 10 + ArrayIndex(*current - '0'); + args_.push_back(index); + } + if (current == end || *current++ != ']') + invalidPath(path, int(current - path.c_str())); + } else if (*current == '%') { + addPathInArg(path, in, itInArg, PathArgument::kindKey); + ++current; + } else if (*current == '.') { + ++current; + } else { + const char* beginName = current; + while (current != end && !strchr("[.", *current)) + ++current; + args_.push_back(std::string(beginName, current)); + } + } +} + +void Path::addPathInArg(const std::string& /*path*/, + const InArgs& in, + InArgs::const_iterator& itInArg, + PathArgument::Kind kind) { + if (itInArg == in.end()) { + // Error: missing argument %d + } else if ((*itInArg)->kind_ != kind) { + // Error: bad argument type + } else { + args_.push_back(**itInArg); + } +} + +void Path::invalidPath(const std::string& /*path*/, int /*location*/) { + // Error: invalid path. +} + +const Value& Path::resolve(const Value& root) const { + const Value* node = &root; + for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it) { + const PathArgument& arg = *it; + if (arg.kind_ == PathArgument::kindIndex) { + if (!node->isArray() || !node->isValidIndex(arg.index_)) { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } else if (arg.kind_ == PathArgument::kindKey) { + if (!node->isObject()) { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if (node == &Value::null) { + // Error: unable to resolve path (object has no member named '' at + // position...) + } + } + } + return *node; +} + +Value Path::resolve(const Value& root, const Value& defaultValue) const { + const Value* node = &root; + for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it) { + const PathArgument& arg = *it; + if (arg.kind_ == PathArgument::kindIndex) { + if (!node->isArray() || !node->isValidIndex(arg.index_)) + return defaultValue; + node = &((*node)[arg.index_]); + } else if (arg.kind_ == PathArgument::kindKey) { + if (!node->isObject()) + return defaultValue; + node = &((*node)[arg.key_]); + if (node == &Value::null) + return defaultValue; + } + } + return *node; +} + +Value& Path::make(Value& root) const { + Value* node = &root; + for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it) { + const PathArgument& arg = *it; + if (arg.kind_ == PathArgument::kindIndex) { + if (!node->isArray()) { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } else if (arg.kind_ == PathArgument::kindKey) { + if (!node->isObject()) { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + +} // namespace Json diff --git a/3rdparty/jsoncpp/src/lib_json/json_valueiterator.inl b/3rdparty/jsoncpp/src/lib_json/json_valueiterator.inl new file mode 100644 index 00000000000..a9f7df63a7c --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/json_valueiterator.inl @@ -0,0 +1,241 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_(), isNull_(true) { +} +#else + : isArray_(true), isNull_(true) { + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( + const Value::ObjectValues::iterator& current) + : current_(current), isNull_(false) {} +#else +ValueIteratorBase::ValueIteratorBase( + const ValueInternalArray::IteratorState& state) + : isArray_(true) { + iterator_.array_ = state; +} + +ValueIteratorBase::ValueIteratorBase( + const ValueInternalMap::IteratorState& state) + : isArray_(false) { + iterator_.map_ = state; +} +#endif + +Value& ValueIteratorBase::deref() const { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if (isArray_) + return ValueInternalArray::dereference(iterator_.array_); + return ValueInternalMap::value(iterator_.map_); +#endif +} + +void ValueIteratorBase::increment() { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if (isArray_) + ValueInternalArray::increment(iterator_.array_); + ValueInternalMap::increment(iterator_.map_); +#endif +} + +void ValueIteratorBase::decrement() { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if (isArray_) + ValueInternalArray::decrement(iterator_.array_); + ValueInternalMap::decrement(iterator_.map_); +#endif +} + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance(const SelfType& other) const { +#ifndef JSON_VALUE_USE_INTERNAL_MAP +#ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +#else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if (isNull_ && other.isNull_) { + return 0; + } + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 + // RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for (Value::ObjectValues::iterator it = current_; it != other.current_; + ++it) { + ++myDistance; + } + return myDistance; +#endif +#else + if (isArray_) + return ValueInternalArray::distance(iterator_.array_, + other.iterator_.array_); + return ValueInternalMap::distance(iterator_.map_, other.iterator_.map_); +#endif +} + +bool ValueIteratorBase::isEqual(const SelfType& other) const { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if (isNull_) { + return other.isNull_; + } + return current_ == other.current_; +#else + if (isArray_) + return ValueInternalArray::equals(iterator_.array_, other.iterator_.array_); + return ValueInternalMap::equals(iterator_.map_, other.iterator_.map_); +#endif +} + +void ValueIteratorBase::copy(const SelfType& other) { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; + isNull_ = other.isNull_; +#else + if (isArray_) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + +Value ValueIteratorBase::key() const { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if (czstring.c_str()) { + if (czstring.isStaticString()) + return Value(StaticString(czstring.c_str())); + return Value(czstring.c_str()); + } + return Value(czstring.index()); +#else + if (isArray_) + return Value(ValueInternalArray::indexOf(iterator_.array_)); + bool isStatic; + const char* memberName = ValueInternalMap::key(iterator_.map_, isStatic); + if (isStatic) + return Value(StaticString(memberName)); + return Value(memberName); +#endif +} + +UInt ValueIteratorBase::index() const { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if (!czstring.c_str()) + return czstring.index(); + return Value::UInt(-1); +#else + if (isArray_) + return Value::UInt(ValueInternalArray::indexOf(iterator_.array_)); + return Value::UInt(-1); +#endif +} + +const char* ValueIteratorBase::memberName() const { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char* name = (*current_).first.c_str(); + return name ? name : ""; +#else + if (!isArray_) + return ValueInternalMap::key(iterator_.map_); + return ""; +#endif +} + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() {} + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( + const Value::ObjectValues::iterator& current) + : ValueIteratorBase(current) {} +#else +ValueConstIterator::ValueConstIterator( + const ValueInternalArray::IteratorState& state) + : ValueIteratorBase(state) {} + +ValueConstIterator::ValueConstIterator( + const ValueInternalMap::IteratorState& state) + : ValueIteratorBase(state) {} +#endif + +ValueConstIterator& ValueConstIterator:: +operator=(const ValueIteratorBase& other) { + copy(other); + return *this; +} + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() {} + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator(const Value::ObjectValues::iterator& current) + : ValueIteratorBase(current) {} +#else +ValueIterator::ValueIterator(const ValueInternalArray::IteratorState& state) + : ValueIteratorBase(state) {} + +ValueIterator::ValueIterator(const ValueInternalMap::IteratorState& state) + : ValueIteratorBase(state) {} +#endif + +ValueIterator::ValueIterator(const ValueConstIterator& other) + : ValueIteratorBase(other) {} + +ValueIterator::ValueIterator(const ValueIterator& other) + : ValueIteratorBase(other) {} + +ValueIterator& ValueIterator::operator=(const SelfType& other) { + copy(other); + return *this; +} + +} // namespace Json diff --git a/3rdparty/jsoncpp/src/lib_json/json_writer.cpp b/3rdparty/jsoncpp/src/lib_json/json_writer.cpp new file mode 100644 index 00000000000..5113c38be1d --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/json_writer.cpp @@ -0,0 +1,695 @@ +// Copyright 2011 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +#include <json/writer.h> +#include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include <utility> +#include <assert.h> +#include <stdio.h> +#include <string.h> +#include <sstream> +#include <iomanip> +#include <math.h> + +#if defined(_MSC_VER) && _MSC_VER < 1500 // VC++ 8.0 and below +#include <float.h> +#define isfinite _finite +#define snprintf _snprintf +#endif + +#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0 +// Disable warning about strdup being deprecated. +#pragma warning(disable : 4996) +#endif + +#if defined(__sun) && defined(__SVR4) //Solaris +#include <ieeefp.h> +#define isfinite finite +#endif + +namespace Json { + +static bool containsControlCharacter(const char* str) { + while (*str) { + if (isControlCharacter(*(str++))) + return true; + } + return false; +} + +std::string valueToString(LargestInt value) { + UIntToStringBuffer buffer; + char* current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if (isNegative) + value = -value; + uintToString(LargestUInt(value), current); + if (isNegative) + *--current = '-'; + assert(current >= buffer); + return current; +} + +std::string valueToString(LargestUInt value) { + UIntToStringBuffer buffer; + char* current = buffer + sizeof(buffer); + uintToString(value, current); + assert(current >= buffer); + return current; +} + +#if defined(JSON_HAS_INT64) + +std::string valueToString(Int value) { + return valueToString(LargestInt(value)); +} + +std::string valueToString(UInt value) { + return valueToString(LargestUInt(value)); +} + +#endif // # if defined(JSON_HAS_INT64) + +std::string valueToString(double value) { + // Allocate a buffer that is more than large enough to store the 16 digits of + // precision requested below. + char buffer[32]; + int len = -1; + +// Print into the buffer. We need not request the alternative representation +// that always has a decimal point because JSON doesn't distingish the +// concepts of reals and integers. +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with + // visual studio 2005 to + // avoid warning. +#if defined(WINCE) + len = _snprintf(buffer, sizeof(buffer), "%.17g", value); +#else + len = sprintf_s(buffer, sizeof(buffer), "%.17g", value); +#endif +#else + if (isfinite(value)) { + len = snprintf(buffer, sizeof(buffer), "%.17g", value); + } else { + // IEEE standard states that NaN values will not compare to themselves + if (value != value) { + len = snprintf(buffer, sizeof(buffer), "null"); + } else if (value < 0) { + len = snprintf(buffer, sizeof(buffer), "-1e+9999"); + } else { + len = snprintf(buffer, sizeof(buffer), "1e+9999"); + } + // For those, we do not need to call fixNumLoc, but it is fast. + } +#endif + assert(len >= 0); + fixNumericLocale(buffer, buffer + len); + return buffer; +} + +std::string valueToString(bool value) { return value ? "true" : "false"; } + +std::string valueToQuotedString(const char* value) { + if (value == NULL) + return ""; + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && + !containsControlCharacter(value)) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = + strlen(value) * 2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c = value; *c != 0; ++c) { + switch (*c) { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + // case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid </ + // sequence. + // Should add a flag to allow this compatibility mode and prevent this + // sequence from occurring. + default: + if (isControlCharacter(*c)) { + std::ostringstream oss; + oss << "\\u" << std::hex << std::uppercase << std::setfill('0') + << std::setw(4) << static_cast<int>(*c); + result += oss.str(); + } else { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() {} + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_(false), dropNullPlaceholders_(false), + omitEndingLineFeed_(false) {} + +void FastWriter::enableYAMLCompatibility() { yamlCompatiblityEnabled_ = true; } + +void FastWriter::dropNullPlaceholders() { dropNullPlaceholders_ = true; } + +void FastWriter::omitEndingLineFeed() { omitEndingLineFeed_ = true; } + +std::string FastWriter::write(const Value& root) { + document_ = ""; + writeValue(root); + if (!omitEndingLineFeed_) + document_ += "\n"; + return document_; +} + +void FastWriter::writeValue(const Value& value) { + switch (value.type()) { + case nullValue: + if (!dropNullPlaceholders_) + document_ += "null"; + break; + case intValue: + document_ += valueToString(value.asLargestInt()); + break; + case uintValue: + document_ += valueToString(value.asLargestUInt()); + break; + case realValue: + document_ += valueToString(value.asDouble()); + break; + case stringValue: + document_ += valueToQuotedString(value.asCString()); + break; + case booleanValue: + document_ += valueToString(value.asBool()); + break; + case arrayValue: { + document_ += '['; + int size = value.size(); + for (int index = 0; index < size; ++index) { + if (index > 0) + document_ += ','; + writeValue(value[index]); + } + document_ += ']'; + } break; + case objectValue: { + Value::Members members(value.getMemberNames()); + document_ += '{'; + for (Value::Members::iterator it = members.begin(); it != members.end(); + ++it) { + const std::string& name = *it; + if (it != members.begin()) + document_ += ','; + document_ += valueToQuotedString(name.c_str()); + document_ += yamlCompatiblityEnabled_ ? ": " : ":"; + writeValue(value[name]); + } + document_ += '}'; + } break; + } +} + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_(74), indentSize_(3), addChildValues_() {} + +std::string StyledWriter::write(const Value& root) { + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue(root); + writeValue(root); + writeCommentAfterValueOnSameLine(root); + document_ += "\n"; + return document_; +} + +void StyledWriter::writeValue(const Value& value) { + switch (value.type()) { + case nullValue: + pushValue("null"); + break; + case intValue: + pushValue(valueToString(value.asLargestInt())); + break; + case uintValue: + pushValue(valueToString(value.asLargestUInt())); + break; + case realValue: + pushValue(valueToString(value.asDouble())); + break; + case stringValue: + pushValue(valueToQuotedString(value.asCString())); + break; + case booleanValue: + pushValue(valueToString(value.asBool())); + break; + case arrayValue: + writeArrayValue(value); + break; + case objectValue: { + Value::Members members(value.getMemberNames()); + if (members.empty()) + pushValue("{}"); + else { + writeWithIndent("{"); + indent(); + Value::Members::iterator it = members.begin(); + for (;;) { + const std::string& name = *it; + const Value& childValue = value[name]; + writeCommentBeforeValue(childValue); + writeWithIndent(valueToQuotedString(name.c_str())); + document_ += " : "; + writeValue(childValue); + if (++it == members.end()) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + document_ += ','; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("}"); + } + } break; + } +} + +void StyledWriter::writeArrayValue(const Value& value) { + unsigned size = value.size(); + if (size == 0) + pushValue("[]"); + else { + bool isArrayMultiLine = isMultineArray(value); + if (isArrayMultiLine) { + writeWithIndent("["); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index = 0; + for (;;) { + const Value& childValue = value[index]; + writeCommentBeforeValue(childValue); + if (hasChildValue) + writeWithIndent(childValues_[index]); + else { + writeIndent(); + writeValue(childValue); + } + if (++index == size) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + document_ += ','; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("]"); + } else // output on a single line + { + assert(childValues_.size() == size); + document_ += "[ "; + for (unsigned index = 0; index < size; ++index) { + if (index > 0) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + +bool StyledWriter::isMultineArray(const Value& value) { + int size = value.size(); + bool isMultiLine = size * 3 >= rightMargin_; + childValues_.clear(); + for (int index = 0; index < size && !isMultiLine; ++index) { + const Value& childValue = value[index]; + isMultiLine = + isMultiLine || ((childValue.isArray() || childValue.isObject()) && + childValue.size() > 0); + } + if (!isMultiLine) // check if line length > max line length + { + childValues_.reserve(size); + addChildValues_ = true; + int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]' + for (int index = 0; index < size; ++index) { + writeValue(value[index]); + lineLength += int(childValues_[index].length()); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + +void StyledWriter::pushValue(const std::string& value) { + if (addChildValues_) + childValues_.push_back(value); + else + document_ += value; +} + +void StyledWriter::writeIndent() { + if (!document_.empty()) { + char last = document_[document_.length() - 1]; + if (last == ' ') // already indented + return; + if (last != '\n') // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + +void StyledWriter::writeWithIndent(const std::string& value) { + writeIndent(); + document_ += value; +} + +void StyledWriter::indent() { indentString_ += std::string(indentSize_, ' '); } + +void StyledWriter::unindent() { + assert(int(indentString_.size()) >= indentSize_); + indentString_.resize(indentString_.size() - indentSize_); +} + +void StyledWriter::writeCommentBeforeValue(const Value& root) { + if (!root.hasComment(commentBefore)) + return; + + document_ += "\n"; + writeIndent(); + std::string normalizedComment = normalizeEOL(root.getComment(commentBefore)); + std::string::const_iterator iter = normalizedComment.begin(); + while (iter != normalizedComment.end()) { + document_ += *iter; + if (*iter == '\n' && *(iter + 1) == '/') + writeIndent(); + ++iter; + } + + // Comments are stripped of newlines, so add one here + document_ += "\n"; +} + +void StyledWriter::writeCommentAfterValueOnSameLine(const Value& root) { + if (root.hasComment(commentAfterOnSameLine)) + document_ += " " + normalizeEOL(root.getComment(commentAfterOnSameLine)); + + if (root.hasComment(commentAfter)) { + document_ += "\n"; + document_ += normalizeEOL(root.getComment(commentAfter)); + document_ += "\n"; + } +} + +bool StyledWriter::hasCommentForValue(const Value& value) { + return value.hasComment(commentBefore) || + value.hasComment(commentAfterOnSameLine) || + value.hasComment(commentAfter); +} + +std::string StyledWriter::normalizeEOL(const std::string& text) { + std::string normalized; + normalized.reserve(text.length()); + const char* begin = text.c_str(); + const char* end = begin + text.length(); + const char* current = begin; + while (current != end) { + char c = *current++; + if (c == '\r') // mac or dos EOL + { + if (*current == '\n') // convert dos EOL + ++current; + normalized += '\n'; + } else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter(std::string indentation) + : document_(NULL), rightMargin_(74), indentation_(indentation), + addChildValues_() {} + +void StyledStreamWriter::write(std::ostream& out, const Value& root) { + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue(root); + writeValue(root); + writeCommentAfterValueOnSameLine(root); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + +void StyledStreamWriter::writeValue(const Value& value) { + switch (value.type()) { + case nullValue: + pushValue("null"); + break; + case intValue: + pushValue(valueToString(value.asLargestInt())); + break; + case uintValue: + pushValue(valueToString(value.asLargestUInt())); + break; + case realValue: + pushValue(valueToString(value.asDouble())); + break; + case stringValue: + pushValue(valueToQuotedString(value.asCString())); + break; + case booleanValue: + pushValue(valueToString(value.asBool())); + break; + case arrayValue: + writeArrayValue(value); + break; + case objectValue: { + Value::Members members(value.getMemberNames()); + if (members.empty()) + pushValue("{}"); + else { + writeWithIndent("{"); + indent(); + Value::Members::iterator it = members.begin(); + for (;;) { + const std::string& name = *it; + const Value& childValue = value[name]; + writeCommentBeforeValue(childValue); + writeWithIndent(valueToQuotedString(name.c_str())); + *document_ << " : "; + writeValue(childValue); + if (++it == members.end()) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("}"); + } + } break; + } +} + +void StyledStreamWriter::writeArrayValue(const Value& value) { + unsigned size = value.size(); + if (size == 0) + pushValue("[]"); + else { + bool isArrayMultiLine = isMultineArray(value); + if (isArrayMultiLine) { + writeWithIndent("["); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index = 0; + for (;;) { + const Value& childValue = value[index]; + writeCommentBeforeValue(childValue); + if (hasChildValue) + writeWithIndent(childValues_[index]); + else { + writeIndent(); + writeValue(childValue); + } + if (++index == size) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("]"); + } else // output on a single line + { + assert(childValues_.size() == size); + *document_ << "[ "; + for (unsigned index = 0; index < size; ++index) { + if (index > 0) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + +bool StyledStreamWriter::isMultineArray(const Value& value) { + int size = value.size(); + bool isMultiLine = size * 3 >= rightMargin_; + childValues_.clear(); + for (int index = 0; index < size && !isMultiLine; ++index) { + const Value& childValue = value[index]; + isMultiLine = + isMultiLine || ((childValue.isArray() || childValue.isObject()) && + childValue.size() > 0); + } + if (!isMultiLine) // check if line length > max line length + { + childValues_.reserve(size); + addChildValues_ = true; + int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]' + for (int index = 0; index < size; ++index) { + writeValue(value[index]); + lineLength += int(childValues_[index].length()); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + +void StyledStreamWriter::pushValue(const std::string& value) { + if (addChildValues_) + childValues_.push_back(value); + else + *document_ << value; +} + +void StyledStreamWriter::writeIndent() { + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + +void StyledStreamWriter::writeWithIndent(const std::string& value) { + writeIndent(); + *document_ << value; +} + +void StyledStreamWriter::indent() { indentString_ += indentation_; } + +void StyledStreamWriter::unindent() { + assert(indentString_.size() >= indentation_.size()); + indentString_.resize(indentString_.size() - indentation_.size()); +} + +void StyledStreamWriter::writeCommentBeforeValue(const Value& root) { + if (!root.hasComment(commentBefore)) + return; + *document_ << normalizeEOL(root.getComment(commentBefore)); + *document_ << "\n"; +} + +void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) { + if (root.hasComment(commentAfterOnSameLine)) + *document_ << " " + normalizeEOL(root.getComment(commentAfterOnSameLine)); + + if (root.hasComment(commentAfter)) { + *document_ << "\n"; + *document_ << normalizeEOL(root.getComment(commentAfter)); + *document_ << "\n"; + } +} + +bool StyledStreamWriter::hasCommentForValue(const Value& value) { + return value.hasComment(commentBefore) || + value.hasComment(commentAfterOnSameLine) || + value.hasComment(commentAfter); +} + +std::string StyledStreamWriter::normalizeEOL(const std::string& text) { + std::string normalized; + normalized.reserve(text.length()); + const char* begin = text.c_str(); + const char* end = begin + text.length(); + const char* current = begin; + while (current != end) { + char c = *current++; + if (c == '\r') // mac or dos EOL + { + if (*current == '\n') // convert dos EOL + ++current; + normalized += '\n'; + } else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + +std::ostream& operator<<(std::ostream& sout, const Value& root) { + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + +} // namespace Json diff --git a/3rdparty/jsoncpp/src/lib_json/sconscript b/3rdparty/jsoncpp/src/lib_json/sconscript new file mode 100644 index 00000000000..6e7c6c8a076 --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/3rdparty/jsoncpp/src/lib_json/version.h.in b/3rdparty/jsoncpp/src/lib_json/version.h.in new file mode 100644 index 00000000000..761ca3a7b92 --- /dev/null +++ b/3rdparty/jsoncpp/src/lib_json/version.h.in @@ -0,0 +1,14 @@ +// DO NOT EDIT. This file is generated by CMake from "version" +// and "version.h.in" files. +// Run CMake configure step to update it. +#ifndef JSON_VERSION_H_INCLUDED +# define JSON_VERSION_H_INCLUDED + +# define JSONCPP_VERSION_STRING "@JSONCPP_VERSION@" +# define JSONCPP_VERSION_MAJOR @JSONCPP_VERSION_MAJOR@ +# define JSONCPP_VERSION_MINOR @JSONCPP_VERSION_MINOR@ +# define JSONCPP_VERSION_PATCH @JSONCPP_VERSION_PATCH@ +# define JSONCPP_VERSION_QUALIFIER +# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) + +#endif // JSON_VERSION_H_INCLUDED diff --git a/3rdparty/jsoncpp/src/test_lib_json/CMakeLists.txt b/3rdparty/jsoncpp/src/test_lib_json/CMakeLists.txt new file mode 100644 index 00000000000..420d659968c --- /dev/null +++ b/3rdparty/jsoncpp/src/test_lib_json/CMakeLists.txt @@ -0,0 +1,22 @@ + +IF(JSONCPP_LIB_BUILD_SHARED) + ADD_DEFINITIONS( -DJSON_DLL ) +ENDIF(JSONCPP_LIB_BUILD_SHARED) + +ADD_EXECUTABLE( jsoncpp_test + jsontest.cpp + jsontest.h + main.cpp + ) + +TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib) + +# Run unit tests in post-build +# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?) +IF(JSONCPP_WITH_POST_BUILD_UNITTEST) + ADD_CUSTOM_COMMAND( TARGET jsoncpp_test + POST_BUILD + COMMAND $<TARGET_FILE:jsoncpp_test>) +ENDIF(JSONCPP_WITH_POST_BUILD_UNITTEST) + +SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test) diff --git a/3rdparty/jsoncpp/src/test_lib_json/jsontest.cpp b/3rdparty/jsoncpp/src/test_lib_json/jsontest.cpp new file mode 100644 index 00000000000..ef9c543fa47 --- /dev/null +++ b/3rdparty/jsoncpp/src/test_lib_json/jsontest.cpp @@ -0,0 +1,443 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include <stdio.h> +#include <string> + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +#include <crtdbg.h> +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +#define WIN32_LEAN_AND_MEAN +#define NOSERVICE +#define NOMCX +#define NOIME +#define NOSOUND +#define NOCOMM +#define NORPC +#define NOGDI +#define NOUSER +#define NODRIVERS +#define NOLOGERROR +#define NOPROFILER +#define NOMEMMGR +#define NOLFILEIO +#define NOOPENFILE +#define NORESOURCE +#define NOATOM +#define NOLANGUAGE +#define NOLSTRING +#define NODBCS +#define NOKEYBOARDINFO +#define NOGDICAPMASKS +#define NOCOLOR +#define NOGDIOBJ +#define NODRAWTEXT +#define NOTEXTMETRIC +#define NOSCALABLEFONT +#define NOBITMAP +#define NORASTEROPS +#define NOMETAFILE +#define NOSYSMETRICS +#define NOSYSTEMPARAMSINFO +#define NOMSG +#define NOWINSTYLES +#define NOWINOFFSETS +#define NOSHOWWINDOW +#define NODEFERWINDOWPOS +#define NOVIRTUALKEYCODES +#define NOKEYSTATES +#define NOWH +#define NOMENUS +#define NOSCROLL +#define NOCLIPBOARD +#define NOICONS +#define NOMB +#define NOSYSCOMMANDS +#define NOMDI +#define NOCTLMGR +#define NOWINMESSAGES +#include <windows.h> +#endif // if defined(_WIN32) + +namespace JsonTest { + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_(1), lastUsedPredicateId_(0), messageTarget_(0) { + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + +void TestResult::setTestName(const std::string& name) { name_ = name; } + +TestResult& +TestResult::addFailure(const char* file, unsigned int line, const char* expr) { + /// Walks the PredicateContext stack adding them to failures_ if not already + /// added. + unsigned int nestingLevel = 0; + PredicateContext* lastNode = rootPredicateNode_.next_; + for (; lastNode != 0; lastNode = lastNode->next_) { + if (lastNode->id_ > lastUsedPredicateId_) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( + lastNode->file_, lastNode->line_, lastNode->expr_, nestingLevel); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &(failures_.back()); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo(file, line, expr, nestingLevel); + messageTarget_ = &(failures_.back()); + return *this; +} + +void TestResult::addFailureInfo(const char* file, + unsigned int line, + const char* expr, + unsigned int nestingLevel) { + Failure failure; + failure.file_ = file; + failure.line_ = line; + if (expr) { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back(failure); +} + +TestResult& TestResult::popPredicateContext() { + PredicateContext* lastNode = &rootPredicateNode_; + while (lastNode->next_ != 0 && lastNode->next_->next_ != 0) { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext* tail = lastNode->next_; + if (tail != 0 && tail->failure_ != 0) { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + +bool TestResult::failed() const { return !failures_.empty(); } + +unsigned int TestResult::getAssertionNestingLevel() const { + unsigned int level = 0; + const PredicateContext* lastNode = &rootPredicateNode_; + while (lastNode->next_ != 0) { + lastNode = lastNode->next_; + ++level; + } + return level; +} + +void TestResult::printFailure(bool printTestName) const { + if (failures_.empty()) { + return; + } + + if (printTestName) { + printf("* Detail of %s test failure:\n", name_.c_str()); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for (Failures::const_iterator it = failures_.begin(); it != itEnd; ++it) { + const Failure& failure = *it; + std::string indent(failure.nestingLevel_ * 2, ' '); + if (failure.file_) { + printf("%s%s(%d): ", indent.c_str(), failure.file_, failure.line_); + } + if (!failure.expr_.empty()) { + printf("%s\n", failure.expr_.c_str()); + } else if (failure.file_) { + printf("\n"); + } + if (!failure.message_.empty()) { + std::string reindented = indentText(failure.message_, indent + " "); + printf("%s\n", reindented.c_str()); + } + } +} + +std::string TestResult::indentText(const std::string& text, + const std::string& indent) { + std::string reindented; + std::string::size_type lastIndex = 0; + while (lastIndex < text.size()) { + std::string::size_type nextIndex = text.find('\n', lastIndex); + if (nextIndex == std::string::npos) { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr(lastIndex, nextIndex - lastIndex + 1); + lastIndex = nextIndex + 1; + } + return reindented; +} + +TestResult& TestResult::addToLastFailure(const std::string& message) { + if (messageTarget_ != 0) { + messageTarget_->message_ += message; + } + return *this; +} + +TestResult& TestResult::operator<<(Json::Int64 value) { + return addToLastFailure(Json::valueToString(value)); +} + +TestResult& TestResult::operator<<(Json::UInt64 value) { + return addToLastFailure(Json::valueToString(value)); +} + +TestResult& TestResult::operator<<(bool value) { + return addToLastFailure(value ? "true" : "false"); +} + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() : result_(0) {} + +TestCase::~TestCase() {} + +void TestCase::run(TestResult& result) { + result_ = &result; + runTestCase(); +} + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() {} + +Runner& Runner::add(TestCaseFactory factory) { + tests_.push_back(factory); + return *this; +} + +unsigned int Runner::testCount() const { + return static_cast<unsigned int>(tests_.size()); +} + +std::string Runner::testNameAt(unsigned int index) const { + TestCase* test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + +void Runner::runTestAt(unsigned int index, TestResult& result) const { + TestCase* test = tests_[index](); + result.setTestName(test->testName()); + printf("Testing %s: ", test->testName()); + fflush(stdout); +#if JSON_USE_EXCEPTION + try { +#endif // if JSON_USE_EXCEPTION + test->run(result); +#if JSON_USE_EXCEPTION + } + catch (const std::exception& e) { + result.addFailure(__FILE__, __LINE__, "Unexpected exception caught:") + << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char* status = result.failed() ? "FAILED" : "OK"; + printf("%s\n", status); + fflush(stdout); +} + +bool Runner::runAllTest(bool printSummary) const { + unsigned int count = testCount(); + std::deque<TestResult> failures; + for (unsigned int index = 0; index < count; ++index) { + TestResult result; + runTestAt(index, result); + if (result.failed()) { + failures.push_back(result); + } + } + + if (failures.empty()) { + if (printSummary) { + printf("All %d tests passed\n", count); + } + return true; + } else { + for (unsigned int index = 0; index < failures.size(); ++index) { + TestResult& result = failures[index]; + result.printFailure(count > 1); + } + + if (printSummary) { + unsigned int failedCount = static_cast<unsigned int>(failures.size()); + unsigned int passedCount = count - failedCount; + printf("%d/%d tests passed (%d failure(s))\n", + passedCount, + count, + failedCount); + } + return false; + } +} + +bool Runner::testIndex(const std::string& testName, + unsigned int& indexOut) const { + unsigned int count = testCount(); + for (unsigned int index = 0; index < count; ++index) { + if (testNameAt(index) == testName) { + indexOut = index; + return true; + } + } + return false; +} + +void Runner::listTests() const { + unsigned int count = testCount(); + for (unsigned int index = 0; index < count; ++index) { + printf("%s\n", testNameAt(index).c_str()); + } +} + +int Runner::runCommandLine(int argc, const char* argv[]) const { + typedef std::deque<std::string> TestNames; + Runner subrunner; + for (int index = 1; index < argc; ++index) { + std::string opt = argv[index]; + if (opt == "--list-tests") { + listTests(); + return 0; + } else if (opt == "--test-auto") { + preventDialogOnCrash(); + } else if (opt == "--test") { + ++index; + if (index < argc) { + unsigned int testNameIndex; + if (testIndex(argv[index], testNameIndex)) { + subrunner.add(tests_[testNameIndex]); + } else { + fprintf(stderr, "Test '%s' does not exist!\n", argv[index]); + return 2; + } + } else { + printUsage(argv[0]); + return 2; + } + } else { + printUsage(argv[0]); + return 2; + } + } + bool succeeded; + if (subrunner.testCount() > 0) { + succeeded = subrunner.runAllTest(subrunner.testCount() > 1); + } else { + succeeded = runAllTest(true); + } + return succeeded ? 0 : 1; +} + +#if defined(_MSC_VER) && defined(_DEBUG) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook(int reportType, char* message, int* /*returnValue*/) { + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if (reportType == _CRT_ERROR || reportType == _CRT_ASSERT) { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if (isAborting) { + return TRUE; + } + isAborting = true; + + fprintf(stderr, "CRT Error/Assert:\n%s\n", message); + fflush(stderr); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + +void Runner::preventDialogOnCrash() { +#if defined(_MSC_VER) && defined(_DEBUG) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog + // This function a NO-OP in release configuration + // (which cause warning since msvcrtSilentReportHook is not referenced) + _CrtSetReportHook(&msvcrtSilentReportHook); +#endif // if defined(_MSC_VER) + +// @todo investiguate this handler (for buffer overflow) +// _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX | + SEM_NOOPENFILEERRORBOX); +#endif // if defined(_WIN32) +} + +void Runner::printUsage(const char* appName) { + printf("Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n", + appName); +} + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult& checkStringEqual(TestResult& result, + const std::string& expected, + const std::string& actual, + const char* file, + unsigned int line, + const char* expr) { + if (expected != actual) { + result.addFailure(file, line, expr); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + +} // namespace JsonTest diff --git a/3rdparty/jsoncpp/src/test_lib_json/jsontest.h b/3rdparty/jsoncpp/src/test_lib_json/jsontest.h new file mode 100644 index 00000000000..5c56a40b0ff --- /dev/null +++ b/3rdparty/jsoncpp/src/test_lib_json/jsontest.h @@ -0,0 +1,280 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONTEST_H_INCLUDED +#define JSONTEST_H_INCLUDED + +#include <json/config.h> +#include <json/value.h> +#include <json/writer.h> +#include <stdio.h> +#include <deque> +#include <sstream> +#include <string> + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + +class Failure { +public: + const char* file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; +}; + +/// Context used to create the assertion callstack on failure. +/// Must be a POD to allow inline initialisation without stepping +/// into the debugger. +struct PredicateContext { + typedef unsigned int Id; + Id id_; + const char* file_; + unsigned int line_; + const char* expr_; + PredicateContext* next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure* failure_; +}; + +class TestResult { +public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext* predicateStackTail_; + + void setTestName(const std::string& name); + + /// Adds an assertion failure. + TestResult& + addFailure(const char* file, unsigned int line, const char* expr = 0); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult& popPredicateContext(); + + bool failed() const; + + void printFailure(bool printTestName) const; + + // Generic operator that will work with anything ostream can deal with. + template <typename T> TestResult& operator<<(const T& value) { + std::ostringstream oss; + oss.precision(16); + oss.setf(std::ios_base::floatfield); + oss << value; + return addToLastFailure(oss.str()); + } + + // Specialized versions. + TestResult& operator<<(bool value); + // std:ostream does not support 64bits integers on all STL implementation + TestResult& operator<<(Json::Int64 value); + TestResult& operator<<(Json::UInt64 value); + +private: + TestResult& addToLastFailure(const std::string& message); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo(const char* file, + unsigned int line, + const char* expr, + unsigned int nestingLevel); + static std::string indentText(const std::string& text, + const std::string& indent); + + typedef std::deque<Failure> Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure* messageTarget_; +}; + +class TestCase { +public: + TestCase(); + + virtual ~TestCase(); + + void run(TestResult& result); + + virtual const char* testName() const = 0; + +protected: + TestResult* result_; + +private: + virtual void runTestCase() = 0; +}; + +/// Function pointer type for TestCase factory +typedef TestCase* (*TestCaseFactory)(); + +class Runner { +public: + Runner(); + + /// Adds a test to the suite + Runner& add(TestCaseFactory factory); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test <testname> is provided, then run test testname. + int runCommandLine(int argc, const char* argv[]) const; + + /// Runs all the test cases + bool runAllTest(bool printSummary) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt(unsigned int index) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt(unsigned int index, TestResult& result) const; + + static void printUsage(const char* appName); + +private: // prevents copy construction and assignment + Runner(const Runner& other); + Runner& operator=(const Runner& other); + +private: + void listTests() const; + bool testIndex(const std::string& testName, unsigned int& index) const; + static void preventDialogOnCrash(); + +private: + typedef std::deque<TestCaseFactory> Factories; + Factories tests_; +}; + +template <typename T, typename U> +TestResult& checkEqual(TestResult& result, + const T& expected, + const U& actual, + const char* file, + unsigned int line, + const char* expr) { + if (static_cast<U>(expected) != actual) { + result.addFailure(file, line, expr); + result << "Expected: " << static_cast<U>(expected) << "\n"; + result << "Actual : " << actual; + } + return result; +} + +TestResult& checkStringEqual(TestResult& result, + const std::string& expected, + const std::string& actual, + const char* file, + unsigned int line, + const char* expr); + +} // namespace JsonTest + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT(expr) \ + if (expr) { \ + } else \ + result_->addFailure(__FILE__, __LINE__, #expr) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the +/// fixture. +#define JSONTEST_ASSERT_PRED(expr) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr \ + }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL(expected, actual) \ + JsonTest::checkEqual(*result_, \ + expected, \ + actual, \ + __FILE__, \ + __LINE__, \ + #expected " == " #actual) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL(expected, actual) \ + JsonTest::checkStringEqual(*result_, \ + std::string(expected), \ + std::string(actual), \ + __FILE__, \ + __LINE__, \ + #expected " == " #actual) + +/// \brief Asserts that a given expression throws an exception +#define JSONTEST_ASSERT_THROWS(expr) \ + { \ + bool _threw = false; \ + try { \ + expr; \ + } \ + catch (...) { \ + _threw = true; \ + } \ + if (!_threw) \ + result_->addFailure( \ + __FILE__, __LINE__, "expected exception thrown: " #expr); \ + } + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE(FixtureType, name) \ + class Test##FixtureType##name : public FixtureType { \ + public: \ + static JsonTest::TestCase* factory() { \ + return new Test##FixtureType##name(); \ + } \ + \ + public: /* overidden from TestCase */ \ + virtual const char* testName() const { return #FixtureType "/" #name; } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY(FixtureType, name) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE(runner, FixtureType, name) \ + (runner).add(JSONTEST_FIXTURE_FACTORY(FixtureType, name)) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/3rdparty/jsoncpp/src/test_lib_json/main.cpp b/3rdparty/jsoncpp/src/test_lib_json/main.cpp new file mode 100644 index 00000000000..51c5e7442b1 --- /dev/null +++ b/3rdparty/jsoncpp/src/test_lib_json/main.cpp @@ -0,0 +1,1637 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#include "jsontest.h" +#include <json/config.h> +#include <json/json.h> +#include <stdexcept> + +// Make numeric limits more convenient to talk about. +// Assumes int type in 32 bits. +#define kint32max Json::Value::maxInt +#define kint32min Json::Value::minInt +#define kuint32max Json::Value::maxUInt +#define kint64max Json::Value::maxInt64 +#define kint64min Json::Value::minInt64 +#define kuint64max Json::Value::maxUInt64 + +static const double kdint64max = double(kint64max); +static const float kfint64max = float(kint64max); +static const float kfint32max = float(kint32max); +static const float kfuint32max = float(kuint32max); + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) +static inline double uint64ToDouble(Json::UInt64 value) { + return static_cast<double>(value); +} +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) +static inline double uint64ToDouble(Json::UInt64 value) { + return static_cast<double>(Json::Int64(value / 2)) * 2.0 + + Json::Int64(value & 1); +} +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + +struct ValueTest : JsonTest::TestCase { + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value float_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_(Json::arrayValue), emptyObject_(Json::objectValue), + integer_(123456789), unsignedInteger_(34567890u), + smallUnsignedInteger_(Json::Value::UInt(Json::Value::maxInt)), + real_(1234.56789), float_(0.00390625f), emptyString_(""), string1_("a"), + string_("sometext with space"), true_(true), false_(false) { + array1_.append(1234); + object1_["id"] = 1234; + } + + struct IsCheck { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isString_; + bool isNull_; + + bool isInt_; + bool isInt64_; + bool isUInt_; + bool isUInt64_; + bool isIntegral_; + bool isDouble_; + bool isNumeric_; + }; + + void checkConstMemberCount(const Json::Value& value, + unsigned int expectedCount); + + void checkMemberCount(Json::Value& value, unsigned int expectedCount); + + void checkIs(const Json::Value& value, const IsCheck& check); + + void checkIsLess(const Json::Value& x, const Json::Value& y); + + void checkIsEqual(const Json::Value& x, const Json::Value& y); + + /// Normalize the representation of floating-point number by stripped leading + /// 0 in exponent. + static std::string normalizeFloatingPointStr(const std::string& s); +}; + +std::string ValueTest::normalizeFloatingPointStr(const std::string& s) { + std::string::size_type index = s.find_last_of("eE"); + if (index != std::string::npos) { + std::string::size_type hasSign = + (s[index + 1] == '+' || s[index + 1] == '-') ? 1 : 0; + std::string::size_type exponentStartIndex = index + 1 + hasSign; + std::string normalized = s.substr(0, exponentStartIndex); + std::string::size_type indexDigit = + s.find_first_not_of('0', exponentStartIndex); + std::string exponent = "0"; + if (indexDigit != + std::string::npos) // There is an exponent different from 0 + { + exponent = s.substr(indexDigit); + } + return normalized + exponent; + } + return s; +} + +JSONTEST_FIXTURE(ValueTest, checkNormalizeFloatingPointStr) { + JSONTEST_ASSERT_STRING_EQUAL("0.0", normalizeFloatingPointStr("0.0")); + JSONTEST_ASSERT_STRING_EQUAL("0e0", normalizeFloatingPointStr("0e0")); + JSONTEST_ASSERT_STRING_EQUAL("1234.0", normalizeFloatingPointStr("1234.0")); + JSONTEST_ASSERT_STRING_EQUAL("1234.0e0", + normalizeFloatingPointStr("1234.0e0")); + JSONTEST_ASSERT_STRING_EQUAL("1234.0e+0", + normalizeFloatingPointStr("1234.0e+0")); + JSONTEST_ASSERT_STRING_EQUAL("1234e-1", normalizeFloatingPointStr("1234e-1")); + JSONTEST_ASSERT_STRING_EQUAL("1234e10", normalizeFloatingPointStr("1234e10")); + JSONTEST_ASSERT_STRING_EQUAL("1234e10", + normalizeFloatingPointStr("1234e010")); + JSONTEST_ASSERT_STRING_EQUAL("1234e+10", + normalizeFloatingPointStr("1234e+010")); + JSONTEST_ASSERT_STRING_EQUAL("1234e-10", + normalizeFloatingPointStr("1234e-010")); + JSONTEST_ASSERT_STRING_EQUAL("1234e+100", + normalizeFloatingPointStr("1234e+100")); + JSONTEST_ASSERT_STRING_EQUAL("1234e-100", + normalizeFloatingPointStr("1234e-100")); + JSONTEST_ASSERT_STRING_EQUAL("1234e+1", + normalizeFloatingPointStr("1234e+001")); +} + +JSONTEST_FIXTURE(ValueTest, memberCount) { + JSONTEST_ASSERT_PRED(checkMemberCount(emptyArray_, 0)); + JSONTEST_ASSERT_PRED(checkMemberCount(emptyObject_, 0)); + JSONTEST_ASSERT_PRED(checkMemberCount(array1_, 1)); + JSONTEST_ASSERT_PRED(checkMemberCount(object1_, 1)); + JSONTEST_ASSERT_PRED(checkMemberCount(null_, 0)); + JSONTEST_ASSERT_PRED(checkMemberCount(integer_, 0)); + JSONTEST_ASSERT_PRED(checkMemberCount(unsignedInteger_, 0)); + JSONTEST_ASSERT_PRED(checkMemberCount(smallUnsignedInteger_, 0)); + JSONTEST_ASSERT_PRED(checkMemberCount(real_, 0)); + JSONTEST_ASSERT_PRED(checkMemberCount(emptyString_, 0)); + JSONTEST_ASSERT_PRED(checkMemberCount(string_, 0)); + JSONTEST_ASSERT_PRED(checkMemberCount(true_, 0)); +} + +JSONTEST_FIXTURE(ValueTest, objects) { + // Types + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED(checkIs(emptyObject_, checks)); + JSONTEST_ASSERT_PRED(checkIs(object1_, checks)); + + JSONTEST_ASSERT_EQUAL(Json::objectValue, emptyObject_.type()); + + // Empty object okay + JSONTEST_ASSERT(emptyObject_.isConvertibleTo(Json::nullValue)); + + // Non-empty object not okay + JSONTEST_ASSERT(!object1_.isConvertibleTo(Json::nullValue)); + + // Always okay + JSONTEST_ASSERT(emptyObject_.isConvertibleTo(Json::objectValue)); + + // Never okay + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::stringValue)); + + // Access through const reference + const Json::Value& constObject = object1_; + + JSONTEST_ASSERT_EQUAL(Json::Value(1234), constObject["id"]); + JSONTEST_ASSERT_EQUAL(Json::Value(), constObject["unknown id"]); + + // Access through non-const reference + JSONTEST_ASSERT_EQUAL(Json::Value(1234), object1_["id"]); + JSONTEST_ASSERT_EQUAL(Json::Value(), object1_["unknown id"]); + + object1_["some other id"] = "foo"; + JSONTEST_ASSERT_EQUAL(Json::Value("foo"), object1_["some other id"]); +} + +JSONTEST_FIXTURE(ValueTest, arrays) { + const unsigned int index0 = 0; + + // Types + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED(checkIs(emptyArray_, checks)); + JSONTEST_ASSERT_PRED(checkIs(array1_, checks)); + + JSONTEST_ASSERT_EQUAL(Json::arrayValue, array1_.type()); + + // Empty array okay + JSONTEST_ASSERT(emptyArray_.isConvertibleTo(Json::nullValue)); + + // Non-empty array not okay + JSONTEST_ASSERT(!array1_.isConvertibleTo(Json::nullValue)); + + // Always okay + JSONTEST_ASSERT(emptyArray_.isConvertibleTo(Json::arrayValue)); + + // Never okay + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::objectValue)); + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::stringValue)); + + // Access through const reference + const Json::Value& constArray = array1_; + JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray[index0]); + JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray[0]); + + // Access through non-const reference + JSONTEST_ASSERT_EQUAL(Json::Value(1234), array1_[index0]); + JSONTEST_ASSERT_EQUAL(Json::Value(1234), array1_[0]); + + array1_[2] = Json::Value(17); + JSONTEST_ASSERT_EQUAL(Json::Value(), array1_[1]); + JSONTEST_ASSERT_EQUAL(Json::Value(17), array1_[2]); +} + +JSONTEST_FIXTURE(ValueTest, null) { + JSONTEST_ASSERT_EQUAL(Json::nullValue, null_.type()); + + IsCheck checks; + checks.isNull_ = true; + JSONTEST_ASSERT_PRED(checkIs(null_, checks)); + + JSONTEST_ASSERT(null_.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(Json::Int(0), null_.asInt()); + JSONTEST_ASSERT_EQUAL(Json::LargestInt(0), null_.asLargestInt()); + JSONTEST_ASSERT_EQUAL(Json::UInt(0), null_.asUInt()); + JSONTEST_ASSERT_EQUAL(Json::LargestUInt(0), null_.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, null_.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, null_.asFloat()); + JSONTEST_ASSERT_STRING_EQUAL("", null_.asString()); +} + +JSONTEST_FIXTURE(ValueTest, strings) { + JSONTEST_ASSERT_EQUAL(Json::stringValue, string1_.type()); + + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED(checkIs(emptyString_, checks)); + JSONTEST_ASSERT_PRED(checkIs(string_, checks)); + JSONTEST_ASSERT_PRED(checkIs(string1_, checks)); + + // Empty string okay + JSONTEST_ASSERT(emptyString_.isConvertibleTo(Json::nullValue)); + + // Non-empty string not okay + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::nullValue)); + + // Always okay + JSONTEST_ASSERT(string1_.isConvertibleTo(Json::stringValue)); + + // Never okay + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::objectValue)); + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::realValue)); + + JSONTEST_ASSERT_STRING_EQUAL("a", string1_.asString()); + JSONTEST_ASSERT_STRING_EQUAL("a", string1_.asCString()); +} + +JSONTEST_FIXTURE(ValueTest, bools) { + JSONTEST_ASSERT_EQUAL(Json::booleanValue, false_.type()); + + IsCheck checks; + checks.isBool_ = true; + JSONTEST_ASSERT_PRED(checkIs(false_, checks)); + JSONTEST_ASSERT_PRED(checkIs(true_, checks)); + + // False okay + JSONTEST_ASSERT(false_.isConvertibleTo(Json::nullValue)); + + // True not okay + JSONTEST_ASSERT(!true_.isConvertibleTo(Json::nullValue)); + + // Always okay + JSONTEST_ASSERT(true_.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(true_.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(true_.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(true_.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(true_.isConvertibleTo(Json::stringValue)); + + // Never okay + JSONTEST_ASSERT(!true_.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!true_.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(true, true_.asBool()); + JSONTEST_ASSERT_EQUAL(1, true_.asInt()); + JSONTEST_ASSERT_EQUAL(1, true_.asLargestInt()); + JSONTEST_ASSERT_EQUAL(1, true_.asUInt()); + JSONTEST_ASSERT_EQUAL(1, true_.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(1.0, true_.asDouble()); + JSONTEST_ASSERT_EQUAL(1.0, true_.asFloat()); + + JSONTEST_ASSERT_EQUAL(false, false_.asBool()); + JSONTEST_ASSERT_EQUAL(0, false_.asInt()); + JSONTEST_ASSERT_EQUAL(0, false_.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, false_.asUInt()); + JSONTEST_ASSERT_EQUAL(0, false_.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, false_.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, false_.asFloat()); +} + +JSONTEST_FIXTURE(ValueTest, integers) { + IsCheck checks; + Json::Value val; + + // Conversions that don't depend on the value. + JSONTEST_ASSERT(Json::Value(17).isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(Json::Value(17).isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(Json::Value(17).isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(!Json::Value(17).isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!Json::Value(17).isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT(Json::Value(17U).isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(Json::Value(17U).isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(Json::Value(17U).isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(!Json::Value(17U).isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!Json::Value(17U).isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT(Json::Value(17.0).isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(Json::Value(17.0).isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(Json::Value(17.0).isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(!Json::Value(17.0).isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!Json::Value(17.0).isConvertibleTo(Json::objectValue)); + + // Default int + val = Json::Value(Json::intValue); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); + + // Default uint + val = Json::Value(Json::uintValue); + + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); + + // Default real + val = Json::Value(Json::realValue); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); + + // Zero (signed constructor arg) + val = Json::Value(0); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); + + // Zero (unsigned constructor arg) + val = Json::Value(0u); + + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); + + // Zero (floating-point constructor arg) + val = Json::Value(0.0); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); + + // 2^20 (signed constructor arg) + val = Json::Value(1 << 20); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL((1 << 20), val.asInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1048576", val.asString()); + + // 2^20 (unsigned constructor arg) + val = Json::Value(Json::UInt(1 << 20)); + + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL((1 << 20), val.asInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1048576", val.asString()); + + // 2^20 (floating-point constructor arg) + val = Json::Value((1 << 20) / 1.0); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL((1 << 20), val.asInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1048576", + normalizeFloatingPointStr(val.asString())); + + // -2^20 + val = Json::Value(-(1 << 20)); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asInt()); + JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asDouble()); + JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-1048576", val.asString()); + + // int32 max + val = Json::Value(kint32max); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(kint32max, val.asInt()); + JSONTEST_ASSERT_EQUAL(kint32max, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(kint32max, val.asUInt()); + JSONTEST_ASSERT_EQUAL(kint32max, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(kint32max, val.asDouble()); + JSONTEST_ASSERT_EQUAL(kfint32max, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("2147483647", val.asString()); + + // int32 min + val = Json::Value(kint32min); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(kint32min, val.asInt()); + JSONTEST_ASSERT_EQUAL(kint32min, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(kint32min, val.asDouble()); + JSONTEST_ASSERT_EQUAL(kint32min, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-2147483648", val.asString()); + + // uint32 max + val = Json::Value(kuint32max); + + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); + + checks = IsCheck(); + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + +#ifndef JSON_NO_INT64 + JSONTEST_ASSERT_EQUAL(kuint32max, val.asLargestInt()); +#endif + JSONTEST_ASSERT_EQUAL(kuint32max, val.asUInt()); + JSONTEST_ASSERT_EQUAL(kuint32max, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(kuint32max, val.asDouble()); + JSONTEST_ASSERT_EQUAL(kfuint32max, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("4294967295", val.asString()); + +#ifdef JSON_NO_INT64 + // int64 max + val = Json::Value(double(kint64max)); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(double(kint64max), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kint64max), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("9.22337e+18", val.asString()); + + // int64 min + val = Json::Value(double(kint64min)); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(double(kint64min), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kint64min), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-9.22337e+18", val.asString()); + + // uint64 max + val = Json::Value(double(kuint64max)); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(double(kuint64max), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kuint64max), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1.84467e+19", val.asString()); +#else // ifdef JSON_NO_INT64 + // 2^40 (signed constructor arg) + val = Json::Value(Json::Int64(1) << 40); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + + checks = IsCheck(); + checks.isInt64_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asUInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1099511627776", val.asString()); + + // 2^40 (unsigned constructor arg) + val = Json::Value(Json::UInt64(1) << 40); + + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); + + checks = IsCheck(); + checks.isInt64_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asUInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1099511627776", val.asString()); + + // 2^40 (floating-point constructor arg) + val = Json::Value((Json::Int64(1) << 40) / 1.0); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isInt64_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asUInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1099511627776", + normalizeFloatingPointStr(val.asString())); + + // -2^40 + val = Json::Value(-(Json::Int64(1) << 40)); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + + checks = IsCheck(); + checks.isInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(-(Json::Int64(1) << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL(-(Json::Int64(1) << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(-(Json::Int64(1) << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL(-(Json::Int64(1) << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-1099511627776", val.asString()); + + // int64 max + val = Json::Value(Json::Int64(kint64max)); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + + checks = IsCheck(); + checks.isInt64_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(kint64max, val.asInt64()); + JSONTEST_ASSERT_EQUAL(kint64max, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(kint64max, val.asUInt64()); + JSONTEST_ASSERT_EQUAL(kint64max, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(double(kint64max), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kint64max), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("9223372036854775807", val.asString()); + + // int64 max (floating point constructor). Note that kint64max is not exactly + // representable as a double, and will be rounded up to be higher. + val = Json::Value(double(kint64max)); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(Json::UInt64(1) << 63, val.asUInt64()); + JSONTEST_ASSERT_EQUAL(Json::UInt64(1) << 63, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(uint64ToDouble(Json::UInt64(1) << 63), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(uint64ToDouble(Json::UInt64(1) << 63)), + val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("9.2233720368547758e+18", + normalizeFloatingPointStr(val.asString())); + + // int64 min + val = Json::Value(Json::Int64(kint64min)); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + + checks = IsCheck(); + checks.isInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(kint64min, val.asInt64()); + JSONTEST_ASSERT_EQUAL(kint64min, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(double(kint64min), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kint64min), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-9223372036854775808", val.asString()); + + // int64 min (floating point constructor). Note that kint64min *is* exactly + // representable as a double. + val = Json::Value(double(kint64min)); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(kint64min, val.asInt64()); + JSONTEST_ASSERT_EQUAL(kint64min, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-9.2233720368547758e+18", + normalizeFloatingPointStr(val.asString())); + + // 10^19 + const Json::UInt64 ten_to_19 = static_cast<Json::UInt64>(1e19); + val = Json::Value(Json::UInt64(ten_to_19)); + + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); + + checks = IsCheck(); + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(ten_to_19, val.asUInt64()); + JSONTEST_ASSERT_EQUAL(ten_to_19, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(uint64ToDouble(ten_to_19), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(uint64ToDouble(ten_to_19)), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("10000000000000000000", val.asString()); + + // 10^19 (double constructor). Note that 10^19 is not exactly representable + // as a double. + val = Json::Value(uint64ToDouble(ten_to_19)); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(1e19, val.asDouble()); + JSONTEST_ASSERT_EQUAL(1e19, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1e+19", + normalizeFloatingPointStr(val.asString())); + + // uint64 max + val = Json::Value(Json::UInt64(kuint64max)); + + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); + + checks = IsCheck(); + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(kuint64max, val.asUInt64()); + JSONTEST_ASSERT_EQUAL(kuint64max, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(uint64ToDouble(kuint64max), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(uint64ToDouble(kuint64max)), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("18446744073709551615", val.asString()); + + // uint64 max (floating point constructor). Note that kuint64max is not + // exactly representable as a double, and will be rounded up to be higher. + val = Json::Value(uint64ToDouble(kuint64max)); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1.8446744073709552e+19", + normalizeFloatingPointStr(val.asString())); +#endif +} + +JSONTEST_FIXTURE(ValueTest, nonIntegers) { + IsCheck checks; + Json::Value val; + + // Small positive number + val = Json::Value(1.5); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(1.5, val.asDouble()); + JSONTEST_ASSERT_EQUAL(1.5, val.asFloat()); + JSONTEST_ASSERT_EQUAL(1, val.asInt()); + JSONTEST_ASSERT_EQUAL(1, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(1, val.asUInt()); + JSONTEST_ASSERT_EQUAL(1, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_EQUAL("1.5", val.asString()); + + // Small negative number + val = Json::Value(-1.5); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(-1.5, val.asDouble()); + JSONTEST_ASSERT_EQUAL(-1.5, val.asFloat()); + JSONTEST_ASSERT_EQUAL(-1, val.asInt()); + JSONTEST_ASSERT_EQUAL(-1, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_EQUAL("-1.5", val.asString()); + + // A bit over int32 max + val = Json::Value(kint32max + 0.5); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(2147483647.5, val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(2147483647.5), val.asFloat()); + JSONTEST_ASSERT_EQUAL(2147483647U, val.asUInt()); +#ifdef JSON_HAS_INT64 + JSONTEST_ASSERT_EQUAL(2147483647L, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(2147483647U, val.asLargestUInt()); +#endif + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_EQUAL("2147483647.5", + normalizeFloatingPointStr(val.asString())); + + // A bit under int32 min + val = Json::Value(kint32min - 0.5); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(-2147483648.5, val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(-2147483648.5), val.asFloat()); +#ifdef JSON_HAS_INT64 + JSONTEST_ASSERT_EQUAL(-Json::Int64(1) << 31, val.asLargestInt()); +#endif + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_EQUAL("-2147483648.5", + normalizeFloatingPointStr(val.asString())); + + // A bit over uint32 max + val = Json::Value(kuint32max + 0.5); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED(checkIs(val, checks)); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(4294967295.5, val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(4294967295.5), val.asFloat()); +#ifdef JSON_HAS_INT64 + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 32) - 1, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((Json::UInt64(1) << 32) - Json::UInt64(1), + val.asLargestUInt()); +#endif + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_EQUAL("4294967295.5", + normalizeFloatingPointStr(val.asString())); + + val = Json::Value(1.2345678901234); + JSONTEST_ASSERT_STRING_EQUAL("1.2345678901234001", + normalizeFloatingPointStr(val.asString())); + + // A 16-digit floating point number. + val = Json::Value(2199023255552000.0f); + JSONTEST_ASSERT_EQUAL(float(2199023255552000), val.asFloat()); + JSONTEST_ASSERT_STRING_EQUAL("2199023255552000", + normalizeFloatingPointStr(val.asString())); + + // A very large floating point number. + val = Json::Value(3.402823466385289e38); + JSONTEST_ASSERT_EQUAL(float(3.402823466385289e38), val.asFloat()); + JSONTEST_ASSERT_STRING_EQUAL("3.402823466385289e+38", + normalizeFloatingPointStr(val.asString())); + + // An even larger floating point number. + val = Json::Value(1.2345678e300); + JSONTEST_ASSERT_EQUAL(double(1.2345678e300), val.asDouble()); + JSONTEST_ASSERT_STRING_EQUAL("1.2345678e+300", + normalizeFloatingPointStr(val.asString())); +} + +void ValueTest::checkConstMemberCount(const Json::Value& value, + unsigned int expectedCount) { + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for (Json::Value::const_iterator it = value.begin(); it != itEnd; ++it) { + ++count; + } + JSONTEST_ASSERT_EQUAL(expectedCount, count) << "Json::Value::const_iterator"; +} + +void ValueTest::checkMemberCount(Json::Value& value, + unsigned int expectedCount) { + JSONTEST_ASSERT_EQUAL(expectedCount, value.size()); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for (Json::Value::iterator it = value.begin(); it != itEnd; ++it) { + ++count; + } + JSONTEST_ASSERT_EQUAL(expectedCount, count) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED(checkConstMemberCount(value, expectedCount)); +} + +ValueTest::IsCheck::IsCheck() + : isObject_(false), isArray_(false), isBool_(false), isString_(false), + isNull_(false), isInt_(false), isInt64_(false), isUInt_(false), + isUInt64_(false), isIntegral_(false), isDouble_(false), + isNumeric_(false) {} + +void ValueTest::checkIs(const Json::Value& value, const IsCheck& check) { + JSONTEST_ASSERT_EQUAL(check.isObject_, value.isObject()); + JSONTEST_ASSERT_EQUAL(check.isArray_, value.isArray()); + JSONTEST_ASSERT_EQUAL(check.isBool_, value.isBool()); + JSONTEST_ASSERT_EQUAL(check.isDouble_, value.isDouble()); + JSONTEST_ASSERT_EQUAL(check.isInt_, value.isInt()); + JSONTEST_ASSERT_EQUAL(check.isUInt_, value.isUInt()); + JSONTEST_ASSERT_EQUAL(check.isIntegral_, value.isIntegral()); + JSONTEST_ASSERT_EQUAL(check.isNumeric_, value.isNumeric()); + JSONTEST_ASSERT_EQUAL(check.isString_, value.isString()); + JSONTEST_ASSERT_EQUAL(check.isNull_, value.isNull()); + +#ifdef JSON_HAS_INT64 + JSONTEST_ASSERT_EQUAL(check.isInt64_, value.isInt64()); + JSONTEST_ASSERT_EQUAL(check.isUInt64_, value.isUInt64()); +#else + JSONTEST_ASSERT_EQUAL(false, value.isInt64()); + JSONTEST_ASSERT_EQUAL(false, value.isUInt64()); +#endif +} + +JSONTEST_FIXTURE(ValueTest, compareNull) { + JSONTEST_ASSERT_PRED(checkIsEqual(Json::Value(), Json::Value())); +} + +JSONTEST_FIXTURE(ValueTest, compareInt) { + JSONTEST_ASSERT_PRED(checkIsLess(0, 10)); + JSONTEST_ASSERT_PRED(checkIsEqual(10, 10)); + JSONTEST_ASSERT_PRED(checkIsEqual(-10, -10)); + JSONTEST_ASSERT_PRED(checkIsLess(-10, 0)); +} + +JSONTEST_FIXTURE(ValueTest, compareUInt) { + JSONTEST_ASSERT_PRED(checkIsLess(0u, 10u)); + JSONTEST_ASSERT_PRED(checkIsLess(0u, Json::Value::maxUInt)); + JSONTEST_ASSERT_PRED(checkIsEqual(10u, 10u)); +} + +JSONTEST_FIXTURE(ValueTest, compareDouble) { + JSONTEST_ASSERT_PRED(checkIsLess(0.0, 10.0)); + JSONTEST_ASSERT_PRED(checkIsEqual(10.0, 10.0)); + JSONTEST_ASSERT_PRED(checkIsEqual(-10.0, -10.0)); + JSONTEST_ASSERT_PRED(checkIsLess(-10.0, 0.0)); +} + +JSONTEST_FIXTURE(ValueTest, compareString) { + JSONTEST_ASSERT_PRED(checkIsLess("", " ")); + JSONTEST_ASSERT_PRED(checkIsLess("", "a")); + JSONTEST_ASSERT_PRED(checkIsLess("abcd", "zyui")); + JSONTEST_ASSERT_PRED(checkIsLess("abc", "abcd")); + JSONTEST_ASSERT_PRED(checkIsEqual("abcd", "abcd")); + JSONTEST_ASSERT_PRED(checkIsEqual(" ", " ")); + JSONTEST_ASSERT_PRED(checkIsLess("ABCD", "abcd")); + JSONTEST_ASSERT_PRED(checkIsEqual("ABCD", "ABCD")); +} + +JSONTEST_FIXTURE(ValueTest, compareBoolean) { + JSONTEST_ASSERT_PRED(checkIsLess(false, true)); + JSONTEST_ASSERT_PRED(checkIsEqual(false, false)); + JSONTEST_ASSERT_PRED(checkIsEqual(true, true)); +} + +JSONTEST_FIXTURE(ValueTest, compareArray) { + // array compare size then content + Json::Value emptyArray(Json::arrayValue); + Json::Value l1aArray; + l1aArray.append(0); + Json::Value l1bArray; + l1bArray.append(10); + Json::Value l2aArray; + l2aArray.append(0); + l2aArray.append(0); + Json::Value l2bArray; + l2bArray.append(0); + l2bArray.append(10); + JSONTEST_ASSERT_PRED(checkIsLess(emptyArray, l1aArray)); + JSONTEST_ASSERT_PRED(checkIsLess(emptyArray, l2aArray)); + JSONTEST_ASSERT_PRED(checkIsLess(l1aArray, l2aArray)); + JSONTEST_ASSERT_PRED(checkIsLess(l2aArray, l2bArray)); + JSONTEST_ASSERT_PRED(checkIsEqual(emptyArray, Json::Value(emptyArray))); + JSONTEST_ASSERT_PRED(checkIsEqual(l1aArray, Json::Value(l1aArray))); + JSONTEST_ASSERT_PRED(checkIsEqual(l2bArray, Json::Value(l2bArray))); +} + +JSONTEST_FIXTURE(ValueTest, compareObject) { + // object compare size then content + Json::Value emptyObject(Json::objectValue); + Json::Value l1aObject; + l1aObject["key1"] = 0; + Json::Value l1bObject; + l1aObject["key1"] = 10; + Json::Value l2aObject; + l2aObject["key1"] = 0; + l2aObject["key2"] = 0; + JSONTEST_ASSERT_PRED(checkIsLess(emptyObject, l1aObject)); + JSONTEST_ASSERT_PRED(checkIsLess(emptyObject, l2aObject)); + JSONTEST_ASSERT_PRED(checkIsLess(l1aObject, l2aObject)); + JSONTEST_ASSERT_PRED(checkIsEqual(emptyObject, Json::Value(emptyObject))); + JSONTEST_ASSERT_PRED(checkIsEqual(l1aObject, Json::Value(l1aObject))); + JSONTEST_ASSERT_PRED(checkIsEqual(l2aObject, Json::Value(l2aObject))); +} + +JSONTEST_FIXTURE(ValueTest, compareType) { + // object of different type are ordered according to their type + JSONTEST_ASSERT_PRED(checkIsLess(Json::Value(), Json::Value(1))); + JSONTEST_ASSERT_PRED(checkIsLess(Json::Value(1), Json::Value(1u))); + JSONTEST_ASSERT_PRED(checkIsLess(Json::Value(1u), Json::Value(1.0))); + JSONTEST_ASSERT_PRED(checkIsLess(Json::Value(1.0), Json::Value("a"))); + JSONTEST_ASSERT_PRED(checkIsLess(Json::Value("a"), Json::Value(true))); + JSONTEST_ASSERT_PRED( + checkIsLess(Json::Value(true), Json::Value(Json::arrayValue))); + JSONTEST_ASSERT_PRED(checkIsLess(Json::Value(Json::arrayValue), + Json::Value(Json::objectValue))); +} + +void ValueTest::checkIsLess(const Json::Value& x, const Json::Value& y) { + JSONTEST_ASSERT(x < y); + JSONTEST_ASSERT(y > x); + JSONTEST_ASSERT(x <= y); + JSONTEST_ASSERT(y >= x); + JSONTEST_ASSERT(!(x == y)); + JSONTEST_ASSERT(!(y == x)); + JSONTEST_ASSERT(!(x >= y)); + JSONTEST_ASSERT(!(y <= x)); + JSONTEST_ASSERT(!(x > y)); + JSONTEST_ASSERT(!(y < x)); + JSONTEST_ASSERT(x.compare(y) < 0); + JSONTEST_ASSERT(y.compare(x) >= 0); +} + +void ValueTest::checkIsEqual(const Json::Value& x, const Json::Value& y) { + JSONTEST_ASSERT(x == y); + JSONTEST_ASSERT(y == x); + JSONTEST_ASSERT(x <= y); + JSONTEST_ASSERT(y <= x); + JSONTEST_ASSERT(x >= y); + JSONTEST_ASSERT(y >= x); + JSONTEST_ASSERT(!(x < y)); + JSONTEST_ASSERT(!(y < x)); + JSONTEST_ASSERT(!(x > y)); + JSONTEST_ASSERT(!(y > x)); + JSONTEST_ASSERT(x.compare(y) == 0); + JSONTEST_ASSERT(y.compare(x) == 0); +} + +JSONTEST_FIXTURE(ValueTest, typeChecksThrowExceptions) { +#if JSON_USE_EXCEPTION + + Json::Value intVal(1); + Json::Value strVal("Test"); + Json::Value objVal(Json::objectValue); + Json::Value arrVal(Json::arrayValue); + + JSONTEST_ASSERT_THROWS(intVal["test"]); + JSONTEST_ASSERT_THROWS(strVal["test"]); + JSONTEST_ASSERT_THROWS(arrVal["test"]); + + JSONTEST_ASSERT_THROWS(intVal.removeMember("test")); + JSONTEST_ASSERT_THROWS(strVal.removeMember("test")); + JSONTEST_ASSERT_THROWS(arrVal.removeMember("test")); + + JSONTEST_ASSERT_THROWS(intVal.getMemberNames()); + JSONTEST_ASSERT_THROWS(strVal.getMemberNames()); + JSONTEST_ASSERT_THROWS(arrVal.getMemberNames()); + + JSONTEST_ASSERT_THROWS(intVal[0]); + JSONTEST_ASSERT_THROWS(objVal[0]); + JSONTEST_ASSERT_THROWS(strVal[0]); + + JSONTEST_ASSERT_THROWS(intVal.clear()); + + JSONTEST_ASSERT_THROWS(intVal.resize(1)); + JSONTEST_ASSERT_THROWS(strVal.resize(1)); + JSONTEST_ASSERT_THROWS(objVal.resize(1)); + + JSONTEST_ASSERT_THROWS(intVal.asCString()); + + JSONTEST_ASSERT_THROWS(objVal.asString()); + JSONTEST_ASSERT_THROWS(arrVal.asString()); + + JSONTEST_ASSERT_THROWS(strVal.asInt()); + JSONTEST_ASSERT_THROWS(objVal.asInt()); + JSONTEST_ASSERT_THROWS(arrVal.asInt()); + + JSONTEST_ASSERT_THROWS(strVal.asUInt()); + JSONTEST_ASSERT_THROWS(objVal.asUInt()); + JSONTEST_ASSERT_THROWS(arrVal.asUInt()); + + JSONTEST_ASSERT_THROWS(strVal.asInt64()); + JSONTEST_ASSERT_THROWS(objVal.asInt64()); + JSONTEST_ASSERT_THROWS(arrVal.asInt64()); + + JSONTEST_ASSERT_THROWS(strVal.asUInt64()); + JSONTEST_ASSERT_THROWS(objVal.asUInt64()); + JSONTEST_ASSERT_THROWS(arrVal.asUInt64()); + + JSONTEST_ASSERT_THROWS(strVal.asDouble()); + JSONTEST_ASSERT_THROWS(objVal.asDouble()); + JSONTEST_ASSERT_THROWS(arrVal.asDouble()); + + JSONTEST_ASSERT_THROWS(strVal.asFloat()); + JSONTEST_ASSERT_THROWS(objVal.asFloat()); + JSONTEST_ASSERT_THROWS(arrVal.asFloat()); + + JSONTEST_ASSERT_THROWS(strVal.asBool()); + JSONTEST_ASSERT_THROWS(objVal.asBool()); + JSONTEST_ASSERT_THROWS(arrVal.asBool()); + +#endif +} + +JSONTEST_FIXTURE(ValueTest, offsetAccessors) { + Json::Value x; + JSONTEST_ASSERT(x.getOffsetStart() == 0); + JSONTEST_ASSERT(x.getOffsetLimit() == 0); + x.setOffsetStart(10); + x.setOffsetLimit(20); + JSONTEST_ASSERT(x.getOffsetStart() == 10); + JSONTEST_ASSERT(x.getOffsetLimit() == 20); + Json::Value y(x); + JSONTEST_ASSERT(y.getOffsetStart() == 10); + JSONTEST_ASSERT(y.getOffsetLimit() == 20); + Json::Value z; + z.swap(y); + JSONTEST_ASSERT(z.getOffsetStart() == 10); + JSONTEST_ASSERT(z.getOffsetLimit() == 20); + JSONTEST_ASSERT(y.getOffsetStart() == 0); + JSONTEST_ASSERT(y.getOffsetLimit() == 0); +} + +struct WriterTest : JsonTest::TestCase {}; + +JSONTEST_FIXTURE(WriterTest, dropNullPlaceholders) { + Json::FastWriter writer; + Json::Value nullValue; + JSONTEST_ASSERT(writer.write(nullValue) == "null\n"); + + writer.dropNullPlaceholders(); + JSONTEST_ASSERT(writer.write(nullValue) == "\n"); +} + +struct ReaderTest : JsonTest::TestCase {}; + +JSONTEST_FIXTURE(ReaderTest, parseWithNoErrors) { + Json::Reader reader; + Json::Value root; + bool ok = reader.parse("{ \"property\" : \"value\" }", root); + JSONTEST_ASSERT(ok); + JSONTEST_ASSERT(reader.getFormattedErrorMessages().size() == 0); + JSONTEST_ASSERT(reader.getStructuredErrors().size() == 0); +} + +JSONTEST_FIXTURE(ReaderTest, parseWithNoErrorsTestingOffsets) { + Json::Reader reader; + Json::Value root; + bool ok = reader.parse("{ \"property\" : [\"value\", \"value2\"], \"obj\" : " + "{ \"nested\" : 123, \"bool\" : true}, \"null\" : " + "null, \"false\" : false }", + root); + JSONTEST_ASSERT(ok); + JSONTEST_ASSERT(reader.getFormattedErrorMessages().size() == 0); + JSONTEST_ASSERT(reader.getStructuredErrors().size() == 0); + JSONTEST_ASSERT(root["property"].getOffsetStart() == 15); + JSONTEST_ASSERT(root["property"].getOffsetLimit() == 34); + JSONTEST_ASSERT(root["property"][0].getOffsetStart() == 16); + JSONTEST_ASSERT(root["property"][0].getOffsetLimit() == 23); + JSONTEST_ASSERT(root["property"][1].getOffsetStart() == 25); + JSONTEST_ASSERT(root["property"][1].getOffsetLimit() == 33); + JSONTEST_ASSERT(root["obj"].getOffsetStart() == 44); + JSONTEST_ASSERT(root["obj"].getOffsetLimit() == 76); + JSONTEST_ASSERT(root["obj"]["nested"].getOffsetStart() == 57); + JSONTEST_ASSERT(root["obj"]["nested"].getOffsetLimit() == 60); + JSONTEST_ASSERT(root["obj"]["bool"].getOffsetStart() == 71); + JSONTEST_ASSERT(root["obj"]["bool"].getOffsetLimit() == 75); + JSONTEST_ASSERT(root["null"].getOffsetStart() == 87); + JSONTEST_ASSERT(root["null"].getOffsetLimit() == 91); + JSONTEST_ASSERT(root["false"].getOffsetStart() == 103); + JSONTEST_ASSERT(root["false"].getOffsetLimit() == 108); + JSONTEST_ASSERT(root.getOffsetStart() == 0); + JSONTEST_ASSERT(root.getOffsetLimit() == 110); +} + +JSONTEST_FIXTURE(ReaderTest, parseWithOneError) { + Json::Reader reader; + Json::Value root; + bool ok = reader.parse("{ \"property\" :: \"value\" }", root); + JSONTEST_ASSERT(!ok); + JSONTEST_ASSERT(reader.getFormattedErrorMessages() == + "* Line 1, Column 15\n Syntax error: value, object or array " + "expected.\n"); + std::vector<Json::Reader::StructuredError> errors = + reader.getStructuredErrors(); + JSONTEST_ASSERT(errors.size() == 1); + JSONTEST_ASSERT(errors.at(0).offset_start == 14); + JSONTEST_ASSERT(errors.at(0).offset_limit == 15); + JSONTEST_ASSERT(errors.at(0).message == + "Syntax error: value, object or array expected."); +} + +JSONTEST_FIXTURE(ReaderTest, parseChineseWithOneError) { + Json::Reader reader; + Json::Value root; + bool ok = reader.parse("{ \"pr佐藤erty\" :: \"value\" }", root); + JSONTEST_ASSERT(!ok); + JSONTEST_ASSERT(reader.getFormattedErrorMessages() == + "* Line 1, Column 19\n Syntax error: value, object or array " + "expected.\n"); + std::vector<Json::Reader::StructuredError> errors = + reader.getStructuredErrors(); + JSONTEST_ASSERT(errors.size() == 1); + JSONTEST_ASSERT(errors.at(0).offset_start == 18); + JSONTEST_ASSERT(errors.at(0).offset_limit == 19); + JSONTEST_ASSERT(errors.at(0).message == + "Syntax error: value, object or array expected."); +} + +JSONTEST_FIXTURE(ReaderTest, parseWithDetailError) { + Json::Reader reader; + Json::Value root; + bool ok = reader.parse("{ \"property\" : \"v\\alue\" }", root); + JSONTEST_ASSERT(!ok); + JSONTEST_ASSERT(reader.getFormattedErrorMessages() == + "* Line 1, Column 16\n Bad escape sequence in string\nSee " + "Line 1, Column 20 for detail.\n"); + std::vector<Json::Reader::StructuredError> errors = + reader.getStructuredErrors(); + JSONTEST_ASSERT(errors.size() == 1); + JSONTEST_ASSERT(errors.at(0).offset_start == 15); + JSONTEST_ASSERT(errors.at(0).offset_limit == 23); + JSONTEST_ASSERT(errors.at(0).message == "Bad escape sequence in string"); +} + +int main(int argc, const char* argv[]) { + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, checkNormalizeFloatingPointStr); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, memberCount); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, objects); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, arrays); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, null); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, strings); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, bools); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, integers); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, nonIntegers); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, compareNull); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, compareInt); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, compareUInt); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, compareDouble); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, compareString); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, compareBoolean); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, compareArray); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, compareObject); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, compareType); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, offsetAccessors); + JSONTEST_REGISTER_FIXTURE(runner, ValueTest, typeChecksThrowExceptions); + + JSONTEST_REGISTER_FIXTURE(runner, ReaderTest, parseWithNoErrors); + JSONTEST_REGISTER_FIXTURE( + runner, ReaderTest, parseWithNoErrorsTestingOffsets); + JSONTEST_REGISTER_FIXTURE(runner, ReaderTest, parseWithOneError); + JSONTEST_REGISTER_FIXTURE(runner, ReaderTest, parseChineseWithOneError); + JSONTEST_REGISTER_FIXTURE(runner, ReaderTest, parseWithDetailError); + + JSONTEST_REGISTER_FIXTURE(runner, WriterTest, dropNullPlaceholders); + + return runner.runCommandLine(argc, argv); +} diff --git a/3rdparty/jsoncpp/src/test_lib_json/sconscript b/3rdparty/jsoncpp/src/test_lib_json/sconscript new file mode 100644 index 00000000000..915fd01c0a3 --- /dev/null +++ b/3rdparty/jsoncpp/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/3rdparty/jsoncpp/test/cleantests.py b/3rdparty/jsoncpp/test/cleantests.py new file mode 100644 index 00000000000..c38fd8ffdd1 --- /dev/null +++ b/3rdparty/jsoncpp/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/3rdparty/jsoncpp/test/data/fail_test_array_01.json b/3rdparty/jsoncpp/test/data/fail_test_array_01.json new file mode 100644 index 00000000000..f72a6d07749 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/fail_test_array_01.json @@ -0,0 +1 @@ +[ 1 2 3] diff --git a/3rdparty/jsoncpp/test/data/test_array_01.expected b/3rdparty/jsoncpp/test/data/test_array_01.expected new file mode 100644 index 00000000000..a341ff753cb --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/3rdparty/jsoncpp/test/data/test_array_01.json b/3rdparty/jsoncpp/test/data/test_array_01.json new file mode 100644 index 00000000000..fe51488c706 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/3rdparty/jsoncpp/test/data/test_array_02.expected b/3rdparty/jsoncpp/test/data/test_array_02.expected new file mode 100644 index 00000000000..ef1f2623de0 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/3rdparty/jsoncpp/test/data/test_array_02.json b/3rdparty/jsoncpp/test/data/test_array_02.json new file mode 100644 index 00000000000..7660873d103 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/3rdparty/jsoncpp/test/data/test_array_03.expected b/3rdparty/jsoncpp/test/data/test_array_03.expected new file mode 100644 index 00000000000..3d8dc18eb1e --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/3rdparty/jsoncpp/test/data/test_array_03.json b/3rdparty/jsoncpp/test/data/test_array_03.json new file mode 100644 index 00000000000..9b3f9247559 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/3rdparty/jsoncpp/test/data/test_array_04.expected b/3rdparty/jsoncpp/test/data/test_array_04.expected new file mode 100644 index 00000000000..ad4add97946 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/3rdparty/jsoncpp/test/data/test_array_04.json b/3rdparty/jsoncpp/test/data/test_array_04.json new file mode 100644 index 00000000000..ecca546b2c6 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/3rdparty/jsoncpp/test/data/test_array_05.expected b/3rdparty/jsoncpp/test/data/test_array_05.expected new file mode 100644 index 00000000000..76cff87c2ab --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/3rdparty/jsoncpp/test/data/test_array_05.json b/3rdparty/jsoncpp/test/data/test_array_05.json new file mode 100644 index 00000000000..7809d6c9a1a --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_array_06.expected b/3rdparty/jsoncpp/test/data/test_array_06.expected new file mode 100644 index 00000000000..5c9f48eaacd --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/3rdparty/jsoncpp/test/data/test_array_06.json b/3rdparty/jsoncpp/test/data/test_array_06.json new file mode 100644 index 00000000000..7f6c516afde --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_array_07.expected b/3rdparty/jsoncpp/test/data/test_array_07.expected new file mode 100644 index 00000000000..ee2fafc010e --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_07.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/3rdparty/jsoncpp/test/data/test_array_07.json b/3rdparty/jsoncpp/test/data/test_array_07.json new file mode 100644 index 00000000000..e4ab4cd4330 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_array_07.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_basic_01.expected b/3rdparty/jsoncpp/test/data/test_basic_01.expected new file mode 100644 index 00000000000..d761fce1cc0 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/3rdparty/jsoncpp/test/data/test_basic_01.json b/3rdparty/jsoncpp/test/data/test_basic_01.json new file mode 100644 index 00000000000..11f11f9be3b --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/3rdparty/jsoncpp/test/data/test_basic_02.expected b/3rdparty/jsoncpp/test/data/test_basic_02.expected new file mode 100644 index 00000000000..650e37cbaaf --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/3rdparty/jsoncpp/test/data/test_basic_02.json b/3rdparty/jsoncpp/test/data/test_basic_02.json new file mode 100644 index 00000000000..bf11bceac74 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/3rdparty/jsoncpp/test/data/test_basic_03.expected b/3rdparty/jsoncpp/test/data/test_basic_03.expected new file mode 100644 index 00000000000..1da2d395b8e --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/3rdparty/jsoncpp/test/data/test_basic_03.json b/3rdparty/jsoncpp/test/data/test_basic_03.json new file mode 100644 index 00000000000..a92b6bd28b5 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/3rdparty/jsoncpp/test/data/test_basic_04.expected b/3rdparty/jsoncpp/test/data/test_basic_04.expected new file mode 100644 index 00000000000..013f424f932 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/3rdparty/jsoncpp/test/data/test_basic_04.json b/3rdparty/jsoncpp/test/data/test_basic_04.json new file mode 100644 index 00000000000..17eeb99e0b0 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/3rdparty/jsoncpp/test/data/test_basic_05.expected b/3rdparty/jsoncpp/test/data/test_basic_05.expected new file mode 100644 index 00000000000..c8db822e584 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/3rdparty/jsoncpp/test/data/test_basic_05.json b/3rdparty/jsoncpp/test/data/test_basic_05.json new file mode 100644 index 00000000000..d0aaea2126b --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/3rdparty/jsoncpp/test/data/test_basic_06.expected b/3rdparty/jsoncpp/test/data/test_basic_06.expected new file mode 100644 index 00000000000..49be55a9654 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/3rdparty/jsoncpp/test/data/test_basic_06.json b/3rdparty/jsoncpp/test/data/test_basic_06.json new file mode 100644 index 00000000000..7eead1ee75f --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/3rdparty/jsoncpp/test/data/test_basic_07.expected b/3rdparty/jsoncpp/test/data/test_basic_07.expected new file mode 100644 index 00000000000..fe55a6a8546 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/3rdparty/jsoncpp/test/data/test_basic_07.json b/3rdparty/jsoncpp/test/data/test_basic_07.json new file mode 100644 index 00000000000..a864bc487d4 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/3rdparty/jsoncpp/test/data/test_basic_08.expected b/3rdparty/jsoncpp/test/data/test_basic_08.expected new file mode 100644 index 00000000000..caf5352a113 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_08.expected @@ -0,0 +1,3 @@ +// C++ style comment +.=null + diff --git a/3rdparty/jsoncpp/test/data/test_basic_08.json b/3rdparty/jsoncpp/test/data/test_basic_08.json new file mode 100644 index 00000000000..fd78837c0c8 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/3rdparty/jsoncpp/test/data/test_basic_09.expected b/3rdparty/jsoncpp/test/data/test_basic_09.expected new file mode 100644 index 00000000000..8b129da42e4 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_09.expected @@ -0,0 +1,4 @@ +/* C style comment + */ +.=null + diff --git a/3rdparty/jsoncpp/test/data/test_basic_09.json b/3rdparty/jsoncpp/test/data/test_basic_09.json new file mode 100644 index 00000000000..fc95f0f514e --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/3rdparty/jsoncpp/test/data/test_comment_01.expected b/3rdparty/jsoncpp/test/data/test_comment_01.expected new file mode 100644 index 00000000000..2a7f00c450b --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/3rdparty/jsoncpp/test/data/test_comment_01.json b/3rdparty/jsoncpp/test/data/test_comment_01.json new file mode 100644 index 00000000000..7363490a91d --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/3rdparty/jsoncpp/test/data/test_comment_02.expected b/3rdparty/jsoncpp/test/data/test_comment_02.expected new file mode 100644 index 00000000000..88d2bd0dc1b --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_comment_02.expected @@ -0,0 +1,14 @@ +.={} +/* C-style comment + + C-style-2 comment */ +.c-test={} +.c-test.a=1 +/* Internal comment c-style */ +.c-test.b=2 +// C++-style comment +.cpp-test={} +// Multiline comment cpp-style +// Second line +.cpp-test.c=3 +.cpp-test.d=4 diff --git a/3rdparty/jsoncpp/test/data/test_comment_02.json b/3rdparty/jsoncpp/test/data/test_comment_02.json new file mode 100644 index 00000000000..297d889036a --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_comment_02.json @@ -0,0 +1,17 @@ +{ + /* C-style comment + + C-style-2 comment */ + "c-test" : { + "a" : 1, + /* Internal comment c-style */ + "b" : 2 + }, + // C++-style comment + "cpp-test" : { + // Multiline comment cpp-style + // Second line + "c" : 3, + "d" : 4 + } +} diff --git a/3rdparty/jsoncpp/test/data/test_complex_01.expected b/3rdparty/jsoncpp/test/data/test_complex_01.expected new file mode 100644 index 00000000000..7573c8812d9 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/3rdparty/jsoncpp/test/data/test_complex_01.json b/3rdparty/jsoncpp/test/data/test_complex_01.json new file mode 100644 index 00000000000..cc0f30f5c34 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/3rdparty/jsoncpp/test/data/test_integer_01.expected b/3rdparty/jsoncpp/test/data/test_integer_01.expected new file mode 100644 index 00000000000..463e149de87 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_01.expected @@ -0,0 +1,2 @@ +// Max signed integer +.=2147483647 diff --git a/3rdparty/jsoncpp/test/data/test_integer_01.json b/3rdparty/jsoncpp/test/data/test_integer_01.json new file mode 100644 index 00000000000..5ab12ffee41 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/3rdparty/jsoncpp/test/data/test_integer_02.expected b/3rdparty/jsoncpp/test/data/test_integer_02.expected new file mode 100644 index 00000000000..0773e0851a4 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_02.expected @@ -0,0 +1,2 @@ +// Min signed integer +.=-2147483648 diff --git a/3rdparty/jsoncpp/test/data/test_integer_02.json b/3rdparty/jsoncpp/test/data/test_integer_02.json new file mode 100644 index 00000000000..056c8500657 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/3rdparty/jsoncpp/test/data/test_integer_03.expected b/3rdparty/jsoncpp/test/data/test_integer_03.expected new file mode 100644 index 00000000000..c7efff799eb --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_03.expected @@ -0,0 +1,2 @@ +// Max unsigned integer +.=4294967295 diff --git a/3rdparty/jsoncpp/test/data/test_integer_03.json b/3rdparty/jsoncpp/test/data/test_integer_03.json new file mode 100644 index 00000000000..12ef3fbb894 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/3rdparty/jsoncpp/test/data/test_integer_04.expected b/3rdparty/jsoncpp/test/data/test_integer_04.expected new file mode 100644 index 00000000000..39f8567040f --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_04.expected @@ -0,0 +1,3 @@ +// Min unsigned integer +.=0 + diff --git a/3rdparty/jsoncpp/test/data/test_integer_04.json b/3rdparty/jsoncpp/test/data/test_integer_04.json new file mode 100644 index 00000000000..bf814994533 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/3rdparty/jsoncpp/test/data/test_integer_05.expected b/3rdparty/jsoncpp/test/data/test_integer_05.expected new file mode 100644 index 00000000000..0caea9d3fd3 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/3rdparty/jsoncpp/test/data/test_integer_05.json b/3rdparty/jsoncpp/test/data/test_integer_05.json new file mode 100644 index 00000000000..d474e1b4d62 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/3rdparty/jsoncpp/test/data/test_integer_06_64bits.expected b/3rdparty/jsoncpp/test/data/test_integer_06_64bits.expected new file mode 100644 index 00000000000..131b085ff56 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_06_64bits.expected @@ -0,0 +1 @@ +.=9223372036854775808 diff --git a/3rdparty/jsoncpp/test/data/test_integer_06_64bits.json b/3rdparty/jsoncpp/test/data/test_integer_06_64bits.json new file mode 100644 index 00000000000..cfedfe5e809 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_06_64bits.json @@ -0,0 +1,2 @@ +9223372036854775808 + diff --git a/3rdparty/jsoncpp/test/data/test_integer_07_64bits.expected b/3rdparty/jsoncpp/test/data/test_integer_07_64bits.expected new file mode 100644 index 00000000000..c8524a32c7a --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_07_64bits.expected @@ -0,0 +1 @@ +.=-9223372036854775808 diff --git a/3rdparty/jsoncpp/test/data/test_integer_07_64bits.json b/3rdparty/jsoncpp/test/data/test_integer_07_64bits.json new file mode 100644 index 00000000000..a964ad2ed25 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_07_64bits.json @@ -0,0 +1,2 @@ +-9223372036854775808 + diff --git a/3rdparty/jsoncpp/test/data/test_integer_08_64bits.expected b/3rdparty/jsoncpp/test/data/test_integer_08_64bits.expected new file mode 100644 index 00000000000..321bba5a738 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_08_64bits.expected @@ -0,0 +1 @@ +.=18446744073709551615 diff --git a/3rdparty/jsoncpp/test/data/test_integer_08_64bits.json b/3rdparty/jsoncpp/test/data/test_integer_08_64bits.json new file mode 100644 index 00000000000..4c15a01b4a9 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_integer_08_64bits.json @@ -0,0 +1,2 @@ +18446744073709551615 + diff --git a/3rdparty/jsoncpp/test/data/test_large_01.expected b/3rdparty/jsoncpp/test/data/test_large_01.expected new file mode 100644 index 00000000000..ee2fafc010e --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/3rdparty/jsoncpp/test/data/test_large_01.json b/3rdparty/jsoncpp/test/data/test_large_01.json new file mode 100644 index 00000000000..e4ab4cd4330 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_object_01.expected b/3rdparty/jsoncpp/test/data/test_object_01.expected new file mode 100644 index 00000000000..67444e5794e --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/3rdparty/jsoncpp/test/data/test_object_01.json b/3rdparty/jsoncpp/test/data/test_object_01.json new file mode 100644 index 00000000000..0967ef424bc --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/3rdparty/jsoncpp/test/data/test_object_02.expected b/3rdparty/jsoncpp/test/data/test_object_02.expected new file mode 100644 index 00000000000..79391c2a6cb --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/3rdparty/jsoncpp/test/data/test_object_02.json b/3rdparty/jsoncpp/test/data/test_object_02.json new file mode 100644 index 00000000000..d0f2facac36 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/3rdparty/jsoncpp/test/data/test_object_03.expected b/3rdparty/jsoncpp/test/data/test_object_03.expected new file mode 100644 index 00000000000..5e96113792d --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/3rdparty/jsoncpp/test/data/test_object_03.json b/3rdparty/jsoncpp/test/data/test_object_03.json new file mode 100644 index 00000000000..4fcd4d821d4 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/3rdparty/jsoncpp/test/data/test_object_04.expected b/3rdparty/jsoncpp/test/data/test_object_04.expected new file mode 100644 index 00000000000..812965b0caa --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/3rdparty/jsoncpp/test/data/test_object_04.json b/3rdparty/jsoncpp/test/data/test_object_04.json new file mode 100644 index 00000000000..450762d71e5 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/3rdparty/jsoncpp/test/data/test_preserve_comment_01.expected b/3rdparty/jsoncpp/test/data/test_preserve_comment_01.expected new file mode 100644 index 00000000000..2797aa7d6be --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_preserve_comment_01.expected @@ -0,0 +1,11 @@ +/* A comment + at the beginning of the file. + */ +.={} +.first=1 +/* Comment before 'second' + */ +.second=2 +/* A comment at + the end of the file. + */ diff --git a/3rdparty/jsoncpp/test/data/test_preserve_comment_01.json b/3rdparty/jsoncpp/test/data/test_preserve_comment_01.json new file mode 100644 index 00000000000..fabd55dd966 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/3rdparty/jsoncpp/test/data/test_real_01.expected b/3rdparty/jsoncpp/test/data/test_real_01.expected new file mode 100644 index 00000000000..951482708a0 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_01.expected @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +.=8589934592 + diff --git a/3rdparty/jsoncpp/test/data/test_real_01.json b/3rdparty/jsoncpp/test/data/test_real_01.json new file mode 100644 index 00000000000..358452d3878 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/3rdparty/jsoncpp/test/data/test_real_02.expected b/3rdparty/jsoncpp/test/data/test_real_02.expected new file mode 100644 index 00000000000..b80c0048119 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_02.expected @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +.=-4294967295 + diff --git a/3rdparty/jsoncpp/test/data/test_real_02.json b/3rdparty/jsoncpp/test/data/test_real_02.json new file mode 100644 index 00000000000..936c706b68e --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/3rdparty/jsoncpp/test/data/test_real_03.expected b/3rdparty/jsoncpp/test/data/test_real_03.expected new file mode 100644 index 00000000000..b80c0048119 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_03.expected @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +.=-4294967295 + diff --git a/3rdparty/jsoncpp/test/data/test_real_03.json b/3rdparty/jsoncpp/test/data/test_real_03.json new file mode 100644 index 00000000000..936c706b68e --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/3rdparty/jsoncpp/test/data/test_real_04.expected b/3rdparty/jsoncpp/test/data/test_real_04.expected new file mode 100644 index 00000000000..ff71a23e141 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_04.expected @@ -0,0 +1,3 @@ +// 1.2345678 +.=1.2345678 + diff --git a/3rdparty/jsoncpp/test/data/test_real_04.json b/3rdparty/jsoncpp/test/data/test_real_04.json new file mode 100644 index 00000000000..a8eb6d0c92c --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/3rdparty/jsoncpp/test/data/test_real_05.expected b/3rdparty/jsoncpp/test/data/test_real_05.expected new file mode 100644 index 00000000000..7a460932d7b --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_05.expected @@ -0,0 +1,4 @@ +// 1234567.8 +.=1234567.8 + + diff --git a/3rdparty/jsoncpp/test/data/test_real_05.json b/3rdparty/jsoncpp/test/data/test_real_05.json new file mode 100644 index 00000000000..f7923bab898 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/3rdparty/jsoncpp/test/data/test_real_06.expected b/3rdparty/jsoncpp/test/data/test_real_06.expected new file mode 100644 index 00000000000..a4a004deb14 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_06.expected @@ -0,0 +1,4 @@ +// -1.2345678 +.=-1.2345678 + + diff --git a/3rdparty/jsoncpp/test/data/test_real_06.json b/3rdparty/jsoncpp/test/data/test_real_06.json new file mode 100644 index 00000000000..485419a276c --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/3rdparty/jsoncpp/test/data/test_real_07.expected b/3rdparty/jsoncpp/test/data/test_real_07.expected new file mode 100644 index 00000000000..dc02a895831 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_07.expected @@ -0,0 +1,4 @@ +// -1234567.8 +.=-1234567.8 + + diff --git a/3rdparty/jsoncpp/test/data/test_real_07.json b/3rdparty/jsoncpp/test/data/test_real_07.json new file mode 100644 index 00000000000..8013eb5c9a4 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/3rdparty/jsoncpp/test/data/test_real_08.expected b/3rdparty/jsoncpp/test/data/test_real_08.expected new file mode 100644 index 00000000000..b1deef919e9 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_08.expected @@ -0,0 +1,4 @@ +// Out of 32-bit integer range, switch to double in 32-bit mode. Length the +// same as UINT_MAX in base 10 and digit less than UINT_MAX's last digit in +// order to catch a bug in the parsing code. +.=4300000001 diff --git a/3rdparty/jsoncpp/test/data/test_real_08.json b/3rdparty/jsoncpp/test/data/test_real_08.json new file mode 100644 index 00000000000..cca950d939e --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_08.json @@ -0,0 +1,4 @@ +// Out of 32-bit integer range, switch to double in 32-bit mode. Length the +// same as UINT_MAX in base 10 and digit less than UINT_MAX's last digit in +// order to catch a bug in the parsing code. +4300000001 diff --git a/3rdparty/jsoncpp/test/data/test_real_09.expected b/3rdparty/jsoncpp/test/data/test_real_09.expected new file mode 100644 index 00000000000..aa2dbb2849d --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_09.expected @@ -0,0 +1,4 @@ +// Out of 64-bit integer range, switch to double in all modes. Length the same +// as ULONG_MAX in base 10 and digit less than ULONG_MAX's last digit in order +// to catch a bug in the parsing code. +.=1.9e+19 diff --git a/3rdparty/jsoncpp/test/data/test_real_09.json b/3rdparty/jsoncpp/test/data/test_real_09.json new file mode 100644 index 00000000000..e65d50c97c6 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_09.json @@ -0,0 +1,4 @@ +// Out of 64-bit integer range, switch to double in all modes. Length the same +// as ULONG_MAX in base 10 and digit less than ULONG_MAX's last digit in order +// to catch a bug in the parsing code. +19000000000000000001 diff --git a/3rdparty/jsoncpp/test/data/test_real_10.expected b/3rdparty/jsoncpp/test/data/test_real_10.expected new file mode 100644 index 00000000000..d28a430eab4 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_10.expected @@ -0,0 +1,4 @@ +// Out of 32-bit signed integer range, switch to double in all modes. Length +// the same as INT_MIN in base 10 and digit less than INT_MIN's last digit in +// order to catch a bug in the parsing code. +.=-2200000001 diff --git a/3rdparty/jsoncpp/test/data/test_real_10.json b/3rdparty/jsoncpp/test/data/test_real_10.json new file mode 100644 index 00000000000..a6a8bcef50f --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_10.json @@ -0,0 +1,4 @@ +// Out of 32-bit signed integer range, switch to double in all modes. Length +// the same as INT_MIN in base 10 and digit less than INT_MIN's last digit in +// order to catch a bug in the parsing code. +-2200000001 diff --git a/3rdparty/jsoncpp/test/data/test_real_11.expected b/3rdparty/jsoncpp/test/data/test_real_11.expected new file mode 100644 index 00000000000..2551946f52f --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_11.expected @@ -0,0 +1,4 @@ +// Out of 64-bit signed integer range, switch to double in all modes. Length +// the same as LONG_MIN in base 10 and digit less than LONG_MIN's last digit in +// order to catch a bug in the parsing code. +.=-9.3e+18 diff --git a/3rdparty/jsoncpp/test/data/test_real_11.json b/3rdparty/jsoncpp/test/data/test_real_11.json new file mode 100644 index 00000000000..63cdb36f4ec --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_11.json @@ -0,0 +1,4 @@ +// Out of 64-bit signed integer range, switch to double in all modes. Length +// the same as LONG_MIN in base 10 and digit less than LONG_MIN's last digit in +// order to catch a bug in the parsing code. +-9300000000000000001 diff --git a/3rdparty/jsoncpp/test/data/test_real_12.expected b/3rdparty/jsoncpp/test/data/test_real_12.expected new file mode 100644 index 00000000000..93e2417d342 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_12.expected @@ -0,0 +1,2 @@ +// 2^64 -> switch to double. +.=1.844674407370955e+19 diff --git a/3rdparty/jsoncpp/test/data/test_real_12.json b/3rdparty/jsoncpp/test/data/test_real_12.json new file mode 100644 index 00000000000..0a13eed2200 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_real_12.json @@ -0,0 +1,2 @@ +// 2^64 -> switch to double. +18446744073709551616 diff --git a/3rdparty/jsoncpp/test/data/test_string_01.expected b/3rdparty/jsoncpp/test/data/test_string_01.expected new file mode 100644 index 00000000000..8fd37b1e0e7 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_string_01.json b/3rdparty/jsoncpp/test/data/test_string_01.json new file mode 100644 index 00000000000..6cd0db44dfc --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/3rdparty/jsoncpp/test/data/test_string_02.expected b/3rdparty/jsoncpp/test/data/test_string_02.expected new file mode 100644 index 00000000000..0443bc3649d --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_string_02.json b/3rdparty/jsoncpp/test/data/test_string_02.json new file mode 100644 index 00000000000..9a7e5dcad43 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/3rdparty/jsoncpp/test/data/test_string_03.expected b/3rdparty/jsoncpp/test/data/test_string_03.expected new file mode 100644 index 00000000000..6ed627a8503 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_03.expected @@ -0,0 +1 @@ +.="http://jsoncpp.sourceforge.net/"
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_string_03.json b/3rdparty/jsoncpp/test/data/test_string_03.json new file mode 100644 index 00000000000..2d38180aea1 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_03.json @@ -0,0 +1 @@ +"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/3rdparty/jsoncpp/test/data/test_string_04.expected b/3rdparty/jsoncpp/test/data/test_string_04.expected new file mode 100644 index 00000000000..f57d5256c49 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_04.expected @@ -0,0 +1,2 @@ +.=""abc\def"" + diff --git a/3rdparty/jsoncpp/test/data/test_string_04.json b/3rdparty/jsoncpp/test/data/test_string_04.json new file mode 100644 index 00000000000..01fe7524ceb --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_04.json @@ -0,0 +1,2 @@ +"\"abc\\def\"" + diff --git a/3rdparty/jsoncpp/test/data/test_string_05.expected b/3rdparty/jsoncpp/test/data/test_string_05.expected new file mode 100644 index 00000000000..9794dddb4d7 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_05.expected @@ -0,0 +1,2 @@ +.="\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\" + diff --git a/3rdparty/jsoncpp/test/data/test_string_05.json b/3rdparty/jsoncpp/test/data/test_string_05.json new file mode 100644 index 00000000000..e156024d5ae --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_05.json @@ -0,0 +1,2 @@ +"\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\" + diff --git a/3rdparty/jsoncpp/test/data/test_string_unicode_01.expected b/3rdparty/jsoncpp/test/data/test_string_unicode_01.expected new file mode 100644 index 00000000000..1f3be7fb687 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/3rdparty/jsoncpp/test/data/test_string_unicode_01.json b/3rdparty/jsoncpp/test/data/test_string_unicode_01.json new file mode 100644 index 00000000000..024114bc095 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061"
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_string_unicode_02.expected b/3rdparty/jsoncpp/test/data/test_string_unicode_02.expected new file mode 100644 index 00000000000..1388f539432 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/3rdparty/jsoncpp/test/data/test_string_unicode_02.json b/3rdparty/jsoncpp/test/data/test_string_unicode_02.json new file mode 100644 index 00000000000..4961024fab4 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2"
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_string_unicode_03.expected b/3rdparty/jsoncpp/test/data/test_string_unicode_03.expected new file mode 100644 index 00000000000..9b80b2719f1 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/3rdparty/jsoncpp/test/data/test_string_unicode_03.json b/3rdparty/jsoncpp/test/data/test_string_unicode_03.json new file mode 100644 index 00000000000..e7e1a9e1388 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC"
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_string_unicode_04.expected b/3rdparty/jsoncpp/test/data/test_string_unicode_04.expected new file mode 100644 index 00000000000..b9e7fe3b7d5 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/3rdparty/jsoncpp/test/data/test_string_unicode_04.json b/3rdparty/jsoncpp/test/data/test_string_unicode_04.json new file mode 100644 index 00000000000..dae65c51554 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E"
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/data/test_string_unicode_05.expected b/3rdparty/jsoncpp/test/data/test_string_unicode_05.expected new file mode 100644 index 00000000000..c2e67f99819 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/3rdparty/jsoncpp/test/data/test_string_unicode_05.json b/3rdparty/jsoncpp/test/data/test_string_unicode_05.json new file mode 100644 index 00000000000..87704109462 --- /dev/null +++ b/3rdparty/jsoncpp/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń"
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/generate_expected.py b/3rdparty/jsoncpp/test/generate_expected.py new file mode 100644 index 00000000000..f668da23861 --- /dev/null +++ b/3rdparty/jsoncpp/test/generate_expected.py @@ -0,0 +1,12 @@ +from __future__ import print_function +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print('skipping:', target) + else: + print('creating:', target) + file(target,'wt').write(text) + diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail1.json b/3rdparty/jsoncpp/test/jsonchecker/fail1.json new file mode 100644 index 00000000000..6216b865f10 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string."
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail10.json b/3rdparty/jsoncpp/test/jsonchecker/fail10.json new file mode 100644 index 00000000000..5d8c0047bd5 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value"
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail11.json b/3rdparty/jsoncpp/test/jsonchecker/fail11.json new file mode 100644 index 00000000000..76eb95b4583 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2}
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail12.json b/3rdparty/jsoncpp/test/jsonchecker/fail12.json new file mode 100644 index 00000000000..77580a4522d --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()}
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail13.json b/3rdparty/jsoncpp/test/jsonchecker/fail13.json new file mode 100644 index 00000000000..379406b59bd --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013}
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail14.json b/3rdparty/jsoncpp/test/jsonchecker/fail14.json new file mode 100644 index 00000000000..0ed366b38a3 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14}
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail15.json b/3rdparty/jsoncpp/test/jsonchecker/fail15.json new file mode 100644 index 00000000000..fc8376b605d --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail16.json b/3rdparty/jsoncpp/test/jsonchecker/fail16.json new file mode 100644 index 00000000000..3fe21d4b532 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail17.json b/3rdparty/jsoncpp/test/jsonchecker/fail17.json new file mode 100644 index 00000000000..62b9214aeda --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail18.json b/3rdparty/jsoncpp/test/jsonchecker/fail18.json new file mode 100644 index 00000000000..edac92716f1 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail19.json b/3rdparty/jsoncpp/test/jsonchecker/fail19.json new file mode 100644 index 00000000000..3b9c46fa9a2 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null}
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail2.json b/3rdparty/jsoncpp/test/jsonchecker/fail2.json new file mode 100644 index 00000000000..6b7c11e5a56 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array"
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail20.json b/3rdparty/jsoncpp/test/jsonchecker/fail20.json new file mode 100644 index 00000000000..27c1af3e72e --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null}
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail21.json b/3rdparty/jsoncpp/test/jsonchecker/fail21.json new file mode 100644 index 00000000000..62474573b21 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null}
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail22.json b/3rdparty/jsoncpp/test/jsonchecker/fail22.json new file mode 100644 index 00000000000..a7752581bcf --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail23.json b/3rdparty/jsoncpp/test/jsonchecker/fail23.json new file mode 100644 index 00000000000..494add1ca19 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail24.json b/3rdparty/jsoncpp/test/jsonchecker/fail24.json new file mode 100644 index 00000000000..caff239bfc3 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote']
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail25.json b/3rdparty/jsoncpp/test/jsonchecker/fail25.json new file mode 100644 index 00000000000..8b7ad23e010 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail26.json b/3rdparty/jsoncpp/test/jsonchecker/fail26.json new file mode 100644 index 00000000000..845d26a6a54 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail27.json b/3rdparty/jsoncpp/test/jsonchecker/fail27.json new file mode 100644 index 00000000000..6b01a2ca4a9 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail28.json b/3rdparty/jsoncpp/test/jsonchecker/fail28.json new file mode 100644 index 00000000000..621a0101c66 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail29.json b/3rdparty/jsoncpp/test/jsonchecker/fail29.json new file mode 100644 index 00000000000..47ec421bb62 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail3.json b/3rdparty/jsoncpp/test/jsonchecker/fail3.json new file mode 100644 index 00000000000..168c81eb785 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"}
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail30.json b/3rdparty/jsoncpp/test/jsonchecker/fail30.json new file mode 100644 index 00000000000..8ab0bc4b8b2 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail31.json b/3rdparty/jsoncpp/test/jsonchecker/fail31.json new file mode 100644 index 00000000000..1cce602b518 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail32.json b/3rdparty/jsoncpp/test/jsonchecker/fail32.json new file mode 100644 index 00000000000..45cba7396ff --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true,
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail33.json b/3rdparty/jsoncpp/test/jsonchecker/fail33.json new file mode 100644 index 00000000000..ca5eb19dc97 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"}
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail4.json b/3rdparty/jsoncpp/test/jsonchecker/fail4.json new file mode 100644 index 00000000000..9de168bf34e --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail5.json b/3rdparty/jsoncpp/test/jsonchecker/fail5.json new file mode 100644 index 00000000000..ddf3ce3d240 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail6.json b/3rdparty/jsoncpp/test/jsonchecker/fail6.json new file mode 100644 index 00000000000..ed91580e1b1 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail7.json b/3rdparty/jsoncpp/test/jsonchecker/fail7.json new file mode 100644 index 00000000000..8a96af3e4ee --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"],
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail8.json b/3rdparty/jsoncpp/test/jsonchecker/fail8.json new file mode 100644 index 00000000000..b28479c6ecb --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/fail9.json b/3rdparty/jsoncpp/test/jsonchecker/fail9.json new file mode 100644 index 00000000000..5815574f363 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,}
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/pass1.json b/3rdparty/jsoncpp/test/jsonchecker/pass1.json new file mode 100644 index 00000000000..70e26854369 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.</>?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* <!-- --", + "# -- --> */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/pass2.json b/3rdparty/jsoncpp/test/jsonchecker/pass2.json new file mode 100644 index 00000000000..d3c63c7ad84 --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]]
\ No newline at end of file diff --git a/3rdparty/jsoncpp/test/jsonchecker/pass3.json b/3rdparty/jsoncpp/test/jsonchecker/pass3.json new file mode 100644 index 00000000000..4528d51f1ac --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/3rdparty/jsoncpp/test/jsonchecker/readme.txt b/3rdparty/jsoncpp/test/jsonchecker/readme.txt new file mode 100644 index 00000000000..321d89d998e --- /dev/null +++ b/3rdparty/jsoncpp/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/3rdparty/jsoncpp/test/pyjsontestrunner.py b/3rdparty/jsoncpp/test/pyjsontestrunner.py new file mode 100644 index 00000000000..3f08a8a7325 --- /dev/null +++ b/3rdparty/jsoncpp/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. +from __future__ import print_function +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print("Usage: %s input-json-file", sys.argv[0]) + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/3rdparty/jsoncpp/test/runjsontests.py b/3rdparty/jsoncpp/test/runjsontests.py new file mode 100644 index 00000000000..9422d57d9dc --- /dev/null +++ b/3rdparty/jsoncpp/test/runjsontests.py @@ -0,0 +1,135 @@ +from __future__ import print_function +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in range(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError as e: + return '<File "%s" is missing: %s>' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + is_json_checker_test = (input_path in test_jsonchecker) or expect_failure + print('TESTING:', input_path, end=' ') + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( '%s%s %s "%s"' % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + if expect_failure: + if status is None: + print('FAILED') + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print('OK') + else: + if status is not None: + print('FAILED') + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print('OK') + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print('parsing failed') + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print('FAILED') + failed_tests.append( (input_path, detail) ) + else: + print('OK') + + if failed_tests: + print() + print('Failure details:') + for failed_test in failed_tests: + print('* Test', failed_test[0]) + print(failed_test[1]) + print() + print('Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) )) + return 1 + else: + print('All %d tests passed.' % len(tests)) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] <path to jsontestrunner.exe> [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/3rdparty/jsoncpp/test/rununittests.py b/3rdparty/jsoncpp/test/rununittests.py new file mode 100644 index 00000000000..6279f80e3c6 --- /dev/null +++ b/3rdparty/jsoncpp/test/rununittests.py @@ -0,0 +1,74 @@ +from __future__ import print_function +from glob import glob +import sys +import os +import os.path +import subprocess +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr) + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print('TESTING %s:' % name, end=' ') + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print('OK') + else: + failures.append( (name, result) ) + print('FAILED') + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print() + for name, result in failures: + print(result) + print('%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count)) + return 1 + else: + print('All %d tests passed' % len(test_names)) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/3rdparty/jsoncpp/version b/3rdparty/jsoncpp/version new file mode 100644 index 00000000000..6314daca8b0 --- /dev/null +++ b/3rdparty/jsoncpp/version @@ -0,0 +1 @@ +1.1.0
\ No newline at end of file |