diff --git a/3rdparty/jsoncpp/.clang-format b/3rdparty/jsoncpp/.clang-format deleted file mode 100644 index dd51247d50a..00000000000 --- a/3rdparty/jsoncpp/.clang-format +++ /dev/null @@ -1,47 +0,0 @@ ---- -# BasedOnStyle: LLVM -AccessModifierOffset: -2 -ConstructorInitializerIndentWidth: 4 -AlignEscapedNewlinesLeft: false -AlignTrailingComments: true -AllowAllParametersOfDeclarationOnNextLine: true -AllowShortIfStatementsOnASingleLine: false -AllowShortLoopsOnASingleLine: false -AlwaysBreakTemplateDeclarations: false -AlwaysBreakBeforeMultilineStrings: false -BreakBeforeBinaryOperators: false -BreakBeforeTernaryOperators: true -BreakConstructorInitializersBeforeComma: false -BinPackParameters: false -ColumnLimit: 80 -ConstructorInitializerAllOnOneLineOrOnePerLine: false -DerivePointerBinding: false -ExperimentalAutoDetectBinPacking: false -IndentCaseLabels: false -MaxEmptyLinesToKeep: 1 -NamespaceIndentation: None -ObjCSpaceBeforeProtocolList: true -PenaltyBreakBeforeFirstCallParameter: 19 -PenaltyBreakComment: 60 -PenaltyBreakString: 1000 -PenaltyBreakFirstLessLess: 120 -PenaltyExcessCharacter: 1000000 -PenaltyReturnTypeOnItsOwnLine: 60 -PointerBindsToType: true -SpacesBeforeTrailingComments: 1 -Cpp11BracedListStyle: false -Standard: Cpp03 -IndentWidth: 2 -TabWidth: 8 -UseTab: Never -BreakBeforeBraces: Attach -IndentFunctionDeclarationAfterType: false -SpacesInParentheses: false -SpacesInAngles: false -SpaceInEmptyParentheses: false -SpacesInCStyleCastParentheses: false -SpaceAfterControlStatementKeyword: true -SpaceBeforeAssignmentOperators: true -ContinuationIndentWidth: 4 -... - diff --git a/3rdparty/jsoncpp/.gitignore b/3rdparty/jsoncpp/.gitignore deleted file mode 100644 index ef226a8875a..00000000000 --- a/3rdparty/jsoncpp/.gitignore +++ /dev/null @@ -1,36 +0,0 @@ -/build/ -*.pyc -*.swp -*.actual -*.actual-rewrite -*.process-output -*.rewrite -/bin/ -/buildscons/ -/libs/ -/doc/doxyfile -/dist/ -#/version -#/include/json/version.h - -# MSVC project files: -*.sln -*.vcxproj -*.filters -*.user -*.sdf -*.opensdf -*.suo - -# MSVC build files: -*.lib -*.obj -*.tlog/ -*.pdb - -# CMake-generated files: -CMakeFiles/ -CTestTestFile.cmake -cmake_install.cmake -pkg-config/jsoncpp.pc -jsoncpp_lib_static.dir/ diff --git a/3rdparty/jsoncpp/.travis.yml b/3rdparty/jsoncpp/.travis.yml deleted file mode 100644 index 17e52dcf89b..00000000000 --- a/3rdparty/jsoncpp/.travis.yml +++ /dev/null @@ -1,25 +0,0 @@ -# Build matrix / environment variable are explained on: -# http://about.travis-ci.org/docs/user/build-configuration/ -# This file can be validated on: -# http://lint.travis-ci.org/ - -#before_install: sudo apt-get install -y cmake -# cmake is pre-installed in Travis for both linux and osx - -before_install: - - sudo apt-get update -qq - - sudo apt-get install -qq valgrind -os: - - linux -language: cpp -compiler: - - gcc - - clang -script: ./travis.sh -env: - matrix: - - SHARED_LIB=ON STATIC_LIB=ON CMAKE_PKG=ON BUILD_TYPE=release VERBOSE_MAKE=false - - SHARED_LIB=OFF STATIC_LIB=ON CMAKE_PKG=OFF BUILD_TYPE=debug VERBOSE_MAKE=true VERBOSE -notifications: - email: - - aaronjjacobs@gmail.com diff --git a/3rdparty/jsoncpp/AUTHORS b/3rdparty/jsoncpp/AUTHORS deleted file mode 100644 index c0fbbeec111..00000000000 --- a/3rdparty/jsoncpp/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/3rdparty/jsoncpp/CMakeLists.txt b/3rdparty/jsoncpp/CMakeLists.txt deleted file mode 100644 index 90eb14e30d0..00000000000 --- a/3rdparty/jsoncpp/CMakeLists.txt +++ /dev/null @@ -1,129 +0,0 @@ -# vim: et ts=4 sts=4 sw=4 tw=0 - -CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5) -PROJECT(jsoncpp) -ENABLE_TESTING() - -OPTION(JSONCPP_WITH_TESTS "Compile and (for jsoncpp_check) run JsonCpp test executables" ON) -OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON) -OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF) -OPTION(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON) -OPTION(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" OFF) -OPTION(BUILD_SHARED_LIBS "Build jsoncpp_lib as a shared library." OFF) -OPTION(BUILD_STATIC_LIBS "Build jsoncpp_lib static library." ON) - -# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix -IF(NOT WIN32) - IF(NOT CMAKE_BUILD_TYPE) - SET(CMAKE_BUILD_TYPE Release CACHE STRING - "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage." - FORCE) - ENDIF(NOT CMAKE_BUILD_TYPE) -ENDIF(NOT WIN32) - -SET(DEBUG_LIBNAME_SUFFIX "" CACHE STRING "Optional suffix to append to the library name for a debug build") -SET(LIB_SUFFIX "" CACHE STRING "Optional arch-dependent suffix for the library installation directory") - -SET(RUNTIME_INSTALL_DIR bin - CACHE PATH "Install dir for executables and dlls") -SET(ARCHIVE_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX} - CACHE PATH "Install dir for static libraries") -SET(LIBRARY_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX} - CACHE PATH "Install dir for shared libraries") -SET(INCLUDE_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/include - CACHE PATH "Install dir for headers") -SET(PACKAGE_INSTALL_DIR lib${LIB_SUFFIX}/cmake - CACHE PATH "Install dir for cmake package config files") -MARK_AS_ADVANCED( RUNTIME_INSTALL_DIR ARCHIVE_INSTALL_DIR INCLUDE_INSTALL_DIR PACKAGE_INSTALL_DIR ) - -# Set variable named ${VAR_NAME} to value ${VALUE} -FUNCTION(set_using_dynamic_name VAR_NAME VALUE) - SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE) -ENDFUNCTION(set_using_dynamic_name) - -# Extract major, minor, patch from version text -# Parse a version string "X.Y.Z" and outputs -# version parts in ${OUPUT_PREFIX}_MAJOR, _MINOR, _PATCH. -# If parse succeeds then ${OUPUT_PREFIX}_FOUND is TRUE. -MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX) - SET(VERSION_REGEX "[0-9]+\\.[0-9]+\\.[0-9]+(-[a-zA-Z0-9_]+)?") - IF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} ) - STRING(REGEX MATCHALL "[0-9]+|-([A-Za-z0-9_]+)" VERSION_PARTS ${VERSION_TEXT}) - LIST(GET VERSION_PARTS 0 ${OUPUT_PREFIX}_MAJOR) - LIST(GET VERSION_PARTS 1 ${OUPUT_PREFIX}_MINOR) - LIST(GET VERSION_PARTS 2 ${OUPUT_PREFIX}_PATCH) - set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE ) - ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} ) - set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE ) - ENDIF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} ) -ENDMACRO(jsoncpp_parse_version) - -# Read out version from "version" file -#FILE(STRINGS "version" JSONCPP_VERSION) -#SET( JSONCPP_VERSION_MAJOR X ) -#SET( JSONCPP_VERSION_MINOR Y ) -#SET( JSONCPP_VERSION_PATCH Z ) -SET( JSONCPP_VERSION 1.6.2 ) -jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION ) -#IF(NOT JSONCPP_VERSION_FOUND) -# MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z") -#ENDIF(NOT JSONCPP_VERSION_FOUND) - -MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}") -# File version.h is only regenerated on CMake configure step -CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in" - "${PROJECT_SOURCE_DIR}/include/json/version.h" - NEWLINE_STYLE UNIX ) -CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/version.in" - "${PROJECT_SOURCE_DIR}/version" - NEWLINE_STYLE UNIX ) - -macro(UseCompilationWarningAsError) - if ( MSVC ) - # Only enabled in debug because some old versions of VS STL generate - # warnings when compiled in release configuration. - set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ") - endif( MSVC ) -endmacro() - -# Include our configuration header -INCLUDE_DIRECTORIES( ${jsoncpp_SOURCE_DIR}/include ) - -if ( MSVC ) - # Only enabled in debug because some old versions of VS STL generate - # unreachable code warning when compiled in release configuration. - set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ") -endif( MSVC ) - -if (CMAKE_CXX_COMPILER_ID MATCHES "Clang") - # using regular Clang or AppleClang - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wshorten-64-to-32") -elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") - # using GCC - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x -Wall -Wextra -pedantic") -endif() - -IF(JSONCPP_WITH_WARNING_AS_ERROR) - UseCompilationWarningAsError() -ENDIF(JSONCPP_WITH_WARNING_AS_ERROR) - -IF(JSONCPP_WITH_PKGCONFIG_SUPPORT) - CONFIGURE_FILE( - "pkg-config/jsoncpp.pc.in" - "pkg-config/jsoncpp.pc" - @ONLY) - INSTALL(FILES "${CMAKE_BINARY_DIR}/pkg-config/jsoncpp.pc" - DESTINATION "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}/pkgconfig") -ENDIF(JSONCPP_WITH_PKGCONFIG_SUPPORT) - -IF(JSONCPP_WITH_CMAKE_PACKAGE) - INSTALL(EXPORT jsoncpp - DESTINATION ${PACKAGE_INSTALL_DIR}/jsoncpp - FILE jsoncppConfig.cmake) -ENDIF(JSONCPP_WITH_CMAKE_PACKAGE) - -# Build the different applications -ADD_SUBDIRECTORY( src ) - -#install the includes -ADD_SUBDIRECTORY( include ) diff --git a/3rdparty/jsoncpp/LICENSE b/3rdparty/jsoncpp/LICENSE deleted file mode 100644 index ca2bfe1a03e..00000000000 --- a/3rdparty/jsoncpp/LICENSE +++ /dev/null @@ -1,55 +0,0 @@ -The JsonCpp library's source code, including accompanying documentation, -tests and demonstration applications, are licensed under the following -conditions... - -The author (Baptiste Lepilleur) explicitly disclaims copyright in all -jurisdictions which recognize such a disclaimer. In such jurisdictions, -this software is released into the Public Domain. - -In jurisdictions which do not recognize Public Domain property (e.g. Germany as of -2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is -released under the terms of the MIT License (see below). - -In jurisdictions which recognize Public Domain property, the user of this -software may choose to accept it either as 1) Public Domain, 2) under the -conditions of the MIT License (see below), or 3) under the terms of dual -Public Domain/MIT License conditions described here, as they choose. - -The MIT License is about as close to Public Domain as a license can get, and is -described in clear, concise terms at: - - http://en.wikipedia.org/wiki/MIT_License - -The full text of the MIT License follows: - -======================================================================== -Copyright (c) 2007-2010 Baptiste Lepilleur - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, copy, -modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -======================================================================== -(END LICENSE TEXT) - -The MIT license is compatible with both the GPL and commercial -software, affording one all of the rights of Public Domain with the -minor nuisance of being required to keep the above copyright notice -and license text in the source code. Note also that by accepting the -Public Domain "license" you can re-license your copy using whatever -license you like. diff --git a/3rdparty/jsoncpp/NEWS.txt b/3rdparty/jsoncpp/NEWS.txt deleted file mode 100644 index 5733fcd5ef4..00000000000 --- a/3rdparty/jsoncpp/NEWS.txt +++ /dev/null @@ -1,175 +0,0 @@ -New in SVN ----------- - - * Updated the type system's behavior, in order to better support backwards - compatibility with code that was written before 64-bit integer support was - introduced. Here's how it works now: - - * isInt, isInt64, isUInt, and isUInt64 return true if and only if the - value can be exactly represented as that type. In particular, a value - constructed with a double like 17.0 will now return true for all of - these methods. - - * isDouble and isFloat now return true for all numeric values, since all - numeric values can be converted to a double or float without - truncation. Note however that the conversion may not be exact -- for - example, doubles cannot exactly represent all integers above 2^53 + 1. - - * isBool, isNull, isString, isArray, and isObject now return true if and - only if the value is of that type. - - * isConvertibleTo(fooValue) indicates that it is safe to call asFoo. - (For each type foo, isFoo always implies isConvertibleTo(fooValue).) - asFoo returns an approximate or exact representation as appropriate. - For example, a double value may be truncated when asInt is called. - - * For backwards compatibility with old code, isConvertibleTo(intValue) - may return false even if type() == intValue. This is because the value - may have been constructed with a 64-bit integer larger than maxInt, - and calling asInt() would cause an exception. If you're writing new - code, use isInt64 to find out whether the value is exactly - representable using an Int64, or asDouble() combined with minInt64 and - maxInt64 to figure out whether it is approximately representable. - -* Value - - Patch #10: BOOST_FOREACH compatibility. Made Json::iterator more - standard compliant, added missing iterator_category and value_type - typedefs (contribued by Robert A. Iannucci). - -* Compilation - - - New CMake based build system. Based in part on contribution from - Igor Okulist and Damien Buhl (Patch #14). - - - New header json/version.h now contains version number macros - (JSONCPP_VERSION_MAJOR, JSONCPP_VERSION_MINOR, JSONCPP_VERSION_PATCH - and JSONCPP_VERSION_HEXA). - - - Patch #11: added missing JSON_API on some classes causing link issues - when building as a dynamic library on Windows - (contributed by Francis Bolduc). - - - Visual Studio DLL: suppressed warning "C4251: : - needs to have dll-interface to be used by..." via pragma push/pop - in json-cpp headers. - - - Added Travis CI intregration: https://travis-ci.org/blep/jsoncpp-mirror - -* Bug fixes - - Patch #15: Copy constructor does not initialize allocated_ for stringValue - (contributed by rmongia). - - - Patch #16: Missing field copy in Json::Value::iterator causing infinite - loop when using experimental internal map (#define JSON_VALUE_USE_INTERNAL_MAP) - (contributed by Ming-Lin Kao). - - - New in JsonCpp 0.6.0: - --------------------- - -* Compilation - - - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now - propagated to the build environment as this is required for some - compiler installation. - - - Added support for Microsoft Visual Studio 2008 (bug #2930462): - The platform "msvc90" has been added. - - Notes: you need to setup the environment by running vcvars32.bat - (e.g. MSVC 2008 command prompt in start menu) before running scons. - - - Added support for amalgamated source and header generation (a la sqlite). - Refer to README.md section "Generating amalgamated source and header" - for detail. - -* Value - - - Removed experimental ValueAllocator, it caused static - initialization/destruction order issues (bug #2934500). - The DefaultValueAllocator has been inlined in code. - - - Added support for 64 bits integer: - - Types Json::Int64 and Json::UInt64 have been added. They are aliased - to 64 bits integers on system that support them (based on __int64 on - Microsoft Visual Studio platform, and long long on other platforms). - - Types Json::LargestInt and Json::LargestUInt have been added. They are - aliased to the largest integer type supported: - either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. - - Json::Value::asInt() and Json::Value::asUInt() still returns plain - "int" based types, but asserts if an attempt is made to retrieve - a 64 bits value that can not represented as the return type. - - Json::Value::asInt64() and Json::Value::asUInt64() have been added - to obtain the 64 bits integer value. - - Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns - the integer as a LargestInt/LargestUInt respectively. Those functions - functions are typically used when implementing writer. - - The reader attempts to read number as 64 bits integer, and fall back - to reading a double if the number is not in the range of 64 bits - integer. - - Warning: Json::Value::asInt() and Json::Value::asUInt() now returns - long long. This changes break code that was passing the return value - to *printf() function. - - Support for 64 bits integer can be disabled by defining the macro - JSON_NO_INT64 (uncomment it in json/config.h for example), though - it should have no impact on existing usage. - - - The type Json::ArrayIndex is used for indexes of a JSON value array. It - is an unsigned int (typically 32 bits). - - - Array index can be passed as int to operator[], allowing use of literal: - Json::Value array; - array.append( 1234 ); - int value = array[0].asInt(); // did not compile previously - - - Added float Json::Value::asFloat() to obtain a floating point value as a - float (avoid lost of precision warning caused by used of asDouble() - to initialize a float). - -* Reader - - - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. - Bug #3023708 (Formatted has 2 't'). The old member function is deprecated - but still present for backward compatibility. - -* Tests - - - Added test to ensure that the escape sequence "\/" is corrected handled - by the parser. - -* Bug fixes - - - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now - correctly detected. - - - Bug #3139678: stack buffer overflow when parsing a double with a - length of 32 characters. - - - Fixed Value::operator <= implementation (had the semantic of operator >=). - Found when adding unit tests for comparison operators. - - - Value::compare() is now const and has an actual implementation with - unit tests. - - - Bug #2407932: strpbrk() can fail for NULL pointer. - - - Bug #3306345: Fixed minor typo in Path::resolve(). - - - Bug #3314841/#3306896: errors in amalgamate.py - - - Fixed some Coverity warnings and line-endings. - -* License - - - See file LICENSE for details. Basically JsonCpp is now licensed under - MIT license, or public domain if desired and recognized in your jurisdiction. - Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who - helped figuring out the solution to the public domain issue. diff --git a/3rdparty/jsoncpp/README.md b/3rdparty/jsoncpp/README.md deleted file mode 100644 index 93c8d1f593a..00000000000 --- a/3rdparty/jsoncpp/README.md +++ /dev/null @@ -1,214 +0,0 @@ -Introduction ------------- - -[JSON][json-org] is a lightweight data-interchange format. It can represent -numbers, strings, ordered sequences of values, and collections of name/value -pairs. - -[json-org]: http://json.org/ - -[JsonCpp][] is a C++ library that allows manipulating JSON values, including -serialization and deserialization to and from strings. It can also preserve -existing comment in unserialization/serialization steps, making it a convenient -format to store user input files. - -[JsonCpp]: http://open-source-parsers.github.io/jsoncpp-docs/doxygen/index.html - -## A note on backward-compatibility -* `1.y.z` is built with C++11. -* `0.y.z` can be used with older compilers. -* Major versions maintain binary-compatibility. - -# Using JsonCpp in your project ------------------------------ -The recommended approach to integrating JsonCpp in your project is to include -the [amalgamated source](#generating-amalgamated-source-and-header) (a single -`.cpp` file and two `.h` files) in your project, and compile and build as you -would any other source file. This ensures consistency of compilation flags and -ABI compatibility, issues which arise when building shared or static -libraries. See the next section for instructions. - -The `include/` should be added to your compiler include path. Jsoncpp headers -should be included as follow: - - #include - -If JsonCpp was built as a dynamic library on Windows, then your project needs to -define the macro `JSON_DLL`. - -Generating amalgamated source and header ----------------------------------------- -JsonCpp is provided with a script to generate a single header and a single -source file to ease inclusion into an existing project. The amalgamated source -can be generated at any time by running the following command from the -top-directory (this requires Python 2.6): - - python amalgamate.py - -It is possible to specify header name. See the `-h` option for detail. - -By default, the following files are generated: -* `dist/jsoncpp.cpp`: source file that needs to be added to your project. -* `dist/json/json.h`: corresponding header file for use in your project. It is - equivalent to including `json/json.h` in non-amalgamated source. This header - only depends on standard headers. -* `dist/json/json-forwards.h`: header that provides forward declaration of all - JsonCpp types. - -The amalgamated sources are generated by concatenating JsonCpp source in the -correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion -of other headers. - -# Contributing to JsonCpp - -Building and testing with CMake -------------------------------- -[CMake][] is a C++ Makefiles/Solution generator. It is usually available on most -Linux system as package. On Ubuntu: - - sudo apt-get install cmake - -[CMake]: http://www.cmake.org - -Note that Python is also required to run the JSON reader/writer tests. If -missing, the build will skip running those tests. - -When running CMake, a few parameters are required: - -* a build directory where the makefiles/solution are generated. It is also used - to store objects, libraries and executables files. -* the generator to use: makefiles or Visual Studio solution? What version or - Visual Studio, 32 or 64 bits solution? - -Steps for generating solution/makefiles using `cmake-gui`: - -* Make "source code" point to the source directory. -* Make "where to build the binary" point to the directory to use for the build. -* Click on the "Grouped" check box. -* Review JsonCpp build options (tick `BUILD_SHARED_LIBS` to build as a - dynamic library). -* Click the configure button at the bottom, then the generate button. -* The generated solution/makefiles can be found in the binary directory. - -Alternatively, from the command-line on Unix in the source directory: - - mkdir -p build/debug - cd build/debug - cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_STATIC_LIBS=ON -DBUILD_SHARED_LIBS=OFF -DARCHIVE_INSTALL_DIR=. -G "Unix Makefiles" ../.. - make - -Running `cmake -h` will display the list of available generators (passed using -the `-G` option). - -By default CMake hides compilation commands. This can be modified by specifying -`-DCMAKE_VERBOSE_MAKEFILE=true` when generating makefiles. - -Building and testing with SCons -------------------------------- -**Note:** The SCons-based build system is deprecated. Please use CMake; see the -section above. - -JsonCpp can use [Scons][] as a build system. Note that SCons requires Python to -be installed. - -[SCons]: http://www.scons.org/ - -Invoke SCons as follows: - - scons platform=$PLATFORM [TARGET] - -where `$PLATFORM` may be one of: - -* `suncc`: Sun C++ (Solaris) -* `vacpp`: Visual Age C++ (AIX) -* `mingw` -* `msvc6`: Microsoft Visual Studio 6 service pack 5-6 -* `msvc70`: Microsoft Visual Studio 2002 -* `msvc71`: Microsoft Visual Studio 2003 -* `msvc80`: Microsoft Visual Studio 2005 -* `msvc90`: Microsoft Visual Studio 2008 -* `linux-gcc`: Gnu C++ (linux, also reported to work for Mac OS X) - -If you are building with Microsoft Visual Studio 2008, you need to set up the -environment by running `vcvars32.bat` (e.g. MSVC 2008 command prompt) before -running SCons. - -## Running the tests manually -You need to run tests manually only if you are troubleshooting an issue. - -In the instructions below, replace `path/to/jsontest` with the path of the -`jsontest` executable that was compiled on your platform. - - cd test - # This will run the Reader/Writer tests - python runjsontests.py path/to/jsontest - - # This will run the Reader/Writer tests, using JSONChecker test suite - # (http://www.json.org/JSON_checker/). - # Notes: not all tests pass: JsonCpp is too lenient (for example, - # it allows an integer to start with '0'). The goal is to improve - # strict mode parsing to get all tests to pass. - python runjsontests.py --with-json-checker path/to/jsontest - - # This will run the unit tests (mostly Value) - python rununittests.py path/to/test_lib_json - - # You can run the tests using valgrind: - python rununittests.py --valgrind path/to/test_lib_json - -## Running the tests using scons -Note that tests can be run using SCons using the `check` target: - - scons platform=$PLATFORM check - -Building the documentation --------------------------- -Run the Python script `doxybuild.py` from the top directory: - - python doxybuild.py --doxygen=$(which doxygen) --open --with-dot - -See `doxybuild.py --help` for options. - -Adding a reader/writer test ---------------------------- -To add a test, you need to create two files in test/data: - -* a `TESTNAME.json` file, that contains the input document in JSON format. -* a `TESTNAME.expected` file, that contains a flatened representation of the - input document. - -The `TESTNAME.expected` file format is as follows: - -* each line represents a JSON element of the element tree represented by the - input document. -* each line has two parts: the path to access the element separated from the - element value by `=`. Array and object values are always empty (i.e. - represented by either `[]` or `{}`). -* element path: `.` represents the root element, and is used to separate object - members. `[N]` is used to specify the value of an array element at index `N`. - -See the examples `test_complex_01.json` and `test_complex_01.expected` to better -understand element paths. - -Understanding reader/writer test output ---------------------------------------- -When a test is run, output files are generated beside the input test files. -Below is a short description of the content of each file: - -* `test_complex_01.json`: input JSON document. -* `test_complex_01.expected`: flattened JSON element tree used to check if - parsing was corrected. -* `test_complex_01.actual`: flattened JSON element tree produced by `jsontest` - from reading `test_complex_01.json`. -* `test_complex_01.rewrite`: JSON document written by `jsontest` using the - `Json::Value` parsed from `test_complex_01.json` and serialized using - `Json::StyledWritter`. -* `test_complex_01.actual-rewrite`: flattened JSON element tree produced by - `jsontest` from reading `test_complex_01.rewrite`. -* `test_complex_01.process-output`: `jsontest` output, typically useful for - understanding parsing errors. - -License -------- -See the `LICENSE` file for details. In summary, JsonCpp is licensed under the -MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/3rdparty/jsoncpp/SConstruct b/3rdparty/jsoncpp/SConstruct deleted file mode 100644 index f3a73f773bf..00000000000 --- a/3rdparty/jsoncpp/SConstruct +++ /dev/null @@ -1,248 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - # LD_LIBRARY_PATH & co is required on some system for the compiler - vars = {} - for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'msvc90': - env['MSVS_VERSION']='9.0' - # Scons 1.2 fails to detect the correct location of the platform SDK. - # So we propagate those from the environment. This requires that the - # user run vcvars32.bat before compiling. - if 'INCLUDE' in os.environ: - env['ENV']['INCLUDE'] = os.environ['INCLUDE'] - if 'LIB' in os.environ: - env['ENV']['LIB'] = os.environ['LIB'] - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = os.environ.get("CXXFLAGS", "-Wall"), LINKFLAGS=os.environ.get("LDFLAGS", "") ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.md SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/3rdparty/jsoncpp/amalgamate.py b/3rdparty/jsoncpp/amalgamate.py deleted file mode 100644 index 1916bb0d5e7..00000000000 --- a/3rdparty/jsoncpp/amalgamate.py +++ /dev/null @@ -1,154 +0,0 @@ -"""Amalgate json-cpp library sources into a single source and header file. - -Works with python2.6+ and python3.4+. - -Example of invocation (must be invoked from json-cpp top directory): -python amalgate.py -""" -import os -import os.path -import sys - -class AmalgamationFile: - def __init__(self, top_dir): - self.top_dir = top_dir - self.blocks = [] - - def add_text(self, text): - if not text.endswith("\n"): - text += "\n" - self.blocks.append(text) - - def add_file(self, relative_input_path, wrap_in_comment=False): - def add_marker(prefix): - self.add_text("") - self.add_text("// " + "/"*70) - self.add_text("// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/"))) - self.add_text("// " + "/"*70) - self.add_text("") - add_marker("Beginning") - f = open(os.path.join(self.top_dir, relative_input_path), "rt") - content = f.read() - if wrap_in_comment: - content = "/*\n" + content + "\n*/" - self.add_text(content) - f.close() - add_marker("End") - self.add_text("\n\n\n\n") - - def get_value(self): - return "".join(self.blocks).replace("\r\n","\n") - - def write_to(self, output_path): - output_dir = os.path.dirname(output_path) - if output_dir and not os.path.isdir(output_dir): - os.makedirs(output_dir) - f = open(output_path, "wb") - f.write(str.encode(self.get_value(), 'UTF-8')) - f.close() - -def amalgamate_source(source_top_dir=None, - target_source_path=None, - header_include_path=None): - """Produces amalgated source. - Parameters: - source_top_dir: top-directory - target_source_path: output .cpp path - header_include_path: generated header path relative to target_source_path. - """ - print("Amalgating header...") - header = AmalgamationFile(source_top_dir) - header.add_text("/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).") - header.add_text('/// It is intended to be used with #include "%s"' % header_include_path) - header.add_file("LICENSE", wrap_in_comment=True) - header.add_text("#ifndef JSON_AMALGATED_H_INCLUDED") - header.add_text("# define JSON_AMALGATED_H_INCLUDED") - header.add_text("/// If defined, indicates that the source file is amalgated") - header.add_text("/// to prevent private header inclusion.") - header.add_text("#define JSON_IS_AMALGAMATION") - header.add_file("include/json/version.h") - header.add_file("include/json/config.h") - header.add_file("include/json/forwards.h") - header.add_file("include/json/features.h") - header.add_file("include/json/value.h") - header.add_file("include/json/reader.h") - header.add_file("include/json/writer.h") - header.add_file("include/json/assertions.h") - header.add_text("#endif //ifndef JSON_AMALGATED_H_INCLUDED") - - target_header_path = os.path.join(os.path.dirname(target_source_path), header_include_path) - print("Writing amalgated header to %r" % target_header_path) - header.write_to(target_header_path) - - base, ext = os.path.splitext(header_include_path) - forward_header_include_path = base + "-forwards" + ext - print("Amalgating forward header...") - header = AmalgamationFile(source_top_dir) - header.add_text("/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).") - header.add_text('/// It is intended to be used with #include "%s"' % forward_header_include_path) - header.add_text("/// This header provides forward declaration for all JsonCpp types.") - header.add_file("LICENSE", wrap_in_comment=True) - header.add_text("#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED") - header.add_text("# define JSON_FORWARD_AMALGATED_H_INCLUDED") - header.add_text("/// If defined, indicates that the source file is amalgated") - header.add_text("/// to prevent private header inclusion.") - header.add_text("#define JSON_IS_AMALGAMATION") - header.add_file("include/json/config.h") - header.add_file("include/json/forwards.h") - header.add_text("#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED") - - target_forward_header_path = os.path.join(os.path.dirname(target_source_path), - forward_header_include_path) - print("Writing amalgated forward header to %r" % target_forward_header_path) - header.write_to(target_forward_header_path) - - print("Amalgating source...") - source = AmalgamationFile(source_top_dir) - source.add_text("/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).") - source.add_text('/// It is intended to be used with #include "%s"' % header_include_path) - source.add_file("LICENSE", wrap_in_comment=True) - source.add_text("") - source.add_text('#include "%s"' % header_include_path) - source.add_text(""" -#ifndef JSON_IS_AMALGAMATION -#error "Compile with -I PATH_TO_JSON_DIRECTORY" -#endif -""") - source.add_text("") - lib_json = "src/lib_json" - source.add_file(os.path.join(lib_json, "json_tool.h")) - source.add_file(os.path.join(lib_json, "json_reader.cpp")) - source.add_file(os.path.join(lib_json, "json_valueiterator.inl")) - source.add_file(os.path.join(lib_json, "json_value.cpp")) - source.add_file(os.path.join(lib_json, "json_writer.cpp")) - - print("Writing amalgated source to %r" % target_source_path) - source.write_to(target_source_path) - -def main(): - usage = """%prog [options] -Generate a single amalgated source and header file from the sources. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option("-s", "--source", dest="target_source_path", action="store", default="dist/jsoncpp.cpp", - help="""Output .cpp source path. [Default: %default]""") - parser.add_option("-i", "--include", dest="header_include_path", action="store", default="json/json.h", - help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""") - parser.add_option("-t", "--top-dir", dest="top_dir", action="store", default=os.getcwd(), - help="""Source top-directory. [Default: %default]""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - msg = amalgamate_source(source_top_dir=options.top_dir, - target_source_path=options.target_source_path, - header_include_path=options.header_include_path) - if msg: - sys.stderr.write(msg + "\n") - sys.exit(1) - else: - print("Source succesfully amalagated") - -if __name__ == "__main__": - main() diff --git a/3rdparty/jsoncpp/appveyor.yml b/3rdparty/jsoncpp/appveyor.yml deleted file mode 100644 index 546cb7ed13e..00000000000 --- a/3rdparty/jsoncpp/appveyor.yml +++ /dev/null @@ -1,34 +0,0 @@ -# This is a comment. - -version: build.{build} - -os: Windows Server 2012 R2 - -clone_folder: c:\projects\jsoncpp - -platform: - - Win32 - - x64 - -configuration: - - Debug - - Release - -# scripts to run before build -before_build: - - echo "Running cmake..." - - cd c:\projects\jsoncpp - - cmake --version - - if %PLATFORM% == Win32 cmake . - - if %PLATFORM% == x64 cmake -G "Visual Studio 12 2013 Win64" . - -build: - project: jsoncpp.sln # path to Visual Studio solution or project - -deploy: - provider: GitHub - auth_token: - secure: K2Tp1q8pIZ7rs0Ot24ZMWuwr12Ev6Tc6QkhMjGQxoQG3ng1pXtgPasiJ45IDXGdg - on: - branch: master - appveyor_repo_tag: true diff --git a/3rdparty/jsoncpp/dev.makefile b/3rdparty/jsoncpp/dev.makefile deleted file mode 100644 index d288b166586..00000000000 --- a/3rdparty/jsoncpp/dev.makefile +++ /dev/null @@ -1,35 +0,0 @@ -# This is only for jsoncpp developers/contributors. -# We use this to sign releases, generate documentation, etc. -VER?=$(shell cat version) - -default: - @echo "VER=${VER}" -sign: jsoncpp-${VER}.tar.gz - gpg --armor --detach-sign $< - gpg --verify $<.asc - # Then upload .asc to the release. -jsoncpp-%.tar.gz: - curl https://github.com/open-source-parsers/jsoncpp/archive/$*.tar.gz -o $@ -dox: - python doxybuild.py --doxygen=$$(which doxygen) --in doc/web_doxyfile.in - rsync -va --delete dist/doxygen/jsoncpp-api-html-${VER}/ ../jsoncpp-docs/doxygen/ - # Then 'git add -A' and 'git push' in jsoncpp-docs. -build: - mkdir -p build/debug - cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_SHARED_LIBS=ON -G "Unix Makefiles" ../.. - make -C build/debug - -# Currently, this depends on include/json/version.h generated -# by cmake. -test-amalgamate: - python2.7 amalgamate.py - python3.4 amalgamate.py - cd dist; gcc -I. -c jsoncpp.cpp - -valgrind: - valgrind --error-exitcode=42 --leak-check=full ./build/debug/src/test_lib_json/jsoncpp_test - -clean: - \rm -rf *.gz *.asc dist/ - -.PHONY: build diff --git a/3rdparty/jsoncpp/devtools/__init__.py b/3rdparty/jsoncpp/devtools/__init__.py deleted file mode 100644 index d18a5216853..00000000000 --- a/3rdparty/jsoncpp/devtools/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright 2010 Baptiste Lepilleur -# Distributed under MIT license, or public domain if desired and -# recognized in your jurisdiction. -# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -# module diff --git a/3rdparty/jsoncpp/devtools/agent_vmw7.json b/3rdparty/jsoncpp/devtools/agent_vmw7.json deleted file mode 100644 index 0810a99544e..00000000000 --- a/3rdparty/jsoncpp/devtools/agent_vmw7.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "cmake_variants" : [ - {"name": "generator", - "generators": [ - {"generator": [ - "Visual Studio 7 .NET 2003", - "Visual Studio 9 2008", - "Visual Studio 9 2008 Win64", - "Visual Studio 10", - "Visual Studio 10 Win64", - "Visual Studio 11", - "Visual Studio 11 Win64" - ] - }, - {"generator": ["MinGW Makefiles"], - "env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}] - } - ] - }, - {"name": "shared_dll", - "variables": [ - ["BUILD_SHARED_LIBS=true"], - ["BUILD_SHARED_LIBS=false"] - ] - }, - {"name": "build_type", - "build_types": [ - "debug", - "release" - ] - } - ] -} diff --git a/3rdparty/jsoncpp/devtools/agent_vmxp.json b/3rdparty/jsoncpp/devtools/agent_vmxp.json deleted file mode 100644 index b627a7221a7..00000000000 --- a/3rdparty/jsoncpp/devtools/agent_vmxp.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "cmake_variants" : [ - {"name": "generator", - "generators": [ - {"generator": [ - "Visual Studio 6", - "Visual Studio 7", - "Visual Studio 8 2005" - ] - } - ] - }, - {"name": "shared_dll", - "variables": [ - ["BUILD_SHARED_LIBS=true"], - ["BUILD_SHARED_LIBS=false"] - ] - }, - {"name": "build_type", - "build_types": [ - "debug", - "release" - ] - } - ] -} diff --git a/3rdparty/jsoncpp/devtools/antglob.py b/3rdparty/jsoncpp/devtools/antglob.py deleted file mode 100644 index c272f66343f..00000000000 --- a/3rdparty/jsoncpp/devtools/antglob.py +++ /dev/null @@ -1,205 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Copyright 2009 Baptiste Lepilleur -# Distributed under MIT license, or public domain if desired and -# recognized in your jurisdiction. -# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -from __future__ import print_function -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile(r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)') - -def ant_pattern_to_re(ant_pattern): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape(os.path.sep) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer(ant_pattern): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError("Invalid ant pattern") - if match.group(1): # /**/ - rex.append(sep_rex + '(?:.*%s)?' % sep_rex) - elif match.group(2): # **/ - rex.append('(?:.*%s)?' % sep_rex) - elif match.group(3): # /** - rex.append(sep_rex + '.*') - elif match.group(4): # * - rex.append('[^/%s]*' % re.escape(os.path.sep)) - elif match.group(5): # / - rex.append(sep_rex) - else: # somepath - rex.append(re.escape(match.group(6))) - next_pos = match.end() - rex.append('$') - return re.compile(''.join(rex)) - -def _as_list(l): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir(dir_name): - for pattern in prune_dirs: - if fnmatch.fnmatch(dir_name, pattern): - return True - return False - - def apply_filter(full_path, filter_rexs): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match(full_path): - return True - return False - - def glob_impl(root_dir_path): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir(dir_path): - full_path = os.path.join(dir_path, entry) -## print 'Testing:', full_path, - is_dir = os.path.isdir(full_path) - if is_dir and not is_pruned_dir(entry): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append(full_path) - included = apply_filter(full_path, include_filter) - rejected = apply_filter(full_path, exclude_filter) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink(full_path) - is_file = os.path.isfile(full_path) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join(dir_path, entry) -## else: -## print ' => TYPE REJECTED' - return list(glob_impl(dir_path)) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion(self): -## self.assertEqual('^somepath$', ant_pattern_to_re('somepath').pattern) - - def test_matching(self): - test_cases = [ ('path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path']), - ('*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c']), - ('**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath']), - ('path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a']), - ('/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath']), - ('a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c']), - ('**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b']), - ('src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py']), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path(paths): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append((ant_pattern, local_path(accepted_matches), local_path(rejected_matches))) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re(ant_pattern) - print('ant_pattern:', ant_pattern, ' => ', rex.pattern) - for accepted_match in accepted_matches: - print('Accepted?:', accepted_match) - self.assertTrue(rex.match(accepted_match) is not None) - for rejected_match in rejected_matches: - print('Rejected?:', rejected_match) - self.assertTrue(rex.match(rejected_match) is None) - - unittest.main() diff --git a/3rdparty/jsoncpp/devtools/batchbuild.py b/3rdparty/jsoncpp/devtools/batchbuild.py deleted file mode 100644 index 0eb0690e8c6..00000000000 --- a/3rdparty/jsoncpp/devtools/batchbuild.py +++ /dev/null @@ -1,278 +0,0 @@ -from __future__ import print_function -import collections -import itertools -import json -import os -import os.path -import re -import shutil -import string -import subprocess -import sys -import cgi - -class BuildDesc: - def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None): - self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ] - self.variables = variables or [] - self.build_type = build_type - self.generator = generator - - def merged_with(self, build_desc): - """Returns a new BuildDesc by merging field content. - Prefer build_desc fields to self fields for single valued field. - """ - return BuildDesc(self.prepend_envs + build_desc.prepend_envs, - self.variables + build_desc.variables, - build_desc.build_type or self.build_type, - build_desc.generator or self.generator) - - def env(self): - environ = os.environ.copy() - for values_by_name in self.prepend_envs: - for var, value in list(values_by_name.items()): - var = var.upper() - if type(value) is unicode: - value = value.encode(sys.getdefaultencoding()) - if var in environ: - environ[var] = value + os.pathsep + environ[var] - else: - environ[var] = value - return environ - - def cmake_args(self): - args = ["-D%s" % var for var in self.variables] - # skip build type for Visual Studio solution as it cause warning - if self.build_type and 'Visual' not in self.generator: - args.append("-DCMAKE_BUILD_TYPE=%s" % self.build_type) - if self.generator: - args.extend(['-G', self.generator]) - return args - - def __repr__(self): - return "BuildDesc(%s, build_type=%s)" % (" ".join(self.cmake_args()), self.build_type) - -class BuildData: - def __init__(self, desc, work_dir, source_dir): - self.desc = desc - self.work_dir = work_dir - self.source_dir = source_dir - self.cmake_log_path = os.path.join(work_dir, 'batchbuild_cmake.log') - self.build_log_path = os.path.join(work_dir, 'batchbuild_build.log') - self.cmake_succeeded = False - self.build_succeeded = False - - def execute_build(self): - print('Build %s' % self.desc) - self._make_new_work_dir() - self.cmake_succeeded = self._generate_makefiles() - if self.cmake_succeeded: - self.build_succeeded = self._build_using_makefiles() - return self.build_succeeded - - def _generate_makefiles(self): - print(' Generating makefiles: ', end=' ') - cmd = ['cmake'] + self.desc.cmake_args() + [os.path.abspath(self.source_dir)] - succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.cmake_log_path) - print('done' if succeeded else 'FAILED') - return succeeded - - def _build_using_makefiles(self): - print(' Building:', end=' ') - cmd = ['cmake', '--build', self.work_dir] - if self.desc.build_type: - cmd += ['--config', self.desc.build_type] - succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.build_log_path) - print('done' if succeeded else 'FAILED') - return succeeded - - def _execute_build_subprocess(self, cmd, env, log_path): - process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir, - env=env) - stdout, _ = process.communicate() - succeeded = (process.returncode == 0) - with open(log_path, 'wb') as flog: - log = ' '.join(cmd) + '\n' + stdout + '\nExit code: %r\n' % process.returncode - flog.write(fix_eol(log)) - return succeeded - - def _make_new_work_dir(self): - if os.path.isdir(self.work_dir): - print(' Removing work directory', self.work_dir) - shutil.rmtree(self.work_dir, ignore_errors=True) - if not os.path.isdir(self.work_dir): - os.makedirs(self.work_dir) - -def fix_eol(stdout): - """Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n). - """ - return re.sub('\r*\n', os.linesep, stdout) - -def load_build_variants_from_config(config_path): - with open(config_path, 'rb') as fconfig: - data = json.load(fconfig) - variants = data[ 'cmake_variants' ] - build_descs_by_axis = collections.defaultdict(list) - for axis in variants: - axis_name = axis["name"] - build_descs = [] - if "generators" in axis: - for generator_data in axis["generators"]: - for generator in generator_data["generator"]: - build_desc = BuildDesc(generator=generator, - prepend_envs=generator_data.get("env_prepend")) - build_descs.append(build_desc) - elif "variables" in axis: - for variables in axis["variables"]: - build_desc = BuildDesc(variables=variables) - build_descs.append(build_desc) - elif "build_types" in axis: - for build_type in axis["build_types"]: - build_desc = BuildDesc(build_type=build_type) - build_descs.append(build_desc) - build_descs_by_axis[axis_name].extend(build_descs) - return build_descs_by_axis - -def generate_build_variants(build_descs_by_axis): - """Returns a list of BuildDesc generated for the partial BuildDesc for each axis.""" - axis_names = list(build_descs_by_axis.keys()) - build_descs = [] - for axis_name, axis_build_descs in list(build_descs_by_axis.items()): - if len(build_descs): - # for each existing build_desc and each axis build desc, create a new build_desc - new_build_descs = [] - for prototype_build_desc, axis_build_desc in itertools.product(build_descs, axis_build_descs): - new_build_descs.append(prototype_build_desc.merged_with(axis_build_desc)) - build_descs = new_build_descs - else: - build_descs = axis_build_descs - return build_descs - -HTML_TEMPLATE = string.Template(''' - - $title - - - - - - - - $th_vars - - - - $th_build_types - - - -$tr_builds - -
Variables
Build type
-''') - -def generate_html_report(html_report_path, builds): - report_dir = os.path.dirname(html_report_path) - # Vertical axis: generator - # Horizontal: variables, then build_type - builds_by_generator = collections.defaultdict(list) - variables = set() - build_types_by_variable = collections.defaultdict(set) - build_by_pos_key = {} # { (generator, var_key, build_type): build } - for build in builds: - builds_by_generator[build.desc.generator].append(build) - var_key = tuple(sorted(build.desc.variables)) - variables.add(var_key) - build_types_by_variable[var_key].add(build.desc.build_type) - pos_key = (build.desc.generator, var_key, build.desc.build_type) - build_by_pos_key[pos_key] = build - variables = sorted(variables) - th_vars = [] - th_build_types = [] - for variable in variables: - build_types = sorted(build_types_by_variable[variable]) - nb_build_type = len(build_types_by_variable[variable]) - th_vars.append('%s' % (nb_build_type, cgi.escape(' '.join(variable)))) - for build_type in build_types: - th_build_types.append('%s' % cgi.escape(build_type)) - tr_builds = [] - for generator in sorted(builds_by_generator): - tds = [ '%s\n' % cgi.escape(generator) ] - for variable in variables: - build_types = sorted(build_types_by_variable[variable]) - for build_type in build_types: - pos_key = (generator, variable, build_type) - build = build_by_pos_key.get(pos_key) - if build: - cmake_status = 'ok' if build.cmake_succeeded else 'FAILED' - build_status = 'ok' if build.build_succeeded else 'FAILED' - cmake_log_url = os.path.relpath(build.cmake_log_path, report_dir) - build_log_url = os.path.relpath(build.build_log_path, report_dir) - td = 'CMake: %s' % ( build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status) - if build.cmake_succeeded: - td += '
Build: %s' % ( build_log_url, build_status.lower(), build_status) - td += '' - else: - td = '' - tds.append(td) - tr_builds.append('%s' % '\n'.join(tds)) - html = HTML_TEMPLATE.substitute( title='Batch build report', - th_vars=' '.join(th_vars), - th_build_types=' '.join(th_build_types), - tr_builds='\n'.join(tr_builds)) - with open(html_report_path, 'wt') as fhtml: - fhtml.write(html) - print('HTML report generated in:', html_report_path) - -def main(): - usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...] -Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run -as described in CONFIG_JSON_PATH building in WORK_DIR. - -Example of call: -python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = True -# parser.add_option('-v', '--verbose', dest="verbose", action='store_true', -# help="""Be verbose.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - if len(args) < 3: - parser.error("Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH.") - work_dir = args[0] - source_dir = args[1].rstrip('/\\') - config_paths = args[2:] - for config_path in config_paths: - if not os.path.isfile(config_path): - parser.error("Can not read: %r" % config_path) - - # generate build variants - build_descs = [] - for config_path in config_paths: - build_descs_by_axis = load_build_variants_from_config(config_path) - build_descs.extend(generate_build_variants(build_descs_by_axis)) - print('Build variants (%d):' % len(build_descs)) - # assign build directory for each variant - if not os.path.isdir(work_dir): - os.makedirs(work_dir) - builds = [] - with open(os.path.join(work_dir, 'matrix-dir-map.txt'), 'wt') as fmatrixmap: - for index, build_desc in enumerate(build_descs): - build_desc_work_dir = os.path.join(work_dir, '%03d' % (index+1)) - builds.append(BuildData(build_desc, build_desc_work_dir, source_dir)) - fmatrixmap.write('%s: %s\n' % (build_desc_work_dir, build_desc)) - for build in builds: - build.execute_build() - html_report_path = os.path.join(work_dir, 'batchbuild-report.html') - generate_html_report(html_report_path, builds) - print('Done') - - -if __name__ == '__main__': - main() - diff --git a/3rdparty/jsoncpp/devtools/fixeol.py b/3rdparty/jsoncpp/devtools/fixeol.py deleted file mode 100644 index 054eb9b227b..00000000000 --- a/3rdparty/jsoncpp/devtools/fixeol.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2010 Baptiste Lepilleur -# Distributed under MIT license, or public domain if desired and -# recognized in your jurisdiction. -# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -from __future__ import print_function -import os.path - -def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile(path): - raise ValueError('Path "%s" is not a file' % path) - try: - f = open(path, 'rb') - except IOError as msg: - print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print('%s =>' % path, end=' ') - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print(is_dry_run and ' NEED FIX' or ' FIXED') - return True -## -## -## -##def _do_fix(is_dry_run = True): -## from waftools import antglob -## python_sources = antglob.glob('.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build') -## for path in python_sources: -## _fix_python_source(path, is_dry_run) -## -## cpp_sources = antglob.glob('.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build') -## for path in cpp_sources: -## _fix_source_eol(path, is_dry_run) -## -## -##def dry_fix(context): -## _do_fix(is_dry_run = True) -## -##def fix(context): -## _do_fix(is_dry_run = False) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/3rdparty/jsoncpp/devtools/licenseupdater.py b/3rdparty/jsoncpp/devtools/licenseupdater.py deleted file mode 100644 index 6f823618fb9..00000000000 --- a/3rdparty/jsoncpp/devtools/licenseupdater.py +++ /dev/null @@ -1,94 +0,0 @@ -"""Updates the license text in source file. -""" -from __future__ import print_function - -# An existing license is found if the file starts with the string below, -# and ends with the first blank line. -LICENSE_BEGIN = "// Copyright " - -BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -""".replace('\r\n','\n') - -def update_license(path, dry_run, show_diff): - """Update the license statement in the specified file. - Parameters: - path: path of the C++ source file to update. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - with open(path, 'rt') as fin: - original_text = fin.read().replace('\r\n','\n') - newline = fin.newlines and fin.newlines[0] or '\n' - if not original_text.startswith(LICENSE_BEGIN): - # No existing license found => prepend it - new_text = BRIEF_LICENSE + original_text - else: - license_end_index = original_text.index('\n\n') # search first blank line - new_text = BRIEF_LICENSE + original_text[license_end_index+2:] - if original_text != new_text: - if not dry_run: - with open(path, 'wb') as fout: - fout.write(new_text.replace('\n', newline)) - print('Updated', path) - if show_diff: - import difflib - print('\n'.join(difflib.unified_diff(original_text.split('\n'), - new_text.split('\n')))) - return True - return False - -def update_license_in_source_directories(source_dirs, dry_run, show_diff): - """Updates license text in C++ source files found in directory source_dirs. - Parameters: - source_dirs: list of directory to scan for C++ sources. Directories are - scanned recursively. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - from devtools import antglob - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - for source_dir in source_dirs: - cpp_sources = antglob.glob(source_dir, - includes = '''**/*.h **/*.cpp **/*.inl''', - prune_dirs = prune_dirs) - for source in cpp_sources: - update_license(source, dry_run, show_diff) - -def main(): - usage = """%prog DIR [DIR2...] -Updates license text in sources of the project in source files found -in the directory specified on the command-line. - -Example of call: -python devtools\licenseupdater.py include src -n --diff -=> Show change that would be made to the sources. - -python devtools\licenseupdater.py include src -=> Update license statement on all sources in directories include/ and src/. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, - help="""Only show what files are updated, do not update the files""") - parser.add_option('--diff', dest="show_diff", action='store_true', default=False, - help="""On update, show change made to the file.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - update_license_in_source_directories(args, options.dry_run, options.show_diff) - print('Done') - -if __name__ == '__main__': - import sys - import os.path - sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - main() - diff --git a/3rdparty/jsoncpp/devtools/tarball.py b/3rdparty/jsoncpp/devtools/tarball.py deleted file mode 100644 index 2e72717aba4..00000000000 --- a/3rdparty/jsoncpp/devtools/tarball.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2010 Baptiste Lepilleur -# Distributed under MIT license, or public domain if desired and -# recognized in your jurisdiction. -# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -from contextlib import closing -import os -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath(os.path.abspath(base_dir)) - def archive_name(path): - """Makes path relative to base_dir.""" - path = os.path.normpath(os.path.abspath(path)) - common_path = os.path.commonprefix((base_dir, path)) - archive_name = path[len(common_path):] - if os.path.isabs(archive_name): - archive_name = archive_name[1:] - return os.path.join(prefix_dir, archive_name) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - with closing(tarfile.TarFile.open(tarball_path, 'w:gz', - compresslevel=compression)) as tar: - for source in sources: - source_path = source - if os.path.isdir(source): - for dirpath, dirnames, filenames in os.walk(source_path): - visit(tar, dirpath, filenames) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar) # filename, arcname - -def decompress(tarball_path, base_dir): - """Decompress the gzipped tarball into directory base_dir. - """ - with closing(tarfile.TarFile.open(tarball_path)) as tar: - tar.extractall(base_dir) diff --git a/3rdparty/jsoncpp/doc/doxyfile.in b/3rdparty/jsoncpp/doc/doxyfile.in deleted file mode 100644 index 57c61c27e40..00000000000 --- a/3rdparty/jsoncpp/doc/doxyfile.in +++ /dev/null @@ -1,2301 +0,0 @@ -# Doxyfile 1.8.5 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project. -# -# All text after a double hash (##) is considered a comment and is placed in -# front of the TAG it is preceding. -# -# All text after a single hash (#) is considered a comment and will be ignored. -# The format is: -# TAG = value [value, ...] -# For lists, items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (\" \"). - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all text -# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv -# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv -# for the list of possible encodings. -# The default value is: UTF-8. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by -# double-quotes, unless you are using Doxywizard) that should identify the -# project for which the documentation is generated. This name is used in the -# title of most generated pages and in a few other places. -# The default value is: My Project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. This -# could be handy for archiving the generated documentation or if some version -# control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# Using the PROJECT_BRIEF tag one can provide an optional one line description -# for a project that appears at the top of each page and should give viewer a -# quick idea about the purpose of the project. Keep the description short. - -PROJECT_BRIEF = - -# With the PROJECT_LOGO tag one can specify an logo or icon that is included in -# the documentation. The maximum height of the logo should not exceed 55 pixels -# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo -# to the output directory. - -PROJECT_LOGO = - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path -# into which the generated documentation will be written. If a relative path is -# entered, it will be relative to the location where doxygen was started. If -# left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub- -# directories (in 2 levels) under the output directory of each output format and -# will distribute the generated files over these directories. Enabling this -# option can be useful when feeding doxygen a huge amount of source files, where -# putting all generated files in the same directory would otherwise causes -# performance problems for the file system. -# The default value is: NO. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# Possible values are: Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese- -# Traditional, Croatian, Czech, Danish, Dutch, English, Esperanto, Farsi, -# Finnish, French, German, Greek, Hungarian, Italian, Japanese, Japanese-en, -# Korean, Korean-en, Latvian, Norwegian, Macedonian, Persian, Polish, -# Portuguese, Romanian, Russian, Serbian, Slovak, Slovene, Spanish, Swedish, -# Turkish, Ukrainian and Vietnamese. -# The default value is: English. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member -# descriptions after the members that are listed in the file and class -# documentation (similar to Javadoc). Set to NO to disable this. -# The default value is: YES. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief -# description of a member or function before the detailed description -# -# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. -# The default value is: YES. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator that is -# used to form the text in various listings. Each string in this list, if found -# as the leading text of the brief description, will be stripped from the text -# and the result, after processing the whole list, is used as the annotated -# text. Otherwise, the brief description is used as-is. If left blank, the -# following values are used ($name is automatically replaced with the name of -# the entity):The $name class, The $name widget, The $name file, is, provides, -# specifies, contains, represents, a, an and the. - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# doxygen will generate a detailed section even if there is only a brief -# description. -# The default value is: NO. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. -# The default value is: NO. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path -# before files name in the file list and in the header files. If set to NO the -# shortest path that makes the file name unique will be used -# The default value is: YES. - -FULL_PATH_NAMES = YES - -# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. -# Stripping is only done if one of the specified strings matches the left-hand -# part of the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the path to -# strip. -# -# Note that you can specify absolute paths here, but also relative paths, which -# will be relative from the directory where doxygen is started. -# This tag requires that the tag FULL_PATH_NAMES is set to YES. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the -# path mentioned in the documentation of a class, which tells the reader which -# header file to include in order to use a class. If left blank only the name of -# the header file containing the class definition is used. Otherwise one should -# specify the list of include paths that are normally passed to the compiler -# using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but -# less readable) file names. This can be useful is your file systems doesn't -# support long names like on DOS, Mac, or CD-ROM. -# The default value is: NO. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the -# first line (until the first dot) of a Javadoc-style comment as the brief -# description. If set to NO, the Javadoc-style will behave just like regular Qt- -# style comments (thus requiring an explicit @brief command for a brief -# description.) -# The default value is: NO. - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first -# line (until the first dot) of a Qt-style comment as the brief description. If -# set to NO, the Qt-style will behave just like regular Qt-style comments (thus -# requiring an explicit \brief command for a brief description.) -# The default value is: NO. - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a -# multi-line C++ special comment block (i.e. a block of //! or /// comments) as -# a brief description. This used to be the default behavior. The new default is -# to treat a multi-line C++ comment block as a detailed description. Set this -# tag to YES if you prefer the old behavior instead. -# -# Note that setting this tag to YES also means that rational rose comments are -# not recognized any more. -# The default value is: NO. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the -# documentation from any documented member that it re-implements. -# The default value is: YES. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a -# new page for each member. If set to NO, the documentation of a member will be -# part of the file/class/namespace that contains it. -# The default value is: NO. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen -# uses this value to replace tabs by spaces in code fragments. -# Minimum value: 1, maximum value: 16, default value: 4. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that act as commands in -# the documentation. An alias has the form: -# name=value -# For example adding -# "sideeffect=@par Side Effects:\n" -# will allow you to put the command \sideeffect (or @sideeffect) in the -# documentation, which will result in a user-defined paragraph with heading -# "Side Effects:". You can put \n's in the value part of an alias to insert -# newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# This tag can be used to specify a number of word-keyword mappings (TCL only). -# A mapping has the form "name=value". For example adding "class=itcl::class" -# will allow you to use the command class in the itcl::class meaning. - -TCL_SUBST = - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources -# only. Doxygen will then generate output that is more tailored for C. For -# instance, some of the names that are used will be different. The list of all -# members will be omitted, etc. -# The default value is: NO. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or -# Python sources only. Doxygen will then generate output that is more tailored -# for that language. For instance, namespaces will be presented as packages, -# qualified scopes will look different, etc. -# The default value is: NO. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources. Doxygen will then generate output that is tailored for Fortran. -# The default value is: NO. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for VHDL. -# The default value is: NO. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it -# parses. With this tag you can assign which parser to use for a given -# extension. Doxygen has a built-in mapping, but you can override or extend it -# using this tag. The format is ext=language, where ext is a file extension, and -# language is one of the parsers supported by doxygen: IDL, Java, Javascript, -# C#, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL. For instance to make -# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C -# (default is Fortran), use: inc=Fortran f=C. -# -# Note For files without extension you can use no_extension as a placeholder. -# -# Note that for custom extensions you also need to set FILE_PATTERNS otherwise -# the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments -# according to the Markdown format, which allows for more readable -# documentation. See http://daringfireball.net/projects/markdown/ for details. -# The output of markdown processing is further processed by doxygen, so you can -# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in -# case of backward compatibilities issues. -# The default value is: YES. - -MARKDOWN_SUPPORT = YES - -# When enabled doxygen tries to link words that correspond to documented -# classes, or namespaces to their corresponding documentation. Such a link can -# be prevented in individual cases by by putting a % sign in front of the word -# or globally by setting AUTOLINK_SUPPORT to NO. -# The default value is: YES. - -AUTOLINK_SUPPORT = YES - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should set this -# tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); -# versus func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. -# The default value is: NO. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. -# The default value is: NO. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: -# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen -# will parse them like normal C++ but will assume all classes use public instead -# of private inheritance when no explicit protection keyword is present. -# The default value is: NO. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate -# getter and setter methods for a property. Setting this option to YES will make -# doxygen to replace the get and set methods by a property in the documentation. -# This will only work if the methods are indeed getting or setting a simple -# type. If this is not the case, or you want to show the methods anyway, you -# should set this option to NO. -# The default value is: YES. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. -# The default value is: NO. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES to allow class member groups of the same type -# (for instance a group of public functions) to be put as a subgroup of that -# type (e.g. under the Public Functions section). Set it to NO to prevent -# subgrouping. Alternatively, this can be done per class using the -# \nosubgrouping command. -# The default value is: YES. - -SUBGROUPING = YES - -# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions -# are shown inside the group in which they are included (e.g. using \ingroup) -# instead of on a separate page (for HTML and Man pages) or section (for LaTeX -# and RTF). -# -# Note that this feature does not work in combination with -# SEPARATE_MEMBER_PAGES. -# The default value is: NO. - -INLINE_GROUPED_CLASSES = NO - -# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions -# with only public data fields or simple typedef fields will be shown inline in -# the documentation of the scope in which they are defined (i.e. file, -# namespace, or group documentation), provided this scope is documented. If set -# to NO, structs, classes, and unions are shown on a separate page (for HTML and -# Man pages) or section (for LaTeX and RTF). -# The default value is: NO. - -INLINE_SIMPLE_STRUCTS = NO - -# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or -# enum is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically be -# useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. -# The default value is: NO. - -TYPEDEF_HIDES_STRUCT = NO - -# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This -# cache is used to resolve symbols given their name and scope. Since this can be -# an expensive process and often the same symbol appears multiple times in the -# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small -# doxygen will become slower. If the cache is too large, memory is wasted. The -# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range -# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 -# symbols. At the end of a run doxygen will report the cache usage and suggest -# the optimal cache size from a speed point of view. -# Minimum value: 0, maximum value: 9, default value: 0. - -LOOKUP_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. Private -# class members and static file members will be hidden unless the -# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. -# Note: This will also disable the warnings about undocumented members that are -# normally produced when WARNINGS is set to YES. -# The default value is: NO. - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will -# be included in the documentation. -# The default value is: NO. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal -# scope will be included in the documentation. -# The default value is: NO. - -EXTRACT_PACKAGE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file will be -# included in the documentation. -# The default value is: NO. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined -# locally in source files will be included in the documentation. If set to NO -# only classes defined in header files are included. Does not have any effect -# for Java sources. -# The default value is: YES. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local methods, -# which are defined in the implementation section but not in the interface are -# included in the documentation. If set to NO only methods in the interface are -# included. -# The default value is: NO. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base name of -# the file that contains the anonymous namespace. By default anonymous namespace -# are hidden. -# The default value is: NO. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all -# undocumented members inside documented classes or files. If set to NO these -# members will be included in the various overviews, but no documentation -# section is generated. This option has no effect if EXTRACT_ALL is enabled. -# The default value is: NO. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. If set -# to NO these classes will be included in the various overviews. This option has -# no effect if EXTRACT_ALL is enabled. -# The default value is: NO. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend -# (class|struct|union) declarations. If set to NO these declarations will be -# included in the documentation. -# The default value is: NO. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any -# documentation blocks found inside the body of a function. If set to NO these -# blocks will be appended to the function's detailed documentation block. -# The default value is: NO. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation that is typed after a -# \internal command is included. If the tag is set to NO then the documentation -# will be excluded. Set it to YES to include the internal documentation. -# The default value is: NO. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file -# names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. -# The default value is: system dependent. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with -# their full class and namespace scopes in the documentation. If set to YES the -# scope will be hidden. -# The default value is: NO. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of -# the files that are included by a file in the documentation of that file. -# The default value is: YES. - -SHOW_INCLUDE_FILES = YES - -# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include -# files with double quotes in the documentation rather than with sharp brackets. -# The default value is: NO. - -FORCE_LOCAL_INCLUDES = NO - -# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the -# documentation for inline members. -# The default value is: YES. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the -# (detailed) documentation of file and class members alphabetically by member -# name. If set to NO the members will appear in declaration order. -# The default value is: YES. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief -# descriptions of file, namespace and class members alphabetically by member -# name. If set to NO the members will appear in declaration order. -# The default value is: NO. - -SORT_BRIEF_DOCS = NO - -# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the -# (brief and detailed) documentation of class members so that constructors and -# destructors are listed first. If set to NO the constructors will appear in the -# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. -# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief -# member documentation. -# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting -# detailed member documentation. -# The default value is: NO. - -SORT_MEMBERS_CTORS_1ST = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy -# of group names into alphabetical order. If set to NO the group names will -# appear in their defined order. -# The default value is: NO. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by -# fully-qualified names, including namespaces. If set to NO, the class list will -# be sorted only by class name, not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the alphabetical -# list. -# The default value is: NO. - -SORT_BY_SCOPE_NAME = YES - -# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper -# type resolution of all parameters of a function it will reject a match between -# the prototype and the implementation of a member function even if there is -# only one candidate or it is obvious which candidate to choose by doing a -# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still -# accept a match between prototype and implementation in such cases. -# The default value is: NO. - -STRICT_PROTO_MATCHING = NO - -# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the -# todo list. This list is created by putting \todo commands in the -# documentation. -# The default value is: YES. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the -# test list. This list is created by putting \test commands in the -# documentation. -# The default value is: YES. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug -# list. This list is created by putting \bug commands in the documentation. -# The default value is: YES. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO) -# the deprecated list. This list is created by putting \deprecated commands in -# the documentation. -# The default value is: YES. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional documentation -# sections, marked by \if ... \endif and \cond -# ... \endcond blocks. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the -# initial value of a variable or macro / define can have for it to appear in the -# documentation. If the initializer consists of more lines than specified here -# it will be hidden. Use a value of 0 to hide initializers completely. The -# appearance of the value of individual variables and macros / defines can be -# controlled using \showinitializer or \hideinitializer command in the -# documentation regardless of this setting. -# Minimum value: 0, maximum value: 10000, default value: 30. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at -# the bottom of the documentation of classes and structs. If set to YES the list -# will mention the files that were used to generate the documentation. -# The default value is: YES. - -SHOW_USED_FILES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This -# will remove the Files entry from the Quick Index and from the Folder Tree View -# (if specified). -# The default value is: YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces -# page. This will remove the Namespaces entry from the Quick Index and from the -# Folder Tree View (if specified). -# The default value is: YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command command input-file, where command is the value of the -# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided -# by doxygen. Whatever the program writes to standard output is used as the file -# version. For an example see the documentation. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed -# by doxygen. The layout file controls the global structure of the generated -# output files in an output format independent way. To create the layout file -# that represents doxygen's defaults, run doxygen with the -l option. You can -# optionally specify a file name after the option, if omitted DoxygenLayout.xml -# will be used as the name of the layout file. -# -# Note that if you run doxygen from a directory containing a file called -# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE -# tag is left empty. - -LAYOUT_FILE = - -# The CITE_BIB_FILES tag can be used to specify one or more bib files containing -# the reference definitions. This must be a list of .bib files. The .bib -# extension is automatically appended if omitted. This requires the bibtex tool -# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. -# For LaTeX the style of the bibliography can be controlled using -# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the -# search path. Do not use file names with spaces, bibtex cannot handle them. See -# also \cite for info how to create references. - -CITE_BIB_FILES = - -#--------------------------------------------------------------------------- -# Configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated to -# standard output by doxygen. If QUIET is set to YES this implies that the -# messages are off. -# The default value is: NO. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES -# this implies that the warnings are on. -# -# Tip: Turn warnings on while writing the documentation. -# The default value is: YES. - -WARNINGS = YES - -# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate -# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag -# will automatically be disabled. -# The default value is: YES. - -WARN_IF_UNDOCUMENTED = YES - -# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some parameters -# in a documented function, or documenting parameters that don't exist or using -# markup commands wrongly. -# The default value is: YES. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that -# are documented, but have no documentation for their parameters or return -# value. If set to NO doxygen will only warn about wrong or incomplete parameter -# documentation, but not about the absence of documentation. -# The default value is: NO. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that doxygen -# can produce. The string should contain the $file, $line, and $text tags, which -# will be replaced by the file and line number from which the warning originated -# and the warning text. Optionally the format may contain $version, which will -# be replaced by the version of the file (if it could be obtained via -# FILE_VERSION_FILTER) -# The default value is: $file:$line: $text. - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning and error -# messages should be written. If left blank the output is written to standard -# error (stderr). - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# Configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag is used to specify the files and/or directories that contain -# documented source files. You may enter file names like myfile.cpp or -# directories like /usr/src/myproject. Separate the files or directories with -# spaces. -# Note: If this tag is empty the current directory is searched. - -INPUT = ../include \ - ../src/lib_json \ - . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses -# libiconv (or the iconv built into libc) for the transcoding. See the libiconv -# documentation (see: http://www.gnu.org/software/libiconv) for the list of -# possible encodings. -# The default value is: UTF-8. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and -# *.h) to filter out the source-files in the directories. If left blank the -# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii, -# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, -# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, -# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, -# *.qsf, *.as and *.js. - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to specify whether or not subdirectories should -# be searched for input files as well. -# The default value is: NO. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should be -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. -# -# Note that relative paths are relative to the directory from which doxygen is -# run. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or -# directories that are symbolic links (a Unix file system feature) are excluded -# from the input. -# The default value is: NO. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. -# -# Note that the wildcards are matched against the file with absolute path, so to -# exclude all test directories for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test -# -# Note that the wildcards are matched against the file with absolute path, so to -# exclude all test directories use the pattern */test/* - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or directories -# that contain example code fragments that are included (see the \include -# command). - -EXAMPLE_PATH = .. - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and -# *.h) to filter out the source-files in the directories. If left blank all -# files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude commands -# irrespective of the value of the RECURSIVE tag. -# The default value is: NO. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or directories -# that contain images that are to be included in the documentation (see the -# \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command: -# -# -# -# where is the value of the INPUT_FILTER tag, and is the -# name of an input file. Doxygen will then use the output that the filter -# program writes to standard output. If FILTER_PATTERNS is specified, this tag -# will be ignored. -# -# Note that the filter must not add or remove lines; it is applied before the -# code is scanned, but not when the output code is generated. If lines are added -# or removed, the anchors will not be placed correctly. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. The filters are a list of the form: pattern=filter -# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how -# filters are used. If the FILTER_PATTERNS tag is empty or if none of the -# patterns match the file name, INPUT_FILTER is applied. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER ) will also be used to filter the input files that are used for -# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). -# The default value is: NO. - -FILTER_SOURCE_FILES = NO - -# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file -# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and -# it is also possible to disable source filtering for a specific pattern using -# *.ext= (so without naming a filter). -# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. - -FILTER_SOURCE_PATTERNS = - -# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that -# is part of the input, its contents will be placed on the main page -# (index.html). This can be useful if you have a project on for instance GitHub -# and want to reuse the introduction page also for the doxygen output. - -USE_MDFILE_AS_MAINPAGE = - -#--------------------------------------------------------------------------- -# Configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will be -# generated. Documented entities will be cross-referenced with these sources. -# -# Note: To get rid of all source code in the generated output, make sure that -# also VERBATIM_HEADERS is set to NO. -# The default value is: NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body of functions, -# classes and enums directly into the documentation. -# The default value is: NO. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any -# special comment blocks from generated source code fragments. Normal C, C++ and -# Fortran comments will always remain visible. -# The default value is: YES. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES then for each documented -# function all documented functions referencing it will be listed. -# The default value is: NO. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES then for each documented function -# all documented entities called/used by that function will be listed. -# The default value is: NO. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set -# to YES, then the hyperlinks from functions in REFERENCES_RELATION and -# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will -# link to the documentation. -# The default value is: YES. - -REFERENCES_LINK_SOURCE = YES - -# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the -# source code will show a tooltip with additional information such as prototype, -# brief description and links to the definition and documentation. Since this -# will make the HTML file larger and loading of large files a bit slower, you -# can opt to disable this feature. -# The default value is: YES. -# This tag requires that the tag SOURCE_BROWSER is set to YES. - -SOURCE_TOOLTIPS = YES - -# If the USE_HTAGS tag is set to YES then the references to source code will -# point to the HTML generated by the htags(1) tool instead of doxygen built-in -# source browser. The htags tool is part of GNU's global source tagging system -# (see http://www.gnu.org/software/global/global.html). You will need version -# 4.8.6 or higher. -# -# To use it do the following: -# - Install the latest version of global -# - Enable SOURCE_BROWSER and USE_HTAGS in the config file -# - Make sure the INPUT points to the root of the source tree -# - Run doxygen as normal -# -# Doxygen will invoke htags (and that will in turn invoke gtags), so these -# tools must be available from the command line (i.e. in the search path). -# -# The result: instead of the source browser generated by doxygen, the links to -# source code will now point to the output of htags. -# The default value is: NO. -# This tag requires that the tag SOURCE_BROWSER is set to YES. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a -# verbatim copy of the header file for each class for which an include is -# specified. Set to NO to disable this. -# See also: Section \class. -# The default value is: YES. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# Configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all -# compounds will be generated. Enable this if the project contains a lot of -# classes, structs, unions or interfaces. -# The default value is: YES. - -ALPHABETICAL_INDEX = NO - -# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in -# which the alphabetical index list will be split. -# Minimum value: 1, maximum value: 20, default value: 5. -# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all classes will -# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag -# can be used to specify a prefix (or a list of prefixes) that should be ignored -# while generating the index headers. -# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output -# The default value is: YES. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a -# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of -# it. -# The default directory is: html. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each -# generated HTML page (for example: .htm, .php, .asp). -# The default value is: .html. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a user-defined HTML header file for -# each generated HTML page. If the tag is left blank doxygen will generate a -# standard header. -# -# To get valid HTML the header file that includes any scripts and style sheets -# that doxygen needs, which is dependent on the configuration options used (e.g. -# the setting GENERATE_TREEVIEW). It is highly recommended to start with a -# default header using -# doxygen -w html new_header.html new_footer.html new_stylesheet.css -# YourConfigFile -# and then modify the file new_header.html. See also section "Doxygen usage" -# for information on how to generate the default header that doxygen normally -# uses. -# Note: The header is subject to change so you typically have to regenerate the -# default header when upgrading to a newer version of doxygen. For a description -# of the possible markers and block names see the documentation. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each -# generated HTML page. If the tag is left blank doxygen will generate a standard -# footer. See HTML_HEADER for more information on how to generate a default -# footer and what special commands can be used inside the footer. See also -# section "Doxygen usage" for information on how to generate the default footer -# that doxygen normally uses. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style -# sheet that is used by each HTML page. It can be used to fine-tune the look of -# the HTML output. If left blank doxygen will generate a default style sheet. -# See also section "Doxygen usage" for information on how to generate the style -# sheet that doxygen normally uses. -# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as -# it is more robust and this tag (HTML_STYLESHEET) will in the future become -# obsolete. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_STYLESHEET = - -# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user- -# defined cascading style sheet that is included after the standard style sheets -# created by doxygen. Using this option one can overrule certain style aspects. -# This is preferred over using HTML_STYLESHEET since it does not replace the -# standard style sheet and is therefor more robust against future updates. -# Doxygen will copy the style sheet file to the output directory. For an example -# see the documentation. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_EXTRA_STYLESHEET = - -# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or -# other source files which should be copied to the HTML output directory. Note -# that these files will be copied to the base HTML output directory. Use the -# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these -# files. In the HTML_STYLESHEET file, use the file name only. Also note that the -# files will be copied as-is; there are no commands or markers available. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_EXTRA_FILES = - -# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen -# will adjust the colors in the stylesheet and background images according to -# this color. Hue is specified as an angle on a colorwheel, see -# http://en.wikipedia.org/wiki/Hue for more information. For instance the value -# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 -# purple, and 360 is red again. -# Minimum value: 0, maximum value: 359, default value: 220. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_COLORSTYLE_HUE = 220 - -# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors -# in the HTML output. For a value of 0 the output will use grayscales only. A -# value of 255 will produce the most vivid colors. -# Minimum value: 0, maximum value: 255, default value: 100. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_COLORSTYLE_SAT = 100 - -# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the -# luminance component of the colors in the HTML output. Values below 100 -# gradually make the output lighter, whereas values above 100 make the output -# darker. The value divided by 100 is the actual gamma applied, so 80 represents -# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not -# change the gamma. -# Minimum value: 40, maximum value: 240, default value: 80. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_COLORSTYLE_GAMMA = 80 - -# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML -# page will contain the date and time when the page was generated. Setting this -# to NO can help when comparing the output of multiple runs. -# The default value is: YES. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_TIMESTAMP = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_DYNAMIC_SECTIONS = YES - -# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries -# shown in the various tree structured indices initially; the user can expand -# and collapse entries dynamically later on. Doxygen will expand the tree to -# such a level that at most the specified number of entries are visible (unless -# a fully collapsed tree already exceeds this amount). So setting the number of -# entries 1 will produce a full collapsed tree by default. 0 is a special value -# representing an infinite number of entries and will result in a full expanded -# tree by default. -# Minimum value: 0, maximum value: 9999, default value: 100. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_INDEX_NUM_ENTRIES = 100 - -# If the GENERATE_DOCSET tag is set to YES, additional index files will be -# generated that can be used as input for Apple's Xcode 3 integrated development -# environment (see: http://developer.apple.com/tools/xcode/), introduced with -# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a -# Makefile in the HTML output directory. Running make will produce the docset in -# that directory and running make install will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at -# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html -# for more information. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_DOCSET = NO - -# This tag determines the name of the docset feed. A documentation feed provides -# an umbrella under which multiple documentation sets from a single provider -# (such as a company or product suite) can be grouped. -# The default value is: Doxygen generated docs. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# This tag specifies a string that should uniquely identify the documentation -# set bundle. This should be a reverse domain-name style string, e.g. -# com.mycompany.MyDocSet. Doxygen will append .docset to the name. -# The default value is: org.doxygen.Project. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify -# the documentation publisher. This should be a reverse domain-name style -# string, e.g. com.mycompany.MyDocSet.documentation. -# The default value is: org.doxygen.Publisher. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_PUBLISHER_ID = org.doxygen.Publisher - -# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. -# The default value is: Publisher. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_PUBLISHER_NAME = Publisher - -# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three -# additional HTML index files: index.hhp, index.hhc, and index.hhk. The -# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop -# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on -# Windows. -# -# The HTML Help Workshop contains a compiler that can convert all HTML output -# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML -# files are now used as the Windows 98 help format, and will replace the old -# Windows help format (.hlp) on all Windows platforms in the future. Compressed -# HTML files also contain an index, a table of contents, and you can search for -# words in the documentation. The HTML workshop also contains a viewer for -# compressed HTML files. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_HTMLHELP = %HTML_HELP% - -# The CHM_FILE tag can be used to specify the file name of the resulting .chm -# file. You can add a path in front of the file if the result should not be -# written to the html output directory. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# The HHC_LOCATION tag can be used to specify the location (absolute path -# including file name) of the HTML help compiler ( hhc.exe). If non-empty -# doxygen will try to run the HTML help compiler on the generated index.hhp. -# The file has to be specified with full path. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# The GENERATE_CHI flag controls if a separate .chi index file is generated ( -# YES) or that it should be included in the master .chm file ( NO). -# The default value is: NO. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -GENERATE_CHI = YES - -# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc) -# and project file content. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -CHM_INDEX_ENCODING = - -# The BINARY_TOC flag controls whether a binary table of contents is generated ( -# YES) or a normal table of contents ( NO) in the .chm file. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members to -# the table of contents of the HTML help documentation and to the tree view. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and -# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that -# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help -# (.qch) of the generated HTML documentation. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify -# the file name of the resulting .qch file. The path specified is relative to -# the HTML output folder. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help -# Project output. For more information please see Qt Help Project / Namespace -# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). -# The default value is: org.doxygen.Project. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt -# Help Project output. For more information please see Qt Help Project / Virtual -# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- -# folders). -# The default value is: doc. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_VIRTUAL_FOLDER = doc - -# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom -# filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- -# filters). -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the -# custom filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- -# filters). -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this -# project's filter section matches. Qt Help Project / Filter Attributes (see: -# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_SECT_FILTER_ATTRS = - -# The QHG_LOCATION tag can be used to specify the location of Qt's -# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the -# generated .qhp file. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHG_LOCATION = - -# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be -# generated, together with the HTML files, they form an Eclipse help plugin. To -# install this plugin and make it available under the help contents menu in -# Eclipse, the contents of the directory containing the HTML and XML files needs -# to be copied into the plugins directory of eclipse. The name of the directory -# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. -# After copying Eclipse needs to be restarted before the help appears. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_ECLIPSEHELP = NO - -# A unique identifier for the Eclipse help plugin. When installing the plugin -# the directory name containing the HTML and XML files should also have this -# name. Each documentation set should have its own identifier. -# The default value is: org.doxygen.Project. -# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. - -ECLIPSE_DOC_ID = org.doxygen.Project - -# If you want full control over the layout of the generated HTML pages it might -# be necessary to disable the index and replace it with your own. The -# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top -# of each HTML page. A value of NO enables the index and the value YES disables -# it. Since the tabs in the index contain the same information as the navigation -# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -DISABLE_INDEX = NO - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. If the tag -# value is set to YES, a side panel will be generated containing a tree-like -# index structure (just like the one that is generated for HTML Help). For this -# to work a browser that supports JavaScript, DHTML, CSS and frames is required -# (i.e. any modern browser). Windows users are probably better off using the -# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can -# further fine-tune the look of the index. As an example, the default style -# sheet generated by doxygen has an example that shows how to put an image at -# the root of the tree instead of the PROJECT_NAME. Since the tree basically has -# the same information as the tab index, you could consider setting -# DISABLE_INDEX to YES when enabling this option. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_TREEVIEW = NO - -# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that -# doxygen will group on one line in the generated HTML documentation. -# -# Note that a value of 0 will completely suppress the enum values from appearing -# in the overview section. -# Minimum value: 0, maximum value: 20, default value: 4. -# This tag requires that the tag GENERATE_HTML is set to YES. - -ENUM_VALUES_PER_LINE = 4 - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used -# to set the initial width (in pixels) of the frame in which the tree is shown. -# Minimum value: 0, maximum value: 1500, default value: 250. -# This tag requires that the tag GENERATE_HTML is set to YES. - -TREEVIEW_WIDTH = 250 - -# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to -# external symbols imported via tag files in a separate window. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -EXT_LINKS_IN_WINDOW = NO - -# Use this tag to change the font size of LaTeX formulas included as images in -# the HTML documentation. When you change the font size after a successful -# doxygen run you need to manually remove any form_*.png images from the HTML -# output directory to force them to be regenerated. -# Minimum value: 8, maximum value: 50, default value: 10. -# This tag requires that the tag GENERATE_HTML is set to YES. - -FORMULA_FONTSIZE = 10 - -# Use the FORMULA_TRANPARENT tag to determine whether or not the images -# generated for formulas are transparent PNGs. Transparent PNGs are not -# supported properly for IE 6.0, but are supported on all modern browsers. -# -# Note that when changing this option you need to delete any form_*.png files in -# the HTML output directory before the changes have effect. -# The default value is: YES. -# This tag requires that the tag GENERATE_HTML is set to YES. - -FORMULA_TRANSPARENT = YES - -# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see -# http://www.mathjax.org) which uses client side Javascript for the rendering -# instead of using prerendered bitmaps. Use this if you do not have LaTeX -# installed or if you want to formulas look prettier in the HTML output. When -# enabled you may also need to install MathJax separately and configure the path -# to it using the MATHJAX_RELPATH option. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -USE_MATHJAX = NO - -# When MathJax is enabled you can set the default output format to be used for -# the MathJax output. See the MathJax site (see: -# http://docs.mathjax.org/en/latest/output.html) for more details. -# Possible values are: HTML-CSS (which is slower, but has the best -# compatibility), NativeMML (i.e. MathML) and SVG. -# The default value is: HTML-CSS. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_FORMAT = HTML-CSS - -# When MathJax is enabled you need to specify the location relative to the HTML -# output directory using the MATHJAX_RELPATH option. The destination directory -# should contain the MathJax.js script. For instance, if the mathjax directory -# is located at the same level as the HTML output directory, then -# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax -# Content Delivery Network so you can quickly see the result without installing -# MathJax. However, it is strongly recommended to install a local copy of -# MathJax from http://www.mathjax.org before deployment. -# The default value is: http://cdn.mathjax.org/mathjax/latest. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest - -# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax -# extension names that should be enabled during MathJax rendering. For example -# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_EXTENSIONS = - -# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces -# of code that will be used on startup of the MathJax code. See the MathJax site -# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an -# example see the documentation. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_CODEFILE = - -# When the SEARCHENGINE tag is enabled doxygen will generate a search box for -# the HTML output. The underlying search engine uses javascript and DHTML and -# should work on any modern browser. Note that when using HTML help -# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) -# there is already a search function so this one should typically be disabled. -# For large projects the javascript based search engine can be slow, then -# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to -# search using the keyboard; to jump to the search box use + S -# (what the is depends on the OS and browser, but it is typically -# , /