removed jsoncpp (nw)

This commit is contained in:
Miodrag Milanovic 2016-01-09 21:11:04 +01:00
parent 50a72771ff
commit 2382be9ea8
233 changed files with 0 additions and 24371 deletions

View File

@ -1,47 +0,0 @@
---
# BasedOnStyle: LLVM
AccessModifierOffset: -2
ConstructorInitializerIndentWidth: 4
AlignEscapedNewlinesLeft: false
AlignTrailingComments: true
AllowAllParametersOfDeclarationOnNextLine: true
AllowShortIfStatementsOnASingleLine: false
AllowShortLoopsOnASingleLine: false
AlwaysBreakTemplateDeclarations: false
AlwaysBreakBeforeMultilineStrings: false
BreakBeforeBinaryOperators: false
BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: false
BinPackParameters: false
ColumnLimit: 80
ConstructorInitializerAllOnOneLineOrOnePerLine: false
DerivePointerBinding: false
ExperimentalAutoDetectBinPacking: false
IndentCaseLabels: false
MaxEmptyLinesToKeep: 1
NamespaceIndentation: None
ObjCSpaceBeforeProtocolList: true
PenaltyBreakBeforeFirstCallParameter: 19
PenaltyBreakComment: 60
PenaltyBreakString: 1000
PenaltyBreakFirstLessLess: 120
PenaltyExcessCharacter: 1000000
PenaltyReturnTypeOnItsOwnLine: 60
PointerBindsToType: true
SpacesBeforeTrailingComments: 1
Cpp11BracedListStyle: false
Standard: Cpp03
IndentWidth: 2
TabWidth: 8
UseTab: Never
BreakBeforeBraces: Attach
IndentFunctionDeclarationAfterType: false
SpacesInParentheses: false
SpacesInAngles: false
SpaceInEmptyParentheses: false
SpacesInCStyleCastParentheses: false
SpaceAfterControlStatementKeyword: true
SpaceBeforeAssignmentOperators: true
ContinuationIndentWidth: 4
...

View File

@ -1,36 +0,0 @@
/build/
*.pyc
*.swp
*.actual
*.actual-rewrite
*.process-output
*.rewrite
/bin/
/buildscons/
/libs/
/doc/doxyfile
/dist/
#/version
#/include/json/version.h
# MSVC project files:
*.sln
*.vcxproj
*.filters
*.user
*.sdf
*.opensdf
*.suo
# MSVC build files:
*.lib
*.obj
*.tlog/
*.pdb
# CMake-generated files:
CMakeFiles/
CTestTestFile.cmake
cmake_install.cmake
pkg-config/jsoncpp.pc
jsoncpp_lib_static.dir/

View File

@ -1,25 +0,0 @@
# Build matrix / environment variable are explained on:
# http://about.travis-ci.org/docs/user/build-configuration/
# This file can be validated on:
# http://lint.travis-ci.org/
#before_install: sudo apt-get install -y cmake
# cmake is pre-installed in Travis for both linux and osx
before_install:
- sudo apt-get update -qq
- sudo apt-get install -qq valgrind
os:
- linux
language: cpp
compiler:
- gcc
- clang
script: ./travis.sh
env:
matrix:
- SHARED_LIB=ON STATIC_LIB=ON CMAKE_PKG=ON BUILD_TYPE=release VERBOSE_MAKE=false
- SHARED_LIB=OFF STATIC_LIB=ON CMAKE_PKG=OFF BUILD_TYPE=debug VERBOSE_MAKE=true VERBOSE
notifications:
email:
- aaronjjacobs@gmail.com

View File

@ -1 +0,0 @@
Baptiste Lepilleur <blep@users.sourceforge.net>

View File

@ -1,129 +0,0 @@
# vim: et ts=4 sts=4 sw=4 tw=0
CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5)
PROJECT(jsoncpp)
ENABLE_TESTING()
OPTION(JSONCPP_WITH_TESTS "Compile and (for jsoncpp_check) run JsonCpp test executables" ON)
OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON)
OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF)
OPTION(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON)
OPTION(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" OFF)
OPTION(BUILD_SHARED_LIBS "Build jsoncpp_lib as a shared library." OFF)
OPTION(BUILD_STATIC_LIBS "Build jsoncpp_lib static library." ON)
# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix
IF(NOT WIN32)
IF(NOT CMAKE_BUILD_TYPE)
SET(CMAKE_BUILD_TYPE Release CACHE STRING
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage."
FORCE)
ENDIF(NOT CMAKE_BUILD_TYPE)
ENDIF(NOT WIN32)
SET(DEBUG_LIBNAME_SUFFIX "" CACHE STRING "Optional suffix to append to the library name for a debug build")
SET(LIB_SUFFIX "" CACHE STRING "Optional arch-dependent suffix for the library installation directory")
SET(RUNTIME_INSTALL_DIR bin
CACHE PATH "Install dir for executables and dlls")
SET(ARCHIVE_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}
CACHE PATH "Install dir for static libraries")
SET(LIBRARY_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}
CACHE PATH "Install dir for shared libraries")
SET(INCLUDE_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/include
CACHE PATH "Install dir for headers")
SET(PACKAGE_INSTALL_DIR lib${LIB_SUFFIX}/cmake
CACHE PATH "Install dir for cmake package config files")
MARK_AS_ADVANCED( RUNTIME_INSTALL_DIR ARCHIVE_INSTALL_DIR INCLUDE_INSTALL_DIR PACKAGE_INSTALL_DIR )
# Set variable named ${VAR_NAME} to value ${VALUE}
FUNCTION(set_using_dynamic_name VAR_NAME VALUE)
SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE)
ENDFUNCTION(set_using_dynamic_name)
# Extract major, minor, patch from version text
# Parse a version string "X.Y.Z" and outputs
# version parts in ${OUPUT_PREFIX}_MAJOR, _MINOR, _PATCH.
# If parse succeeds then ${OUPUT_PREFIX}_FOUND is TRUE.
MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX)
SET(VERSION_REGEX "[0-9]+\\.[0-9]+\\.[0-9]+(-[a-zA-Z0-9_]+)?")
IF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
STRING(REGEX MATCHALL "[0-9]+|-([A-Za-z0-9_]+)" VERSION_PARTS ${VERSION_TEXT})
LIST(GET VERSION_PARTS 0 ${OUPUT_PREFIX}_MAJOR)
LIST(GET VERSION_PARTS 1 ${OUPUT_PREFIX}_MINOR)
LIST(GET VERSION_PARTS 2 ${OUPUT_PREFIX}_PATCH)
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE )
ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE )
ENDIF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
ENDMACRO(jsoncpp_parse_version)
# Read out version from "version" file
#FILE(STRINGS "version" JSONCPP_VERSION)
#SET( JSONCPP_VERSION_MAJOR X )
#SET( JSONCPP_VERSION_MINOR Y )
#SET( JSONCPP_VERSION_PATCH Z )
SET( JSONCPP_VERSION 1.6.2 )
jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION )
#IF(NOT JSONCPP_VERSION_FOUND)
# MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z")
#ENDIF(NOT JSONCPP_VERSION_FOUND)
MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}")
# File version.h is only regenerated on CMake configure step
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in"
"${PROJECT_SOURCE_DIR}/include/json/version.h"
NEWLINE_STYLE UNIX )
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/version.in"
"${PROJECT_SOURCE_DIR}/version"
NEWLINE_STYLE UNIX )
macro(UseCompilationWarningAsError)
if ( MSVC )
# Only enabled in debug because some old versions of VS STL generate
# warnings when compiled in release configuration.
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ")
endif( MSVC )
endmacro()
# Include our configuration header
INCLUDE_DIRECTORIES( ${jsoncpp_SOURCE_DIR}/include )
if ( MSVC )
# Only enabled in debug because some old versions of VS STL generate
# unreachable code warning when compiled in release configuration.
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ")
endif( MSVC )
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
# using regular Clang or AppleClang
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wshorten-64-to-32")
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
# using GCC
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x -Wall -Wextra -pedantic")
endif()
IF(JSONCPP_WITH_WARNING_AS_ERROR)
UseCompilationWarningAsError()
ENDIF(JSONCPP_WITH_WARNING_AS_ERROR)
IF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
CONFIGURE_FILE(
"pkg-config/jsoncpp.pc.in"
"pkg-config/jsoncpp.pc"
@ONLY)
INSTALL(FILES "${CMAKE_BINARY_DIR}/pkg-config/jsoncpp.pc"
DESTINATION "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}/pkgconfig")
ENDIF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
IF(JSONCPP_WITH_CMAKE_PACKAGE)
INSTALL(EXPORT jsoncpp
DESTINATION ${PACKAGE_INSTALL_DIR}/jsoncpp
FILE jsoncppConfig.cmake)
ENDIF(JSONCPP_WITH_CMAKE_PACKAGE)
# Build the different applications
ADD_SUBDIRECTORY( src )
#install the includes
ADD_SUBDIRECTORY( include )

View File

@ -1,55 +0,0 @@
The JsonCpp library's source code, including accompanying documentation,
tests and demonstration applications, are licensed under the following
conditions...
The author (Baptiste Lepilleur) explicitly disclaims copyright in all
jurisdictions which recognize such a disclaimer. In such jurisdictions,
this software is released into the Public Domain.
In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
released under the terms of the MIT License (see below).
In jurisdictions which recognize Public Domain property, the user of this
software may choose to accept it either as 1) Public Domain, 2) under the
conditions of the MIT License (see below), or 3) under the terms of dual
Public Domain/MIT License conditions described here, as they choose.
The MIT License is about as close to Public Domain as a license can get, and is
described in clear, concise terms at:
http://en.wikipedia.org/wiki/MIT_License
The full text of the MIT License follows:
========================================================================
Copyright (c) 2007-2010 Baptiste Lepilleur
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
========================================================================
(END LICENSE TEXT)
The MIT license is compatible with both the GPL and commercial
software, affording one all of the rights of Public Domain with the
minor nuisance of being required to keep the above copyright notice
and license text in the source code. Note also that by accepting the
Public Domain "license" you can re-license your copy using whatever
license you like.

View File

@ -1,175 +0,0 @@
New in SVN
----------
* Updated the type system's behavior, in order to better support backwards
compatibility with code that was written before 64-bit integer support was
introduced. Here's how it works now:
* isInt, isInt64, isUInt, and isUInt64 return true if and only if the
value can be exactly represented as that type. In particular, a value
constructed with a double like 17.0 will now return true for all of
these methods.
* isDouble and isFloat now return true for all numeric values, since all
numeric values can be converted to a double or float without
truncation. Note however that the conversion may not be exact -- for
example, doubles cannot exactly represent all integers above 2^53 + 1.
* isBool, isNull, isString, isArray, and isObject now return true if and
only if the value is of that type.
* isConvertibleTo(fooValue) indicates that it is safe to call asFoo.
(For each type foo, isFoo always implies isConvertibleTo(fooValue).)
asFoo returns an approximate or exact representation as appropriate.
For example, a double value may be truncated when asInt is called.
* For backwards compatibility with old code, isConvertibleTo(intValue)
may return false even if type() == intValue. This is because the value
may have been constructed with a 64-bit integer larger than maxInt,
and calling asInt() would cause an exception. If you're writing new
code, use isInt64 to find out whether the value is exactly
representable using an Int64, or asDouble() combined with minInt64 and
maxInt64 to figure out whether it is approximately representable.
* Value
- Patch #10: BOOST_FOREACH compatibility. Made Json::iterator more
standard compliant, added missing iterator_category and value_type
typedefs (contribued by Robert A. Iannucci).
* Compilation
- New CMake based build system. Based in part on contribution from
Igor Okulist and Damien Buhl (Patch #14).
- New header json/version.h now contains version number macros
(JSONCPP_VERSION_MAJOR, JSONCPP_VERSION_MINOR, JSONCPP_VERSION_PATCH
and JSONCPP_VERSION_HEXA).
- Patch #11: added missing JSON_API on some classes causing link issues
when building as a dynamic library on Windows
(contributed by Francis Bolduc).
- Visual Studio DLL: suppressed warning "C4251: <data member>: <type>
needs to have dll-interface to be used by..." via pragma push/pop
in json-cpp headers.
- Added Travis CI intregration: https://travis-ci.org/blep/jsoncpp-mirror
* Bug fixes
- Patch #15: Copy constructor does not initialize allocated_ for stringValue
(contributed by rmongia).
- Patch #16: Missing field copy in Json::Value::iterator causing infinite
loop when using experimental internal map (#define JSON_VALUE_USE_INTERNAL_MAP)
(contributed by Ming-Lin Kao).
New in JsonCpp 0.6.0:
---------------------
* Compilation
- LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now
propagated to the build environment as this is required for some
compiler installation.
- Added support for Microsoft Visual Studio 2008 (bug #2930462):
The platform "msvc90" has been added.
Notes: you need to setup the environment by running vcvars32.bat
(e.g. MSVC 2008 command prompt in start menu) before running scons.
- Added support for amalgamated source and header generation (a la sqlite).
Refer to README.md section "Generating amalgamated source and header"
for detail.
* Value
- Removed experimental ValueAllocator, it caused static
initialization/destruction order issues (bug #2934500).
The DefaultValueAllocator has been inlined in code.
- Added support for 64 bits integer:
Types Json::Int64 and Json::UInt64 have been added. They are aliased
to 64 bits integers on system that support them (based on __int64 on
Microsoft Visual Studio platform, and long long on other platforms).
Types Json::LargestInt and Json::LargestUInt have been added. They are
aliased to the largest integer type supported:
either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively.
Json::Value::asInt() and Json::Value::asUInt() still returns plain
"int" based types, but asserts if an attempt is made to retrieve
a 64 bits value that can not represented as the return type.
Json::Value::asInt64() and Json::Value::asUInt64() have been added
to obtain the 64 bits integer value.
Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns
the integer as a LargestInt/LargestUInt respectively. Those functions
functions are typically used when implementing writer.
The reader attempts to read number as 64 bits integer, and fall back
to reading a double if the number is not in the range of 64 bits
integer.
Warning: Json::Value::asInt() and Json::Value::asUInt() now returns
long long. This changes break code that was passing the return value
to *printf() function.
Support for 64 bits integer can be disabled by defining the macro
JSON_NO_INT64 (uncomment it in json/config.h for example), though
it should have no impact on existing usage.
- The type Json::ArrayIndex is used for indexes of a JSON value array. It
is an unsigned int (typically 32 bits).
- Array index can be passed as int to operator[], allowing use of literal:
Json::Value array;
array.append( 1234 );
int value = array[0].asInt(); // did not compile previously
- Added float Json::Value::asFloat() to obtain a floating point value as a
float (avoid lost of precision warning caused by used of asDouble()
to initialize a float).
* Reader
- Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages.
Bug #3023708 (Formatted has 2 't'). The old member function is deprecated
but still present for backward compatibility.
* Tests
- Added test to ensure that the escape sequence "\/" is corrected handled
by the parser.
* Bug fixes
- Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now
correctly detected.
- Bug #3139678: stack buffer overflow when parsing a double with a
length of 32 characters.
- Fixed Value::operator <= implementation (had the semantic of operator >=).
Found when adding unit tests for comparison operators.
- Value::compare() is now const and has an actual implementation with
unit tests.
- Bug #2407932: strpbrk() can fail for NULL pointer.
- Bug #3306345: Fixed minor typo in Path::resolve().
- Bug #3314841/#3306896: errors in amalgamate.py
- Fixed some Coverity warnings and line-endings.
* License
- See file LICENSE for details. Basically JsonCpp is now licensed under
MIT license, or public domain if desired and recognized in your jurisdiction.
Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who
helped figuring out the solution to the public domain issue.

View File

@ -1,214 +0,0 @@
Introduction
------------
[JSON][json-org] is a lightweight data-interchange format. It can represent
numbers, strings, ordered sequences of values, and collections of name/value
pairs.
[json-org]: http://json.org/
[JsonCpp][] is a C++ library that allows manipulating JSON values, including
serialization and deserialization to and from strings. It can also preserve
existing comment in unserialization/serialization steps, making it a convenient
format to store user input files.
[JsonCpp]: http://open-source-parsers.github.io/jsoncpp-docs/doxygen/index.html
## A note on backward-compatibility
* `1.y.z` is built with C++11.
* `0.y.z` can be used with older compilers.
* Major versions maintain binary-compatibility.
# Using JsonCpp in your project
-----------------------------
The recommended approach to integrating JsonCpp in your project is to include
the [amalgamated source](#generating-amalgamated-source-and-header) (a single
`.cpp` file and two `.h` files) in your project, and compile and build as you
would any other source file. This ensures consistency of compilation flags and
ABI compatibility, issues which arise when building shared or static
libraries. See the next section for instructions.
The `include/` should be added to your compiler include path. Jsoncpp headers
should be included as follow:
#include <json/json.h>
If JsonCpp was built as a dynamic library on Windows, then your project needs to
define the macro `JSON_DLL`.
Generating amalgamated source and header
----------------------------------------
JsonCpp is provided with a script to generate a single header and a single
source file to ease inclusion into an existing project. The amalgamated source
can be generated at any time by running the following command from the
top-directory (this requires Python 2.6):
python amalgamate.py
It is possible to specify header name. See the `-h` option for detail.
By default, the following files are generated:
* `dist/jsoncpp.cpp`: source file that needs to be added to your project.
* `dist/json/json.h`: corresponding header file for use in your project. It is
equivalent to including `json/json.h` in non-amalgamated source. This header
only depends on standard headers.
* `dist/json/json-forwards.h`: header that provides forward declaration of all
JsonCpp types.
The amalgamated sources are generated by concatenating JsonCpp source in the
correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion
of other headers.
# Contributing to JsonCpp
Building and testing with CMake
-------------------------------
[CMake][] is a C++ Makefiles/Solution generator. It is usually available on most
Linux system as package. On Ubuntu:
sudo apt-get install cmake
[CMake]: http://www.cmake.org
Note that Python is also required to run the JSON reader/writer tests. If
missing, the build will skip running those tests.
When running CMake, a few parameters are required:
* a build directory where the makefiles/solution are generated. It is also used
to store objects, libraries and executables files.
* the generator to use: makefiles or Visual Studio solution? What version or
Visual Studio, 32 or 64 bits solution?
Steps for generating solution/makefiles using `cmake-gui`:
* Make "source code" point to the source directory.
* Make "where to build the binary" point to the directory to use for the build.
* Click on the "Grouped" check box.
* Review JsonCpp build options (tick `BUILD_SHARED_LIBS` to build as a
dynamic library).
* Click the configure button at the bottom, then the generate button.
* The generated solution/makefiles can be found in the binary directory.
Alternatively, from the command-line on Unix in the source directory:
mkdir -p build/debug
cd build/debug
cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_STATIC_LIBS=ON -DBUILD_SHARED_LIBS=OFF -DARCHIVE_INSTALL_DIR=. -G "Unix Makefiles" ../..
make
Running `cmake -h` will display the list of available generators (passed using
the `-G` option).
By default CMake hides compilation commands. This can be modified by specifying
`-DCMAKE_VERBOSE_MAKEFILE=true` when generating makefiles.
Building and testing with SCons
-------------------------------
**Note:** The SCons-based build system is deprecated. Please use CMake; see the
section above.
JsonCpp can use [Scons][] as a build system. Note that SCons requires Python to
be installed.
[SCons]: http://www.scons.org/
Invoke SCons as follows:
scons platform=$PLATFORM [TARGET]
where `$PLATFORM` may be one of:
* `suncc`: Sun C++ (Solaris)
* `vacpp`: Visual Age C++ (AIX)
* `mingw`
* `msvc6`: Microsoft Visual Studio 6 service pack 5-6
* `msvc70`: Microsoft Visual Studio 2002
* `msvc71`: Microsoft Visual Studio 2003
* `msvc80`: Microsoft Visual Studio 2005
* `msvc90`: Microsoft Visual Studio 2008
* `linux-gcc`: Gnu C++ (linux, also reported to work for Mac OS X)
If you are building with Microsoft Visual Studio 2008, you need to set up the
environment by running `vcvars32.bat` (e.g. MSVC 2008 command prompt) before
running SCons.
## Running the tests manually
You need to run tests manually only if you are troubleshooting an issue.
In the instructions below, replace `path/to/jsontest` with the path of the
`jsontest` executable that was compiled on your platform.
cd test
# This will run the Reader/Writer tests
python runjsontests.py path/to/jsontest
# This will run the Reader/Writer tests, using JSONChecker test suite
# (http://www.json.org/JSON_checker/).
# Notes: not all tests pass: JsonCpp is too lenient (for example,
# it allows an integer to start with '0'). The goal is to improve
# strict mode parsing to get all tests to pass.
python runjsontests.py --with-json-checker path/to/jsontest
# This will run the unit tests (mostly Value)
python rununittests.py path/to/test_lib_json
# You can run the tests using valgrind:
python rununittests.py --valgrind path/to/test_lib_json
## Running the tests using scons
Note that tests can be run using SCons using the `check` target:
scons platform=$PLATFORM check
Building the documentation
--------------------------
Run the Python script `doxybuild.py` from the top directory:
python doxybuild.py --doxygen=$(which doxygen) --open --with-dot
See `doxybuild.py --help` for options.
Adding a reader/writer test
---------------------------
To add a test, you need to create two files in test/data:
* a `TESTNAME.json` file, that contains the input document in JSON format.
* a `TESTNAME.expected` file, that contains a flatened representation of the
input document.
The `TESTNAME.expected` file format is as follows:
* each line represents a JSON element of the element tree represented by the
input document.
* each line has two parts: the path to access the element separated from the
element value by `=`. Array and object values are always empty (i.e.
represented by either `[]` or `{}`).
* element path: `.` represents the root element, and is used to separate object
members. `[N]` is used to specify the value of an array element at index `N`.
See the examples `test_complex_01.json` and `test_complex_01.expected` to better
understand element paths.
Understanding reader/writer test output
---------------------------------------
When a test is run, output files are generated beside the input test files.
Below is a short description of the content of each file:
* `test_complex_01.json`: input JSON document.
* `test_complex_01.expected`: flattened JSON element tree used to check if
parsing was corrected.
* `test_complex_01.actual`: flattened JSON element tree produced by `jsontest`
from reading `test_complex_01.json`.
* `test_complex_01.rewrite`: JSON document written by `jsontest` using the
`Json::Value` parsed from `test_complex_01.json` and serialized using
`Json::StyledWritter`.
* `test_complex_01.actual-rewrite`: flattened JSON element tree produced by
`jsontest` from reading `test_complex_01.rewrite`.
* `test_complex_01.process-output`: `jsontest` output, typically useful for
understanding parsing errors.
License
-------
See the `LICENSE` file for details. In summary, JsonCpp is licensed under the
MIT license, or public domain if desired and recognized in your jurisdiction.

View File

@ -1,248 +0,0 @@
"""
Notes:
- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time.
To add a platform:
- add its name in options allowed_values below
- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example.
"""
import os
import os.path
import sys
JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip()
DIST_DIR = '#dist'
options = Variables()
options.Add( EnumVariable('platform',
'Platform (compiler/stl) used to build the project',
'msvc71',
allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(),
ignorecase=2) )
try:
platform = ARGUMENTS['platform']
if platform == 'linux-gcc':
CXX = 'g++' # not quite right, but env is not yet available.
import commands
version = commands.getoutput('%s -dumpversion' %CXX)
platform = 'linux-gcc-%s' %version
print "Using platform '%s'" %platform
LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '')
LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform)
os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH
print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH
except KeyError:
print 'You must specify a "platform"'
sys.exit(2)
print "Building using PLATFORM =", platform
rootbuild_dir = Dir('#buildscons')
build_dir = os.path.join( '#buildscons', platform )
bin_dir = os.path.join( '#bin', platform )
lib_dir = os.path.join( '#libs', platform )
sconsign_dir_path = Dir(build_dir).abspath
sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' )
# Ensure build directory exist (SConsignFile fail otherwise!)
if not os.path.exists( sconsign_dir_path ):
os.makedirs( sconsign_dir_path )
# Store all dependencies signature in a database
SConsignFile( sconsign_path )
def make_environ_vars():
"""Returns a dictionnary with environment variable to use when compiling."""
# PATH is required to find the compiler
# TEMP is required for at least mingw
# LD_LIBRARY_PATH & co is required on some system for the compiler
vars = {}
for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'):
if name in os.environ:
vars[name] = os.environ[name]
return vars
env = Environment( ENV = make_environ_vars(),
toolpath = ['scons-tools'],
tools=[] ) #, tools=['default'] )
if platform == 'suncc':
env.Tool( 'sunc++' )
env.Tool( 'sunlink' )
env.Tool( 'sunar' )
env.Append( CCFLAGS = ['-mt'] )
elif platform == 'vacpp':
env.Tool( 'default' )
env.Tool( 'aixcc' )
env['CXX'] = 'xlC_r' #scons does not pick-up the correct one !
# using xlC_r ensure multi-threading is enabled:
# http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm
env.Append( CCFLAGS = '-qrtti=all',
LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning
elif platform == 'msvc6':
env['MSVS_VERSION']='6.0'
for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
env.Tool( tool )
env['CXXFLAGS']='-GR -GX /nologo /MT'
elif platform == 'msvc70':
env['MSVS_VERSION']='7.0'
for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
env.Tool( tool )
env['CXXFLAGS']='-GR -GX /nologo /MT'
elif platform == 'msvc71':
env['MSVS_VERSION']='7.1'
for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
env.Tool( tool )
env['CXXFLAGS']='-GR -GX /nologo /MT'
elif platform == 'msvc80':
env['MSVS_VERSION']='8.0'
for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
env.Tool( tool )
env['CXXFLAGS']='-GR -EHsc /nologo /MT'
elif platform == 'msvc90':
env['MSVS_VERSION']='9.0'
# Scons 1.2 fails to detect the correct location of the platform SDK.
# So we propagate those from the environment. This requires that the
# user run vcvars32.bat before compiling.
if 'INCLUDE' in os.environ:
env['ENV']['INCLUDE'] = os.environ['INCLUDE']
if 'LIB' in os.environ:
env['ENV']['LIB'] = os.environ['LIB']
for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
env.Tool( tool )
env['CXXFLAGS']='-GR -EHsc /nologo /MT'
elif platform == 'mingw':
env.Tool( 'mingw' )
env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] )
elif platform.startswith('linux-gcc'):
env.Tool( 'default' )
env.Append( LIBS = ['pthread'], CCFLAGS = os.environ.get("CXXFLAGS", "-Wall"), LINKFLAGS=os.environ.get("LDFLAGS", "") )
env['SHARED_LIB_ENABLED'] = True
else:
print "UNSUPPORTED PLATFORM."
env.Exit(1)
env.Tool('targz')
env.Tool('srcdist')
env.Tool('globtool')
env.Append( CPPPATH = ['#include'],
LIBPATH = lib_dir )
short_platform = platform
if short_platform.startswith('msvc'):
short_platform = short_platform[2:]
# Notes: on Windows you need to rebuild the source for each variant
# Build script does not support that yet so we only build static libraries.
# This also fails on AIX because both dynamic and static library ends with
# extension .a.
env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False)
env['LIB_PLATFORM'] = short_platform
env['LIB_LINK_TYPE'] = 'lib' # static
env['LIB_CRUNTIME'] = 'mt'
env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention
env['JSONCPP_VERSION'] = JSONCPP_VERSION
env['BUILD_DIR'] = env.Dir(build_dir)
env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir)
env['DIST_DIR'] = DIST_DIR
if 'TarGz' in env['BUILDERS']:
class SrcDistAdder:
def __init__( self, env ):
self.env = env
def __call__( self, *args, **kw ):
apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw )
env['SRCDIST_BUILDER'] = env.TarGz
else: # If tarfile module is missing
class SrcDistAdder:
def __init__( self, env ):
pass
def __call__( self, *args, **kw ):
pass
env['SRCDIST_ADD'] = SrcDistAdder( env )
env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] )
env_testing = env.Clone( )
env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] )
def buildJSONExample( env, target_sources, target_name ):
env = env.Clone()
env.Append( CPPPATH = ['#'] )
exe = env.Program( target=target_name,
source=target_sources )
env['SRCDIST_ADD']( source=[target_sources] )
global bin_dir
return env.Install( bin_dir, exe )
def buildJSONTests( env, target_sources, target_name ):
jsontests_node = buildJSONExample( env, target_sources, target_name )
check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) )
env.AlwaysBuild( check_alias_target )
def buildUnitTests( env, target_sources, target_name ):
jsontests_node = buildJSONExample( env, target_sources, target_name )
check_alias_target = env.Alias( 'check', jsontests_node,
RunUnitTests( jsontests_node, jsontests_node ) )
env.AlwaysBuild( check_alias_target )
def buildLibrary( env, target_sources, target_name ):
static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}',
source=target_sources )
global lib_dir
env.Install( lib_dir, static_lib )
if env['SHARED_LIB_ENABLED']:
shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}',
source=target_sources )
env.Install( lib_dir, shared_lib )
env['SRCDIST_ADD']( source=[target_sources] )
Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' )
def buildProjectInDirectory( target_directory ):
global build_dir
target_build_dir = os.path.join( build_dir, target_directory )
target = os.path.join( target_directory, 'sconscript' )
SConscript( target, build_dir=target_build_dir, duplicate=0 )
env['SRCDIST_ADD']( source=[target] )
def runJSONTests_action( target, source = None, env = None ):
# Add test scripts to python path
jsontest_path = Dir( '#test' ).abspath
sys.path.insert( 0, jsontest_path )
data_path = os.path.join( jsontest_path, 'data' )
import runjsontests
return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path )
def runJSONTests_string( target, source = None, env = None ):
return 'RunJSONTests("%s")' % source[0]
import SCons.Action
ActionFactory = SCons.Action.ActionFactory
RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string )
def runUnitTests_action( target, source = None, env = None ):
# Add test scripts to python path
jsontest_path = Dir( '#test' ).abspath
sys.path.insert( 0, jsontest_path )
import rununittests
return rununittests.runAllTests( os.path.abspath(source[0].path) )
def runUnitTests_string( target, source = None, env = None ):
return 'RunUnitTests("%s")' % source[0]
RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string )
env.Alias( 'check' )
srcdist_cmd = env['SRCDIST_ADD']( source = """
AUTHORS README.md SConstruct
""".split() )
env.Alias( 'src-dist', srcdist_cmd )
buildProjectInDirectory( 'src/jsontestrunner' )
buildProjectInDirectory( 'src/lib_json' )
buildProjectInDirectory( 'src/test_lib_json' )
#print env.Dump()

View File

@ -1,154 +0,0 @@
"""Amalgate json-cpp library sources into a single source and header file.
Works with python2.6+ and python3.4+.
Example of invocation (must be invoked from json-cpp top directory):
python amalgate.py
"""
import os
import os.path
import sys
class AmalgamationFile:
def __init__(self, top_dir):
self.top_dir = top_dir
self.blocks = []
def add_text(self, text):
if not text.endswith("\n"):
text += "\n"
self.blocks.append(text)
def add_file(self, relative_input_path, wrap_in_comment=False):
def add_marker(prefix):
self.add_text("")
self.add_text("// " + "/"*70)
self.add_text("// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/")))
self.add_text("// " + "/"*70)
self.add_text("")
add_marker("Beginning")
f = open(os.path.join(self.top_dir, relative_input_path), "rt")
content = f.read()
if wrap_in_comment:
content = "/*\n" + content + "\n*/"
self.add_text(content)
f.close()
add_marker("End")
self.add_text("\n\n\n\n")
def get_value(self):
return "".join(self.blocks).replace("\r\n","\n")
def write_to(self, output_path):
output_dir = os.path.dirname(output_path)
if output_dir and not os.path.isdir(output_dir):
os.makedirs(output_dir)
f = open(output_path, "wb")
f.write(str.encode(self.get_value(), 'UTF-8'))
f.close()
def amalgamate_source(source_top_dir=None,
target_source_path=None,
header_include_path=None):
"""Produces amalgated source.
Parameters:
source_top_dir: top-directory
target_source_path: output .cpp path
header_include_path: generated header path relative to target_source_path.
"""
print("Amalgating header...")
header = AmalgamationFile(source_top_dir)
header.add_text("/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).")
header.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
header.add_file("LICENSE", wrap_in_comment=True)
header.add_text("#ifndef JSON_AMALGATED_H_INCLUDED")
header.add_text("# define JSON_AMALGATED_H_INCLUDED")
header.add_text("/// If defined, indicates that the source file is amalgated")
header.add_text("/// to prevent private header inclusion.")
header.add_text("#define JSON_IS_AMALGAMATION")
header.add_file("include/json/version.h")
header.add_file("include/json/config.h")
header.add_file("include/json/forwards.h")
header.add_file("include/json/features.h")
header.add_file("include/json/value.h")
header.add_file("include/json/reader.h")
header.add_file("include/json/writer.h")
header.add_file("include/json/assertions.h")
header.add_text("#endif //ifndef JSON_AMALGATED_H_INCLUDED")
target_header_path = os.path.join(os.path.dirname(target_source_path), header_include_path)
print("Writing amalgated header to %r" % target_header_path)
header.write_to(target_header_path)
base, ext = os.path.splitext(header_include_path)
forward_header_include_path = base + "-forwards" + ext
print("Amalgating forward header...")
header = AmalgamationFile(source_top_dir)
header.add_text("/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).")
header.add_text('/// It is intended to be used with #include "%s"' % forward_header_include_path)
header.add_text("/// This header provides forward declaration for all JsonCpp types.")
header.add_file("LICENSE", wrap_in_comment=True)
header.add_text("#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED")
header.add_text("# define JSON_FORWARD_AMALGATED_H_INCLUDED")
header.add_text("/// If defined, indicates that the source file is amalgated")
header.add_text("/// to prevent private header inclusion.")
header.add_text("#define JSON_IS_AMALGAMATION")
header.add_file("include/json/config.h")
header.add_file("include/json/forwards.h")
header.add_text("#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED")
target_forward_header_path = os.path.join(os.path.dirname(target_source_path),
forward_header_include_path)
print("Writing amalgated forward header to %r" % target_forward_header_path)
header.write_to(target_forward_header_path)
print("Amalgating source...")
source = AmalgamationFile(source_top_dir)
source.add_text("/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).")
source.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
source.add_file("LICENSE", wrap_in_comment=True)
source.add_text("")
source.add_text('#include "%s"' % header_include_path)
source.add_text("""
#ifndef JSON_IS_AMALGAMATION
#error "Compile with -I PATH_TO_JSON_DIRECTORY"
#endif
""")
source.add_text("")
lib_json = "src/lib_json"
source.add_file(os.path.join(lib_json, "json_tool.h"))
source.add_file(os.path.join(lib_json, "json_reader.cpp"))
source.add_file(os.path.join(lib_json, "json_valueiterator.inl"))
source.add_file(os.path.join(lib_json, "json_value.cpp"))
source.add_file(os.path.join(lib_json, "json_writer.cpp"))
print("Writing amalgated source to %r" % target_source_path)
source.write_to(target_source_path)
def main():
usage = """%prog [options]
Generate a single amalgated source and header file from the sources.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option("-s", "--source", dest="target_source_path", action="store", default="dist/jsoncpp.cpp",
help="""Output .cpp source path. [Default: %default]""")
parser.add_option("-i", "--include", dest="header_include_path", action="store", default="json/json.h",
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
parser.add_option("-t", "--top-dir", dest="top_dir", action="store", default=os.getcwd(),
help="""Source top-directory. [Default: %default]""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
msg = amalgamate_source(source_top_dir=options.top_dir,
target_source_path=options.target_source_path,
header_include_path=options.header_include_path)
if msg:
sys.stderr.write(msg + "\n")
sys.exit(1)
else:
print("Source succesfully amalagated")
if __name__ == "__main__":
main()

View File

@ -1,34 +0,0 @@
# This is a comment.
version: build.{build}
os: Windows Server 2012 R2
clone_folder: c:\projects\jsoncpp
platform:
- Win32
- x64
configuration:
- Debug
- Release
# scripts to run before build
before_build:
- echo "Running cmake..."
- cd c:\projects\jsoncpp
- cmake --version
- if %PLATFORM% == Win32 cmake .
- if %PLATFORM% == x64 cmake -G "Visual Studio 12 2013 Win64" .
build:
project: jsoncpp.sln # path to Visual Studio solution or project
deploy:
provider: GitHub
auth_token:
secure: K2Tp1q8pIZ7rs0Ot24ZMWuwr12Ev6Tc6QkhMjGQxoQG3ng1pXtgPasiJ45IDXGdg
on:
branch: master
appveyor_repo_tag: true

View File

@ -1,35 +0,0 @@
# This is only for jsoncpp developers/contributors.
# We use this to sign releases, generate documentation, etc.
VER?=$(shell cat version)
default:
@echo "VER=${VER}"
sign: jsoncpp-${VER}.tar.gz
gpg --armor --detach-sign $<
gpg --verify $<.asc
# Then upload .asc to the release.
jsoncpp-%.tar.gz:
curl https://github.com/open-source-parsers/jsoncpp/archive/$*.tar.gz -o $@
dox:
python doxybuild.py --doxygen=$$(which doxygen) --in doc/web_doxyfile.in
rsync -va --delete dist/doxygen/jsoncpp-api-html-${VER}/ ../jsoncpp-docs/doxygen/
# Then 'git add -A' and 'git push' in jsoncpp-docs.
build:
mkdir -p build/debug
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_SHARED_LIBS=ON -G "Unix Makefiles" ../..
make -C build/debug
# Currently, this depends on include/json/version.h generated
# by cmake.
test-amalgamate:
python2.7 amalgamate.py
python3.4 amalgamate.py
cd dist; gcc -I. -c jsoncpp.cpp
valgrind:
valgrind --error-exitcode=42 --leak-check=full ./build/debug/src/test_lib_json/jsoncpp_test
clean:
\rm -rf *.gz *.asc dist/
.PHONY: build

View File

@ -1,6 +0,0 @@
# Copyright 2010 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
# module

View File

@ -1,33 +0,0 @@
{
"cmake_variants" : [
{"name": "generator",
"generators": [
{"generator": [
"Visual Studio 7 .NET 2003",
"Visual Studio 9 2008",
"Visual Studio 9 2008 Win64",
"Visual Studio 10",
"Visual Studio 10 Win64",
"Visual Studio 11",
"Visual Studio 11 Win64"
]
},
{"generator": ["MinGW Makefiles"],
"env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}]
}
]
},
{"name": "shared_dll",
"variables": [
["BUILD_SHARED_LIBS=true"],
["BUILD_SHARED_LIBS=false"]
]
},
{"name": "build_type",
"build_types": [
"debug",
"release"
]
}
]
}

View File

@ -1,26 +0,0 @@
{
"cmake_variants" : [
{"name": "generator",
"generators": [
{"generator": [
"Visual Studio 6",
"Visual Studio 7",
"Visual Studio 8 2005"
]
}
]
},
{"name": "shared_dll",
"variables": [
["BUILD_SHARED_LIBS=true"],
["BUILD_SHARED_LIBS=false"]
]
},
{"name": "build_type",
"build_types": [
"debug",
"release"
]
}
]
}

View File

@ -1,205 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Copyright 2009 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from __future__ import print_function
from dircache import listdir
import re
import fnmatch
import os.path
# These fnmatch expressions are used by default to prune the directory tree
# while doing the recursive traversal in the glob_impl method of glob function.
prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS '
# These fnmatch expressions are used by default to exclude files and dirs
# while doing the recursive traversal in the glob_impl method of glob function.
##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree
# while doing the recursive traversal in the glob_impl method of glob function.
default_excludes = '''
**/*~
**/#*#
**/.#*
**/%*%
**/._*
**/CVS
**/CVS/**
**/.cvsignore
**/SCCS
**/SCCS/**
**/vssver.scc
**/.svn
**/.svn/**
**/.git
**/.git/**
**/.gitignore
**/.bzr
**/.bzr/**
**/.hg
**/.hg/**
**/_MTN
**/_MTN/**
**/_darcs
**/_darcs/**
**/.DS_Store '''
DIR = 1
FILE = 2
DIR_LINK = 4
FILE_LINK = 8
LINKS = DIR_LINK | FILE_LINK
ALL_NO_LINK = DIR | FILE
ALL = DIR | FILE | LINKS
_ANT_RE = re.compile(r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)')
def ant_pattern_to_re(ant_pattern):
"""Generates a regular expression from the ant pattern.
Matching convention:
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
*.py: match 'script.py' but not 'a/script.py'
"""
rex = ['^']
next_pos = 0
sep_rex = r'(?:/|%s)' % re.escape(os.path.sep)
## print 'Converting', ant_pattern
for match in _ANT_RE.finditer(ant_pattern):
## print 'Matched', match.group()
## print match.start(0), next_pos
if match.start(0) != next_pos:
raise ValueError("Invalid ant pattern")
if match.group(1): # /**/
rex.append(sep_rex + '(?:.*%s)?' % sep_rex)
elif match.group(2): # **/
rex.append('(?:.*%s)?' % sep_rex)
elif match.group(3): # /**
rex.append(sep_rex + '.*')
elif match.group(4): # *
rex.append('[^/%s]*' % re.escape(os.path.sep))
elif match.group(5): # /
rex.append(sep_rex)
else: # somepath
rex.append(re.escape(match.group(6)))
next_pos = match.end()
rex.append('$')
return re.compile(''.join(rex))
def _as_list(l):
if isinstance(l, basestring):
return l.split()
return l
def glob(dir_path,
includes = '**/*',
excludes = default_excludes,
entry_type = FILE,
prune_dirs = prune_dirs,
max_depth = 25):
include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
dir_path = dir_path.replace('/',os.path.sep)
entry_type_filter = entry_type
def is_pruned_dir(dir_name):
for pattern in prune_dirs:
if fnmatch.fnmatch(dir_name, pattern):
return True
return False
def apply_filter(full_path, filter_rexs):
"""Return True if at least one of the filter regular expression match full_path."""
for rex in filter_rexs:
if rex.match(full_path):
return True
return False
def glob_impl(root_dir_path):
child_dirs = [root_dir_path]
while child_dirs:
dir_path = child_dirs.pop()
for entry in listdir(dir_path):
full_path = os.path.join(dir_path, entry)
## print 'Testing:', full_path,
is_dir = os.path.isdir(full_path)
if is_dir and not is_pruned_dir(entry): # explore child directory ?
## print '===> marked for recursion',
child_dirs.append(full_path)
included = apply_filter(full_path, include_filter)
rejected = apply_filter(full_path, exclude_filter)
if not included or rejected: # do not include entry ?
## print '=> not included or rejected'
continue
link = os.path.islink(full_path)
is_file = os.path.isfile(full_path)
if not is_file and not is_dir:
## print '=> unknown entry type'
continue
if link:
entry_type = is_file and FILE_LINK or DIR_LINK
else:
entry_type = is_file and FILE or DIR
## print '=> type: %d' % entry_type,
if (entry_type & entry_type_filter) != 0:
## print ' => KEEP'
yield os.path.join(dir_path, entry)
## else:
## print ' => TYPE REJECTED'
return list(glob_impl(dir_path))
if __name__ == "__main__":
import unittest
class AntPatternToRETest(unittest.TestCase):
## def test_conversion(self):
## self.assertEqual('^somepath$', ant_pattern_to_re('somepath').pattern)
def test_matching(self):
test_cases = [ ('path',
['path'],
['somepath', 'pathsuffix', '/path', '/path']),
('*.py',
['source.py', 'source.ext.py', '.py'],
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c']),
('**/path',
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath']),
('path/**',
['path/a', 'path/path/a', 'path//'],
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a']),
('/**/path',
['/path', '/a/path', '/a/b/path/path', '/path/path'],
['path', 'path/', 'a/path', '/pathsuffix', '/somepath']),
('a/b',
['a/b'],
['somea/b', 'a/bsuffix', 'a/b/c']),
('**/*.py',
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
['script.pyc', 'script.pyo', 'a.py/b']),
('src/**/*.py',
['src/a.py', 'src/dir/a.py'],
['a/src/a.py', '/src/a.py']),
]
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
def local_path(paths):
return [ p.replace('/',os.path.sep) for p in paths ]
test_cases.append((ant_pattern, local_path(accepted_matches), local_path(rejected_matches)))
for ant_pattern, accepted_matches, rejected_matches in test_cases:
rex = ant_pattern_to_re(ant_pattern)
print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
for accepted_match in accepted_matches:
print('Accepted?:', accepted_match)
self.assertTrue(rex.match(accepted_match) is not None)
for rejected_match in rejected_matches:
print('Rejected?:', rejected_match)
self.assertTrue(rex.match(rejected_match) is None)
unittest.main()

View File

@ -1,278 +0,0 @@
from __future__ import print_function
import collections
import itertools
import json
import os
import os.path
import re
import shutil
import string
import subprocess
import sys
import cgi
class BuildDesc:
def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None):
self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ]
self.variables = variables or []
self.build_type = build_type
self.generator = generator
def merged_with(self, build_desc):
"""Returns a new BuildDesc by merging field content.
Prefer build_desc fields to self fields for single valued field.
"""
return BuildDesc(self.prepend_envs + build_desc.prepend_envs,
self.variables + build_desc.variables,
build_desc.build_type or self.build_type,
build_desc.generator or self.generator)
def env(self):
environ = os.environ.copy()
for values_by_name in self.prepend_envs:
for var, value in list(values_by_name.items()):
var = var.upper()
if type(value) is unicode:
value = value.encode(sys.getdefaultencoding())
if var in environ:
environ[var] = value + os.pathsep + environ[var]
else:
environ[var] = value
return environ
def cmake_args(self):
args = ["-D%s" % var for var in self.variables]
# skip build type for Visual Studio solution as it cause warning
if self.build_type and 'Visual' not in self.generator:
args.append("-DCMAKE_BUILD_TYPE=%s" % self.build_type)
if self.generator:
args.extend(['-G', self.generator])
return args
def __repr__(self):
return "BuildDesc(%s, build_type=%s)" % (" ".join(self.cmake_args()), self.build_type)
class BuildData:
def __init__(self, desc, work_dir, source_dir):
self.desc = desc
self.work_dir = work_dir
self.source_dir = source_dir
self.cmake_log_path = os.path.join(work_dir, 'batchbuild_cmake.log')
self.build_log_path = os.path.join(work_dir, 'batchbuild_build.log')
self.cmake_succeeded = False
self.build_succeeded = False
def execute_build(self):
print('Build %s' % self.desc)
self._make_new_work_dir()
self.cmake_succeeded = self._generate_makefiles()
if self.cmake_succeeded:
self.build_succeeded = self._build_using_makefiles()
return self.build_succeeded
def _generate_makefiles(self):
print(' Generating makefiles: ', end=' ')
cmd = ['cmake'] + self.desc.cmake_args() + [os.path.abspath(self.source_dir)]
succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.cmake_log_path)
print('done' if succeeded else 'FAILED')
return succeeded
def _build_using_makefiles(self):
print(' Building:', end=' ')
cmd = ['cmake', '--build', self.work_dir]
if self.desc.build_type:
cmd += ['--config', self.desc.build_type]
succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.build_log_path)
print('done' if succeeded else 'FAILED')
return succeeded
def _execute_build_subprocess(self, cmd, env, log_path):
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
env=env)
stdout, _ = process.communicate()
succeeded = (process.returncode == 0)
with open(log_path, 'wb') as flog:
log = ' '.join(cmd) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
flog.write(fix_eol(log))
return succeeded
def _make_new_work_dir(self):
if os.path.isdir(self.work_dir):
print(' Removing work directory', self.work_dir)
shutil.rmtree(self.work_dir, ignore_errors=True)
if not os.path.isdir(self.work_dir):
os.makedirs(self.work_dir)
def fix_eol(stdout):
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
"""
return re.sub('\r*\n', os.linesep, stdout)
def load_build_variants_from_config(config_path):
with open(config_path, 'rb') as fconfig:
data = json.load(fconfig)
variants = data[ 'cmake_variants' ]
build_descs_by_axis = collections.defaultdict(list)
for axis in variants:
axis_name = axis["name"]
build_descs = []
if "generators" in axis:
for generator_data in axis["generators"]:
for generator in generator_data["generator"]:
build_desc = BuildDesc(generator=generator,
prepend_envs=generator_data.get("env_prepend"))
build_descs.append(build_desc)
elif "variables" in axis:
for variables in axis["variables"]:
build_desc = BuildDesc(variables=variables)
build_descs.append(build_desc)
elif "build_types" in axis:
for build_type in axis["build_types"]:
build_desc = BuildDesc(build_type=build_type)
build_descs.append(build_desc)
build_descs_by_axis[axis_name].extend(build_descs)
return build_descs_by_axis
def generate_build_variants(build_descs_by_axis):
"""Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
axis_names = list(build_descs_by_axis.keys())
build_descs = []
for axis_name, axis_build_descs in list(build_descs_by_axis.items()):
if len(build_descs):
# for each existing build_desc and each axis build desc, create a new build_desc
new_build_descs = []
for prototype_build_desc, axis_build_desc in itertools.product(build_descs, axis_build_descs):
new_build_descs.append(prototype_build_desc.merged_with(axis_build_desc))
build_descs = new_build_descs
else:
build_descs = axis_build_descs
return build_descs
HTML_TEMPLATE = string.Template('''<html>
<head>
<title>$title</title>
<style type="text/css">
td.failed {background-color:#f08080;}
td.ok {background-color:#c0eec0;}
</style>
</head>
<body>
<table border="1">
<thead>
<tr>
<th>Variables</th>
$th_vars
</tr>
<tr>
<th>Build type</th>
$th_build_types
</tr>
</thead>
<tbody>
$tr_builds
</tbody>
</table>
</body></html>''')
def generate_html_report(html_report_path, builds):
report_dir = os.path.dirname(html_report_path)
# Vertical axis: generator
# Horizontal: variables, then build_type
builds_by_generator = collections.defaultdict(list)
variables = set()
build_types_by_variable = collections.defaultdict(set)
build_by_pos_key = {} # { (generator, var_key, build_type): build }
for build in builds:
builds_by_generator[build.desc.generator].append(build)
var_key = tuple(sorted(build.desc.variables))
variables.add(var_key)
build_types_by_variable[var_key].add(build.desc.build_type)
pos_key = (build.desc.generator, var_key, build.desc.build_type)
build_by_pos_key[pos_key] = build
variables = sorted(variables)
th_vars = []
th_build_types = []
for variable in variables:
build_types = sorted(build_types_by_variable[variable])
nb_build_type = len(build_types_by_variable[variable])
th_vars.append('<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape(' '.join(variable))))
for build_type in build_types:
th_build_types.append('<th>%s</th>' % cgi.escape(build_type))
tr_builds = []
for generator in sorted(builds_by_generator):
tds = [ '<td>%s</td>\n' % cgi.escape(generator) ]
for variable in variables:
build_types = sorted(build_types_by_variable[variable])
for build_type in build_types:
pos_key = (generator, variable, build_type)
build = build_by_pos_key.get(pos_key)
if build:
cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
build_status = 'ok' if build.build_succeeded else 'FAILED'
cmake_log_url = os.path.relpath(build.cmake_log_path, report_dir)
build_log_url = os.path.relpath(build.build_log_path, report_dir)
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % ( build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
if build.cmake_succeeded:
td += '<br><a href="%s" class="%s">Build: %s</a>' % ( build_log_url, build_status.lower(), build_status)
td += '</td>'
else:
td = '<td></td>'
tds.append(td)
tr_builds.append('<tr>%s</tr>' % '\n'.join(tds))
html = HTML_TEMPLATE.substitute( title='Batch build report',
th_vars=' '.join(th_vars),
th_build_types=' '.join(th_build_types),
tr_builds='\n'.join(tr_builds))
with open(html_report_path, 'wt') as fhtml:
fhtml.write(html)
print('HTML report generated in:', html_report_path)
def main():
usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...]
Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run
as described in CONFIG_JSON_PATH building in WORK_DIR.
Example of call:
python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = True
# parser.add_option('-v', '--verbose', dest="verbose", action='store_true',
# help="""Be verbose.""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
if len(args) < 3:
parser.error("Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH.")
work_dir = args[0]
source_dir = args[1].rstrip('/\\')
config_paths = args[2:]
for config_path in config_paths:
if not os.path.isfile(config_path):
parser.error("Can not read: %r" % config_path)
# generate build variants
build_descs = []
for config_path in config_paths:
build_descs_by_axis = load_build_variants_from_config(config_path)
build_descs.extend(generate_build_variants(build_descs_by_axis))
print('Build variants (%d):' % len(build_descs))
# assign build directory for each variant
if not os.path.isdir(work_dir):
os.makedirs(work_dir)
builds = []
with open(os.path.join(work_dir, 'matrix-dir-map.txt'), 'wt') as fmatrixmap:
for index, build_desc in enumerate(build_descs):
build_desc_work_dir = os.path.join(work_dir, '%03d' % (index+1))
builds.append(BuildData(build_desc, build_desc_work_dir, source_dir))
fmatrixmap.write('%s: %s\n' % (build_desc_work_dir, build_desc))
for build in builds:
build.execute_build()
html_report_path = os.path.join(work_dir, 'batchbuild-report.html')
generate_html_report(html_report_path, builds)
print('Done')
if __name__ == '__main__':
main()

View File

@ -1,69 +0,0 @@
# Copyright 2010 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from __future__ import print_function
import os.path
def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
if not os.path.isfile(path):
raise ValueError('Path "%s" is not a file' % path)
try:
f = open(path, 'rb')
except IOError as msg:
print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr)
return False
try:
raw_lines = f.readlines()
finally:
f.close()
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
if raw_lines != fixed_lines:
print('%s =>' % path, end=' ')
if not is_dry_run:
f = open(path, "wb")
try:
f.writelines(fixed_lines)
finally:
f.close()
if verbose:
print(is_dry_run and ' NEED FIX' or ' FIXED')
return True
##
##
##
##def _do_fix(is_dry_run = True):
## from waftools import antglob
## python_sources = antglob.glob('.',
## includes = '**/*.py **/wscript **/wscript_build',
## excludes = antglob.default_excludes + './waf.py',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build')
## for path in python_sources:
## _fix_python_source(path, is_dry_run)
##
## cpp_sources = antglob.glob('.',
## includes = '**/*.cpp **/*.h **/*.inl',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build')
## for path in cpp_sources:
## _fix_source_eol(path, is_dry_run)
##
##
##def dry_fix(context):
## _do_fix(is_dry_run = True)
##
##def fix(context):
## _do_fix(is_dry_run = False)
##
##def shutdown():
## pass
##
##def check(context):
## # Unit tests are run when "check" target is used
## ut = UnitTest.unit_test()
## ut.change_to_testfile_dir = True
## ut.want_to_see_test_output = True
## ut.want_to_see_test_error = True
## ut.run()
## ut.print_results()

View File

@ -1,94 +0,0 @@
"""Updates the license text in source file.
"""
from __future__ import print_function
# An existing license is found if the file starts with the string below,
# and ends with the first blank line.
LICENSE_BEGIN = "// Copyright "
BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
""".replace('\r\n','\n')
def update_license(path, dry_run, show_diff):
"""Update the license statement in the specified file.
Parameters:
path: path of the C++ source file to update.
dry_run: if True, just print the path of the file that would be updated,
but don't change it.
show_diff: if True, print the path of the file that would be modified,
as well as the change made to the file.
"""
with open(path, 'rt') as fin:
original_text = fin.read().replace('\r\n','\n')
newline = fin.newlines and fin.newlines[0] or '\n'
if not original_text.startswith(LICENSE_BEGIN):
# No existing license found => prepend it
new_text = BRIEF_LICENSE + original_text
else:
license_end_index = original_text.index('\n\n') # search first blank line
new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
if original_text != new_text:
if not dry_run:
with open(path, 'wb') as fout:
fout.write(new_text.replace('\n', newline))
print('Updated', path)
if show_diff:
import difflib
print('\n'.join(difflib.unified_diff(original_text.split('\n'),
new_text.split('\n'))))
return True
return False
def update_license_in_source_directories(source_dirs, dry_run, show_diff):
"""Updates license text in C++ source files found in directory source_dirs.
Parameters:
source_dirs: list of directory to scan for C++ sources. Directories are
scanned recursively.
dry_run: if True, just print the path of the file that would be updated,
but don't change it.
show_diff: if True, print the path of the file that would be modified,
as well as the change made to the file.
"""
from devtools import antglob
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
for source_dir in source_dirs:
cpp_sources = antglob.glob(source_dir,
includes = '''**/*.h **/*.cpp **/*.inl''',
prune_dirs = prune_dirs)
for source in cpp_sources:
update_license(source, dry_run, show_diff)
def main():
usage = """%prog DIR [DIR2...]
Updates license text in sources of the project in source files found
in the directory specified on the command-line.
Example of call:
python devtools\licenseupdater.py include src -n --diff
=> Show change that would be made to the sources.
python devtools\licenseupdater.py include src
=> Update license statement on all sources in directories include/ and src/.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False,
help="""Only show what files are updated, do not update the files""")
parser.add_option('--diff', dest="show_diff", action='store_true', default=False,
help="""On update, show change made to the file.""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
update_license_in_source_directories(args, options.dry_run, options.show_diff)
print('Done')
if __name__ == '__main__':
import sys
import os.path
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
main()

View File

@ -1,52 +0,0 @@
# Copyright 2010 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from contextlib import closing
import os
import tarfile
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
"""Parameters:
tarball_path: output path of the .tar.gz file
sources: list of sources to include in the tarball, relative to the current directory
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
from path in the tarball.
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
to make them child of root.
"""
base_dir = os.path.normpath(os.path.abspath(base_dir))
def archive_name(path):
"""Makes path relative to base_dir."""
path = os.path.normpath(os.path.abspath(path))
common_path = os.path.commonprefix((base_dir, path))
archive_name = path[len(common_path):]
if os.path.isabs(archive_name):
archive_name = archive_name[1:]
return os.path.join(prefix_dir, archive_name)
def visit(tar, dirname, names):
for name in names:
path = os.path.join(dirname, name)
if os.path.isfile(path):
path_in_tar = archive_name(path)
tar.add(path, path_in_tar)
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
with closing(tarfile.TarFile.open(tarball_path, 'w:gz',
compresslevel=compression)) as tar:
for source in sources:
source_path = source
if os.path.isdir(source):
for dirpath, dirnames, filenames in os.walk(source_path):
visit(tar, dirpath, filenames)
else:
path_in_tar = archive_name(source_path)
tar.add(source_path, path_in_tar) # filename, arcname
def decompress(tarball_path, base_dir):
"""Decompress the gzipped tarball into directory base_dir.
"""
with closing(tarfile.TarFile.open(tarball_path)) as tar:
tar.extractall(base_dir)

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +0,0 @@
<hr>
</body>
</html>

View File

@ -1,24 +0,0 @@
<html>
<head>
<title>
JsonCpp - JSON data format manipulation library
</title>
<link href="doxygen.css" rel="stylesheet" type="text/css">
<link href="tabs.css" rel="stylesheet" type="text/css">
</head>
<body bgcolor="#ffffff">
<table width="100%">
<tr>
<td width="40%" align="left" valign="center">
<a href="https://github.com/open-source-parsers/jsoncpp">
JsonCpp project page
</a>
</td>
<td width="40%" align="right" valign="center">
<a href="http://open-source-parsers.github.io/jsoncpp-docs/doxygen/">JsonCpp home page</a>
</td>
</tr>
</table>
<hr>

View File

@ -1,164 +0,0 @@
/**
\mainpage
\section _intro Introduction
<a HREF="http://www.json.org/">JSON (JavaScript Object Notation)</a>
is a lightweight data-interchange format.
Here is an example of JSON data:
\verbatim
{
"encoding" : "UTF-8",
"plug-ins" : [
"python",
"c++",
"ruby"
],
"indent" : { "length" : 3, "use_space": true }
}
\endverbatim
<b>JsonCpp</b> supports comments as <i>meta-data</i>:
\code
// Configuration options
{
// Default encoding for text
"encoding" : "UTF-8",
// Plug-ins loaded at start-up
"plug-ins" : [
"python",
"c++", // trailing comment
"ruby"
],
// Tab indent size
// (multi-line comment)
"indent" : { /*embedded comment*/ "length" : 3, "use_space": true }
}
\endcode
\section _features Features
- read and write JSON document
- attach C++ style comments to element during parsing
- rewrite JSON document preserving original comments
Notes: Comments used to be supported in JSON but were removed for
portability (C like comments are not supported in Python). Since
comments are useful in configuration/input file, this feature was
preserved.
\section _example Code example
\code
Json::Value root; // 'root' will contain the root value after parsing.
std::cin >> root;
// You can also read into a particular sub-value.
std::cin >> root["subtree"];
// Get the value of the member of root named 'encoding',
// and return 'UTF-8' if there is no such member.
std::string encoding = root.get("encoding", "UTF-8" ).asString();
// Get the value of the member of root named 'plug-ins'; return a 'null' value if
// there is no such member.
const Json::Value plugins = root["plug-ins"];
// Iterate over the sequence elements.
for ( int index = 0; index < plugins.size(); ++index )
loadPlugIn( plugins[index].asString() );
// Try other datatypes. Some are auto-convertible to others.
foo::setIndentLength( root["indent"].get("length", 3).asInt() );
foo::setIndentUseSpace( root["indent"].get("use_space", true).asBool() );
// Since Json::Value has an implicit constructor for all value types, it is not
// necessary to explicitly construct the Json::Value object.
root["encoding"] = foo::getCurrentEncoding();
root["indent"]["length"] = foo::getCurrentIndentLength();
root["indent"]["use_space"] = foo::getCurrentIndentUseSpace();
// If you like the defaults, you can insert directly into a stream.
std::cout << root;
// Of course, you can write to `std::ostringstream` if you prefer.
// If desired, remember to add a linefeed and flush.
std::cout << std::endl;
\endcode
\section _advanced Advanced usage
Configure *builders* to create *readers* and *writers*. For
configuration, we use our own `Json::Value` (rather than
standard setters/getters) so that we can add
features without losing binary-compatibility.
\code
// For convenience, use `writeString()` with a specialized builder.
Json::StreamWriterBuilder wbuilder;
wbuilder["indentation"] = "\t";
std::string document = Json::writeString(wbuilder, root);
// Here, using a specialized Builder, we discard comments and
// record errors as we parse.
Json::CharReaderBuilder rbuilder;
rbuilder["collectComments"] = false;
std::string errs;
bool ok = Json::parseFromStream(rbuilder, std::cin, &root, &errs);
\endcode
Yes, compile-time configuration-checking would be helpful,
but `Json::Value` lets you
write and read the builder configuration, which is better! In other words,
you can configure your JSON parser using JSON.
CharReaders and StreamWriters are not thread-safe, but they are re-usable.
\code
Json::CharReaderBuilder rbuilder;
cfg >> rbuilder.settings_;
std::unique_ptr<Json::CharReader> const reader(rbuilder.newCharReader());
reader->parse(start, stop, &value1, &errs);
// ...
reader->parse(start, stop, &value2, &errs);
// etc.
\endcode
\section _pbuild Build instructions
The build instructions are located in the file
<a HREF="https://github.com/open-source-parsers/jsoncpp/blob/master/README.md">README.md</a> in the top-directory of the project.
The latest version of the source is available in the project's GitHub repository:
<a HREF="https://github.com/open-source-parsers/jsoncpp/">
jsoncpp</a>
\section _news What's New?
The description of latest changes can be found in
<a HREF="https://github.com/open-source-parsers/jsoncpp/wiki/NEWS">
the NEWS wiki
</a>.
\section _rlinks Related links
- <a HREF="http://www.json.org/">JSON</a> Specification and alternate language implementations.
- <a HREF="http://www.yaml.org/">YAML</a> A data format designed for human readability.
- <a HREF="http://www.cl.cam.ac.uk/~mgk25/unicode.html">UTF-8 and Unicode FAQ</a>.
\section _plinks Old project links
- <a href="https://sourceforge.net/projects/jsoncpp/">https://sourceforge.net/projects/jsoncpp/</a>
- <a href="http://jsoncpp.sourceforge.net">http://jsoncpp.sourceforge.net</a>
- <a href="http://sourceforge.net/projects/jsoncpp/files/">http://sourceforge.net/projects/jsoncpp/files/</a>
- <a href="http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/">http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/</a>
- <a href="http://jsoncpp.sourceforge.net/old.html">http://jsoncpp.sourceforge.net/old.html</a>
\section _license License
See file <a href="https://github.com/open-source-parsers/jsoncpp/blob/master/LICENSE"><code>LICENSE</code></a> in the top-directory of the project.
Basically JsonCpp is licensed under MIT license, or public domain if desired
and recognized in your jurisdiction.
\author Baptiste Lepilleur <blep@users.sourceforge.net> (originator)
\author Christopher Dunn <cdunn2001@gmail.com> (primary maintainer)
\version \include version
We make strong guarantees about binary-compatibility, consistent with
<a href="http://apr.apache.org/versioning.html">the Apache versioning scheme</a>.
\sa version.h
*/

View File

@ -1 +0,0 @@
The documentation is generated using doxygen (http://www.doxygen.org).

View File

@ -1,3 +0,0 @@
/*! \page roadmap JsonCpp roadmap
Moved to: https://github.com/open-source-parsers/jsoncpp/wiki/Roadmap
*/

File diff suppressed because it is too large Load Diff

View File

@ -1,189 +0,0 @@
"""Script to generate doxygen documentation.
"""
from __future__ import print_function
from __future__ import unicode_literals
from devtools import tarball
from contextlib import contextmanager
import subprocess
import traceback
import re
import os
import sys
import shutil
@contextmanager
def cd(newdir):
"""
http://stackoverflow.com/questions/431684/how-do-i-cd-in-python
"""
prevdir = os.getcwd()
os.chdir(newdir)
try:
yield
finally:
os.chdir(prevdir)
def find_program(*filenames):
"""find a program in folders path_lst, and sets env[var]
@param filenames: a list of possible names of the program to search for
@return: the full path of the filename if found, or '' if filename could not be found
"""
paths = os.environ.get('PATH', '').split(os.pathsep)
suffixes = ('win32' in sys.platform) and '.exe .com .bat .cmd' or ''
for filename in filenames:
for name in [filename+ext for ext in suffixes.split(' ')]:
for directory in paths:
full_path = os.path.join(directory, name)
if os.path.isfile(full_path):
return full_path
return ''
def do_subst_in_file(targetfile, sourcefile, dict):
"""Replace all instances of the keys of dict with their values.
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
"""
with open(sourcefile, 'r') as f:
contents = f.read()
for (k,v) in list(dict.items()):
v = v.replace('\\','\\\\')
contents = re.sub(k, v, contents)
with open(targetfile, 'w') as f:
f.write(contents)
def getstatusoutput(cmd):
"""cmd is a list.
"""
try:
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output, _ = process.communicate()
status = process.returncode
except:
status = -1
output = traceback.format_exc()
return status, output
def run_cmd(cmd, silent=False):
"""Raise exception on failure.
"""
info = 'Running: %r in %r' %(' '.join(cmd), os.getcwd())
print(info)
sys.stdout.flush()
if silent:
status, output = getstatusoutput(cmd)
else:
status, output = subprocess.call(cmd), ''
if status:
msg = 'Error while %s ...\n\terror=%d, output="""%s"""' %(info, status, output)
raise Exception(msg)
def assert_is_exe(path):
if not path:
raise Exception('path is empty.')
if not os.path.isfile(path):
raise Exception('%r is not a file.' %path)
if not os.access(path, os.X_OK):
raise Exception('%r is not executable by this user.' %path)
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
assert_is_exe(doxygen_path)
config_file = os.path.abspath(config_file)
with cd(working_dir):
cmd = [doxygen_path, config_file]
run_cmd(cmd, is_silent)
def build_doc(options, make_release=False):
if make_release:
options.make_tarball = True
options.with_dot = True
options.with_html_help = True
options.with_uml_look = True
options.open = False
options.silent = True
version = open('version', 'rt').read().strip()
output_dir = 'dist/doxygen' # relative to doc/doxyfile location.
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
top_dir = os.path.abspath('.')
html_output_dirname = 'jsoncpp-api-html-' + version
tarball_path = os.path.join('dist', html_output_dirname + '.tar.gz')
warning_log_path = os.path.join(output_dir, '../jsoncpp-doxygen-warning.log')
html_output_path = os.path.join(output_dir, html_output_dirname)
def yesno(bool):
return bool and 'YES' or 'NO'
subst_keys = {
'%JSONCPP_VERSION%': version,
'%DOC_TOPDIR%': '',
'%TOPDIR%': top_dir,
'%HTML_OUTPUT%': os.path.join('..', output_dir, html_output_dirname),
'%HAVE_DOT%': yesno(options.with_dot),
'%DOT_PATH%': os.path.split(options.dot_path)[0],
'%HTML_HELP%': yesno(options.with_html_help),
'%UML_LOOK%': yesno(options.with_uml_look),
'%WARNING_LOG_PATH%': os.path.join('..', warning_log_path)
}
if os.path.isdir(output_dir):
print('Deleting directory:', output_dir)
shutil.rmtree(output_dir)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
do_subst_in_file('doc/doxyfile', options.doxyfile_input_path, subst_keys)
run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent)
if not options.silent:
print(open(warning_log_path, 'r').read())
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
print('Generated documentation can be found in:')
print(index_path)
if options.open:
import webbrowser
webbrowser.open('file://' + index_path)
if options.make_tarball:
print('Generating doc tarball to', tarball_path)
tarball_sources = [
output_dir,
'README.md',
'LICENSE',
'NEWS.txt',
'version'
]
tarball_basedir = os.path.join(output_dir, html_output_dirname)
tarball.make_tarball(tarball_path, tarball_sources, tarball_basedir, html_output_dirname)
return tarball_path, html_output_dirname
def main():
usage = """%prog
Generates doxygen documentation in build/doxygen.
Optionaly makes a tarball of the documentation to dist/.
Must be started in the project top directory.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False,
help="""Enable usage of DOT to generate collaboration diagram""")
parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'),
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'),
help="""Path to Doxygen tool. [Default: %default]""")
parser.add_option('--in', dest="doxyfile_input_path", action='store', default='doc/doxyfile.in',
help="""Path to doxygen inputs. [Default: %default]""")
parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False,
help="""Enable generation of Microsoft HTML HELP""")
parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True,
help="""Generates DOT graph without UML look [Default: False]""")
parser.add_option('--open', dest="open", action='store_true', default=False,
help="""Open the HTML index in the web browser after generation""")
parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False,
help="""Generates a tarball of the documentation in dist/ directory""")
parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False,
help="""Hides doxygen output""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
build_doc(options)
if __name__ == '__main__':
main()

View File

@ -1,2 +0,0 @@
FILE(GLOB INCLUDE_FILES "json/*.h")
INSTALL(FILES ${INCLUDE_FILES} DESTINATION ${INCLUDE_INSTALL_DIR}/json)

View File

@ -1,54 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED
#define CPPTL_JSON_ASSERTIONS_H_INCLUDED
#include <stdlib.h>
#include <sstream>
#if !defined(JSON_IS_AMALGAMATION)
#include "config.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
/** It should not be possible for a maliciously designed file to
* cause an abort() or seg-fault, so these macros are used only
* for pre-condition violations and internal logic errors.
*/
#if JSON_USE_EXCEPTION
// @todo <= add detail about condition in exception
# define JSON_ASSERT(condition) \
{if (!(condition)) {Json::throwLogicError( "assert json failed" );}}
# define JSON_FAIL_MESSAGE(message) \
{ \
std::ostringstream oss; oss << message; \
Json::throwLogicError(oss.str()); \
abort(); \
}
#else // JSON_USE_EXCEPTION
# define JSON_ASSERT(condition) assert(condition)
// The call to assert() will show the failure message in debug builds. In
// release builds we abort, for a core-dump or debugger.
# define JSON_FAIL_MESSAGE(message) \
{ \
std::ostringstream oss; oss << message; \
assert(false && oss.str().c_str()); \
abort(); \
}
#endif
#define JSON_ASSERT_MESSAGE(condition, message) \
if (!(condition)) { \
JSON_FAIL_MESSAGE(message); \
}
#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED

View File

@ -1,25 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef JSON_AUTOLINK_H_INCLUDED
#define JSON_AUTOLINK_H_INCLUDED
#include "config.h"
#ifdef JSON_IN_CPPTL
#include <cpptl/cpptl_autolink.h>
#endif
#if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && \
!defined(JSON_IN_CPPTL)
#define CPPTL_AUTOLINK_NAME "json"
#undef CPPTL_AUTOLINK_DLL
#ifdef JSON_DLL
#define CPPTL_AUTOLINK_DLL
#endif
#include "autolink.h"
#endif
#endif // JSON_AUTOLINK_H_INCLUDED

View File

@ -1,109 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef JSON_CONFIG_H_INCLUDED
#define JSON_CONFIG_H_INCLUDED
/// If defined, indicates that json library is embedded in CppTL library.
//# define JSON_IN_CPPTL 1
/// If defined, indicates that json may leverage CppTL library
//# define JSON_USE_CPPTL 1
/// If defined, indicates that cpptl vector based map should be used instead of
/// std::map
/// as Value container.
//# define JSON_USE_CPPTL_SMALLMAP 1
// If non-zero, the library uses exceptions to report bad input instead of C
// assertion macros. The default is to use exceptions.
#ifndef JSON_USE_EXCEPTION
#define JSON_USE_EXCEPTION 1
#endif
/// If defined, indicates that the source file is amalgated
/// to prevent private header inclusion.
/// Remarks: it is automatically defined in the generated amalgated header.
// #define JSON_IS_AMALGAMATION
#ifdef JSON_IN_CPPTL
#include <cpptl/config.h>
#ifndef JSON_USE_CPPTL
#define JSON_USE_CPPTL 1
#endif
#endif
#ifdef JSON_IN_CPPTL
#define JSON_API CPPTL_API
#elif defined(JSON_DLL_BUILD)
#if defined(_MSC_VER)
#define JSON_API __declspec(dllexport)
#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
#endif // if defined(_MSC_VER)
#elif defined(JSON_DLL)
#if defined(_MSC_VER)
#define JSON_API __declspec(dllimport)
#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
#endif // if defined(_MSC_VER)
#endif // ifdef JSON_IN_CPPTL
#if !defined(JSON_API)
#define JSON_API
#endif
// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for
// integer
// Storages, and 64 bits integer support is disabled.
// #define JSON_NO_INT64 1
#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6
// Microsoft Visual Studio 6 only support conversion from __int64 to double
// (no conversion from unsigned __int64).
#define JSON_USE_INT64_DOUBLE_CONVERSION 1
// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255'
// characters in the debug information)
// All projects I've ever seen with VS6 were using this globally (not bothering
// with pragma push/pop).
#pragma warning(disable : 4786)
#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6
#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008
/// Indicates that the following function is deprecated.
#define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
#elif defined(__clang__) && defined(__has_feature)
#if __has_feature(attribute_deprecated_with_message)
#define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
#endif
#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
#define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
#elif defined(__GNUC__) && (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
#define JSONCPP_DEPRECATED(message) __attribute__((__deprecated__))
#endif
#if !defined(JSONCPP_DEPRECATED)
#define JSONCPP_DEPRECATED(message)
#endif // if !defined(JSONCPP_DEPRECATED)
namespace Json {
typedef int Int;
typedef unsigned int UInt;
#if defined(JSON_NO_INT64)
typedef int LargestInt;
typedef unsigned int LargestUInt;
#undef JSON_HAS_INT64
#else // if defined(JSON_NO_INT64)
// For Microsoft Visual use specific types as long long is not supported
#if defined(_MSC_VER) // Microsoft Visual Studio
typedef __int64 Int64;
typedef unsigned __int64 UInt64;
#else // if defined(_MSC_VER) // Other platforms, use long long
typedef long long int Int64;
typedef unsigned long long int UInt64;
#endif // if defined(_MSC_VER)
typedef Int64 LargestInt;
typedef UInt64 LargestUInt;
#define JSON_HAS_INT64
#endif // if defined(JSON_NO_INT64)
} // end namespace Json
#endif // JSON_CONFIG_H_INCLUDED

View File

@ -1,57 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef CPPTL_JSON_FEATURES_H_INCLUDED
#define CPPTL_JSON_FEATURES_H_INCLUDED
#if !defined(JSON_IS_AMALGAMATION)
#include "forwards.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
namespace Json {
/** \brief Configuration passed to reader and writer.
* This configuration object can be used to force the Reader or Writer
* to behave in a standard conforming way.
*/
class JSON_API Features {
public:
/** \brief A configuration that allows all features and assumes all strings
* are UTF-8.
* - C & C++ comments are allowed
* - Root object can be any JSON value
* - Assumes Value strings are encoded in UTF-8
*/
static Features all();
/** \brief A configuration that is strictly compatible with the JSON
* specification.
* - Comments are forbidden.
* - Root object must be either an array or an object value.
* - Assumes Value strings are encoded in UTF-8
*/
static Features strictMode();
/** \brief Initialize the configuration like JsonConfig::allFeatures;
*/
Features();
/// \c true if comments are allowed. Default: \c true.
bool allowComments_;
/// \c true if root must be either an array or an object value. Default: \c
/// false.
bool strictRoot_;
/// \c true if dropped null placeholders are allowed. Default: \c false.
bool allowDroppedNullPlaceholders_;
/// \c true if numeric object key are allowed. Default: \c false.
bool allowNumericKeys_;
};
} // namespace Json
#endif // CPPTL_JSON_FEATURES_H_INCLUDED

View File

@ -1,37 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef JSON_FORWARDS_H_INCLUDED
#define JSON_FORWARDS_H_INCLUDED
#if !defined(JSON_IS_AMALGAMATION)
#include "config.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
namespace Json {
// writer.h
class FastWriter;
class StyledWriter;
// reader.h
class Reader;
// features.h
class Features;
// value.h
typedef unsigned int ArrayIndex;
class StaticString;
class Path;
class PathArgument;
class Value;
class ValueIteratorBase;
class ValueIterator;
class ValueConstIterator;
} // namespace Json
#endif // JSON_FORWARDS_H_INCLUDED

View File

@ -1,15 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef JSON_JSON_H_INCLUDED
#define JSON_JSON_H_INCLUDED
#include "autolink.h"
#include "value.h"
#include "reader.h"
#include "writer.h"
#include "features.h"
#endif // JSON_JSON_H_INCLUDED

View File

@ -1,401 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef CPPTL_JSON_READER_H_INCLUDED
#define CPPTL_JSON_READER_H_INCLUDED
#if !defined(JSON_IS_AMALGAMATION)
#include "features.h"
#include "value.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
#include <deque>
#include <iosfwd>
#include <stack>
#include <string>
#include <istream>
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
// be used by...
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(push)
#pragma warning(disable : 4251)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
namespace Json {
/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a
*Value.
*
* \deprecated Use CharReader and CharReaderBuilder.
*/
class JSON_API Reader {
public:
typedef char Char;
typedef const Char* Location;
/** \brief An error tagged with where in the JSON text it was encountered.
*
* The offsets give the [start, limit) range of bytes within the text. Note
* that this is bytes, not codepoints.
*
*/
struct StructuredError {
size_t offset_start;
size_t offset_limit;
std::string message;
};
/** \brief Constructs a Reader allowing all features
* for parsing.
*/
Reader();
/** \brief Constructs a Reader allowing the specified feature set
* for parsing.
*/
Reader(const Features& features);
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
* document.
* \param document UTF-8 encoded string containing the document to read.
* \param root [out] Contains the root value of the document if it was
* successfully parsed.
* \param collectComments \c true to collect comment and allow writing them
* back during
* serialization, \c false to discard comments.
* This parameter is ignored if
* Features::allowComments_
* is \c false.
* \return \c true if the document was successfully parsed, \c false if an
* error occurred.
*/
bool
parse(const std::string& document, Value& root, bool collectComments = true);
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
document.
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
document to read.
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
document to read.
* Must be >= beginDoc.
* \param root [out] Contains the root value of the document if it was
* successfully parsed.
* \param collectComments \c true to collect comment and allow writing them
back during
* serialization, \c false to discard comments.
* This parameter is ignored if
Features::allowComments_
* is \c false.
* \return \c true if the document was successfully parsed, \c false if an
error occurred.
*/
bool parse(const char* beginDoc,
const char* endDoc,
Value& root,
bool collectComments = true);
/// \brief Parse from input stream.
/// \see Json::operator>>(std::istream&, Json::Value&).
bool parse(std::istream& is, Value& root, bool collectComments = true);
/** \brief Returns a user friendly string that list errors in the parsed
* document.
* \return Formatted error message with the list of errors with their location
* in
* the parsed document. An empty string is returned if no error
* occurred
* during parsing.
* \deprecated Use getFormattedErrorMessages() instead (typo fix).
*/
JSONCPP_DEPRECATED("Use getFormattedErrorMessages() instead.")
std::string getFormatedErrorMessages() const;
/** \brief Returns a user friendly string that list errors in the parsed
* document.
* \return Formatted error message with the list of errors with their location
* in
* the parsed document. An empty string is returned if no error
* occurred
* during parsing.
*/
std::string getFormattedErrorMessages() const;
/** \brief Returns a vector of structured erros encounted while parsing.
* \return A (possibly empty) vector of StructuredError objects. Currently
* only one error can be returned, but the caller should tolerate
* multiple
* errors. This can occur if the parser recovers from a non-fatal
* parse error and then encounters additional errors.
*/
std::vector<StructuredError> getStructuredErrors() const;
/** \brief Add a semantic error message.
* \param value JSON Value location associated with the error
* \param message The error message.
* \return \c true if the error was successfully added, \c false if the
* Value offset exceeds the document size.
*/
bool pushError(const Value& value, const std::string& message);
/** \brief Add a semantic error message with extra context.
* \param value JSON Value location associated with the error
* \param message The error message.
* \param extra Additional JSON Value location to contextualize the error
* \return \c true if the error was successfully added, \c false if either
* Value offset exceeds the document size.
*/
bool pushError(const Value& value, const std::string& message, const Value& extra);
/** \brief Return whether there are any errors.
* \return \c true if there are no errors to report \c false if
* errors have occurred.
*/
bool good() const;
private:
enum TokenType {
tokenEndOfStream = 0,
tokenObjectBegin,
tokenObjectEnd,
tokenArrayBegin,
tokenArrayEnd,
tokenString,
tokenNumber,
tokenTrue,
tokenFalse,
tokenNull,
tokenArraySeparator,
tokenMemberSeparator,
tokenComment,
tokenError
};
class Token {
public:
TokenType type_;
Location start_;
Location end_;
};
class ErrorInfo {
public:
Token token_;
std::string message_;
Location extra_;
};
typedef std::deque<ErrorInfo> Errors;
bool readToken(Token& token);
void skipSpaces();
bool match(Location pattern, int patternLength);
bool readComment();
bool readCStyleComment();
bool readCppStyleComment();
bool readString();
void readNumber();
bool readValue();
bool readObject(Token& token);
bool readArray(Token& token);
bool decodeNumber(Token& token);
bool decodeNumber(Token& token, Value& decoded);
bool decodeString(Token& token);
bool decodeString(Token& token, std::string& decoded);
bool decodeDouble(Token& token);
bool decodeDouble(Token& token, Value& decoded);
bool decodeUnicodeCodePoint(Token& token,
Location& current,
Location end,
unsigned int& unicode);
bool decodeUnicodeEscapeSequence(Token& token,
Location& current,
Location end,
unsigned int& unicode);
bool addError(const std::string& message, Token& token, Location extra = 0);
bool recoverFromError(TokenType skipUntilToken);
bool addErrorAndRecover(const std::string& message,
Token& token,
TokenType skipUntilToken);
void skipUntilSpace();
Value& currentValue();
Char getNextChar();
void
getLocationLineAndColumn(Location location, int& line, int& column) const;
std::string getLocationLineAndColumn(Location location) const;
void addComment(Location begin, Location end, CommentPlacement placement);
void skipCommentTokens(Token& token);
typedef std::stack<Value*> Nodes;
Nodes nodes_;
Errors errors_;
std::string document_;
Location begin_;
Location end_;
Location current_;
Location lastValueEnd_;
Value* lastValue_;
std::string commentsBefore_;
Features features_;
bool collectComments_;
}; // Reader
/** Interface for reading JSON from a char array.
*/
class JSON_API CharReader {
public:
virtual ~CharReader() {}
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
document.
* The document must be a UTF-8 encoded string containing the document to read.
*
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
document to read.
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
document to read.
* Must be >= beginDoc.
* \param root [out] Contains the root value of the document if it was
* successfully parsed.
* \param errs [out] Formatted error messages (if not NULL)
* a user friendly string that lists errors in the parsed
* document.
* \return \c true if the document was successfully parsed, \c false if an
error occurred.
*/
virtual bool parse(
char const* beginDoc, char const* endDoc,
Value* root, std::string* errs) = 0;
class Factory {
public:
virtual ~Factory() {}
/** \brief Allocate a CharReader via operator new().
* \throw std::exception if something goes wrong (e.g. invalid settings)
*/
virtual CharReader* newCharReader() const = 0;
}; // Factory
}; // CharReader
/** \brief Build a CharReader implementation.
Usage:
\code
using namespace Json;
CharReaderBuilder builder;
builder["collectComments"] = false;
Value value;
std::string errs;
bool ok = parseFromStream(builder, std::cin, &value, &errs);
\endcode
*/
class JSON_API CharReaderBuilder : public CharReader::Factory {
public:
// Note: We use a Json::Value so that we can add data-members to this class
// without a major version bump.
/** Configuration of this builder.
These are case-sensitive.
Available settings (case-sensitive):
- `"collectComments": false or true`
- true to collect comment and allow writing them
back during serialization, false to discard comments.
This parameter is ignored if allowComments is false.
- `"allowComments": false or true`
- true if comments are allowed.
- `"strictRoot": false or true`
- true if root must be either an array or an object value
- `"allowDroppedNullPlaceholders": false or true`
- true if dropped null placeholders are allowed. (See StreamWriterBuilder.)
- `"allowNumericKeys": false or true`
- true if numeric object keys are allowed.
- `"allowSingleQuotes": false or true`
- true if '' are allowed for strings (both keys and values)
- `"stackLimit": integer`
- Exceeding stackLimit (recursive depth of `readValue()`) will
cause an exception.
- This is a security issue (seg-faults caused by deeply nested JSON),
so the default is low.
- `"failIfExtra": false or true`
- If true, `parse()` returns false when extra non-whitespace trails
the JSON value in the input string.
- `"rejectDupKeys": false or true`
- If true, `parse()` returns false when a key is duplicated within an object.
You can examine 'settings_` yourself
to see the defaults. You can also write and read them just like any
JSON Value.
\sa setDefaults()
*/
Json::Value settings_;
CharReaderBuilder();
virtual ~CharReaderBuilder();
virtual CharReader* newCharReader() const;
/** \return true if 'settings' are legal and consistent;
* otherwise, indicate bad settings via 'invalid'.
*/
bool validate(Json::Value* invalid) const;
/** A simple way to update a specific setting.
*/
Value& operator[](std::string key);
/** Called by ctor, but you can use this to reset settings_.
* \pre 'settings' != NULL (but Json::null is fine)
* \remark Defaults:
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderDefaults
*/
static void setDefaults(Json::Value* settings);
/** Same as old Features::strictMode().
* \pre 'settings' != NULL (but Json::null is fine)
* \remark Defaults:
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
*/
static void strictMode(Json::Value* settings);
};
/** Consume entire stream and use its begin/end.
* Someday we might have a real StreamReader, but for now this
* is convenient.
*/
bool JSON_API parseFromStream(
CharReader::Factory const&,
std::istream&,
Value* root, std::string* errs);
/** \brief Read from 'sin' into 'root'.
Always keep comments from the input JSON.
This can be used to read a file into a particular sub-object.
For example:
\code
Json::Value root;
cin >> root["dir"]["file"];
cout << root;
\endcode
Result:
\verbatim
{
"dir": {
"file": {
// The input stream JSON would be nested here.
}
}
}
\endverbatim
\throw std::exception on parse error.
\see Json::operator<<()
*/
JSON_API std::istream& operator>>(std::istream&, Value&);
} // namespace Json
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(pop)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#endif // CPPTL_JSON_READER_H_INCLUDED

View File

@ -1,826 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef CPPTL_JSON_H_INCLUDED
#define CPPTL_JSON_H_INCLUDED
#if !defined(JSON_IS_AMALGAMATION)
#include "forwards.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
#include <string>
#include <vector>
#include <exception>
#ifndef JSON_USE_CPPTL_SMALLMAP
#include <map>
#else
#include <cpptl/smallmap.h>
#endif
#ifdef JSON_USE_CPPTL
#include <cpptl/forwards.h>
#endif
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
// be used by...
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(push)
#pragma warning(disable : 4251)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
/** \brief JSON (JavaScript Object Notation).
*/
namespace Json {
/** Base class for all exceptions we throw.
*
* We use nothing but these internally. Of course, STL can throw others.
*/
class JSON_API Exception;
/** Exceptions which the user cannot easily avoid.
*
* E.g. out-of-memory (when we use malloc), stack-overflow, malicious input
*
* \remark derived from Json::Exception
*/
class JSON_API RuntimeError;
/** Exceptions thrown by JSON_ASSERT/JSON_FAIL macros.
*
* These are precondition-violations (user bugs) and internal errors (our bugs).
*
* \remark derived from Json::Exception
*/
class JSON_API LogicError;
/// used internally
void throwRuntimeError(std::string const& msg);
/// used internally
void throwLogicError(std::string const& msg);
/** \brief Type of the value held by a Value object.
*/
enum ValueType {
nullValue = 0, ///< 'null' value
intValue, ///< signed integer value
uintValue, ///< unsigned integer value
realValue, ///< double value
stringValue, ///< UTF-8 string value
booleanValue, ///< bool value
arrayValue, ///< array value (ordered list)
objectValue ///< object value (collection of name/value pairs).
};
enum CommentPlacement {
commentBefore = 0, ///< a comment placed on the line before a value
commentAfterOnSameLine, ///< a comment just after a value on the same line
commentAfter, ///< a comment on the line after a value (only make sense for
/// root value)
numberOfCommentPlacement
};
//# ifdef JSON_USE_CPPTL
// typedef CppTL::AnyEnumerator<const char *> EnumMemberNames;
// typedef CppTL::AnyEnumerator<const Value &> EnumValues;
//# endif
/** \brief Lightweight wrapper to tag static string.
*
* Value constructor and objectValue member assignement takes advantage of the
* StaticString and avoid the cost of string duplication when storing the
* string or the member name.
*
* Example of usage:
* \code
* Json::Value aValue( StaticString("some text") );
* Json::Value object;
* static const StaticString code("code");
* object[code] = 1234;
* \endcode
*/
class JSON_API StaticString {
public:
explicit StaticString(const char* czstring) : c_str_(czstring) {}
operator const char*() const { return c_str_; }
const char* c_str() const { return c_str_; }
private:
const char* c_str_;
};
/** \brief Represents a <a HREF="http://www.json.org">JSON</a> value.
*
* This class is a discriminated union wrapper that can represents a:
* - signed integer [range: Value::minInt - Value::maxInt]
* - unsigned integer (range: 0 - Value::maxUInt)
* - double
* - UTF-8 string
* - boolean
* - 'null'
* - an ordered list of Value
* - collection of name/value pairs (javascript object)
*
* The type of the held value is represented by a #ValueType and
* can be obtained using type().
*
* Values of an #objectValue or #arrayValue can be accessed using operator[]()
* methods.
* Non-const methods will automatically create the a #nullValue element
* if it does not exist.
* The sequence of an #arrayValue will be automatically resized and initialized
* with #nullValue. resize() can be used to enlarge or truncate an #arrayValue.
*
* The get() methods can be used to obtain default value in the case the
* required element does not exist.
*
* It is possible to iterate over the list of a #objectValue values using
* the getMemberNames() method.
*
* \note #Value string-length fit in size_t, but keys must be < 2^30.
* (The reason is an implementation detail.) A #CharReader will raise an
* exception if a bound is exceeded to avoid security holes in your app,
* but the Value API does *not* check bounds. That is the responsibility
* of the caller.
*/
class JSON_API Value {
friend class ValueIteratorBase;
public:
typedef std::vector<std::string> Members;
typedef ValueIterator iterator;
typedef ValueConstIterator const_iterator;
typedef Json::UInt UInt;
typedef Json::Int Int;
#if defined(JSON_HAS_INT64)
typedef Json::UInt64 UInt64;
typedef Json::Int64 Int64;
#endif // defined(JSON_HAS_INT64)
typedef Json::LargestInt LargestInt;
typedef Json::LargestUInt LargestUInt;
typedef Json::ArrayIndex ArrayIndex;
static const Value& null; ///< We regret this reference to a global instance; prefer the simpler Value().
static const Value& nullRef; ///< just a kludge for binary-compatibility; same as null
/// Minimum signed integer value that can be stored in a Json::Value.
static const LargestInt minLargestInt;
/// Maximum signed integer value that can be stored in a Json::Value.
static const LargestInt maxLargestInt;
/// Maximum unsigned integer value that can be stored in a Json::Value.
static const LargestUInt maxLargestUInt;
/// Minimum signed int value that can be stored in a Json::Value.
static const Int minInt;
/// Maximum signed int value that can be stored in a Json::Value.
static const Int maxInt;
/// Maximum unsigned int value that can be stored in a Json::Value.
static const UInt maxUInt;
#if defined(JSON_HAS_INT64)
/// Minimum signed 64 bits int value that can be stored in a Json::Value.
static const Int64 minInt64;
/// Maximum signed 64 bits int value that can be stored in a Json::Value.
static const Int64 maxInt64;
/// Maximum unsigned 64 bits int value that can be stored in a Json::Value.
static const UInt64 maxUInt64;
#endif // defined(JSON_HAS_INT64)
private:
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
class CZString {
public:
enum DuplicationPolicy {
noDuplication = 0,
duplicate,
duplicateOnCopy
};
CZString(ArrayIndex index);
CZString(char const* str, unsigned length, DuplicationPolicy allocate);
CZString(CZString const& other);
~CZString();
CZString& operator=(CZString other);
bool operator<(CZString const& other) const;
bool operator==(CZString const& other) const;
ArrayIndex index() const;
//const char* c_str() const; ///< \deprecated
char const* data() const;
unsigned length() const;
bool isStaticString() const;
private:
void swap(CZString& other);
struct StringStorage {
unsigned policy_: 2;
unsigned length_: 30; // 1GB max
};
char const* cstr_; // actually, a prefixed string, unless policy is noDup
union {
ArrayIndex index_;
StringStorage storage_;
};
};
public:
#ifndef JSON_USE_CPPTL_SMALLMAP
typedef std::map<CZString, Value> ObjectValues;
#else
typedef CppTL::SmallMap<CZString, Value> ObjectValues;
#endif // ifndef JSON_USE_CPPTL_SMALLMAP
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
public:
/** \brief Create a default Value of the given type.
This is a very useful constructor.
To create an empty array, pass arrayValue.
To create an empty object, pass objectValue.
Another Value can then be set to this one by assignment.
This is useful since clear() and resize() will not alter types.
Examples:
\code
Json::Value null_value; // null
Json::Value arr_value(Json::arrayValue); // []
Json::Value obj_value(Json::objectValue); // {}
\endcode
*/
Value(ValueType type = nullValue);
Value(Int value);
Value(UInt value);
#if defined(JSON_HAS_INT64)
Value(Int64 value);
Value(UInt64 value);
#endif // if defined(JSON_HAS_INT64)
Value(double value);
Value(const char* value); ///< Copy til first 0. (NULL causes to seg-fault.)
Value(const char* beginValue, const char* endValue); ///< Copy all, incl zeroes.
/** \brief Constructs a value from a static string.
* Like other value string constructor but do not duplicate the string for
* internal storage. The given string must remain alive after the call to this
* constructor.
* \note This works only for null-terminated strings. (We cannot change the
* size of this class, so we have nowhere to store the length,
* which might be computed later for various operations.)
*
* Example of usage:
* \code
* static StaticString foo("some text");
* Json::Value aValue(foo);
* \endcode
*/
Value(const StaticString& value);
Value(const std::string& value); ///< Copy data() til size(). Embedded zeroes too.
#ifdef JSON_USE_CPPTL
Value(const CppTL::ConstString& value);
#endif
Value(bool value);
/// Deep copy.
Value(const Value& other);
~Value();
/// Deep copy, then swap(other).
/// \note Over-write existing comments. To preserve comments, use #swapPayload().
Value& operator=(Value other);
/// Swap everything.
void swap(Value& other);
/// Swap values but leave comments and source offsets in place.
void swapPayload(Value& other);
ValueType type() const;
/// Compare payload only, not comments etc.
bool operator<(const Value& other) const;
bool operator<=(const Value& other) const;
bool operator>=(const Value& other) const;
bool operator>(const Value& other) const;
bool operator==(const Value& other) const;
bool operator!=(const Value& other) const;
int compare(const Value& other) const;
const char* asCString() const; ///< Embedded zeroes could cause you trouble!
std::string asString() const; ///< Embedded zeroes are possible.
/** Get raw char* of string-value.
* \return false if !string. (Seg-fault if str or end are NULL.)
*/
bool getString(
char const** str, char const** end) const;
#ifdef JSON_USE_CPPTL
CppTL::ConstString asConstString() const;
#endif
Int asInt() const;
UInt asUInt() const;
#if defined(JSON_HAS_INT64)
Int64 asInt64() const;
UInt64 asUInt64() const;
#endif // if defined(JSON_HAS_INT64)
LargestInt asLargestInt() const;
LargestUInt asLargestUInt() const;
float asFloat() const;
double asDouble() const;
bool asBool() const;
bool isNull() const;
bool isBool() const;
bool isInt() const;
bool isInt64() const;
bool isUInt() const;
bool isUInt64() const;
bool isIntegral() const;
bool isDouble() const;
bool isNumeric() const;
bool isString() const;
bool isArray() const;
bool isObject() const;
bool isConvertibleTo(ValueType other) const;
/// Number of values in array or object
ArrayIndex size() const;
/// \brief Return true if empty array, empty object, or null;
/// otherwise, false.
bool empty() const;
/// Return isNull()
bool operator!() const;
/// Remove all object members and array elements.
/// \pre type() is arrayValue, objectValue, or nullValue
/// \post type() is unchanged
void clear();
/// Resize the array to size elements.
/// New elements are initialized to null.
/// May only be called on nullValue or arrayValue.
/// \pre type() is arrayValue or nullValue
/// \post type() is arrayValue
void resize(ArrayIndex size);
/// Access an array element (zero based index ).
/// If the array contains less than index element, then null value are
/// inserted
/// in the array so that its size is index+1.
/// (You may need to say 'value[0u]' to get your compiler to distinguish
/// this from the operator[] which takes a string.)
Value& operator[](ArrayIndex index);
/// Access an array element (zero based index ).
/// If the array contains less than index element, then null value are
/// inserted
/// in the array so that its size is index+1.
/// (You may need to say 'value[0u]' to get your compiler to distinguish
/// this from the operator[] which takes a string.)
Value& operator[](int index);
/// Access an array element (zero based index )
/// (You may need to say 'value[0u]' to get your compiler to distinguish
/// this from the operator[] which takes a string.)
const Value& operator[](ArrayIndex index) const;
/// Access an array element (zero based index )
/// (You may need to say 'value[0u]' to get your compiler to distinguish
/// this from the operator[] which takes a string.)
const Value& operator[](int index) const;
/// If the array contains at least index+1 elements, returns the element
/// value,
/// otherwise returns defaultValue.
Value get(ArrayIndex index, const Value& defaultValue) const;
/// Return true if index < size().
bool isValidIndex(ArrayIndex index) const;
/// \brief Append value to array at the end.
///
/// Equivalent to jsonvalue[jsonvalue.size()] = value;
Value& append(const Value& value);
/// Access an object value by name, create a null member if it does not exist.
/// \note Because of our implementation, keys are limited to 2^30 -1 chars.
/// Exceeding that will cause an exception.
Value& operator[](const char* key);
/// Access an object value by name, returns null if there is no member with
/// that name.
const Value& operator[](const char* key) const;
/// Access an object value by name, create a null member if it does not exist.
/// \param key may contain embedded nulls.
Value& operator[](const std::string& key);
/// Access an object value by name, returns null if there is no member with
/// that name.
/// \param key may contain embedded nulls.
const Value& operator[](const std::string& key) const;
/** \brief Access an object value by name, create a null member if it does not
exist.
* If the object has no entry for that name, then the member name used to store
* the new entry is not duplicated.
* Example of use:
* \code
* Json::Value object;
* static const StaticString code("code");
* object[code] = 1234;
* \endcode
*/
Value& operator[](const StaticString& key);
#ifdef JSON_USE_CPPTL
/// Access an object value by name, create a null member if it does not exist.
Value& operator[](const CppTL::ConstString& key);
/// Access an object value by name, returns null if there is no member with
/// that name.
const Value& operator[](const CppTL::ConstString& key) const;
#endif
/// Return the member named key if it exist, defaultValue otherwise.
/// \note deep copy
Value get(const char* key, const Value& defaultValue) const;
/// Return the member named key if it exist, defaultValue otherwise.
/// \note deep copy
/// \param key may contain embedded nulls.
Value get(const char* key, const char* end, const Value& defaultValue) const;
/// Return the member named key if it exist, defaultValue otherwise.
/// \note deep copy
/// \param key may contain embedded nulls.
Value get(const std::string& key, const Value& defaultValue) const;
#ifdef JSON_USE_CPPTL
/// Return the member named key if it exist, defaultValue otherwise.
/// \note deep copy
Value get(const CppTL::ConstString& key, const Value& defaultValue) const;
#endif
/// Most general and efficient version of isMember()const, get()const,
/// and operator[]const
/// \note As stated elsewhere, behavior is undefined if (end-key) >= 2^30
Value const* find(char const* key, char const* end) const;
/// Most general and efficient version of object-mutators.
/// \note As stated elsewhere, behavior is undefined if (end-key) >= 2^30
/// \return non-zero, but JSON_ASSERT if this is neither object nor nullValue.
Value const* demand(char const* key, char const* end);
/// \brief Remove and return the named member.
///
/// Do nothing if it did not exist.
/// \return the removed Value, or null.
/// \pre type() is objectValue or nullValue
/// \post type() is unchanged
/// \deprecated
Value removeMember(const char* key);
/// Same as removeMember(const char*)
/// \param key may contain embedded nulls.
/// \deprecated
Value removeMember(const std::string& key);
/// Same as removeMember(const char* key, const char* end, Value* removed),
/// but 'key' is null-terminated.
bool removeMember(const char* key, Value* removed);
/** \brief Remove the named map member.
Update 'removed' iff removed.
\param key may contain embedded nulls.
\return true iff removed (no exceptions)
*/
bool removeMember(std::string const& key, Value* removed);
/// Same as removeMember(std::string const& key, Value* removed)
bool removeMember(const char* key, const char* end, Value* removed);
/** \brief Remove the indexed array element.
O(n) expensive operations.
Update 'removed' iff removed.
\return true iff removed (no exceptions)
*/
bool removeIndex(ArrayIndex i, Value* removed);
/// Return true if the object has a member named key.
/// \note 'key' must be null-terminated.
bool isMember(const char* key) const;
/// Return true if the object has a member named key.
/// \param key may contain embedded nulls.
bool isMember(const std::string& key) const;
/// Same as isMember(std::string const& key)const
bool isMember(const char* key, const char* end) const;
#ifdef JSON_USE_CPPTL
/// Return true if the object has a member named key.
bool isMember(const CppTL::ConstString& key) const;
#endif
/// \brief Return a list of the member names.
///
/// If null, return an empty list.
/// \pre type() is objectValue or nullValue
/// \post if type() was nullValue, it remains nullValue
Members getMemberNames() const;
//# ifdef JSON_USE_CPPTL
// EnumMemberNames enumMemberNames() const;
// EnumValues enumValues() const;
//# endif
/// \deprecated Always pass len.
JSONCPP_DEPRECATED("Use setComment(std::string const&) instead.")
void setComment(const char* comment, CommentPlacement placement);
/// Comments must be //... or /* ... */
void setComment(const char* comment, size_t len, CommentPlacement placement);
/// Comments must be //... or /* ... */
void setComment(const std::string& comment, CommentPlacement placement);
bool hasComment(CommentPlacement placement) const;
/// Include delimiters and embedded newlines.
std::string getComment(CommentPlacement placement) const;
std::string toStyledString() const;
const_iterator begin() const;
const_iterator end() const;
iterator begin();
iterator end();
// Accessors for the [start, limit) range of bytes within the JSON text from
// which this value was parsed, if any.
void setOffsetStart(size_t start);
void setOffsetLimit(size_t limit);
size_t getOffsetStart() const;
size_t getOffsetLimit() const;
private:
void initBasic(ValueType type, bool allocated = false);
Value& resolveReference(const char* key);
Value& resolveReference(const char* key, const char* end);
struct CommentInfo {
CommentInfo();
~CommentInfo();
void setComment(const char* text, size_t len);
char* comment_;
};
// struct MemberNamesTransform
//{
// typedef const char *result_type;
// const char *operator()( const CZString &name ) const
// {
// return name.c_str();
// }
//};
union ValueHolder {
LargestInt int_;
LargestUInt uint_;
double real_;
bool bool_;
char* string_; // actually ptr to unsigned, followed by str, unless !allocated_
ObjectValues* map_;
} value_;
ValueType type_ : 8;
unsigned int allocated_ : 1; // Notes: if declared as bool, bitfield is useless.
// If not allocated_, string_ must be null-terminated.
CommentInfo* comments_;
// [start, limit) byte offsets in the source JSON text from which this Value
// was extracted.
size_t start_;
size_t limit_;
};
/** \brief Experimental and untested: represents an element of the "path" to
* access a node.
*/
class JSON_API PathArgument {
public:
friend class Path;
PathArgument();
PathArgument(ArrayIndex index);
PathArgument(const char* key);
PathArgument(const std::string& key);
private:
enum Kind {
kindNone = 0,
kindIndex,
kindKey
};
std::string key_;
ArrayIndex index_;
Kind kind_;
};
/** \brief Experimental and untested: represents a "path" to access a node.
*
* Syntax:
* - "." => root node
* - ".[n]" => elements at index 'n' of root node (an array value)
* - ".name" => member named 'name' of root node (an object value)
* - ".name1.name2.name3"
* - ".[0][1][2].name1[3]"
* - ".%" => member name is provided as parameter
* - ".[%]" => index is provied as parameter
*/
class JSON_API Path {
public:
Path(const std::string& path,
const PathArgument& a1 = PathArgument(),
const PathArgument& a2 = PathArgument(),
const PathArgument& a3 = PathArgument(),
const PathArgument& a4 = PathArgument(),
const PathArgument& a5 = PathArgument());
const Value& resolve(const Value& root) const;
Value resolve(const Value& root, const Value& defaultValue) const;
/// Creates the "path" to access the specified node and returns a reference on
/// the node.
Value& make(Value& root) const;
private:
typedef std::vector<const PathArgument*> InArgs;
typedef std::vector<PathArgument> Args;
void makePath(const std::string& path, const InArgs& in);
void addPathInArg(const std::string& path,
const InArgs& in,
InArgs::const_iterator& itInArg,
PathArgument::Kind kind);
void invalidPath(const std::string& path, int location);
Args args_;
};
/** \brief base class for Value iterators.
*
*/
class JSON_API ValueIteratorBase {
public:
typedef std::bidirectional_iterator_tag iterator_category;
typedef unsigned int size_t;
typedef int difference_type;
typedef ValueIteratorBase SelfType;
bool operator==(const SelfType& other) const { return isEqual(other); }
bool operator!=(const SelfType& other) const { return !isEqual(other); }
difference_type operator-(const SelfType& other) const {
return other.computeDistance(*this);
}
/// Return either the index or the member name of the referenced value as a
/// Value.
Value key() const;
/// Return the index of the referenced Value, or -1 if it is not an arrayValue.
UInt index() const;
/// Return the member name of the referenced Value, or "" if it is not an
/// objectValue.
/// \note Avoid `c_str()` on result, as embedded zeroes are possible.
std::string name() const;
/// Return the member name of the referenced Value. "" if it is not an
/// objectValue.
/// \deprecated This cannot be used for UTF-8 strings, since there can be embedded nulls.
JSONCPP_DEPRECATED("Use `key = name();` instead.")
char const* memberName() const;
/// Return the member name of the referenced Value, or NULL if it is not an
/// objectValue.
/// \note Better version than memberName(). Allows embedded nulls.
char const* memberName(char const** end) const;
protected:
Value& deref() const;
void increment();
void decrement();
difference_type computeDistance(const SelfType& other) const;
bool isEqual(const SelfType& other) const;
void copy(const SelfType& other);
private:
Value::ObjectValues::iterator current_;
// Indicates that iterator is for a null value.
bool isNull_;
public:
// For some reason, BORLAND needs these at the end, rather
// than earlier. No idea why.
ValueIteratorBase();
explicit ValueIteratorBase(const Value::ObjectValues::iterator& current);
};
/** \brief const iterator for object and array value.
*
*/
class JSON_API ValueConstIterator : public ValueIteratorBase {
friend class Value;
public:
typedef const Value value_type;
//typedef unsigned int size_t;
//typedef int difference_type;
typedef const Value& reference;
typedef const Value* pointer;
typedef ValueConstIterator SelfType;
ValueConstIterator();
private:
/*! \internal Use by Value to create an iterator.
*/
explicit ValueConstIterator(const Value::ObjectValues::iterator& current);
public:
SelfType& operator=(const ValueIteratorBase& other);
SelfType operator++(int) {
SelfType temp(*this);
++*this;
return temp;
}
SelfType operator--(int) {
SelfType temp(*this);
--*this;
return temp;
}
SelfType& operator--() {
decrement();
return *this;
}
SelfType& operator++() {
increment();
return *this;
}
reference operator*() const { return deref(); }
pointer operator->() const { return &deref(); }
};
/** \brief Iterator for object and array value.
*/
class JSON_API ValueIterator : public ValueIteratorBase {
friend class Value;
public:
typedef Value value_type;
typedef unsigned int size_t;
typedef int difference_type;
typedef Value& reference;
typedef Value* pointer;
typedef ValueIterator SelfType;
ValueIterator();
ValueIterator(const ValueConstIterator& other);
ValueIterator(const ValueIterator& other);
private:
/*! \internal Use by Value to create an iterator.
*/
explicit ValueIterator(const Value::ObjectValues::iterator& current);
public:
SelfType& operator=(const SelfType& other);
SelfType operator++(int) {
SelfType temp(*this);
++*this;
return temp;
}
SelfType operator--(int) {
SelfType temp(*this);
--*this;
return temp;
}
SelfType& operator--() {
decrement();
return *this;
}
SelfType& operator++() {
increment();
return *this;
}
reference operator*() const { return deref(); }
pointer operator->() const { return &deref(); }
};
} // namespace Json
namespace std {
/// Specialize std::swap() for Json::Value.
template<>
inline void swap(Json::Value& a, Json::Value& b) { a.swap(b); }
}
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(pop)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#endif // CPPTL_JSON_H_INCLUDED

View File

@ -1,14 +0,0 @@
// DO NOT EDIT. This file is generated by CMake from "version"
// and "version.h.in" files.
// Run CMake configure step to update it.
#ifndef JSON_VERSION_H_INCLUDED
# define JSON_VERSION_H_INCLUDED
# define JSONCPP_VERSION_STRING "1.6.2"
# define JSONCPP_VERSION_MAJOR 1
# define JSONCPP_VERSION_MINOR 6
# define JSONCPP_VERSION_PATCH 2
# define JSONCPP_VERSION_QUALIFIER
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
#endif // JSON_VERSION_H_INCLUDED

View File

@ -1,327 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef JSON_WRITER_H_INCLUDED
#define JSON_WRITER_H_INCLUDED
#if !defined(JSON_IS_AMALGAMATION)
#include "value.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
#include <vector>
#include <string>
#include <ostream>
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
// be used by...
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(push)
#pragma warning(disable : 4251)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
namespace Json {
class Value;
/**
Usage:
\code
using namespace Json;
void writeToStdout(StreamWriter::Factory const& factory, Value const& value) {
std::unique_ptr<StreamWriter> const writer(
factory.newStreamWriter());
writer->write(value, &std::cout);
std::cout << std::endl; // add lf and flush
}
\endcode
*/
class JSON_API StreamWriter {
protected:
std::ostream* sout_; // not owned; will not delete
public:
StreamWriter();
virtual ~StreamWriter();
/** Write Value into document as configured in sub-class.
Do not take ownership of sout, but maintain a reference during function.
\pre sout != NULL
\return zero on success (For now, we always return zero, so check the stream instead.)
\throw std::exception possibly, depending on configuration
*/
virtual int write(Value const& root, std::ostream* sout) = 0;
/** \brief A simple abstract factory.
*/
class JSON_API Factory {
public:
virtual ~Factory();
/** \brief Allocate a CharReader via operator new().
* \throw std::exception if something goes wrong (e.g. invalid settings)
*/
virtual StreamWriter* newStreamWriter() const = 0;
}; // Factory
}; // StreamWriter
/** \brief Write into stringstream, then return string, for convenience.
* A StreamWriter will be created from the factory, used, and then deleted.
*/
std::string JSON_API writeString(StreamWriter::Factory const& factory, Value const& root);
/** \brief Build a StreamWriter implementation.
Usage:
\code
using namespace Json;
Value value = ...;
StreamWriterBuilder builder;
builder["commentStyle"] = "None";
builder["indentation"] = " "; // or whatever you like
std::unique_ptr<Json::StreamWriter> writer(
builder.newStreamWriter());
writer->write(value, &std::cout);
std::cout << std::endl; // add lf and flush
\endcode
*/
class JSON_API StreamWriterBuilder : public StreamWriter::Factory {
public:
// Note: We use a Json::Value so that we can add data-members to this class
// without a major version bump.
/** Configuration of this builder.
Available settings (case-sensitive):
- "commentStyle": "None" or "All"
- "indentation": "<anything>"
- "enableYAMLCompatibility": false or true
- slightly change the whitespace around colons
- "dropNullPlaceholders": false or true
- Drop the "null" string from the writer's output for nullValues.
Strictly speaking, this is not valid JSON. But when the output is being
fed to a browser's Javascript, it makes for smaller output and the
browser can handle the output just fine.
You can examine 'settings_` yourself
to see the defaults. You can also write and read them just like any
JSON Value.
\sa setDefaults()
*/
Json::Value settings_;
StreamWriterBuilder();
virtual ~StreamWriterBuilder();
/**
* \throw std::exception if something goes wrong (e.g. invalid settings)
*/
virtual StreamWriter* newStreamWriter() const;
/** \return true if 'settings' are legal and consistent;
* otherwise, indicate bad settings via 'invalid'.
*/
bool validate(Json::Value* invalid) const;
/** A simple way to update a specific setting.
*/
Value& operator[](std::string key);
/** Called by ctor, but you can use this to reset settings_.
* \pre 'settings' != NULL (but Json::null is fine)
* \remark Defaults:
* \snippet src/lib_json/json_writer.cpp StreamWriterBuilderDefaults
*/
static void setDefaults(Json::Value* settings);
};
/** \brief Abstract class for writers.
* \deprecated Use StreamWriter. (And really, this is an implementation detail.)
*/
class JSON_API Writer {
public:
virtual ~Writer();
virtual std::string write(const Value& root) = 0;
};
/** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format
*without formatting (not human friendly).
*
* The JSON document is written in a single line. It is not intended for 'human'
*consumption,
* but may be usefull to support feature such as RPC where bandwith is limited.
* \sa Reader, Value
* \deprecated Use StreamWriterBuilder.
*/
class JSON_API FastWriter : public Writer {
public:
FastWriter();
virtual ~FastWriter() {}
void enableYAMLCompatibility();
/** \brief Drop the "null" string from the writer's output for nullValues.
* Strictly speaking, this is not valid JSON. But when the output is being
* fed to a browser's Javascript, it makes for smaller output and the
* browser can handle the output just fine.
*/
void dropNullPlaceholders();
void omitEndingLineFeed();
public: // overridden from Writer
virtual std::string write(const Value& root);
private:
void writeValue(const Value& value);
std::string document_;
bool yamlCompatiblityEnabled_;
bool dropNullPlaceholders_;
bool omitEndingLineFeed_;
};
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a
*human friendly way.
*
* The rules for line break and indent are as follow:
* - Object value:
* - if empty then print {} without indent and line break
* - if not empty the print '{', line break & indent, print one value per
*line
* and then unindent and line break and print '}'.
* - Array value:
* - if empty then print [] without indent and line break
* - if the array contains no object value, empty array or some other value
*types,
* and all the values fit on one lines, then print the array on a single
*line.
* - otherwise, it the values do not fit on one line, or the array contains
* object or non empty array, then print one value per line.
*
* If the Value have comments then they are outputed according to their
*#CommentPlacement.
*
* \sa Reader, Value, Value::setComment()
* \deprecated Use StreamWriterBuilder.
*/
class JSON_API StyledWriter : public Writer {
public:
StyledWriter();
virtual ~StyledWriter() {}
public: // overridden from Writer
/** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
* \param root Value to serialize.
* \return String containing the JSON document that represents the root value.
*/
virtual std::string write(const Value& root);
private:
void writeValue(const Value& value);
void writeArrayValue(const Value& value);
bool isMultineArray(const Value& value);
void pushValue(const std::string& value);
void writeIndent();
void writeWithIndent(const std::string& value);
void indent();
void unindent();
void writeCommentBeforeValue(const Value& root);
void writeCommentAfterValueOnSameLine(const Value& root);
bool hasCommentForValue(const Value& value);
static std::string normalizeEOL(const std::string& text);
typedef std::vector<std::string> ChildValues;
ChildValues childValues_;
std::string document_;
std::string indentString_;
int rightMargin_;
int indentSize_;
bool addChildValues_;
};
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a
human friendly way,
to a stream rather than to a string.
*
* The rules for line break and indent are as follow:
* - Object value:
* - if empty then print {} without indent and line break
* - if not empty the print '{', line break & indent, print one value per
line
* and then unindent and line break and print '}'.
* - Array value:
* - if empty then print [] without indent and line break
* - if the array contains no object value, empty array or some other value
types,
* and all the values fit on one lines, then print the array on a single
line.
* - otherwise, it the values do not fit on one line, or the array contains
* object or non empty array, then print one value per line.
*
* If the Value have comments then they are outputed according to their
#CommentPlacement.
*
* \param indentation Each level will be indented by this amount extra.
* \sa Reader, Value, Value::setComment()
* \deprecated Use StreamWriterBuilder.
*/
class JSON_API StyledStreamWriter {
public:
StyledStreamWriter(std::string indentation = "\t");
~StyledStreamWriter() {}
public:
/** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
* \param out Stream to write to. (Can be ostringstream, e.g.)
* \param root Value to serialize.
* \note There is no point in deriving from Writer, since write() should not
* return a value.
*/
void write(std::ostream& out, const Value& root);
private:
void writeValue(const Value& value);
void writeArrayValue(const Value& value);
bool isMultineArray(const Value& value);
void pushValue(const std::string& value);
void writeIndent();
void writeWithIndent(const std::string& value);
void indent();
void unindent();
void writeCommentBeforeValue(const Value& root);
void writeCommentAfterValueOnSameLine(const Value& root);
bool hasCommentForValue(const Value& value);
static std::string normalizeEOL(const std::string& text);
typedef std::vector<std::string> ChildValues;
ChildValues childValues_;
std::ostream* document_;
std::string indentString_;
int rightMargin_;
std::string indentation_;
bool addChildValues_ : 1;
bool indented_ : 1;
};
#if defined(JSON_HAS_INT64)
std::string JSON_API valueToString(Int value);
std::string JSON_API valueToString(UInt value);
#endif // if defined(JSON_HAS_INT64)
std::string JSON_API valueToString(LargestInt value);
std::string JSON_API valueToString(LargestUInt value);
std::string JSON_API valueToString(double value);
std::string JSON_API valueToString(bool value);
std::string JSON_API valueToQuotedString(const char* value);
/// \brief Output using the StyledStreamWriter.
/// \see Json::operator>>()
JSON_API std::ostream& operator<<(std::ostream&, const Value& root);
} // namespace Json
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(pop)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#endif // JSON_WRITER_H_INCLUDED

View File

@ -1,42 +0,0 @@

Microsoft Visual Studio Solution File, Format Version 11.00
# Visual Studio 2010
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcxproj", "{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcxproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcxproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Win32 = Debug|Win32
Debug|x64 = Debug|x64
Release|Win32 = Release|Win32
Release|x64 = Release|x64
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|Win32.ActiveCfg = Debug|Win32
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|Win32.Build.0 = Debug|Win32
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|x64.ActiveCfg = Debug|x64
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|x64.Build.0 = Debug|x64
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|Win32.ActiveCfg = Release|Win32
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|Win32.Build.0 = Release|Win32
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|x64.ActiveCfg = Release|x64
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|x64.Build.0 = Release|x64
{25AF2DD2-D396-4668-B188-488C33B8E620}.Debug|Win32.ActiveCfg = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Debug|Win32.Build.0 = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Debug|x64.ActiveCfg = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Release|Win32.ActiveCfg = Release|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Release|Win32.Build.0 = Release|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Release|x64.ActiveCfg = Release|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug|Win32.ActiveCfg = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug|Win32.Build.0 = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug|x64.ActiveCfg = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release|Win32.ActiveCfg = Release|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release|Win32.Build.0 = Release|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release|x64.ActiveCfg = Release|Win32
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal

View File

@ -1,96 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{25AF2DD2-D396-4668-B188-488C33B8E620}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup>
<_ProjectFileVersion>10.0.40219.1</_ProjectFileVersion>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/jsontest\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/jsontest\</IntDir>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</LinkIncremental>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/jsontest\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/jsontest\</IntDir>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<Optimization>Disabled</Optimization>
<AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<MinimalRebuild>true</MinimalRebuild>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>EditAndContinue</DebugInformationFormat>
</ClCompile>
<Link>
<OutputFile>$(OutDir)jsontest.exe</OutputFile>
<GenerateDebugInformation>true</GenerateDebugInformation>
<ProgramDatabaseFile>$(OutDir)jsontest.pdb</ProgramDatabaseFile>
<SubSystem>Console</SubSystem>
<TargetMachine>MachineX86</TargetMachine>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
</ClCompile>
<Link>
<OutputFile>$(OutDir)jsontest.exe</OutputFile>
<GenerateDebugInformation>true</GenerateDebugInformation>
<SubSystem>Console</SubSystem>
<OptimizeReferences>true</OptimizeReferences>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<TargetMachine>MachineX86</TargetMachine>
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="..\..\src\jsontestrunner\main.cpp" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="lib_json.vcxproj">
<Project>{1e6c2c1c-6453-4129-ae3f-0ee8e6599c89}</Project>
</ProjectReference>
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@ -1,13 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Source Files">
<UniqueIdentifier>{903591b3-ade3-4ce4-b1f9-1e175e62b014}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="..\..\src\jsontestrunner\main.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
</Project>

View File

@ -1,143 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<ItemGroup>
<ClCompile Include="..\..\src\lib_json\json_reader.cpp" />
<ClCompile Include="..\..\src\lib_json\json_value.cpp" />
<ClCompile Include="..\..\src\lib_json\json_writer.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\..\include\json\reader.h" />
<ClInclude Include="..\..\include\json\value.h" />
<ClInclude Include="..\..\include\json\writer.h" />
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
<RootNamespace>jsoncpp</RootNamespace>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup />
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories>
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories>
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
</ItemDefinitionGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@ -1,33 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Header Files">
<UniqueIdentifier>{c110bc57-c46e-476c-97ea-84d8014f431c}</UniqueIdentifier>
</Filter>
<Filter Include="Source Files">
<UniqueIdentifier>{ed718592-5acf-47b5-8f2b-b8224590da6a}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="..\..\src\lib_json\json_reader.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\..\src\lib_json\json_value.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\..\src\lib_json\json_writer.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\..\include\json\reader.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="..\..\include\json\value.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="..\..\include\json\writer.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
</Project>

View File

@ -1,109 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}</ProjectGuid>
<RootNamespace>test_lib_json</RootNamespace>
<Keyword>Win32Proj</Keyword>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup>
<_ProjectFileVersion>10.0.40219.1</_ProjectFileVersion>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/test_lib_json\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/test_lib_json\</IntDir>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</LinkIncremental>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/test_lib_json\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/test_lib_json\</IntDir>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<Optimization>Disabled</Optimization>
<AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<MinimalRebuild>true</MinimalRebuild>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>EditAndContinue</DebugInformationFormat>
</ClCompile>
<Link>
<OutputFile>$(OutDir)test_lib_json.exe</OutputFile>
<GenerateDebugInformation>true</GenerateDebugInformation>
<ProgramDatabaseFile>$(OutDir)test_lib_json.pdb</ProgramDatabaseFile>
<SubSystem>Console</SubSystem>
<TargetMachine>MachineX86</TargetMachine>
</Link>
<PostBuildEvent>
<Message>Running all unit tests</Message>
<Command>$(TargetPath)</Command>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
</ClCompile>
<Link>
<OutputFile>$(OutDir)test_lib_json.exe</OutputFile>
<GenerateDebugInformation>true</GenerateDebugInformation>
<SubSystem>Console</SubSystem>
<OptimizeReferences>true</OptimizeReferences>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<TargetMachine>MachineX86</TargetMachine>
</Link>
<PostBuildEvent>
<Message>Running all unit tests</Message>
<Command>$(TargetPath)</Command>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="..\..\src\test_lib_json\jsontest.cpp" />
<ClCompile Include="..\..\src\test_lib_json\main.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\..\src\test_lib_json\jsontest.h" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="lib_json.vcxproj">
<Project>{1e6c2c1c-6453-4129-ae3f-0ee8e6599c89}</Project>
</ProjectReference>
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@ -1,24 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<ClCompile Include="..\..\src\test_lib_json\jsontest.cpp">
<Filter>Source Filter</Filter>
</ClCompile>
<ClCompile Include="..\..\src\test_lib_json\main.cpp">
<Filter>Source Filter</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<Filter Include="Source Filter">
<UniqueIdentifier>{bf40cbfc-8e98-40b4-b9f3-7e8d579cbae2}</UniqueIdentifier>
</Filter>
<Filter Include="Header Files">
<UniqueIdentifier>{5fd39074-89e6-4939-aa3f-694fefd296b1}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\..\src\test_lib_json\jsontest.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
</Project>

View File

@ -1,46 +0,0 @@
Microsoft Visual Studio Solution File, Format Version 8.00
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}"
ProjectSection(ProjectDependencies) = postProject
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}"
ProjectSection(ProjectDependencies) = postProject
{B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}"
ProjectSection(ProjectDependencies) = postProject
{B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B}
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfiguration) = preSolution
Debug = Debug
dummy = dummy
Release = Release
EndGlobalSection
GlobalSection(ProjectConfiguration) = postSolution
{B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32
{B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32
{B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32
{B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32
{B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32
{B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
EndGlobalSection
GlobalSection(ExtensibilityAddIns) = postSolution
EndGlobalSection
EndGlobal

View File

@ -1,119 +0,0 @@
<?xml version="1.0" encoding="Windows-1252"?>
<VisualStudioProject
ProjectType="Visual C++"
Version="7.10"
Name="jsontest"
ProjectGUID="{25AF2DD2-D396-4668-B188-488C33B8E620}"
Keyword="Win32Proj">
<Platforms>
<Platform
Name="Win32"/>
</Platforms>
<Configurations>
<Configuration
Name="Debug|Win32"
OutputDirectory="../../build/vs71/debug/jsontest"
IntermediateDirectory="../../build/vs71/debug/jsontest"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCCLCompilerTool"
Optimization="0"
AdditionalIncludeDirectories="../../include"
PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
MinimalRebuild="TRUE"
BasicRuntimeChecks="3"
RuntimeLibrary="1"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="TRUE"
DebugInformationFormat="4"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCLinkerTool"
OutputFile="$(OutDir)/jsontest.exe"
LinkIncremental="2"
GenerateDebugInformation="TRUE"
ProgramDatabaseFile="$(OutDir)/jsontest.pdb"
SubSystem="1"
TargetMachine="1"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCPostBuildEventTool"/>
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebDeploymentTool"/>
<Tool
Name="VCManagedWrapperGeneratorTool"/>
<Tool
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
</Configuration>
<Configuration
Name="Release|Win32"
OutputDirectory="../../build/vs71/release/jsontest"
IntermediateDirectory="../../build/vs71/release/jsontest"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCCLCompilerTool"
AdditionalIncludeDirectories="../../include"
PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
RuntimeLibrary="0"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="TRUE"
DebugInformationFormat="3"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCLinkerTool"
OutputFile="$(OutDir)/jsontest.exe"
LinkIncremental="1"
GenerateDebugInformation="TRUE"
SubSystem="1"
OptimizeReferences="2"
EnableCOMDATFolding="2"
TargetMachine="1"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCPostBuildEventTool"/>
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebDeploymentTool"/>
<Tool
Name="VCManagedWrapperGeneratorTool"/>
<Tool
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
</Configuration>
</Configurations>
<References>
</References>
<Files>
<File
RelativePath="..\..\src\jsontestrunner\main.cpp">
</File>
</Files>
<Globals>
</Globals>
</VisualStudioProject>

View File

@ -1,205 +0,0 @@
<?xml version="1.0" encoding="Windows-1252"?>
<VisualStudioProject
ProjectType="Visual C++"
Version="7.10"
Name="lib_json"
ProjectGUID="{B84F7231-16CE-41D8-8C08-7B523FF4225B}"
Keyword="Win32Proj">
<Platforms>
<Platform
Name="Win32"/>
</Platforms>
<Configurations>
<Configuration
Name="Debug|Win32"
OutputDirectory="../../build/vs71/debug/lib_json"
IntermediateDirectory="../../build/vs71/debug/lib_json"
ConfigurationType="4"
CharacterSet="2">
<Tool
Name="VCCLCompilerTool"
Optimization="0"
AdditionalIncludeDirectories="../../include"
PreprocessorDefinitions="WIN32;_DEBUG;_LIB"
StringPooling="TRUE"
MinimalRebuild="TRUE"
BasicRuntimeChecks="3"
RuntimeLibrary="1"
EnableFunctionLevelLinking="TRUE"
DisableLanguageExtensions="TRUE"
ForceConformanceInForLoopScope="FALSE"
RuntimeTypeInfo="TRUE"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="TRUE"
DebugInformationFormat="4"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCLibrarianTool"
OutputFile="$(OutDir)/json_vc71_libmtd.lib"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCPostBuildEventTool"/>
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCManagedWrapperGeneratorTool"/>
<Tool
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
</Configuration>
<Configuration
Name="Release|Win32"
OutputDirectory="../../build/vs71/release/lib_json"
IntermediateDirectory="../../build/vs71/release/lib_json"
ConfigurationType="4"
CharacterSet="2"
WholeProgramOptimization="TRUE">
<Tool
Name="VCCLCompilerTool"
GlobalOptimizations="TRUE"
EnableIntrinsicFunctions="TRUE"
AdditionalIncludeDirectories="../../include"
PreprocessorDefinitions="WIN32;NDEBUG;_LIB"
StringPooling="TRUE"
RuntimeLibrary="0"
EnableFunctionLevelLinking="TRUE"
DisableLanguageExtensions="TRUE"
ForceConformanceInForLoopScope="FALSE"
RuntimeTypeInfo="TRUE"
UsePrecompiledHeader="0"
AssemblerOutput="4"
WarningLevel="3"
Detect64BitPortabilityProblems="TRUE"
DebugInformationFormat="3"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCLibrarianTool"
OutputFile="$(OutDir)/json_vc71_libmt.lib"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCPostBuildEventTool"/>
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCManagedWrapperGeneratorTool"/>
<Tool
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
</Configuration>
<Configuration
Name="dummy|Win32"
OutputDirectory="$(ConfigurationName)"
IntermediateDirectory="$(ConfigurationName)"
ConfigurationType="2"
CharacterSet="2"
WholeProgramOptimization="TRUE">
<Tool
Name="VCCLCompilerTool"
GlobalOptimizations="TRUE"
EnableIntrinsicFunctions="TRUE"
AdditionalIncludeDirectories="../../include"
PreprocessorDefinitions="WIN32;NDEBUG;_LIB"
StringPooling="TRUE"
RuntimeLibrary="4"
EnableFunctionLevelLinking="TRUE"
DisableLanguageExtensions="TRUE"
ForceConformanceInForLoopScope="FALSE"
RuntimeTypeInfo="TRUE"
UsePrecompiledHeader="0"
AssemblerOutput="4"
WarningLevel="3"
Detect64BitPortabilityProblems="TRUE"
DebugInformationFormat="3"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCLinkerTool"
GenerateDebugInformation="TRUE"
SubSystem="2"
OptimizeReferences="2"
EnableCOMDATFolding="2"
TargetMachine="1"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCPostBuildEventTool"/>
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebDeploymentTool"/>
<Tool
Name="VCManagedWrapperGeneratorTool"/>
<Tool
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
</Configuration>
</Configurations>
<References>
</References>
<Files>
<File
RelativePath="..\..\include\json\autolink.h">
</File>
<File
RelativePath="..\..\include\json\config.h">
</File>
<File
RelativePath="..\..\include\json\features.h">
</File>
<File
RelativePath="..\..\include\json\forwards.h">
</File>
<File
RelativePath="..\..\include\json\json.h">
</File>
<File
RelativePath="..\..\src\lib_json\json_reader.cpp">
</File>
<File
RelativePath="..\..\src\lib_json\json_value.cpp">
</File>
<File
RelativePath="..\..\src\lib_json\json_valueiterator.inl">
</File>
<File
RelativePath="..\..\src\lib_json\json_writer.cpp">
</File>
<File
RelativePath="..\..\include\json\reader.h">
</File>
<File
RelativePath="..\..\include\json\value.h">
</File>
<File
RelativePath="..\..\include\json\writer.h">
</File>
</Files>
<Globals>
</Globals>
</VisualStudioProject>

View File

@ -1,130 +0,0 @@
<?xml version="1.0" encoding="Windows-1252"?>
<VisualStudioProject
ProjectType="Visual C++"
Version="7.10"
Name="test_lib_json"
ProjectGUID="{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}"
RootNamespace="test_lib_json"
Keyword="Win32Proj">
<Platforms>
<Platform
Name="Win32"/>
</Platforms>
<Configurations>
<Configuration
Name="Debug|Win32"
OutputDirectory="../../build/vs71/debug/test_lib_json"
IntermediateDirectory="../../build/vs71/debug/test_lib_json"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCCLCompilerTool"
Optimization="0"
AdditionalIncludeDirectories="../../include"
PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
MinimalRebuild="TRUE"
BasicRuntimeChecks="3"
RuntimeLibrary="1"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="TRUE"
DebugInformationFormat="4"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCLinkerTool"
OutputFile="$(OutDir)/test_lib_json.exe"
LinkIncremental="2"
GenerateDebugInformation="TRUE"
ProgramDatabaseFile="$(OutDir)/test_lib_json.pdb"
SubSystem="1"
TargetMachine="1"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCPostBuildEventTool"
Description="Running all unit tests"
CommandLine="$(TargetPath)"/>
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebDeploymentTool"/>
<Tool
Name="VCManagedWrapperGeneratorTool"/>
<Tool
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
</Configuration>
<Configuration
Name="Release|Win32"
OutputDirectory="../../build/vs71/release/test_lib_json"
IntermediateDirectory="../../build/vs71/release/test_lib_json"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCCLCompilerTool"
AdditionalIncludeDirectories="../../include"
PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
RuntimeLibrary="0"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="TRUE"
DebugInformationFormat="3"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCLinkerTool"
OutputFile="$(OutDir)/test_lib_json.exe"
LinkIncremental="1"
GenerateDebugInformation="TRUE"
SubSystem="1"
OptimizeReferences="2"
EnableCOMDATFolding="2"
TargetMachine="1"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCPostBuildEventTool"
Description="Running all unit tests"
CommandLine="$(TargetPath)"/>
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebDeploymentTool"/>
<Tool
Name="VCManagedWrapperGeneratorTool"/>
<Tool
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
</Configuration>
</Configurations>
<References>
</References>
<Files>
<File
RelativePath="..\..\src\test_lib_json\jsontest.cpp">
</File>
<File
RelativePath="..\..\src\test_lib_json\jsontest.h">
</File>
<File
RelativePath="..\..\src\test_lib_json\main.cpp">
</File>
</Files>
<Globals>
</Globals>
</VisualStudioProject>

View File

@ -1,390 +0,0 @@
# Copyright 2010 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
"""Tag the sandbox for release, make source and doc tarballs.
Requires Python 2.6
Example of invocation (use to test the script):
python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev
When testing this script:
python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev
Example of invocation when doing a release:
python makerelease.py 0.5.0 0.6.0-dev
Note: This was for Subversion. Now that we are in GitHub, we do not
need to build versioned tarballs anymore, so makerelease.py is defunct.
"""
from __future__ import print_function
import os.path
import subprocess
import sys
import doxybuild
import subprocess
import xml.etree.ElementTree as ElementTree
import shutil
import urllib2
import tempfile
import os
import time
from devtools import antglob, fixeol, tarball
import amalgamate
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download'
SOURCEFORGE_PROJECT = 'jsoncpp'
def set_version(version):
with open('version','wb') as f:
f.write(version.strip())
def rmdir_if_exist(dir_path):
if os.path.isdir(dir_path):
shutil.rmtree(dir_path)
class SVNError(Exception):
pass
def svn_command(command, *args):
cmd = ['svn', '--non-interactive', command] + list(args)
print('Running:', ' '.join(cmd))
process = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout = process.communicate()[0]
if process.returncode:
error = SVNError('SVN command failed:\n' + stdout)
error.returncode = process.returncode
raise error
return stdout
def check_no_pending_commit():
"""Checks that there is no pending commit in the sandbox."""
stdout = svn_command('status', '--xml')
etree = ElementTree.fromstring(stdout)
msg = []
for entry in etree.getiterator('entry'):
path = entry.get('path')
status = entry.find('wc-status').get('item')
if status != 'unversioned' and path != 'version':
msg.append('File "%s" has pending change (status="%s")' % (path, status))
if msg:
msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!')
return '\n'.join(msg)
def svn_join_url(base_url, suffix):
if not base_url.endswith('/'):
base_url += '/'
if suffix.startswith('/'):
suffix = suffix[1:]
return base_url + suffix
def svn_check_if_tag_exist(tag_url):
"""Checks if a tag exist.
Returns: True if the tag exist, False otherwise.
"""
try:
list_stdout = svn_command('list', tag_url)
except SVNError as e:
if e.returncode != 1 or not str(e).find('tag_url'):
raise e
# otherwise ignore error, meaning tag does not exist
return False
return True
def svn_commit(message):
"""Commit the sandbox, providing the specified comment.
"""
svn_command('ci', '-m', message)
def svn_tag_sandbox(tag_url, message):
"""Makes a tag based on the sandbox revisions.
"""
svn_command('copy', '-m', message, '.', tag_url)
def svn_remove_tag(tag_url, message):
"""Removes an existing tag.
"""
svn_command('delete', '-m', message, tag_url)
def svn_export(tag_url, export_dir):
"""Exports the tag_url revision to export_dir.
Target directory, including its parent is created if it does not exist.
If the directory export_dir exist, it is deleted before export proceed.
"""
rmdir_if_exist(export_dir)
svn_command('export', tag_url, export_dir)
def fix_sources_eol(dist_dir):
"""Set file EOL for tarball distribution.
"""
print('Preparing exported source file EOL for distribution...')
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
win_sources = antglob.glob(dist_dir,
includes = '**/*.sln **/*.vcproj',
prune_dirs = prune_dirs)
unix_sources = antglob.glob(dist_dir,
includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
sconscript *.json *.expected AUTHORS LICENSE''',
excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
prune_dirs = prune_dirs)
for path in win_sources:
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\r\n')
for path in unix_sources:
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\n')
def download(url, target_path):
"""Download file represented by url to target_path.
"""
f = urllib2.urlopen(url)
try:
data = f.read()
finally:
f.close()
fout = open(target_path, 'wb')
try:
fout.write(data)
finally:
fout.close()
def check_compile(distcheck_top_dir, platform):
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
print('Running:', ' '.join(cmd))
log_path = os.path.join(distcheck_top_dir, 'build-%s.log' % platform)
flog = open(log_path, 'wb')
try:
process = subprocess.Popen(cmd,
stdout=flog,
stderr=subprocess.STDOUT,
cwd=distcheck_top_dir)
stdout = process.communicate()[0]
status = (process.returncode == 0)
finally:
flog.close()
return (status, log_path)
def write_tempfile(content, **kwargs):
fd, path = tempfile.mkstemp(**kwargs)
f = os.fdopen(fd, 'wt')
try:
f.write(content)
finally:
f.close()
return path
class SFTPError(Exception):
pass
def run_sftp_batch(userhost, sftp, batch, retry=0):
path = write_tempfile(batch, suffix='.sftp', text=True)
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
error = None
for retry_index in range(0, max(1,retry)):
heading = retry_index == 0 and 'Running:' or 'Retrying:'
print(heading, ' '.join(cmd))
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout = process.communicate()[0]
if process.returncode != 0:
error = SFTPError('SFTP batch failed:\n' + stdout)
else:
break
if error:
raise error
return stdout
def sourceforge_web_synchro(sourceforge_project, doc_dir,
user=None, sftp='sftp'):
"""Notes: does not synchronize sub-directory of doc-dir.
"""
userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project)
stdout = run_sftp_batch(userhost, sftp, """
cd htdocs
dir
exit
""")
existing_paths = set()
collect = 0
for line in stdout.split('\n'):
line = line.strip()
if not collect and line.endswith('> dir'):
collect = True
elif collect and line.endswith('> exit'):
break
elif collect == 1:
collect = 2
elif collect == 2:
path = line.strip().split()[-1:]
if path and path[0] not in ('.', '..'):
existing_paths.add(path[0])
upload_paths = set([os.path.basename(p) for p in antglob.glob(doc_dir)])
paths_to_remove = existing_paths - upload_paths
if paths_to_remove:
print('Removing the following file from web:')
print('\n'.join(paths_to_remove))
stdout = run_sftp_batch(userhost, sftp, """cd htdocs
rm %s
exit""" % ' '.join(paths_to_remove))
print('Uploading %d files:' % len(upload_paths))
batch_size = 10
upload_paths = list(upload_paths)
start_time = time.time()
for index in range(0,len(upload_paths),batch_size):
paths = upload_paths[index:index+batch_size]
file_per_sec = (time.time() - start_time) / (index+1)
remaining_files = len(upload_paths) - index
remaining_sec = file_per_sec * remaining_files
print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
run_sftp_batch(userhost, sftp, """cd htdocs
lcd %s
mput %s
exit""" % (doc_dir, ' '.join(paths)), retry=3)
def sourceforge_release_tarball(sourceforge_project, paths, user=None, sftp='sftp'):
userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project)
run_sftp_batch(userhost, sftp, """
mput %s
exit
""" % (' '.join(paths),))
def main():
usage = """%prog release_version next_dev_version
Update 'version' file to release_version and commit.
Generates the document tarball.
Tags the sandbox revision with release_version.
Update 'version' file to next_dev_version and commit.
Performs an svn export of tag release version, and build a source tarball.
Must be started in the project top directory.
Warning: --force should only be used when developping/testing the release script.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'),
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'),
help="""Path to Doxygen tool. [Default: %default]""")
parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False,
help="""Ignore pending commit. [Default: %default]""")
parser.add_option('--retag', dest="retag_release", action='store_true', default=False,
help="""Overwrite release existing tag if it exist. [Default: %default]""")
parser.add_option('-p', '--platforms', dest="platforms", action='store', default='',
help="""Comma separated list of platform passed to scons for build check.""")
parser.add_option('--no-test', dest="no_test", action='store_true', default=False,
help="""Skips build check.""")
parser.add_option('--no-web', dest="no_web", action='store_true', default=False,
help="""Do not update web site.""")
parser.add_option('-u', '--upload-user', dest="user", action='store',
help="""Sourceforge user for SFTP documentation upload.""")
parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'),
help="""Path of the SFTP compatible binary used to upload the documentation.""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
if len(args) != 2:
parser.error('release_version missing on command-line.')
release_version = args[0]
next_version = args[1]
if not options.platforms and not options.no_test:
parser.error('You must specify either --platform or --no-test option.')
if options.ignore_pending_commit:
msg = ''
else:
msg = check_no_pending_commit()
if not msg:
print('Setting version to', release_version)
set_version(release_version)
svn_commit('Release ' + release_version)
tag_url = svn_join_url(SVN_TAG_ROOT, release_version)
if svn_check_if_tag_exist(tag_url):
if options.retag_release:
svn_remove_tag(tag_url, 'Overwriting previous tag')
else:
print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
sys.exit(1)
svn_tag_sandbox(tag_url, 'Release ' + release_version)
print('Generated doxygen document...')
## doc_dirname = r'jsoncpp-api-html-0.5.0'
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
doc_tarball_path, doc_dirname = doxybuild.build_doc(options, make_release=True)
doc_distcheck_dir = 'dist/doccheck'
tarball.decompress(doc_tarball_path, doc_distcheck_dir)
doc_distcheck_top_dir = os.path.join(doc_distcheck_dir, doc_dirname)
export_dir = 'dist/export'
svn_export(tag_url, export_dir)
fix_sources_eol(export_dir)
source_dir = 'jsoncpp-src-' + release_version
source_tarball_path = 'dist/%s.tar.gz' % source_dir
print('Generating source tarball to', source_tarball_path)
tarball.make_tarball(source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir)
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
print('Generating amalgamation source tarball to', amalgamation_tarball_path)
amalgamation_dir = 'dist/amalgamation'
amalgamate.amalgamate_source(export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h')
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
tarball.make_tarball(amalgamation_tarball_path, [amalgamation_dir],
amalgamation_dir, prefix_dir=amalgamation_source_dir)
# Decompress source tarball, download and install scons-local
distcheck_dir = 'dist/distcheck'
distcheck_top_dir = distcheck_dir + '/' + source_dir
print('Decompressing source tarball to', distcheck_dir)
rmdir_if_exist(distcheck_dir)
tarball.decompress(source_tarball_path, distcheck_dir)
scons_local_path = 'dist/scons-local.tar.gz'
print('Downloading scons-local to', scons_local_path)
download(SCONS_LOCAL_URL, scons_local_path)
print('Decompressing scons-local to', distcheck_top_dir)
tarball.decompress(scons_local_path, distcheck_top_dir)
# Run compilation
print('Compiling decompressed tarball')
all_build_status = True
for platform in options.platforms.split(','):
print('Testing platform:', platform)
build_status, log_path = check_compile(distcheck_top_dir, platform)
print('see build log:', log_path)
print(build_status and '=> ok' or '=> FAILED')
all_build_status = all_build_status and build_status
if not build_status:
print('Testing failed on at least one platform, aborting...')
svn_remove_tag(tag_url, 'Removing tag due to failed testing')
sys.exit(1)
if options.user:
if not options.no_web:
print('Uploading documentation using user', options.user)
sourceforge_web_synchro(SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp)
print('Completed documentation upload')
print('Uploading source and documentation tarballs for release using user', options.user)
sourceforge_release_tarball(SOURCEFORGE_PROJECT,
[source_tarball_path, doc_tarball_path],
user=options.user, sftp=options.sftp)
print('Source and doc release tarballs uploaded')
else:
print('No upload user specified. Web site and download tarbal were not uploaded.')
print('Tarball can be found at:', doc_tarball_path)
# Set next version number and commit
set_version(next_version)
svn_commit('Released ' + release_version)
else:
sys.stderr.write(msg + '\n')
if __name__ == '__main__':
main()

View File

@ -1,11 +0,0 @@
prefix=@CMAKE_INSTALL_PREFIX@
exec_prefix=${prefix}
libdir=@LIBRARY_INSTALL_DIR@
includedir=@INCLUDE_INSTALL_DIR@
Name: jsoncpp
Description: A C++ library for interacting with JSON
Version: @JSONCPP_VERSION@
URL: https://github.com/open-source-parsers/jsoncpp
Libs: -L${libdir} -ljsoncpp
Cflags: -I${includedir}

View File

@ -1,58 +0,0 @@
# Copyright 2009 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
import fnmatch
import os
def generate(env):
def Glob(env, includes = None, excludes = None, dir = '.'):
"""Adds Glob(includes = Split('*'), excludes = None, dir = '.')
helper function to environment.
Glob both the file-system files.
includes: list of file name pattern included in the return list when matched.
excludes: list of file name pattern exluced from the return list.
Example:
sources = env.Glob(("*.cpp", '*.h'), "~*.cpp", "#src")
"""
def filterFilename(path):
abs_path = os.path.join(dir, path)
if not os.path.isfile(abs_path):
return 0
fn = os.path.basename(path)
match = 0
for include in includes:
if fnmatch.fnmatchcase(fn, include):
match = 1
break
if match == 1 and not excludes is None:
for exclude in excludes:
if fnmatch.fnmatchcase(fn, exclude):
match = 0
break
return match
if includes is None:
includes = ('*',)
elif type(includes) in (type(''), type(u'')):
includes = (includes,)
if type(excludes) in (type(''), type(u'')):
excludes = (excludes,)
dir = env.Dir(dir).abspath
paths = os.listdir(dir)
def makeAbsFileNode(path):
return env.File(os.path.join(dir, path))
nodes = filter(filterFilename, paths)
return map(makeAbsFileNode, nodes)
from SCons.Script import Environment
Environment.Glob = Glob
def exists(env):
"""
Tool always exists.
"""
return True

View File

@ -1,183 +0,0 @@
# Copyright 2007 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
import os
import os.path
from fnmatch import fnmatch
import targz
##def DoxyfileParse(file_contents):
## """
## Parse a Doxygen source file and return a dictionary of all the values.
## Values will be strings and lists of strings.
## """
## data = {}
##
## import shlex
## lex = shlex.shlex(instream = file_contents, posix = True)
## lex.wordchars += "*+./-:"
## lex.whitespace = lex.whitespace.replace("\n", "")
## lex.escape = ""
##
## lineno = lex.lineno
## last_backslash_lineno = lineno
## token = lex.get_token()
## key = token # the first token should be a key
## last_token = ""
## key_token = False
## next_key = False
## new_data = True
##
## def append_data(data, key, new_data, token):
## if new_data or len(data[key]) == 0:
## data[key].append(token)
## else:
## data[key][-1] += token
##
## while token:
## if token in ['\n']:
## if last_token not in ['\\']:
## key_token = True
## elif token in ['\\']:
## pass
## elif key_token:
## key = token
## key_token = False
## else:
## if token == "+=":
## if not data.has_key(key):
## data[key] = list()
## elif token == "=":
## data[key] = list()
## else:
## append_data(data, key, new_data, token)
## new_data = True
##
## last_token = token
## token = lex.get_token()
##
## if last_token == '\\' and token != '\n':
## new_data = False
## append_data(data, key, new_data, '\\')
##
## # compress lists of len 1 into single strings
## for (k, v) in data.items():
## if len(v) == 0:
## data.pop(k)
##
## # items in the following list will be kept as lists and not converted to strings
## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]:
## continue
##
## if len(v) == 1:
## data[k] = v[0]
##
## return data
##
##def DoxySourceScan(node, env, path):
## """
## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add
## any files used to generate docs to the list of source files.
## """
## default_file_patterns = [
## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx',
## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++',
## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm',
## '*.py',
## ]
##
## default_exclude_patterns = [
## '*~',
## ]
##
## sources = []
##
## data = DoxyfileParse(node.get_contents())
##
## if data.get("RECURSIVE", "NO") == "YES":
## recursive = True
## else:
## recursive = False
##
## file_patterns = data.get("FILE_PATTERNS", default_file_patterns)
## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns)
##
## for node in data.get("INPUT", []):
## if os.path.isfile(node):
## sources.add(node)
## elif os.path.isdir(node):
## if recursive:
## for root, dirs, files in os.walk(node):
## for f in files:
## filename = os.path.join(root, f)
##
## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False)
## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True)
##
## if pattern_check and not exclude_check:
## sources.append(filename)
## else:
## for pattern in file_patterns:
## sources.extend(glob.glob("/".join([node, pattern])))
## sources = map(lambda path: env.File(path), sources)
## return sources
##
##
##def DoxySourceScanCheck(node, env):
## """Check if we should scan this file"""
## return os.path.isfile(node.path)
def srcDistEmitter(source, target, env):
## """Doxygen Doxyfile emitter"""
## # possible output formats and their default values and output locations
## output_formats = {
## "HTML": ("YES", "html"),
## "LATEX": ("YES", "latex"),
## "RTF": ("NO", "rtf"),
## "MAN": ("YES", "man"),
## "XML": ("NO", "xml"),
## }
##
## data = DoxyfileParse(source[0].get_contents())
##
## targets = []
## out_dir = data.get("OUTPUT_DIRECTORY", ".")
##
## # add our output locations
## for (k, v) in output_formats.items():
## if data.get("GENERATE_" + k, v[0]) == "YES":
## targets.append(env.Dir(os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))))
##
## # don't clobber targets
## for node in targets:
## env.Precious(node)
##
## # set up cleaning stuff
## for node in targets:
## env.Clean(node, node)
##
## return (targets, source)
return (target,source)
def generate(env):
"""
Add builders and construction variables for the
SrcDist tool.
"""
## doxyfile_scanner = env.Scanner(## DoxySourceScan,
## "DoxySourceScan",
## scan_check = DoxySourceScanCheck,
##)
if targz.exists(env):
srcdist_builder = targz.makeBuilder(srcDistEmitter)
env['BUILDERS']['SrcDist'] = srcdist_builder
def exists(env):
"""
Make sure srcdist exists.
"""
return targz.exists(env)

View File

@ -1,85 +0,0 @@
# Copyright 2010 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
import re
from SCons.Script import * # the usual scons stuff you get in a SConscript
import collections
def generate(env):
"""
Add builders and construction variables for the
SubstInFile tool.
Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT
from the source to the target.
The values of SUBST_DICT first have any construction variables expanded
(its keys are not expanded).
If a value of SUBST_DICT is a python callable function, it is called and
the result is expanded as the value.
If there's more than one source and more than one target, each target gets
substituted from the corresponding source.
"""
def do_subst_in_file(targetfile, sourcefile, dict):
"""Replace all instances of the keys of dict with their values.
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
"""
try:
f = open(sourcefile, 'rb')
contents = f.read()
f.close()
except:
raise SCons.Errors.UserError("Can't read source file %s"%sourcefile)
for (k,v) in list(dict.items()):
contents = re.sub(k, v, contents)
try:
f = open(targetfile, 'wb')
f.write(contents)
f.close()
except:
raise SCons.Errors.UserError("Can't write target file %s"%targetfile)
return 0 # success
def subst_in_file(target, source, env):
if 'SUBST_DICT' not in env:
raise SCons.Errors.UserError("SubstInFile requires SUBST_DICT to be set.")
d = dict(env['SUBST_DICT']) # copy it
for (k,v) in list(d.items()):
if isinstance(v, collections.Callable):
d[k] = env.subst(v()).replace('\\','\\\\')
elif SCons.Util.is_String(v):
d[k] = env.subst(v).replace('\\','\\\\')
else:
raise SCons.Errors.UserError("SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)))
for (t,s) in zip(target, source):
return do_subst_in_file(str(t), str(s), d)
def subst_in_file_string(target, source, env):
"""This is what gets printed on the console."""
return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t))
for (t,s) in zip(target, source)])
def subst_emitter(target, source, env):
"""Add dependency from substituted SUBST_DICT to target.
Returns original target, source tuple unchanged.
"""
d = env['SUBST_DICT'].copy() # copy it
for (k,v) in list(d.items()):
if isinstance(v, collections.Callable):
d[k] = env.subst(v())
elif SCons.Util.is_String(v):
d[k]=env.subst(v)
Depends(target, SCons.Node.Python.Value(d))
return target, source
## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!?
subst_action = SCons.Action.Action(subst_in_file, subst_in_file_string)
env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter)
def exists(env):
"""
Make sure tool exists.
"""
return True

View File

@ -1,87 +0,0 @@
# Copyright 2007 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
"""tarball
Tool-specific initialization for tarball.
"""
## Commands to tackle a command based implementation:
##to unpack on the fly...
##gunzip < FILE.tar.gz | tar xvf -
##to pack on the fly...
##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
try:
import gzip
import tarfile
internal_targz = 1
except ImportError:
internal_targz = 0
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
if internal_targz:
def targz(target, source, env):
def archive_name(path):
path = os.path.normpath(os.path.abspath(path))
common_path = os.path.commonprefix((base_dir, path))
archive_name = path[len(common_path):]
return archive_name
def visit(tar, dirname, names):
for name in names:
path = os.path.join(dirname, name)
if os.path.isfile(path):
tar.add(path, archive_name(path))
compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL)
base_dir = os.path.normpath(env.get('TARGZ_BASEDIR', env.Dir('.')).abspath)
target_path = str(target[0])
fileobj = gzip.GzipFile(target_path, 'wb', compression)
tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj)
for source in source:
source_path = str(source)
if source.isdir():
os.path.walk(source_path, visit, tar)
else:
tar.add(source_path, archive_name(source_path)) # filename, arcname
tar.close()
targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR'])
def makeBuilder(emitter = None):
return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'),
source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner,
suffix = '$TARGZ_SUFFIX',
multi = 1)
TarGzBuilder = makeBuilder()
def generate(env):
"""Add Builders and construction variables for zip to an Environment.
The following environnement variables may be set:
TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level).
TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative
to something other than top-dir).
"""
env['BUILDERS']['TarGz'] = TarGzBuilder
env['TARGZ_COM'] = targzAction
env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9
env['TARGZ_SUFFIX'] = '.tar.gz'
env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory.
else:
def generate(env):
pass
def exists(env):
return internal_targz

View File

@ -1,5 +0,0 @@
ADD_SUBDIRECTORY(lib_json)
IF(JSONCPP_WITH_TESTS)
ADD_SUBDIRECTORY(jsontestrunner)
ADD_SUBDIRECTORY(test_lib_json)
ENDIF(JSONCPP_WITH_TESTS)

View File

@ -1,25 +0,0 @@
FIND_PACKAGE(PythonInterp 2.6)
ADD_EXECUTABLE(jsontestrunner_exe
main.cpp
)
IF(BUILD_SHARED_LIBS)
ADD_DEFINITIONS( -DJSON_DLL )
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
ELSE(BUILD_SHARED_LIBS)
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib_static)
ENDIF(BUILD_SHARED_LIBS)
SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe)
IF(PYTHONINTERP_FOUND)
# Run end to end parser/writer tests
SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test)
SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py)
ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests
"${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
DEPENDS jsontestrunner_exe jsoncpp_test
)
ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests)
ENDIF(PYTHONINTERP_FOUND)

View File

@ -1,325 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
/* This executable is used for testing parser/writer using real JSON files.
*/
#include <json/json.h>
#include <algorithm> // sort
#include <sstream>
#include <stdio.h>
#if defined(_MSC_VER) && _MSC_VER >= 1310
#pragma warning(disable : 4996) // disable fopen deprecation warning
#endif
struct Options
{
std::string path;
Json::Features features;
bool parseOnly;
typedef std::string (*writeFuncType)(Json::Value const&);
writeFuncType write;
};
static std::string normalizeFloatingPointStr(double value) {
char buffer[32];
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
sprintf_s(buffer, sizeof(buffer), "%.16g", value);
#else
snprintf(buffer, sizeof(buffer), "%.16g", value);
#endif
buffer[sizeof(buffer) - 1] = 0;
std::string s(buffer);
std::string::size_type index = s.find_last_of("eE");
if (index != std::string::npos) {
std::string::size_type hasSign =
(s[index + 1] == '+' || s[index + 1] == '-') ? 1 : 0;
std::string::size_type exponentStartIndex = index + 1 + hasSign;
std::string normalized = s.substr(0, exponentStartIndex);
std::string::size_type indexDigit =
s.find_first_not_of('0', exponentStartIndex);
std::string exponent = "0";
if (indexDigit !=
std::string::npos) // There is an exponent different from 0
{
exponent = s.substr(indexDigit);
}
return normalized + exponent;
}
return s;
}
static std::string readInputTestFile(const char* path) {
FILE* file = fopen(path, "rb");
if (!file)
return std::string("");
fseek(file, 0, SEEK_END);
long size = ftell(file);
fseek(file, 0, SEEK_SET);
std::string text;
char* buffer = new char[size + 1];
buffer[size] = 0;
if (fread(buffer, 1, size, file) == (unsigned long)size)
text = buffer;
fclose(file);
delete[] buffer;
return text;
}
static void
printValueTree(FILE* fout, Json::Value& value, const std::string& path = ".") {
if (value.hasComment(Json::commentBefore)) {
fprintf(fout, "%s\n", value.getComment(Json::commentBefore).c_str());
}
switch (value.type()) {
case Json::nullValue:
fprintf(fout, "%s=null\n", path.c_str());
break;
case Json::intValue:
fprintf(fout,
"%s=%s\n",
path.c_str(),
Json::valueToString(value.asLargestInt()).c_str());
break;
case Json::uintValue:
fprintf(fout,
"%s=%s\n",
path.c_str(),
Json::valueToString(value.asLargestUInt()).c_str());
break;
case Json::realValue:
fprintf(fout,
"%s=%s\n",
path.c_str(),
normalizeFloatingPointStr(value.asDouble()).c_str());
break;
case Json::stringValue:
fprintf(fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str());
break;
case Json::booleanValue:
fprintf(fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false");
break;
case Json::arrayValue: {
fprintf(fout, "%s=[]\n", path.c_str());
int size = value.size();
for (int index = 0; index < size; ++index) {
static char buffer[16];
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
sprintf_s(buffer, sizeof(buffer), "[%d]", index);
#else
snprintf(buffer, sizeof(buffer), "[%d]", index);
#endif
printValueTree(fout, value[index], path + buffer);
}
} break;
case Json::objectValue: {
fprintf(fout, "%s={}\n", path.c_str());
Json::Value::Members members(value.getMemberNames());
std::sort(members.begin(), members.end());
std::string suffix = *(path.end() - 1) == '.' ? "" : ".";
for (Json::Value::Members::iterator it = members.begin();
it != members.end();
++it) {
const std::string& name = *it;
printValueTree(fout, value[name], path + suffix + name);
}
} break;
default:
break;
}
if (value.hasComment(Json::commentAfter)) {
fprintf(fout, "%s\n", value.getComment(Json::commentAfter).c_str());
}
}
static int parseAndSaveValueTree(const std::string& input,
const std::string& actual,
const std::string& kind,
const Json::Features& features,
bool parseOnly,
Json::Value* root)
{
Json::Reader reader(features);
bool parsingSuccessful = reader.parse(input, *root);
if (!parsingSuccessful) {
printf("Failed to parse %s file: \n%s\n",
kind.c_str(),
reader.getFormattedErrorMessages().c_str());
return 1;
}
if (!parseOnly) {
FILE* factual = fopen(actual.c_str(), "wt");
if (!factual) {
printf("Failed to create %s actual file.\n", kind.c_str());
return 2;
}
printValueTree(factual, *root);
fclose(factual);
}
return 0;
}
// static std::string useFastWriter(Json::Value const& root) {
// Json::FastWriter writer;
// writer.enableYAMLCompatibility();
// return writer.write(root);
// }
static std::string useStyledWriter(
Json::Value const& root)
{
Json::StyledWriter writer;
return writer.write(root);
}
static std::string useStyledStreamWriter(
Json::Value const& root)
{
Json::StyledStreamWriter writer;
std::ostringstream sout;
writer.write(sout, root);
return sout.str();
}
static std::string useBuiltStyledStreamWriter(
Json::Value const& root)
{
Json::StreamWriterBuilder builder;
return Json::writeString(builder, root);
}
static int rewriteValueTree(
const std::string& rewritePath,
const Json::Value& root,
Options::writeFuncType write,
std::string* rewrite)
{
*rewrite = write(root);
FILE* fout = fopen(rewritePath.c_str(), "wt");
if (!fout) {
printf("Failed to create rewrite file: %s\n", rewritePath.c_str());
return 2;
}
fprintf(fout, "%s\n", rewrite->c_str());
fclose(fout);
return 0;
}
static std::string removeSuffix(const std::string& path,
const std::string& extension) {
if (extension.length() >= path.length())
return std::string("");
std::string suffix = path.substr(path.length() - extension.length());
if (suffix != extension)
return std::string("");
return path.substr(0, path.length() - extension.length());
}
static void printConfig() {
// Print the configuration used to compile JsonCpp
#if defined(JSON_NO_INT64)
printf("JSON_NO_INT64=1\n");
#else
printf("JSON_NO_INT64=0\n");
#endif
}
static int printUsage(const char* argv[]) {
printf("Usage: %s [--strict] input-json-file", argv[0]);
return 3;
}
static int parseCommandLine(
int argc, const char* argv[], Options* opts)
{
opts->parseOnly = false;
opts->write = &useStyledWriter;
if (argc < 2) {
return printUsage(argv);
}
int index = 1;
if (std::string(argv[index]) == "--json-checker") {
opts->features = Json::Features::strictMode();
opts->parseOnly = true;
++index;
}
if (std::string(argv[index]) == "--json-config") {
printConfig();
return 3;
}
if (std::string(argv[index]) == "--json-writer") {
++index;
std::string const writerName(argv[index++]);
if (writerName == "StyledWriter") {
opts->write = &useStyledWriter;
} else if (writerName == "StyledStreamWriter") {
opts->write = &useStyledStreamWriter;
} else if (writerName == "BuiltStyledStreamWriter") {
opts->write = &useBuiltStyledStreamWriter;
} else {
printf("Unknown '--json-writer %s'\n", writerName.c_str());
return 4;
}
}
if (index == argc || index + 1 < argc) {
return printUsage(argv);
}
opts->path = argv[index];
return 0;
}
static int runTest(Options const& opts)
{
int exitCode = 0;
std::string input = readInputTestFile(opts.path.c_str());
if (input.empty()) {
printf("Failed to read input or empty input: %s\n", opts.path.c_str());
return 3;
}
std::string basePath = removeSuffix(opts.path, ".json");
if (!opts.parseOnly && basePath.empty()) {
printf("Bad input path. Path does not end with '.expected':\n%s\n",
opts.path.c_str());
return 3;
}
std::string const actualPath = basePath + ".actual";
std::string const rewritePath = basePath + ".rewrite";
std::string const rewriteActualPath = basePath + ".actual-rewrite";
Json::Value root;
exitCode = parseAndSaveValueTree(
input, actualPath, "input",
opts.features, opts.parseOnly, &root);
if (exitCode || opts.parseOnly) {
return exitCode;
}
std::string rewrite;
exitCode = rewriteValueTree(rewritePath, root, opts.write, &rewrite);
if (exitCode) {
return exitCode;
}
Json::Value rewriteRoot;
exitCode = parseAndSaveValueTree(
rewrite, rewriteActualPath, "rewrite",
opts.features, opts.parseOnly, &rewriteRoot);
if (exitCode) {
return exitCode;
}
return 0;
}
int main(int argc, const char* argv[]) {
Options opts;
int exitCode = parseCommandLine(argc, argv, &opts);
if (exitCode != 0) {
printf("Failed to parse command-line.");
return exitCode;
}
try {
return runTest(opts);
}
catch (const std::exception& e) {
printf("Unhandled exception:\n%s\n", e.what());
return 1;
}
}

View File

@ -1,9 +0,0 @@
Import( 'env_testing buildJSONTests' )
buildJSONTests( env_testing, Split( """
main.cpp
""" ),
'jsontestrunner' )
# For 'check' to work, 'libs' must be built first.
env_testing.Depends('jsontestrunner', '#libs')

View File

@ -1,80 +0,0 @@
if( CMAKE_COMPILER_IS_GNUCXX )
#Get compiler version.
execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion
OUTPUT_VARIABLE GNUCXX_VERSION )
#-Werror=* was introduced -after- GCC 4.1.2
if( GNUCXX_VERSION VERSION_GREATER 4.1.2 )
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing")
endif()
endif( CMAKE_COMPILER_IS_GNUCXX )
SET( JSONCPP_INCLUDE_DIR ../../include )
SET( PUBLIC_HEADERS
${JSONCPP_INCLUDE_DIR}/json/config.h
${JSONCPP_INCLUDE_DIR}/json/forwards.h
${JSONCPP_INCLUDE_DIR}/json/features.h
${JSONCPP_INCLUDE_DIR}/json/value.h
${JSONCPP_INCLUDE_DIR}/json/reader.h
${JSONCPP_INCLUDE_DIR}/json/writer.h
${JSONCPP_INCLUDE_DIR}/json/assertions.h
${JSONCPP_INCLUDE_DIR}/json/version.h
)
SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} )
SET(jsoncpp_sources
json_tool.h
json_reader.cpp
json_valueiterator.inl
json_value.cpp
json_writer.cpp
version.h.in)
# Install instructions for this target
IF(JSONCPP_WITH_CMAKE_PACKAGE)
SET(INSTALL_EXPORT EXPORT jsoncpp)
ELSE(JSONCPP_WITH_CMAKE_PACKAGE)
SET(INSTALL_EXPORT)
ENDIF(JSONCPP_WITH_CMAKE_PACKAGE)
IF(BUILD_SHARED_LIBS)
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
ADD_LIBRARY(jsoncpp_lib SHARED ${PUBLIC_HEADERS} ${jsoncpp_sources})
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR})
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT}
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR}
ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR})
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>)
ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
ENDIF()
IF(BUILD_STATIC_LIBS)
ADD_LIBRARY(jsoncpp_lib_static STATIC ${PUBLIC_HEADERS} ${jsoncpp_sources})
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR})
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES OUTPUT_NAME jsoncpp
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
INSTALL( TARGETS jsoncpp_lib_static ${INSTALL_EXPORT}
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR}
ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR})
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib_static PUBLIC
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
)
ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
ENDIF()

File diff suppressed because it is too large Load Diff

View File

@ -1,87 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED
#define LIB_JSONCPP_JSON_TOOL_H_INCLUDED
/* This header provides common string manipulation support, such as UTF-8,
* portable conversion from/to string...
*
* It is an internal header that must not be exposed.
*/
namespace Json {
/// Converts a unicode code-point to UTF-8.
static inline std::string codePointToUTF8(unsigned int cp) {
std::string result;
// based on description from http://en.wikipedia.org/wiki/UTF-8
if (cp <= 0x7f) {
result.resize(1);
result[0] = static_cast<char>(cp);
} else if (cp <= 0x7FF) {
result.resize(2);
result[1] = static_cast<char>(0x80 | (0x3f & cp));
result[0] = static_cast<char>(0xC0 | (0x1f & (cp >> 6)));
} else if (cp <= 0xFFFF) {
result.resize(3);
result[2] = static_cast<char>(0x80 | (0x3f & cp));
result[1] = 0x80 | static_cast<char>((0x3f & (cp >> 6)));
result[0] = 0xE0 | static_cast<char>((0xf & (cp >> 12)));
} else if (cp <= 0x10FFFF) {
result.resize(4);
result[3] = static_cast<char>(0x80 | (0x3f & cp));
result[2] = static_cast<char>(0x80 | (0x3f & (cp >> 6)));
result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 12)));
result[0] = static_cast<char>(0xF0 | (0x7 & (cp >> 18)));
}
return result;
}
/// Returns true if ch is a control character (in range [0,32[).
static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; }
enum {
/// Constant that specify the size of the buffer that must be passed to
/// uintToString.
uintToStringBufferSize = 3 * sizeof(LargestUInt) + 1
};
// Defines a char buffer for use with uintToString().
typedef char UIntToStringBuffer[uintToStringBufferSize];
/** Converts an unsigned integer to string.
* @param value Unsigned interger to convert to string
* @param current Input/Output string buffer.
* Must have at least uintToStringBufferSize chars free.
*/
static inline void uintToString(LargestUInt value, char*& current) {
*--current = 0;
do {
*--current = char(value % 10) + '0';
value /= 10;
} while (value != 0);
}
/** Change ',' to '.' everywhere in buffer.
*
* We had a sophisticated way, but it did not work in WinCE.
* @see https://github.com/open-source-parsers/jsoncpp/pull/9
*/
static inline void fixNumericLocale(char* begin, char* end) {
while (begin < end) {
if (*begin == ',') {
*begin = '.';
}
++begin;
}
}
} // namespace Json {
#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED

File diff suppressed because it is too large Load Diff

View File

@ -1,162 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
// included by json_value.cpp
namespace Json {
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// class ValueIteratorBase
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
ValueIteratorBase::ValueIteratorBase()
: current_(), isNull_(true) {
}
ValueIteratorBase::ValueIteratorBase(
const Value::ObjectValues::iterator& current)
: current_(current), isNull_(false) {}
Value& ValueIteratorBase::deref() const {
return current_->second;
}
void ValueIteratorBase::increment() {
++current_;
}
void ValueIteratorBase::decrement() {
--current_;
}
ValueIteratorBase::difference_type
ValueIteratorBase::computeDistance(const SelfType& other) const {
#ifdef JSON_USE_CPPTL_SMALLMAP
return other.current_ - current_;
#else
// Iterator for null value are initialized using the default
// constructor, which initialize current_ to the default
// std::map::iterator. As begin() and end() are two instance
// of the default std::map::iterator, they can not be compared.
// To allow this, we handle this comparison specifically.
if (isNull_ && other.isNull_) {
return 0;
}
// Usage of std::distance is not portable (does not compile with Sun Studio 12
// RogueWave STL,
// which is the one used by default).
// Using a portable hand-made version for non random iterator instead:
// return difference_type( std::distance( current_, other.current_ ) );
difference_type myDistance = 0;
for (Value::ObjectValues::iterator it = current_; it != other.current_;
++it) {
++myDistance;
}
return myDistance;
#endif
}
bool ValueIteratorBase::isEqual(const SelfType& other) const {
if (isNull_) {
return other.isNull_;
}
return current_ == other.current_;
}
void ValueIteratorBase::copy(const SelfType& other) {
current_ = other.current_;
isNull_ = other.isNull_;
}
Value ValueIteratorBase::key() const {
const Value::CZString czstring = (*current_).first;
if (czstring.data()) {
if (czstring.isStaticString())
return Value(StaticString(czstring.data()));
return Value(czstring.data(), czstring.data() + czstring.length());
}
return Value(czstring.index());
}
UInt ValueIteratorBase::index() const {
const Value::CZString czstring = (*current_).first;
if (!czstring.data())
return czstring.index();
return Value::UInt(-1);
}
std::string ValueIteratorBase::name() const {
char const* key;
char const* end;
key = memberName(&end);
if (!key) return std::string();
return std::string(key, end);
}
char const* ValueIteratorBase::memberName() const {
const char* name = (*current_).first.data();
return name ? name : "";
}
char const* ValueIteratorBase::memberName(char const** end) const {
const char* name = (*current_).first.data();
if (!name) {
*end = NULL;
return NULL;
}
*end = name + (*current_).first.length();
return name;
}
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// class ValueConstIterator
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
ValueConstIterator::ValueConstIterator() {}
ValueConstIterator::ValueConstIterator(
const Value::ObjectValues::iterator& current)
: ValueIteratorBase(current) {}
ValueConstIterator& ValueConstIterator::
operator=(const ValueIteratorBase& other) {
copy(other);
return *this;
}
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// class ValueIterator
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
ValueIterator::ValueIterator() {}
ValueIterator::ValueIterator(const Value::ObjectValues::iterator& current)
: ValueIteratorBase(current) {}
ValueIterator::ValueIterator(const ValueConstIterator& other)
: ValueIteratorBase(other) {}
ValueIterator::ValueIterator(const ValueIterator& other)
: ValueIteratorBase(other) {}
ValueIterator& ValueIterator::operator=(const SelfType& other) {
copy(other);
return *this;
}
} // namespace Json

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +0,0 @@
Import( 'env buildLibrary' )
buildLibrary( env, Split( """
json_reader.cpp
json_value.cpp
json_writer.cpp
""" ),
'json' )

View File

@ -1,14 +0,0 @@
// DO NOT EDIT. This file is generated by CMake from "version"
// and "version.h.in" files.
// Run CMake configure step to update it.
#ifndef JSON_VERSION_H_INCLUDED
# define JSON_VERSION_H_INCLUDED
# define JSONCPP_VERSION_STRING "@JSONCPP_VERSION@"
# define JSONCPP_VERSION_MAJOR @JSONCPP_VERSION_MAJOR@
# define JSONCPP_VERSION_MINOR @JSONCPP_VERSION_MINOR@
# define JSONCPP_VERSION_PATCH @JSONCPP_VERSION_PATCH@
# define JSONCPP_VERSION_QUALIFIER
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
#endif // JSON_VERSION_H_INCLUDED

View File

@ -1,38 +0,0 @@
# vim: et ts=4 sts=4 sw=4 tw=0
ADD_EXECUTABLE( jsoncpp_test
jsontest.cpp
jsontest.h
main.cpp
)
IF(BUILD_SHARED_LIBS)
ADD_DEFINITIONS( -DJSON_DLL )
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
ELSE(BUILD_SHARED_LIBS)
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib_static)
ENDIF(BUILD_SHARED_LIBS)
# another way to solve issue #90
#set_target_properties(jsoncpp_test PROPERTIES COMPILE_FLAGS -ffloat-store)
# Run unit tests in post-build
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
IF(JSONCPP_WITH_POST_BUILD_UNITTEST)
IF(BUILD_SHARED_LIBS)
# First, copy the shared lib, for Microsoft.
# Then, run the test executable.
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different $<TARGET_FILE:jsoncpp_lib> $<TARGET_FILE_DIR:jsoncpp_test>
COMMAND $<TARGET_FILE:jsoncpp_test>)
ELSE(BUILD_SHARED_LIBS)
# Just run the test executable.
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
POST_BUILD
COMMAND $<TARGET_FILE:jsoncpp_test>)
ENDIF(BUILD_SHARED_LIBS)
ENDIF(JSONCPP_WITH_POST_BUILD_UNITTEST)
SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)

View File

@ -1,443 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC
#include "jsontest.h"
#include <stdio.h>
#include <string>
#if defined(_MSC_VER)
// Used to install a report hook that prevent dialog on assertion and error.
#include <crtdbg.h>
#endif // if defined(_MSC_VER)
#if defined(_WIN32)
// Used to prevent dialog on memory fault.
// Limits headers included by Windows.h
#define WIN32_LEAN_AND_MEAN
#define NOSERVICE
#define NOMCX
#define NOIME
#define NOSOUND
#define NOCOMM
#define NORPC
#define NOGDI
#define NOUSER
#define NODRIVERS
#define NOLOGERROR
#define NOPROFILER
#define NOMEMMGR
#define NOLFILEIO
#define NOOPENFILE
#define NORESOURCE
#define NOATOM
#define NOLANGUAGE
#define NOLSTRING
#define NODBCS
#define NOKEYBOARDINFO
#define NOGDICAPMASKS
#define NOCOLOR
#define NOGDIOBJ
#define NODRAWTEXT
#define NOTEXTMETRIC
#define NOSCALABLEFONT
#define NOBITMAP
#define NORASTEROPS
#define NOMETAFILE
#define NOSYSMETRICS
#define NOSYSTEMPARAMSINFO
#define NOMSG
#define NOWINSTYLES
#define NOWINOFFSETS
#define NOSHOWWINDOW
#define NODEFERWINDOWPOS
#define NOVIRTUALKEYCODES
#define NOKEYSTATES
#define NOWH
#define NOMENUS
#define NOSCROLL
#define NOCLIPBOARD
#define NOICONS
#define NOMB
#define NOSYSCOMMANDS
#define NOMDI
#define NOCTLMGR
#define NOWINMESSAGES
#include <windows.h>
#endif // if defined(_WIN32)
namespace JsonTest {
// class TestResult
// //////////////////////////////////////////////////////////////////
TestResult::TestResult()
: predicateId_(1), lastUsedPredicateId_(0), messageTarget_(0) {
// The root predicate has id 0
rootPredicateNode_.id_ = 0;
rootPredicateNode_.next_ = 0;
predicateStackTail_ = &rootPredicateNode_;
}
void TestResult::setTestName(const std::string& name) { name_ = name; }
TestResult&
TestResult::addFailure(const char* file, unsigned int line, const char* expr) {
/// Walks the PredicateContext stack adding them to failures_ if not already
/// added.
unsigned int nestingLevel = 0;
PredicateContext* lastNode = rootPredicateNode_.next_;
for (; lastNode != 0; lastNode = lastNode->next_) {
if (lastNode->id_ > lastUsedPredicateId_) // new PredicateContext
{
lastUsedPredicateId_ = lastNode->id_;
addFailureInfo(
lastNode->file_, lastNode->line_, lastNode->expr_, nestingLevel);
// Link the PredicateContext to the failure for message target when
// popping the PredicateContext.
lastNode->failure_ = &(failures_.back());
}
++nestingLevel;
}
// Adds the failed assertion
addFailureInfo(file, line, expr, nestingLevel);
messageTarget_ = &(failures_.back());
return *this;
}
void TestResult::addFailureInfo(const char* file,
unsigned int line,
const char* expr,
unsigned int nestingLevel) {
Failure failure;
failure.file_ = file;
failure.line_ = line;
if (expr) {
failure.expr_ = expr;
}
failure.nestingLevel_ = nestingLevel;
failures_.push_back(failure);
}
TestResult& TestResult::popPredicateContext() {
PredicateContext* lastNode = &rootPredicateNode_;
while (lastNode->next_ != 0 && lastNode->next_->next_ != 0) {
lastNode = lastNode->next_;
}
// Set message target to popped failure
PredicateContext* tail = lastNode->next_;
if (tail != 0 && tail->failure_ != 0) {
messageTarget_ = tail->failure_;
}
// Remove tail from list
predicateStackTail_ = lastNode;
lastNode->next_ = 0;
return *this;
}
bool TestResult::failed() const { return !failures_.empty(); }
unsigned int TestResult::getAssertionNestingLevel() const {
unsigned int level = 0;
const PredicateContext* lastNode = &rootPredicateNode_;
while (lastNode->next_ != 0) {
lastNode = lastNode->next_;
++level;
}
return level;
}
void TestResult::printFailure(bool printTestName) const {
if (failures_.empty()) {
return;
}
if (printTestName) {
printf("* Detail of %s test failure:\n", name_.c_str());
}
// Print in reverse to display the callstack in the right order
Failures::const_iterator itEnd = failures_.end();
for (Failures::const_iterator it = failures_.begin(); it != itEnd; ++it) {
const Failure& failure = *it;
std::string indent(failure.nestingLevel_ * 2, ' ');
if (failure.file_) {
printf("%s%s(%d): ", indent.c_str(), failure.file_, failure.line_);
}
if (!failure.expr_.empty()) {
printf("%s\n", failure.expr_.c_str());
} else if (failure.file_) {
printf("\n");
}
if (!failure.message_.empty()) {
std::string reindented = indentText(failure.message_, indent + " ");
printf("%s\n", reindented.c_str());
}
}
}
std::string TestResult::indentText(const std::string& text,
const std::string& indent) {
std::string reindented;
std::string::size_type lastIndex = 0;
while (lastIndex < text.size()) {
std::string::size_type nextIndex = text.find('\n', lastIndex);
if (nextIndex == std::string::npos) {
nextIndex = text.size() - 1;
}
reindented += indent;
reindented += text.substr(lastIndex, nextIndex - lastIndex + 1);
lastIndex = nextIndex + 1;
}
return reindented;
}
TestResult& TestResult::addToLastFailure(const std::string& message) {
if (messageTarget_ != 0) {
messageTarget_->message_ += message;
}
return *this;
}
TestResult& TestResult::operator<<(Json::Int64 value) {
return addToLastFailure(Json::valueToString(value));
}
TestResult& TestResult::operator<<(Json::UInt64 value) {
return addToLastFailure(Json::valueToString(value));
}
TestResult& TestResult::operator<<(bool value) {
return addToLastFailure(value ? "true" : "false");
}
// class TestCase
// //////////////////////////////////////////////////////////////////
TestCase::TestCase() : result_(0) {}
TestCase::~TestCase() {}
void TestCase::run(TestResult& result) {
result_ = &result;
runTestCase();
}
// class Runner
// //////////////////////////////////////////////////////////////////
Runner::Runner() {}
Runner& Runner::add(TestCaseFactory factory) {
tests_.push_back(factory);
return *this;
}
unsigned int Runner::testCount() const {
return static_cast<unsigned int>(tests_.size());
}
std::string Runner::testNameAt(unsigned int index) const {
TestCase* test = tests_[index]();
std::string name = test->testName();
delete test;
return name;
}
void Runner::runTestAt(unsigned int index, TestResult& result) const {
TestCase* test = tests_[index]();
result.setTestName(test->testName());
printf("Testing %s: ", test->testName());
fflush(stdout);
#if JSON_USE_EXCEPTION
try {
#endif // if JSON_USE_EXCEPTION
test->run(result);
#if JSON_USE_EXCEPTION
}
catch (const std::exception& e) {
result.addFailure(__FILE__, __LINE__, "Unexpected exception caught:")
<< e.what();
}
#endif // if JSON_USE_EXCEPTION
delete test;
const char* status = result.failed() ? "FAILED" : "OK";
printf("%s\n", status);
fflush(stdout);
}
bool Runner::runAllTest(bool printSummary) const {
unsigned int count = testCount();
std::deque<TestResult> failures;
for (unsigned int index = 0; index < count; ++index) {
TestResult result;
runTestAt(index, result);
if (result.failed()) {
failures.push_back(result);
}
}
if (failures.empty()) {
if (printSummary) {
printf("All %d tests passed\n", count);
}
return true;
} else {
for (unsigned int index = 0; index < failures.size(); ++index) {
TestResult& result = failures[index];
result.printFailure(count > 1);
}
if (printSummary) {
unsigned int failedCount = static_cast<unsigned int>(failures.size());
unsigned int passedCount = count - failedCount;
printf("%d/%d tests passed (%d failure(s))\n",
passedCount,
count,
failedCount);
}
return false;
}
}
bool Runner::testIndex(const std::string& testName,
unsigned int& indexOut) const {
unsigned int count = testCount();
for (unsigned int index = 0; index < count; ++index) {
if (testNameAt(index) == testName) {
indexOut = index;
return true;
}
}
return false;
}
void Runner::listTests() const {
unsigned int count = testCount();
for (unsigned int index = 0; index < count; ++index) {
printf("%s\n", testNameAt(index).c_str());
}
}
int Runner::runCommandLine(int argc, const char* argv[]) const {
// typedef std::deque<std::string> TestNames;
Runner subrunner;
for (int index = 1; index < argc; ++index) {
std::string opt = argv[index];
if (opt == "--list-tests") {
listTests();
return 0;
} else if (opt == "--test-auto") {
preventDialogOnCrash();
} else if (opt == "--test") {
++index;
if (index < argc) {
unsigned int testNameIndex;
if (testIndex(argv[index], testNameIndex)) {
subrunner.add(tests_[testNameIndex]);
} else {
fprintf(stderr, "Test '%s' does not exist!\n", argv[index]);
return 2;
}
} else {
printUsage(argv[0]);
return 2;
}
} else {
printUsage(argv[0]);
return 2;
}
}
bool succeeded;
if (subrunner.testCount() > 0) {
succeeded = subrunner.runAllTest(subrunner.testCount() > 1);
} else {
succeeded = runAllTest(true);
}
return succeeded ? 0 : 1;
}
#if defined(_MSC_VER) && defined(_DEBUG)
// Hook MSVCRT assertions to prevent dialog from appearing
static int
msvcrtSilentReportHook(int reportType, char* message, int* /*returnValue*/) {
// The default CRT handling of error and assertion is to display
// an error dialog to the user.
// Instead, when an error or an assertion occurs, we force the
// application to terminate using abort() after display
// the message on stderr.
if (reportType == _CRT_ERROR || reportType == _CRT_ASSERT) {
// calling abort() cause the ReportHook to be called
// The following is used to detect this case and let's the
// error handler fallback on its default behaviour (
// display a warning message)
static volatile bool isAborting = false;
if (isAborting) {
return TRUE;
}
isAborting = true;
fprintf(stderr, "CRT Error/Assert:\n%s\n", message);
fflush(stderr);
abort();
}
// Let's other reportType (_CRT_WARNING) be handled as they would by default
return FALSE;
}
#endif // if defined(_MSC_VER)
void Runner::preventDialogOnCrash() {
#if defined(_MSC_VER) && defined(_DEBUG)
// Install a hook to prevent MSVCRT error and assertion from
// popping a dialog
// This function a NO-OP in release configuration
// (which cause warning since msvcrtSilentReportHook is not referenced)
_CrtSetReportHook(&msvcrtSilentReportHook);
#endif // if defined(_MSC_VER)
// @todo investiguate this handler (for buffer overflow)
// _set_security_error_handler
#if defined(_WIN32)
// Prevents the system from popping a dialog for debugging if the
// application fails due to invalid memory access.
SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX |
SEM_NOOPENFILEERRORBOX);
#endif // if defined(_WIN32)
}
void Runner::printUsage(const char* appName) {
printf("Usage: %s [options]\n"
"\n"
"If --test is not specified, then all the test cases be run.\n"
"\n"
"Valid options:\n"
"--list-tests: print the name of all test cases on the standard\n"
" output and exit.\n"
"--test TESTNAME: executes the test case with the specified name.\n"
" May be repeated.\n"
"--test-auto: prevent dialog prompting for debugging on crash.\n",
appName);
}
// Assertion functions
// //////////////////////////////////////////////////////////////////
TestResult& checkStringEqual(TestResult& result,
const std::string& expected,
const std::string& actual,
const char* file,
unsigned int line,
const char* expr) {
if (expected != actual) {
result.addFailure(file, line, expr);
result << "Expected: '" << expected << "'\n";
result << "Actual : '" << actual << "'";
}
return result;
}
} // namespace JsonTest

View File

@ -1,280 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef JSONTEST_H_INCLUDED
#define JSONTEST_H_INCLUDED
#include <json/config.h>
#include <json/value.h>
#include <json/writer.h>
#include <stdio.h>
#include <deque>
#include <sstream>
#include <string>
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// Mini Unit Testing framework
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
/** \brief Unit testing framework.
* \warning: all assertions are non-aborting, test case execution will continue
* even if an assertion namespace.
* This constraint is for portability: the framework needs to compile
* on Visual Studio 6 and must not require exception usage.
*/
namespace JsonTest {
class Failure {
public:
const char* file_;
unsigned int line_;
std::string expr_;
std::string message_;
unsigned int nestingLevel_;
};
/// Context used to create the assertion callstack on failure.
/// Must be a POD to allow inline initialisation without stepping
/// into the debugger.
struct PredicateContext {
typedef unsigned int Id;
Id id_;
const char* file_;
unsigned int line_;
const char* expr_;
PredicateContext* next_;
/// Related Failure, set when the PredicateContext is converted
/// into a Failure.
Failure* failure_;
};
class TestResult {
public:
TestResult();
/// \internal Implementation detail for assertion macros
/// Not encapsulated to prevent step into when debugging failed assertions
/// Incremented by one on assertion predicate entry, decreased by one
/// by addPredicateContext().
PredicateContext::Id predicateId_;
/// \internal Implementation detail for predicate macros
PredicateContext* predicateStackTail_;
void setTestName(const std::string& name);
/// Adds an assertion failure.
TestResult&
addFailure(const char* file, unsigned int line, const char* expr = 0);
/// Removes the last PredicateContext added to the predicate stack
/// chained list.
/// Next messages will be targed at the PredicateContext that was removed.
TestResult& popPredicateContext();
bool failed() const;
void printFailure(bool printTestName) const;
// Generic operator that will work with anything ostream can deal with.
template <typename T> TestResult& operator<<(const T& value) {
std::ostringstream oss;
oss.precision(16);
oss.setf(std::ios_base::floatfield);
oss << value;
return addToLastFailure(oss.str());
}
// Specialized versions.
TestResult& operator<<(bool value);
// std:ostream does not support 64bits integers on all STL implementation
TestResult& operator<<(Json::Int64 value);
TestResult& operator<<(Json::UInt64 value);
private:
TestResult& addToLastFailure(const std::string& message);
unsigned int getAssertionNestingLevel() const;
/// Adds a failure or a predicate context
void addFailureInfo(const char* file,
unsigned int line,
const char* expr,
unsigned int nestingLevel);
static std::string indentText(const std::string& text,
const std::string& indent);
typedef std::deque<Failure> Failures;
Failures failures_;
std::string name_;
PredicateContext rootPredicateNode_;
PredicateContext::Id lastUsedPredicateId_;
/// Failure which is the target of the messages added using operator <<
Failure* messageTarget_;
};
class TestCase {
public:
TestCase();
virtual ~TestCase();
void run(TestResult& result);
virtual const char* testName() const = 0;
protected:
TestResult* result_;
private:
virtual void runTestCase() = 0;
};
/// Function pointer type for TestCase factory
typedef TestCase* (*TestCaseFactory)();
class Runner {
public:
Runner();
/// Adds a test to the suite
Runner& add(TestCaseFactory factory);
/// Runs test as specified on the command-line
/// If no command-line arguments are provided, run all tests.
/// If --list-tests is provided, then print the list of all test cases
/// If --test <testname> is provided, then run test testname.
int runCommandLine(int argc, const char* argv[]) const;
/// Runs all the test cases
bool runAllTest(bool printSummary) const;
/// Returns the number of test case in the suite
unsigned int testCount() const;
/// Returns the name of the test case at the specified index
std::string testNameAt(unsigned int index) const;
/// Runs the test case at the specified index using the specified TestResult
void runTestAt(unsigned int index, TestResult& result) const;
static void printUsage(const char* appName);
private: // prevents copy construction and assignment
Runner(const Runner& other);
Runner& operator=(const Runner& other);
private:
void listTests() const;
bool testIndex(const std::string& testName, unsigned int& index) const;
static void preventDialogOnCrash();
private:
typedef std::deque<TestCaseFactory> Factories;
Factories tests_;
};
template <typename T, typename U>
TestResult& checkEqual(TestResult& result,
T expected,
U actual,
const char* file,
unsigned int line,
const char* expr) {
if (static_cast<U>(expected) != actual) {
result.addFailure(file, line, expr);
result << "Expected: " << static_cast<U>(expected) << "\n";
result << "Actual : " << actual;
}
return result;
}
TestResult& checkStringEqual(TestResult& result,
const std::string& expected,
const std::string& actual,
const char* file,
unsigned int line,
const char* expr);
} // namespace JsonTest
/// \brief Asserts that the given expression is true.
/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y;
/// JSONTEST_ASSERT( x == y );
#define JSONTEST_ASSERT(expr) \
if (expr) { \
} else \
result_->addFailure(__FILE__, __LINE__, #expr)
/// \brief Asserts that the given predicate is true.
/// The predicate may do other assertions and be a member function of the
/// fixture.
#define JSONTEST_ASSERT_PRED(expr) \
{ \
JsonTest::PredicateContext _minitest_Context = { \
result_->predicateId_, __FILE__, __LINE__, #expr, NULL, NULL \
}; \
result_->predicateStackTail_->next_ = &_minitest_Context; \
result_->predicateId_ += 1; \
result_->predicateStackTail_ = &_minitest_Context; \
(expr); \
result_->popPredicateContext(); \
}
/// \brief Asserts that two values are equals.
#define JSONTEST_ASSERT_EQUAL(expected, actual) \
JsonTest::checkEqual(*result_, \
expected, \
actual, \
__FILE__, \
__LINE__, \
#expected " == " #actual)
/// \brief Asserts that two values are equals.
#define JSONTEST_ASSERT_STRING_EQUAL(expected, actual) \
JsonTest::checkStringEqual(*result_, \
std::string(expected), \
std::string(actual), \
__FILE__, \
__LINE__, \
#expected " == " #actual)
/// \brief Asserts that a given expression throws an exception
#define JSONTEST_ASSERT_THROWS(expr) \
{ \
bool _threw = false; \
try { \
expr; \
} \
catch (...) { \
_threw = true; \
} \
if (!_threw) \
result_->addFailure( \
__FILE__, __LINE__, "expected exception thrown: " #expr); \
}
/// \brief Begin a fixture test case.
#define JSONTEST_FIXTURE(FixtureType, name) \
class Test##FixtureType##name : public FixtureType { \
public: \
static JsonTest::TestCase* factory() { \
return new Test##FixtureType##name(); \
} \
\
public: /* overidden from TestCase */ \
virtual const char* testName() const { return #FixtureType "/" #name; } \
virtual void runTestCase(); \
}; \
\
void Test##FixtureType##name::runTestCase()
#define JSONTEST_FIXTURE_FACTORY(FixtureType, name) \
&Test##FixtureType##name::factory
#define JSONTEST_REGISTER_FIXTURE(runner, FixtureType, name) \
(runner).add(JSONTEST_FIXTURE_FACTORY(FixtureType, name))
#endif // ifndef JSONTEST_H_INCLUDED

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +0,0 @@
Import( 'env_testing buildUnitTests' )
buildUnitTests( env_testing, Split( """
main.cpp
jsontest.cpp
""" ),
'test_lib_json' )
# For 'check' to work, 'libs' must be built first.
env_testing.Depends('test_lib_json', '#libs')

View File

@ -1,16 +0,0 @@
# Copyright 2007 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
"""Removes all files created during testing."""
import glob
import os
paths = []
for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]:
paths += glob.glob('data/' + pattern)
for path in paths:
os.unlink(path)

View File

@ -1 +0,0 @@
[ 1 2 3]

View File

@ -1 +0,0 @@
.=[]

View File

@ -1 +0,0 @@
[]

View File

@ -1,2 +0,0 @@
.=[]
.[0]=1

View File

@ -1 +0,0 @@
[1]

View File

@ -1,6 +0,0 @@
.=[]
.[0]=1
.[1]=2
.[2]=3
.[3]=4
.[4]=5

View File

@ -1 +0,0 @@
[ 1, 2 , 3,4,5]

View File

@ -1,5 +0,0 @@
.=[]
.[0]=1
.[1]="abc"
.[2]=12.3
.[3]=-4

View File

@ -1 +0,0 @@
[1, "abc" , 12.3, -4]

View File

@ -1,100 +0,0 @@
.=[]
.[0]=1
.[1]=2
.[2]=3
.[3]=4
.[4]=5
.[5]=6
.[6]=7
.[7]=8
.[8]=9
.[9]=10
.[10]=11
.[11]=12
.[12]=13
.[13]=14
.[14]=15
.[15]=16
.[16]=17
.[17]=18
.[18]=19
.[19]=20
.[20]=21
.[21]=22
.[22]=23
.[23]=24
.[24]=25
.[25]=26
.[26]=27
.[27]=28
.[28]=29
.[29]=30
.[30]=31
.[31]=32
.[32]=33
.[33]=34
.[34]=35
.[35]=36
.[36]=37
.[37]=38
.[38]=39
.[39]=40
.[40]=41
.[41]=42
.[42]=43
.[43]=44
.[44]=45
.[45]=46
.[46]=47
.[47]=48
.[48]=49
.[49]=50
.[50]=51
.[51]=52
.[52]=53
.[53]=54
.[54]=55
.[55]=56
.[56]=57
.[57]=58
.[58]=59
.[59]=60
.[60]=61
.[61]=62
.[62]=63
.[63]=64
.[64]=65
.[65]=66
.[66]=67
.[67]=68
.[68]=69
.[69]=70
.[70]=71
.[71]=72
.[72]=73
.[73]=74
.[74]=75
.[75]=76
.[76]=77
.[77]=78
.[78]=79
.[79]=80
.[80]=81
.[81]=82
.[82]=83
.[83]=84
.[84]=85
.[85]=86
.[86]=87
.[87]=88
.[88]=89
.[89]=90
.[90]=91
.[91]=92
.[92]=93
.[93]=94
.[94]=95
.[95]=96
.[96]=97
.[97]=98
.[98]=99

View File

@ -1 +0,0 @@
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99]

View File

@ -1,5 +0,0 @@
.=[]
.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"
.[2]="ccccccccccccccccccccccc"
.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd"

View File

@ -1,4 +0,0 @@
[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
"ccccccccccccccccccccccc",
"dddddddddddddddddddddddddddddddddddddddddddddddddddd" ]

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -1 +0,0 @@
.=123456789

View File

@ -1 +0,0 @@
0123456789

View File

@ -1 +0,0 @@
.=-123456789

View File

@ -1 +0,0 @@
-0123456789

View File

@ -1,3 +0,0 @@
.=1.2345678

View File

@ -1,3 +0,0 @@
1.2345678

View File

@ -1,2 +0,0 @@
.="abcdef"

View File

@ -1,2 +0,0 @@
"abcdef"

View File

@ -1,2 +0,0 @@
.=null

View File

@ -1,2 +0,0 @@
null

View File

@ -1,2 +0,0 @@
.=true

Some files were not shown because too many files have changed in this diff Show More