mirror of
https://github.com/holub/mame
synced 2025-06-05 20:33:45 +03:00
Merge remote-tracking branch 'upstream/master'
This commit is contained in:
commit
28ee343aff
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,6 +3,7 @@
|
||||
!/*/
|
||||
!/.gitattributes
|
||||
!/.gitignore
|
||||
!/.travis.yml
|
||||
!/makefile
|
||||
!/mame.doxygen
|
||||
!/*.md
|
||||
|
18
.travis.yml
Normal file
18
.travis.yml
Normal file
@ -0,0 +1,18 @@
|
||||
language: cpp
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
env:
|
||||
- SUBTARGET=arcade MAME=mamearcade64
|
||||
- SUBTARGET=mess MAME=mess64
|
||||
script:
|
||||
- if [ $CC == 'clang' ];
|
||||
then make -j2 linux_x64_clang && ./$MAME -validate;
|
||||
else make -j2 OPTIMIZE=0 && ./$MAME -validate;
|
||||
fi
|
||||
sudo: required
|
||||
before_install:
|
||||
- sudo add-apt-repository ppa:zoogie/sdl2-snapshots -y
|
||||
- sudo add-apt-repository ppa:shahar-evron/qt-backports -y
|
||||
- sudo apt-get update -qq
|
||||
- sudo apt-get install -y libsdl2-dev libsdl2-ttf-dev libasound2-dev libqt4-dev libqt4-dev-bin
|
15
3rdparty/dxsdk/Include/d3dcommon.h
vendored
15
3rdparty/dxsdk/Include/d3dcommon.h
vendored
@ -81,6 +81,21 @@ enum D3D_FEATURE_LEVEL
|
||||
} D3D_FEATURE_LEVEL;
|
||||
|
||||
typedef
|
||||
#define D3D_FL9_1_REQ_TEXTURE1D_U_DIMENSION 2048
|
||||
#define D3D_FL9_3_REQ_TEXTURE1D_U_DIMENSION 4096
|
||||
#define D3D_FL9_1_REQ_TEXTURE2D_U_OR_V_DIMENSION 2048
|
||||
#define D3D_FL9_3_REQ_TEXTURE2D_U_OR_V_DIMENSION 4096
|
||||
#define D3D_FL9_1_REQ_TEXTURECUBE_DIMENSION 512
|
||||
#define D3D_FL9_3_REQ_TEXTURECUBE_DIMENSION 4096
|
||||
#define D3D_FL9_1_REQ_TEXTURE3D_U_V_OR_W_DIMENSION 256
|
||||
#define D3D_FL9_1_DEFAULT_MAX_ANISOTROPY 2
|
||||
#define D3D_FL9_1_IA_PRIMITIVE_MAX_COUNT 65535
|
||||
#define D3D_FL9_2_IA_PRIMITIVE_MAX_COUNT 1048575
|
||||
#define D3D_FL9_1_SIMULTANEOUS_RENDER_TARGET_COUNT 1
|
||||
#define D3D_FL9_3_SIMULTANEOUS_RENDER_TARGET_COUNT 4
|
||||
#define D3D_FL9_1_MAX_TEXTURE_REPEAT 128
|
||||
#define D3D_FL9_2_MAX_TEXTURE_REPEAT 2048
|
||||
#define D3D_FL9_3_MAX_TEXTURE_REPEAT 8192
|
||||
enum D3D_PRIMITIVE_TOPOLOGY
|
||||
{ D3D_PRIMITIVE_TOPOLOGY_UNDEFINED = 0,
|
||||
D3D_PRIMITIVE_TOPOLOGY_POINTLIST = 1,
|
||||
|
25
3rdparty/jsoncpp/.gitignore
vendored
25
3rdparty/jsoncpp/.gitignore
vendored
@ -10,4 +10,27 @@
|
||||
/libs/
|
||||
/doc/doxyfile
|
||||
/dist/
|
||||
/include/json/version.h
|
||||
#/version
|
||||
#/include/json/version.h
|
||||
|
||||
# MSVC project files:
|
||||
*.sln
|
||||
*.vcxproj
|
||||
*.filters
|
||||
*.user
|
||||
*.sdf
|
||||
*.opensdf
|
||||
*.suo
|
||||
|
||||
# MSVC build files:
|
||||
*.lib
|
||||
*.obj
|
||||
*.tlog/
|
||||
*.pdb
|
||||
|
||||
# CMake-generated files:
|
||||
CMakeFiles/
|
||||
CTestTestFile.cmake
|
||||
cmake_install.cmake
|
||||
pkg-config/jsoncpp.pc
|
||||
jsoncpp_lib_static.dir/
|
||||
|
17
3rdparty/jsoncpp/.travis.yml
vendored
17
3rdparty/jsoncpp/.travis.yml
vendored
@ -2,17 +2,24 @@
|
||||
# http://about.travis-ci.org/docs/user/build-configuration/
|
||||
# This file can be validated on:
|
||||
# http://lint.travis-ci.org/
|
||||
before_install: sudo apt-get install cmake
|
||||
|
||||
#before_install: sudo apt-get install -y cmake
|
||||
# cmake is pre-installed in Travis for both linux and osx
|
||||
|
||||
before_install:
|
||||
- sudo apt-get update -qq
|
||||
- sudo apt-get install -qq valgrind
|
||||
os:
|
||||
- linux
|
||||
language: cpp
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE . && make
|
||||
script: ./travis.sh
|
||||
env:
|
||||
matrix:
|
||||
- SHARED_LIBRARY=ON BUILD_TYPE=release VERBOSE_MAKE=false
|
||||
- SHARED_LIBRARY=OFF BUILD_TYPE=release VERBOSE_MAKE=false
|
||||
- SHARED_LIBRARY=OFF BUILD_TYPE=debug VERBOSE VERBOSE_MAKE=true
|
||||
- SHARED_LIB=ON STATIC_LIB=ON CMAKE_PKG=ON BUILD_TYPE=release VERBOSE_MAKE=false
|
||||
- SHARED_LIB=OFF STATIC_LIB=ON CMAKE_PKG=OFF BUILD_TYPE=debug VERBOSE_MAKE=true VERBOSE
|
||||
notifications:
|
||||
email:
|
||||
- aaronjjacobs@gmail.com
|
||||
|
50
3rdparty/jsoncpp/CMakeLists.txt
vendored
50
3rdparty/jsoncpp/CMakeLists.txt
vendored
@ -1,12 +1,16 @@
|
||||
# vim: et ts=4 sts=4 sw=4 tw=0
|
||||
|
||||
CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5)
|
||||
PROJECT(jsoncpp)
|
||||
ENABLE_TESTING()
|
||||
|
||||
OPTION(JSONCPP_WITH_TESTS "Compile and run JsonCpp test executables" ON)
|
||||
OPTION(JSONCPP_WITH_TESTS "Compile and (for jsoncpp_check) run JsonCpp test executables" ON)
|
||||
OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON)
|
||||
OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF)
|
||||
OPTION(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON)
|
||||
OPTION(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" OFF)
|
||||
OPTION(BUILD_SHARED_LIBS "Build jsoncpp_lib as a shared library." OFF)
|
||||
OPTION(BUILD_STATIC_LIBS "Build jsoncpp_lib static library." ON)
|
||||
|
||||
# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix
|
||||
IF(NOT WIN32)
|
||||
@ -17,30 +21,21 @@ IF(NOT WIN32)
|
||||
ENDIF(NOT CMAKE_BUILD_TYPE)
|
||||
ENDIF(NOT WIN32)
|
||||
|
||||
SET(DEBUG_LIBNAME_SUFFIX "" CACHE STRING "Optional suffix to append to the library name for a debug build")
|
||||
SET(LIB_SUFFIX "" CACHE STRING "Optional arch-dependent suffix for the library installation directory")
|
||||
|
||||
SET(RUNTIME_INSTALL_DIR bin
|
||||
CACHE PATH "Install dir for executables and dlls")
|
||||
SET(ARCHIVE_INSTALL_DIR lib${LIB_SUFFIX}
|
||||
SET(ARCHIVE_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}
|
||||
CACHE PATH "Install dir for static libraries")
|
||||
SET(LIBRARY_INSTALL_DIR lib${LIB_SUFFIX}
|
||||
SET(LIBRARY_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}
|
||||
CACHE PATH "Install dir for shared libraries")
|
||||
SET(INCLUDE_INSTALL_DIR include
|
||||
SET(INCLUDE_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/include
|
||||
CACHE PATH "Install dir for headers")
|
||||
SET(PACKAGE_INSTALL_DIR lib${LIB_SUFFIX}/cmake
|
||||
CACHE PATH "Install dir for cmake package config files")
|
||||
MARK_AS_ADVANCED( RUNTIME_INSTALL_DIR ARCHIVE_INSTALL_DIR INCLUDE_INSTALL_DIR PACKAGE_INSTALL_DIR )
|
||||
|
||||
# This ensures shared DLL are in the same dir as executable on Windows.
|
||||
# Put all executables / libraries are in a project global directory.
|
||||
SET(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib
|
||||
CACHE PATH "Single directory for all static libraries.")
|
||||
SET(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib
|
||||
CACHE PATH "Single directory for all dynamic libraries on Unix.")
|
||||
SET(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/bin
|
||||
CACHE PATH "Single directory for all executable and dynamic libraries on Windows.")
|
||||
MARK_AS_ADVANCED( CMAKE_RUNTIME_OUTPUT_DIRECTORY CMAKE_LIBRARY_OUTPUT_DIRECTORY CMAKE_ARCHIVE_OUTPUT_DIRECTORY )
|
||||
|
||||
# Set variable named ${VAR_NAME} to value ${VALUE}
|
||||
FUNCTION(set_using_dynamic_name VAR_NAME VALUE)
|
||||
SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE)
|
||||
@ -64,17 +59,24 @@ MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX)
|
||||
ENDMACRO(jsoncpp_parse_version)
|
||||
|
||||
# Read out version from "version" file
|
||||
FILE(STRINGS "version" JSONCPP_VERSION)
|
||||
|
||||
#FILE(STRINGS "version" JSONCPP_VERSION)
|
||||
#SET( JSONCPP_VERSION_MAJOR X )
|
||||
#SET( JSONCPP_VERSION_MINOR Y )
|
||||
#SET( JSONCPP_VERSION_PATCH Z )
|
||||
SET( JSONCPP_VERSION 1.6.2 )
|
||||
jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION )
|
||||
IF(NOT JSONCPP_VERSION_FOUND)
|
||||
MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z")
|
||||
ENDIF(NOT JSONCPP_VERSION_FOUND)
|
||||
#IF(NOT JSONCPP_VERSION_FOUND)
|
||||
# MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z")
|
||||
#ENDIF(NOT JSONCPP_VERSION_FOUND)
|
||||
|
||||
MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}")
|
||||
# File version.h is only regenerated on CMake configure step
|
||||
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in"
|
||||
"${PROJECT_SOURCE_DIR}/include/json/version.h" )
|
||||
"${PROJECT_SOURCE_DIR}/include/json/version.h"
|
||||
NEWLINE_STYLE UNIX )
|
||||
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/version.in"
|
||||
"${PROJECT_SOURCE_DIR}/version"
|
||||
NEWLINE_STYLE UNIX )
|
||||
|
||||
macro(UseCompilationWarningAsError)
|
||||
if ( MSVC )
|
||||
@ -93,6 +95,14 @@ if ( MSVC )
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ")
|
||||
endif( MSVC )
|
||||
|
||||
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
# using regular Clang or AppleClang
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wshorten-64-to-32")
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
# using GCC
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x -Wall -Wextra -pedantic")
|
||||
endif()
|
||||
|
||||
IF(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||
UseCompilationWarningAsError()
|
||||
ENDIF(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||
|
2
3rdparty/jsoncpp/NEWS.txt
vendored
2
3rdparty/jsoncpp/NEWS.txt
vendored
@ -80,7 +80,7 @@ New in SVN
|
||||
(e.g. MSVC 2008 command prompt in start menu) before running scons.
|
||||
|
||||
- Added support for amalgamated source and header generation (a la sqlite).
|
||||
Refer to README.txt section "Generating amalgamated source and header"
|
||||
Refer to README.md section "Generating amalgamated source and header"
|
||||
for detail.
|
||||
|
||||
* Value
|
||||
|
104
3rdparty/jsoncpp/README.md
vendored
104
3rdparty/jsoncpp/README.md
vendored
@ -7,34 +7,62 @@ pairs.
|
||||
|
||||
[json-org]: http://json.org/
|
||||
|
||||
JsonCpp is a C++ library that allows manipulating JSON values, including
|
||||
[JsonCpp][] is a C++ library that allows manipulating JSON values, including
|
||||
serialization and deserialization to and from strings. It can also preserve
|
||||
existing comment in unserialization/serialization steps, making it a convenient
|
||||
format to store user input files.
|
||||
|
||||
[JsonCpp]: http://open-source-parsers.github.io/jsoncpp-docs/doxygen/index.html
|
||||
|
||||
## A note on backward-compatibility
|
||||
Very soon, we are switching to C++11 only. For older compilers, try the `pre-C++11` branch.
|
||||
* `1.y.z` is built with C++11.
|
||||
* `0.y.z` can be used with older compilers.
|
||||
* Major versions maintain binary-compatibility.
|
||||
|
||||
Using JsonCpp in your project
|
||||
# Using JsonCpp in your project
|
||||
-----------------------------
|
||||
|
||||
The recommended approach to integrating JsonCpp in your project is to build
|
||||
the amalgamated source (a single `.cpp` file) with your own build system. This
|
||||
ensures consistency of compilation flags and ABI compatibility. See the section
|
||||
"Generating amalgamated source and header" for instructions.
|
||||
The recommended approach to integrating JsonCpp in your project is to include
|
||||
the [amalgamated source](#generating-amalgamated-source-and-header) (a single
|
||||
`.cpp` file and two `.h` files) in your project, and compile and build as you
|
||||
would any other source file. This ensures consistency of compilation flags and
|
||||
ABI compatibility, issues which arise when building shared or static
|
||||
libraries. See the next section for instructions.
|
||||
|
||||
The `include/` should be added to your compiler include path. Jsoncpp headers
|
||||
should be included as follow:
|
||||
|
||||
#include <json/json.h>
|
||||
|
||||
If JsonCpp was build as a dynamic library on Windows, then your project needs to
|
||||
If JsonCpp was built as a dynamic library on Windows, then your project needs to
|
||||
define the macro `JSON_DLL`.
|
||||
|
||||
Generating amalgamated source and header
|
||||
----------------------------------------
|
||||
JsonCpp is provided with a script to generate a single header and a single
|
||||
source file to ease inclusion into an existing project. The amalgamated source
|
||||
can be generated at any time by running the following command from the
|
||||
top-directory (this requires Python 2.6):
|
||||
|
||||
Building and testing with new CMake
|
||||
-----------------------------------
|
||||
python amalgamate.py
|
||||
|
||||
It is possible to specify header name. See the `-h` option for detail.
|
||||
|
||||
By default, the following files are generated:
|
||||
* `dist/jsoncpp.cpp`: source file that needs to be added to your project.
|
||||
* `dist/json/json.h`: corresponding header file for use in your project. It is
|
||||
equivalent to including `json/json.h` in non-amalgamated source. This header
|
||||
only depends on standard headers.
|
||||
* `dist/json/json-forwards.h`: header that provides forward declaration of all
|
||||
JsonCpp types.
|
||||
|
||||
The amalgamated sources are generated by concatenating JsonCpp source in the
|
||||
correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion
|
||||
of other headers.
|
||||
|
||||
# Contributing to JsonCpp
|
||||
|
||||
Building and testing with CMake
|
||||
-------------------------------
|
||||
[CMake][] is a C++ Makefiles/Solution generator. It is usually available on most
|
||||
Linux system as package. On Ubuntu:
|
||||
|
||||
@ -57,7 +85,7 @@ Steps for generating solution/makefiles using `cmake-gui`:
|
||||
* Make "source code" point to the source directory.
|
||||
* Make "where to build the binary" point to the directory to use for the build.
|
||||
* Click on the "Grouped" check box.
|
||||
* Review JsonCpp build options (tick `JSONCPP_LIB_BUILD_SHARED` to build as a
|
||||
* Review JsonCpp build options (tick `BUILD_SHARED_LIBS` to build as a
|
||||
dynamic library).
|
||||
* Click the configure button at the bottom, then the generate button.
|
||||
* The generated solution/makefiles can be found in the binary directory.
|
||||
@ -66,19 +94,17 @@ Alternatively, from the command-line on Unix in the source directory:
|
||||
|
||||
mkdir -p build/debug
|
||||
cd build/debug
|
||||
cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../..
|
||||
cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_STATIC_LIBS=ON -DBUILD_SHARED_LIBS=OFF -DARCHIVE_INSTALL_DIR=. -G "Unix Makefiles" ../..
|
||||
make
|
||||
|
||||
Running `cmake -`" will display the list of available generators (passed using
|
||||
Running `cmake -h` will display the list of available generators (passed using
|
||||
the `-G` option).
|
||||
|
||||
By default CMake hides compilation commands. This can be modified by specifying
|
||||
`-DCMAKE_VERBOSE_MAKEFILE=true` when generating makefiles.
|
||||
|
||||
|
||||
Building and testing with SCons
|
||||
-------------------------------
|
||||
|
||||
**Note:** The SCons-based build system is deprecated. Please use CMake; see the
|
||||
section above.
|
||||
|
||||
@ -107,14 +133,7 @@ If you are building with Microsoft Visual Studio 2008, you need to set up the
|
||||
environment by running `vcvars32.bat` (e.g. MSVC 2008 command prompt) before
|
||||
running SCons.
|
||||
|
||||
|
||||
Running the tests manually
|
||||
--------------------------
|
||||
|
||||
Note that test can be run using SCons using the `check` target:
|
||||
|
||||
scons platform=$PLATFORM check
|
||||
|
||||
## Running the tests manually
|
||||
You need to run tests manually only if you are troubleshooting an issue.
|
||||
|
||||
In the instructions below, replace `path/to/jsontest` with the path of the
|
||||
@ -137,45 +156,21 @@ In the instructions below, replace `path/to/jsontest` with the path of the
|
||||
# You can run the tests using valgrind:
|
||||
python rununittests.py --valgrind path/to/test_lib_json
|
||||
|
||||
## Running the tests using scons
|
||||
Note that tests can be run using SCons using the `check` target:
|
||||
|
||||
scons platform=$PLATFORM check
|
||||
|
||||
Building the documentation
|
||||
--------------------------
|
||||
|
||||
Run the Python script `doxybuild.py` from the top directory:
|
||||
|
||||
python doxybuild.py --doxygen=$(which doxygen) --open --with-dot
|
||||
|
||||
See `doxybuild.py --help` for options.
|
||||
|
||||
|
||||
Generating amalgamated source and header
|
||||
----------------------------------------
|
||||
|
||||
JsonCpp is provided with a script to generate a single header and a single
|
||||
source file to ease inclusion into an existing project. The amalgamated source
|
||||
can be generated at any time by running the following command from the
|
||||
top-directory (this requires Python 2.6):
|
||||
|
||||
python amalgamate.py
|
||||
|
||||
It is possible to specify header name. See the `-h` option for detail.
|
||||
|
||||
By default, the following files are generated:
|
||||
* `dist/jsoncpp.cpp`: source file that needs to be added to your project.
|
||||
* `dist/json/json.h`: corresponding header file for use in your project. It is
|
||||
equivalent to including `json/json.h` in non-amalgamated source. This header
|
||||
only depends on standard headers.
|
||||
* `dist/json/json-forwards.h`: header that provides forward declaration of all
|
||||
JsonCpp types.
|
||||
|
||||
The amalgamated sources are generated by concatenating JsonCpp source in the
|
||||
correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion
|
||||
of other headers.
|
||||
|
||||
|
||||
Adding a reader/writer test
|
||||
---------------------------
|
||||
|
||||
To add a test, you need to create two files in test/data:
|
||||
|
||||
* a `TESTNAME.json` file, that contains the input document in JSON format.
|
||||
@ -195,10 +190,8 @@ The `TESTNAME.expected` file format is as follows:
|
||||
See the examples `test_complex_01.json` and `test_complex_01.expected` to better
|
||||
understand element paths.
|
||||
|
||||
|
||||
Understanding reader/writer test output
|
||||
---------------------------------------
|
||||
|
||||
When a test is run, output files are generated beside the input test files.
|
||||
Below is a short description of the content of each file:
|
||||
|
||||
@ -215,10 +208,7 @@ Below is a short description of the content of each file:
|
||||
* `test_complex_01.process-output`: `jsontest` output, typically useful for
|
||||
understanding parsing errors.
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
See the `LICENSE` file for details. In summary, JsonCpp is licensed under the
|
||||
MIT license, or public domain if desired and recognized in your jurisdiction.
|
||||
|
||||
|
2
3rdparty/jsoncpp/SConstruct
vendored
2
3rdparty/jsoncpp/SConstruct
vendored
@ -237,7 +237,7 @@ RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string )
|
||||
env.Alias( 'check' )
|
||||
|
||||
srcdist_cmd = env['SRCDIST_ADD']( source = """
|
||||
AUTHORS README.txt SConstruct
|
||||
AUTHORS README.md SConstruct
|
||||
""".split() )
|
||||
env.Alias( 'src-dist', srcdist_cmd )
|
||||
|
||||
|
168
3rdparty/jsoncpp/amalgamate.py
vendored
168
3rdparty/jsoncpp/amalgamate.py
vendored
@ -1,6 +1,6 @@
|
||||
"""Amalgate json-cpp library sources into a single source and header file.
|
||||
|
||||
Requires Python 2.6
|
||||
Works with python2.6+ and python3.4+.
|
||||
|
||||
Example of invocation (must be invoked from json-cpp top directory):
|
||||
python amalgate.py
|
||||
@ -10,46 +10,46 @@ import os.path
|
||||
import sys
|
||||
|
||||
class AmalgamationFile:
|
||||
def __init__( self, top_dir ):
|
||||
def __init__(self, top_dir):
|
||||
self.top_dir = top_dir
|
||||
self.blocks = []
|
||||
|
||||
def add_text( self, text ):
|
||||
if not text.endswith( "\n" ):
|
||||
def add_text(self, text):
|
||||
if not text.endswith("\n"):
|
||||
text += "\n"
|
||||
self.blocks.append( text )
|
||||
self.blocks.append(text)
|
||||
|
||||
def add_file( self, relative_input_path, wrap_in_comment=False ):
|
||||
def add_marker( prefix ):
|
||||
self.add_text( "" )
|
||||
self.add_text( "// " + "/"*70 )
|
||||
self.add_text( "// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/")) )
|
||||
self.add_text( "// " + "/"*70 )
|
||||
self.add_text( "" )
|
||||
add_marker( "Beginning" )
|
||||
f = open( os.path.join( self.top_dir, relative_input_path ), "rt" )
|
||||
def add_file(self, relative_input_path, wrap_in_comment=False):
|
||||
def add_marker(prefix):
|
||||
self.add_text("")
|
||||
self.add_text("// " + "/"*70)
|
||||
self.add_text("// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/")))
|
||||
self.add_text("// " + "/"*70)
|
||||
self.add_text("")
|
||||
add_marker("Beginning")
|
||||
f = open(os.path.join(self.top_dir, relative_input_path), "rt")
|
||||
content = f.read()
|
||||
if wrap_in_comment:
|
||||
content = "/*\n" + content + "\n*/"
|
||||
self.add_text( content )
|
||||
self.add_text(content)
|
||||
f.close()
|
||||
add_marker( "End" )
|
||||
self.add_text( "\n\n\n\n" )
|
||||
add_marker("End")
|
||||
self.add_text("\n\n\n\n")
|
||||
|
||||
def get_value( self ):
|
||||
return "".join( self.blocks ).replace("\r\n","\n")
|
||||
def get_value(self):
|
||||
return "".join(self.blocks).replace("\r\n","\n")
|
||||
|
||||
def write_to( self, output_path ):
|
||||
output_dir = os.path.dirname( output_path )
|
||||
if output_dir and not os.path.isdir( output_dir ):
|
||||
os.makedirs( output_dir )
|
||||
f = open( output_path, "wb" )
|
||||
f.write( str.encode(self.get_value(), 'UTF-8') )
|
||||
def write_to(self, output_path):
|
||||
output_dir = os.path.dirname(output_path)
|
||||
if output_dir and not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
f = open(output_path, "wb")
|
||||
f.write(str.encode(self.get_value(), 'UTF-8'))
|
||||
f.close()
|
||||
|
||||
def amalgamate_source( source_top_dir=None,
|
||||
def amalgamate_source(source_top_dir=None,
|
||||
target_source_path=None,
|
||||
header_include_path=None ):
|
||||
header_include_path=None):
|
||||
"""Produces amalgated source.
|
||||
Parameters:
|
||||
source_top_dir: top-directory
|
||||
@ -57,69 +57,73 @@ def amalgamate_source( source_top_dir=None,
|
||||
header_include_path: generated header path relative to target_source_path.
|
||||
"""
|
||||
print("Amalgating header...")
|
||||
header = AmalgamationFile( source_top_dir )
|
||||
header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." )
|
||||
header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
|
||||
header.add_file( "LICENSE", wrap_in_comment=True )
|
||||
header.add_text( "#ifndef JSON_AMALGATED_H_INCLUDED" )
|
||||
header.add_text( "# define JSON_AMALGATED_H_INCLUDED" )
|
||||
header.add_text( "/// If defined, indicates that the source file is amalgated" )
|
||||
header.add_text( "/// to prevent private header inclusion." )
|
||||
header.add_text( "#define JSON_IS_AMALGAMATION" )
|
||||
header.add_file( "include/json/version.h" )
|
||||
header.add_file( "include/json/config.h" )
|
||||
header.add_file( "include/json/forwards.h" )
|
||||
header.add_file( "include/json/features.h" )
|
||||
header.add_file( "include/json/value.h" )
|
||||
header.add_file( "include/json/reader.h" )
|
||||
header.add_file( "include/json/writer.h" )
|
||||
header.add_file( "include/json/assertions.h" )
|
||||
header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" )
|
||||
header = AmalgamationFile(source_top_dir)
|
||||
header.add_text("/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).")
|
||||
header.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
|
||||
header.add_file("LICENSE", wrap_in_comment=True)
|
||||
header.add_text("#ifndef JSON_AMALGATED_H_INCLUDED")
|
||||
header.add_text("# define JSON_AMALGATED_H_INCLUDED")
|
||||
header.add_text("/// If defined, indicates that the source file is amalgated")
|
||||
header.add_text("/// to prevent private header inclusion.")
|
||||
header.add_text("#define JSON_IS_AMALGAMATION")
|
||||
header.add_file("include/json/version.h")
|
||||
header.add_file("include/json/config.h")
|
||||
header.add_file("include/json/forwards.h")
|
||||
header.add_file("include/json/features.h")
|
||||
header.add_file("include/json/value.h")
|
||||
header.add_file("include/json/reader.h")
|
||||
header.add_file("include/json/writer.h")
|
||||
header.add_file("include/json/assertions.h")
|
||||
header.add_text("#endif //ifndef JSON_AMALGATED_H_INCLUDED")
|
||||
|
||||
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
|
||||
target_header_path = os.path.join(os.path.dirname(target_source_path), header_include_path)
|
||||
print("Writing amalgated header to %r" % target_header_path)
|
||||
header.write_to( target_header_path )
|
||||
header.write_to(target_header_path)
|
||||
|
||||
base, ext = os.path.splitext( header_include_path )
|
||||
base, ext = os.path.splitext(header_include_path)
|
||||
forward_header_include_path = base + "-forwards" + ext
|
||||
print("Amalgating forward header...")
|
||||
header = AmalgamationFile( source_top_dir )
|
||||
header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." )
|
||||
header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path )
|
||||
header.add_text( "/// This header provides forward declaration for all JsonCpp types." )
|
||||
header.add_file( "LICENSE", wrap_in_comment=True )
|
||||
header.add_text( "#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED" )
|
||||
header.add_text( "# define JSON_FORWARD_AMALGATED_H_INCLUDED" )
|
||||
header.add_text( "/// If defined, indicates that the source file is amalgated" )
|
||||
header.add_text( "/// to prevent private header inclusion." )
|
||||
header.add_text( "#define JSON_IS_AMALGAMATION" )
|
||||
header.add_file( "include/json/config.h" )
|
||||
header.add_file( "include/json/forwards.h" )
|
||||
header.add_text( "#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED" )
|
||||
header = AmalgamationFile(source_top_dir)
|
||||
header.add_text("/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).")
|
||||
header.add_text('/// It is intended to be used with #include "%s"' % forward_header_include_path)
|
||||
header.add_text("/// This header provides forward declaration for all JsonCpp types.")
|
||||
header.add_file("LICENSE", wrap_in_comment=True)
|
||||
header.add_text("#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED")
|
||||
header.add_text("# define JSON_FORWARD_AMALGATED_H_INCLUDED")
|
||||
header.add_text("/// If defined, indicates that the source file is amalgated")
|
||||
header.add_text("/// to prevent private header inclusion.")
|
||||
header.add_text("#define JSON_IS_AMALGAMATION")
|
||||
header.add_file("include/json/config.h")
|
||||
header.add_file("include/json/forwards.h")
|
||||
header.add_text("#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED")
|
||||
|
||||
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
|
||||
forward_header_include_path )
|
||||
target_forward_header_path = os.path.join(os.path.dirname(target_source_path),
|
||||
forward_header_include_path)
|
||||
print("Writing amalgated forward header to %r" % target_forward_header_path)
|
||||
header.write_to( target_forward_header_path )
|
||||
header.write_to(target_forward_header_path)
|
||||
|
||||
print("Amalgating source...")
|
||||
source = AmalgamationFile( source_top_dir )
|
||||
source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." )
|
||||
source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
|
||||
source.add_file( "LICENSE", wrap_in_comment=True )
|
||||
source.add_text( "" )
|
||||
source.add_text( "#include <%s>" % header_include_path )
|
||||
source.add_text( "" )
|
||||
source = AmalgamationFile(source_top_dir)
|
||||
source.add_text("/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).")
|
||||
source.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
|
||||
source.add_file("LICENSE", wrap_in_comment=True)
|
||||
source.add_text("")
|
||||
source.add_text('#include "%s"' % header_include_path)
|
||||
source.add_text("""
|
||||
#ifndef JSON_IS_AMALGAMATION
|
||||
#error "Compile with -I PATH_TO_JSON_DIRECTORY"
|
||||
#endif
|
||||
""")
|
||||
source.add_text("")
|
||||
lib_json = "src/lib_json"
|
||||
source.add_file( os.path.join(lib_json, "json_tool.h") )
|
||||
source.add_file( os.path.join(lib_json, "json_reader.cpp") )
|
||||
source.add_file( os.path.join(lib_json, "json_batchallocator.h") )
|
||||
source.add_file( os.path.join(lib_json, "json_valueiterator.inl") )
|
||||
source.add_file( os.path.join(lib_json, "json_value.cpp") )
|
||||
source.add_file( os.path.join(lib_json, "json_writer.cpp") )
|
||||
source.add_file(os.path.join(lib_json, "json_tool.h"))
|
||||
source.add_file(os.path.join(lib_json, "json_reader.cpp"))
|
||||
source.add_file(os.path.join(lib_json, "json_valueiterator.inl"))
|
||||
source.add_file(os.path.join(lib_json, "json_value.cpp"))
|
||||
source.add_file(os.path.join(lib_json, "json_writer.cpp"))
|
||||
|
||||
print("Writing amalgated source to %r" % target_source_path)
|
||||
source.write_to( target_source_path )
|
||||
source.write_to(target_source_path)
|
||||
|
||||
def main():
|
||||
usage = """%prog [options]
|
||||
@ -137,12 +141,12 @@ Generate a single amalgated source and header file from the sources.
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
msg = amalgamate_source( source_top_dir=options.top_dir,
|
||||
msg = amalgamate_source(source_top_dir=options.top_dir,
|
||||
target_source_path=options.target_source_path,
|
||||
header_include_path=options.header_include_path )
|
||||
header_include_path=options.header_include_path)
|
||||
if msg:
|
||||
sys.stderr.write( msg + "\n" )
|
||||
sys.exit( 1 )
|
||||
sys.stderr.write(msg + "\n")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Source succesfully amalagated")
|
||||
|
||||
|
34
3rdparty/jsoncpp/appveyor.yml
vendored
Normal file
34
3rdparty/jsoncpp/appveyor.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
# This is a comment.
|
||||
|
||||
version: build.{build}
|
||||
|
||||
os: Windows Server 2012 R2
|
||||
|
||||
clone_folder: c:\projects\jsoncpp
|
||||
|
||||
platform:
|
||||
- Win32
|
||||
- x64
|
||||
|
||||
configuration:
|
||||
- Debug
|
||||
- Release
|
||||
|
||||
# scripts to run before build
|
||||
before_build:
|
||||
- echo "Running cmake..."
|
||||
- cd c:\projects\jsoncpp
|
||||
- cmake --version
|
||||
- if %PLATFORM% == Win32 cmake .
|
||||
- if %PLATFORM% == x64 cmake -G "Visual Studio 12 2013 Win64" .
|
||||
|
||||
build:
|
||||
project: jsoncpp.sln # path to Visual Studio solution or project
|
||||
|
||||
deploy:
|
||||
provider: GitHub
|
||||
auth_token:
|
||||
secure: K2Tp1q8pIZ7rs0Ot24ZMWuwr12Ev6Tc6QkhMjGQxoQG3ng1pXtgPasiJ45IDXGdg
|
||||
on:
|
||||
branch: master
|
||||
appveyor_repo_tag: true
|
27
3rdparty/jsoncpp/dev.makefile
vendored
27
3rdparty/jsoncpp/dev.makefile
vendored
@ -1,14 +1,35 @@
|
||||
all: build test-amalgamate
|
||||
# This is only for jsoncpp developers/contributors.
|
||||
# We use this to sign releases, generate documentation, etc.
|
||||
VER?=$(shell cat version)
|
||||
|
||||
default:
|
||||
@echo "VER=${VER}"
|
||||
sign: jsoncpp-${VER}.tar.gz
|
||||
gpg --armor --detach-sign $<
|
||||
gpg --verify $<.asc
|
||||
# Then upload .asc to the release.
|
||||
jsoncpp-%.tar.gz:
|
||||
curl https://github.com/open-source-parsers/jsoncpp/archive/$*.tar.gz -o $@
|
||||
dox:
|
||||
python doxybuild.py --doxygen=$$(which doxygen) --in doc/web_doxyfile.in
|
||||
rsync -va --delete dist/doxygen/jsoncpp-api-html-${VER}/ ../jsoncpp-docs/doxygen/
|
||||
# Then 'git add -A' and 'git push' in jsoncpp-docs.
|
||||
build:
|
||||
mkdir -p build/debug
|
||||
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=ON -G "Unix Makefiles" ../..
|
||||
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_SHARED_LIBS=ON -G "Unix Makefiles" ../..
|
||||
make -C build/debug
|
||||
|
||||
# Currently, this depends on include/json/version.h generated
|
||||
# by cmake.
|
||||
test-amalgamate: build
|
||||
test-amalgamate:
|
||||
python2.7 amalgamate.py
|
||||
python3.4 amalgamate.py
|
||||
cd dist; gcc -I. -c jsoncpp.cpp
|
||||
|
||||
valgrind:
|
||||
valgrind --error-exitcode=42 --leak-check=full ./build/debug/src/test_lib_json/jsoncpp_test
|
||||
|
||||
clean:
|
||||
\rm -rf *.gz *.asc dist/
|
||||
|
||||
.PHONY: build
|
||||
|
7
3rdparty/jsoncpp/devtools/__init__.py
vendored
7
3rdparty/jsoncpp/devtools/__init__.py
vendored
@ -1 +1,6 @@
|
||||
# module
|
||||
# Copyright 2010 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
# module
|
||||
|
4
3rdparty/jsoncpp/devtools/agent_vmw7.json
vendored
4
3rdparty/jsoncpp/devtools/agent_vmw7.json
vendored
@ -19,8 +19,8 @@
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||
["BUILD_SHARED_LIBS=true"],
|
||||
["BUILD_SHARED_LIBS=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
|
4
3rdparty/jsoncpp/devtools/agent_vmxp.json
vendored
4
3rdparty/jsoncpp/devtools/agent_vmxp.json
vendored
@ -12,8 +12,8 @@
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||
["BUILD_SHARED_LIBS=true"],
|
||||
["BUILD_SHARED_LIBS=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
|
111
3rdparty/jsoncpp/devtools/antglob.py
vendored
111
3rdparty/jsoncpp/devtools/antglob.py
vendored
@ -1,6 +1,9 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Baptiste Lepilleur, 2009
|
||||
# Copyright 2009 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from __future__ import print_function
|
||||
from dircache import listdir
|
||||
@ -54,9 +57,9 @@ LINKS = DIR_LINK | FILE_LINK
|
||||
ALL_NO_LINK = DIR | FILE
|
||||
ALL = DIR | FILE | LINKS
|
||||
|
||||
_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
|
||||
_ANT_RE = re.compile(r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)')
|
||||
|
||||
def ant_pattern_to_re( ant_pattern ):
|
||||
def ant_pattern_to_re(ant_pattern):
|
||||
"""Generates a regular expression from the ant pattern.
|
||||
Matching convention:
|
||||
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
|
||||
@ -65,30 +68,30 @@ def ant_pattern_to_re( ant_pattern ):
|
||||
"""
|
||||
rex = ['^']
|
||||
next_pos = 0
|
||||
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
|
||||
sep_rex = r'(?:/|%s)' % re.escape(os.path.sep)
|
||||
## print 'Converting', ant_pattern
|
||||
for match in _ANT_RE.finditer( ant_pattern ):
|
||||
for match in _ANT_RE.finditer(ant_pattern):
|
||||
## print 'Matched', match.group()
|
||||
## print match.start(0), next_pos
|
||||
if match.start(0) != next_pos:
|
||||
raise ValueError( "Invalid ant pattern" )
|
||||
raise ValueError("Invalid ant pattern")
|
||||
if match.group(1): # /**/
|
||||
rex.append( sep_rex + '(?:.*%s)?' % sep_rex )
|
||||
rex.append(sep_rex + '(?:.*%s)?' % sep_rex)
|
||||
elif match.group(2): # **/
|
||||
rex.append( '(?:.*%s)?' % sep_rex )
|
||||
rex.append('(?:.*%s)?' % sep_rex)
|
||||
elif match.group(3): # /**
|
||||
rex.append( sep_rex + '.*' )
|
||||
rex.append(sep_rex + '.*')
|
||||
elif match.group(4): # *
|
||||
rex.append( '[^/%s]*' % re.escape(os.path.sep) )
|
||||
rex.append('[^/%s]*' % re.escape(os.path.sep))
|
||||
elif match.group(5): # /
|
||||
rex.append( sep_rex )
|
||||
rex.append(sep_rex)
|
||||
else: # somepath
|
||||
rex.append( re.escape(match.group(6)) )
|
||||
rex.append(re.escape(match.group(6)))
|
||||
next_pos = match.end()
|
||||
rex.append('$')
|
||||
return re.compile( ''.join( rex ) )
|
||||
return re.compile(''.join(rex))
|
||||
|
||||
def _as_list( l ):
|
||||
def _as_list(l):
|
||||
if isinstance(l, basestring):
|
||||
return l.split()
|
||||
return l
|
||||
@ -105,37 +108,37 @@ def glob(dir_path,
|
||||
dir_path = dir_path.replace('/',os.path.sep)
|
||||
entry_type_filter = entry_type
|
||||
|
||||
def is_pruned_dir( dir_name ):
|
||||
def is_pruned_dir(dir_name):
|
||||
for pattern in prune_dirs:
|
||||
if fnmatch.fnmatch( dir_name, pattern ):
|
||||
if fnmatch.fnmatch(dir_name, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
def apply_filter( full_path, filter_rexs ):
|
||||
def apply_filter(full_path, filter_rexs):
|
||||
"""Return True if at least one of the filter regular expression match full_path."""
|
||||
for rex in filter_rexs:
|
||||
if rex.match( full_path ):
|
||||
if rex.match(full_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
def glob_impl( root_dir_path ):
|
||||
def glob_impl(root_dir_path):
|
||||
child_dirs = [root_dir_path]
|
||||
while child_dirs:
|
||||
dir_path = child_dirs.pop()
|
||||
for entry in listdir( dir_path ):
|
||||
full_path = os.path.join( dir_path, entry )
|
||||
for entry in listdir(dir_path):
|
||||
full_path = os.path.join(dir_path, entry)
|
||||
## print 'Testing:', full_path,
|
||||
is_dir = os.path.isdir( full_path )
|
||||
if is_dir and not is_pruned_dir( entry ): # explore child directory ?
|
||||
is_dir = os.path.isdir(full_path)
|
||||
if is_dir and not is_pruned_dir(entry): # explore child directory ?
|
||||
## print '===> marked for recursion',
|
||||
child_dirs.append( full_path )
|
||||
included = apply_filter( full_path, include_filter )
|
||||
rejected = apply_filter( full_path, exclude_filter )
|
||||
child_dirs.append(full_path)
|
||||
included = apply_filter(full_path, include_filter)
|
||||
rejected = apply_filter(full_path, exclude_filter)
|
||||
if not included or rejected: # do not include entry ?
|
||||
## print '=> not included or rejected'
|
||||
continue
|
||||
link = os.path.islink( full_path )
|
||||
is_file = os.path.isfile( full_path )
|
||||
link = os.path.islink(full_path)
|
||||
is_file = os.path.isfile(full_path)
|
||||
if not is_file and not is_dir:
|
||||
## print '=> unknown entry type'
|
||||
continue
|
||||
@ -146,57 +149,57 @@ def glob(dir_path,
|
||||
## print '=> type: %d' % entry_type,
|
||||
if (entry_type & entry_type_filter) != 0:
|
||||
## print ' => KEEP'
|
||||
yield os.path.join( dir_path, entry )
|
||||
yield os.path.join(dir_path, entry)
|
||||
## else:
|
||||
## print ' => TYPE REJECTED'
|
||||
return list( glob_impl( dir_path ) )
|
||||
return list(glob_impl(dir_path))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import unittest
|
||||
|
||||
class AntPatternToRETest(unittest.TestCase):
|
||||
## def test_conversion( self ):
|
||||
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
|
||||
## def test_conversion(self):
|
||||
## self.assertEqual('^somepath$', ant_pattern_to_re('somepath').pattern)
|
||||
|
||||
def test_matching( self ):
|
||||
test_cases = [ ( 'path',
|
||||
def test_matching(self):
|
||||
test_cases = [ ('path',
|
||||
['path'],
|
||||
['somepath', 'pathsuffix', '/path', '/path'] ),
|
||||
( '*.py',
|
||||
['somepath', 'pathsuffix', '/path', '/path']),
|
||||
('*.py',
|
||||
['source.py', 'source.ext.py', '.py'],
|
||||
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
|
||||
( '**/path',
|
||||
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c']),
|
||||
('**/path',
|
||||
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
|
||||
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
|
||||
( 'path/**',
|
||||
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath']),
|
||||
('path/**',
|
||||
['path/a', 'path/path/a', 'path//'],
|
||||
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
|
||||
( '/**/path',
|
||||
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a']),
|
||||
('/**/path',
|
||||
['/path', '/a/path', '/a/b/path/path', '/path/path'],
|
||||
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
|
||||
( 'a/b',
|
||||
['path', 'path/', 'a/path', '/pathsuffix', '/somepath']),
|
||||
('a/b',
|
||||
['a/b'],
|
||||
['somea/b', 'a/bsuffix', 'a/b/c'] ),
|
||||
( '**/*.py',
|
||||
['somea/b', 'a/bsuffix', 'a/b/c']),
|
||||
('**/*.py',
|
||||
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
|
||||
['script.pyc', 'script.pyo', 'a.py/b'] ),
|
||||
( 'src/**/*.py',
|
||||
['script.pyc', 'script.pyo', 'a.py/b']),
|
||||
('src/**/*.py',
|
||||
['src/a.py', 'src/dir/a.py'],
|
||||
['a/src/a.py', '/src/a.py'] ),
|
||||
['a/src/a.py', '/src/a.py']),
|
||||
]
|
||||
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
|
||||
def local_path( paths ):
|
||||
def local_path(paths):
|
||||
return [ p.replace('/',os.path.sep) for p in paths ]
|
||||
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
||||
test_cases.append((ant_pattern, local_path(accepted_matches), local_path(rejected_matches)))
|
||||
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||
rex = ant_pattern_to_re( ant_pattern )
|
||||
rex = ant_pattern_to_re(ant_pattern)
|
||||
print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
|
||||
for accepted_match in accepted_matches:
|
||||
print('Accepted?:', accepted_match)
|
||||
self.assertTrue( rex.match( accepted_match ) is not None )
|
||||
self.assertTrue(rex.match(accepted_match) is not None)
|
||||
for rejected_match in rejected_matches:
|
||||
print('Rejected?:', rejected_match)
|
||||
self.assertTrue( rex.match( rejected_match ) is None )
|
||||
self.assertTrue(rex.match(rejected_match) is None)
|
||||
|
||||
unittest.main()
|
||||
|
173
3rdparty/jsoncpp/devtools/batchbuild.py
vendored
173
3rdparty/jsoncpp/devtools/batchbuild.py
vendored
@ -18,62 +18,62 @@ class BuildDesc:
|
||||
self.build_type = build_type
|
||||
self.generator = generator
|
||||
|
||||
def merged_with( self, build_desc ):
|
||||
def merged_with(self, build_desc):
|
||||
"""Returns a new BuildDesc by merging field content.
|
||||
Prefer build_desc fields to self fields for single valued field.
|
||||
"""
|
||||
return BuildDesc( self.prepend_envs + build_desc.prepend_envs,
|
||||
return BuildDesc(self.prepend_envs + build_desc.prepend_envs,
|
||||
self.variables + build_desc.variables,
|
||||
build_desc.build_type or self.build_type,
|
||||
build_desc.generator or self.generator )
|
||||
build_desc.generator or self.generator)
|
||||
|
||||
def env( self ):
|
||||
def env(self):
|
||||
environ = os.environ.copy()
|
||||
for values_by_name in self.prepend_envs:
|
||||
for var, value in list(values_by_name.items()):
|
||||
var = var.upper()
|
||||
if type(value) is unicode:
|
||||
value = value.encode( sys.getdefaultencoding() )
|
||||
value = value.encode(sys.getdefaultencoding())
|
||||
if var in environ:
|
||||
environ[var] = value + os.pathsep + environ[var]
|
||||
else:
|
||||
environ[var] = value
|
||||
return environ
|
||||
|
||||
def cmake_args( self ):
|
||||
def cmake_args(self):
|
||||
args = ["-D%s" % var for var in self.variables]
|
||||
# skip build type for Visual Studio solution as it cause warning
|
||||
if self.build_type and 'Visual' not in self.generator:
|
||||
args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type )
|
||||
args.append("-DCMAKE_BUILD_TYPE=%s" % self.build_type)
|
||||
if self.generator:
|
||||
args.extend( ['-G', self.generator] )
|
||||
args.extend(['-G', self.generator])
|
||||
return args
|
||||
|
||||
def __repr__( self ):
|
||||
return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type)
|
||||
def __repr__(self):
|
||||
return "BuildDesc(%s, build_type=%s)" % (" ".join(self.cmake_args()), self.build_type)
|
||||
|
||||
class BuildData:
|
||||
def __init__( self, desc, work_dir, source_dir ):
|
||||
def __init__(self, desc, work_dir, source_dir):
|
||||
self.desc = desc
|
||||
self.work_dir = work_dir
|
||||
self.source_dir = source_dir
|
||||
self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' )
|
||||
self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' )
|
||||
self.cmake_log_path = os.path.join(work_dir, 'batchbuild_cmake.log')
|
||||
self.build_log_path = os.path.join(work_dir, 'batchbuild_build.log')
|
||||
self.cmake_succeeded = False
|
||||
self.build_succeeded = False
|
||||
|
||||
def execute_build(self):
|
||||
print('Build %s' % self.desc)
|
||||
self._make_new_work_dir( )
|
||||
self.cmake_succeeded = self._generate_makefiles( )
|
||||
self._make_new_work_dir()
|
||||
self.cmake_succeeded = self._generate_makefiles()
|
||||
if self.cmake_succeeded:
|
||||
self.build_succeeded = self._build_using_makefiles( )
|
||||
self.build_succeeded = self._build_using_makefiles()
|
||||
return self.build_succeeded
|
||||
|
||||
def _generate_makefiles(self):
|
||||
print(' Generating makefiles: ', end=' ')
|
||||
cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )]
|
||||
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path )
|
||||
cmd = ['cmake'] + self.desc.cmake_args() + [os.path.abspath(self.source_dir)]
|
||||
succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.cmake_log_path)
|
||||
print('done' if succeeded else 'FAILED')
|
||||
return succeeded
|
||||
|
||||
@ -82,58 +82,58 @@ class BuildData:
|
||||
cmd = ['cmake', '--build', self.work_dir]
|
||||
if self.desc.build_type:
|
||||
cmd += ['--config', self.desc.build_type]
|
||||
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path )
|
||||
succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.build_log_path)
|
||||
print('done' if succeeded else 'FAILED')
|
||||
return succeeded
|
||||
|
||||
def _execute_build_subprocess(self, cmd, env, log_path):
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
|
||||
env=env )
|
||||
stdout, _ = process.communicate( )
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
|
||||
env=env)
|
||||
stdout, _ = process.communicate()
|
||||
succeeded = (process.returncode == 0)
|
||||
with open( log_path, 'wb' ) as flog:
|
||||
log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
|
||||
flog.write( fix_eol( log ) )
|
||||
with open(log_path, 'wb') as flog:
|
||||
log = ' '.join(cmd) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
|
||||
flog.write(fix_eol(log))
|
||||
return succeeded
|
||||
|
||||
def _make_new_work_dir(self):
|
||||
if os.path.isdir( self.work_dir ):
|
||||
if os.path.isdir(self.work_dir):
|
||||
print(' Removing work directory', self.work_dir)
|
||||
shutil.rmtree( self.work_dir, ignore_errors=True )
|
||||
if not os.path.isdir( self.work_dir ):
|
||||
os.makedirs( self.work_dir )
|
||||
shutil.rmtree(self.work_dir, ignore_errors=True)
|
||||
if not os.path.isdir(self.work_dir):
|
||||
os.makedirs(self.work_dir)
|
||||
|
||||
def fix_eol( stdout ):
|
||||
def fix_eol(stdout):
|
||||
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
|
||||
"""
|
||||
return re.sub( '\r*\n', os.linesep, stdout )
|
||||
return re.sub('\r*\n', os.linesep, stdout)
|
||||
|
||||
def load_build_variants_from_config( config_path ):
|
||||
with open( config_path, 'rb' ) as fconfig:
|
||||
data = json.load( fconfig )
|
||||
def load_build_variants_from_config(config_path):
|
||||
with open(config_path, 'rb') as fconfig:
|
||||
data = json.load(fconfig)
|
||||
variants = data[ 'cmake_variants' ]
|
||||
build_descs_by_axis = collections.defaultdict( list )
|
||||
build_descs_by_axis = collections.defaultdict(list)
|
||||
for axis in variants:
|
||||
axis_name = axis["name"]
|
||||
build_descs = []
|
||||
if "generators" in axis:
|
||||
for generator_data in axis["generators"]:
|
||||
for generator in generator_data["generator"]:
|
||||
build_desc = BuildDesc( generator=generator,
|
||||
prepend_envs=generator_data.get("env_prepend") )
|
||||
build_descs.append( build_desc )
|
||||
build_desc = BuildDesc(generator=generator,
|
||||
prepend_envs=generator_data.get("env_prepend"))
|
||||
build_descs.append(build_desc)
|
||||
elif "variables" in axis:
|
||||
for variables in axis["variables"]:
|
||||
build_desc = BuildDesc( variables=variables )
|
||||
build_descs.append( build_desc )
|
||||
build_desc = BuildDesc(variables=variables)
|
||||
build_descs.append(build_desc)
|
||||
elif "build_types" in axis:
|
||||
for build_type in axis["build_types"]:
|
||||
build_desc = BuildDesc( build_type=build_type )
|
||||
build_descs.append( build_desc )
|
||||
build_descs_by_axis[axis_name].extend( build_descs )
|
||||
build_desc = BuildDesc(build_type=build_type)
|
||||
build_descs.append(build_desc)
|
||||
build_descs_by_axis[axis_name].extend(build_descs)
|
||||
return build_descs_by_axis
|
||||
|
||||
def generate_build_variants( build_descs_by_axis ):
|
||||
def generate_build_variants(build_descs_by_axis):
|
||||
"""Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
|
||||
axis_names = list(build_descs_by_axis.keys())
|
||||
build_descs = []
|
||||
@ -141,8 +141,8 @@ def generate_build_variants( build_descs_by_axis ):
|
||||
if len(build_descs):
|
||||
# for each existing build_desc and each axis build desc, create a new build_desc
|
||||
new_build_descs = []
|
||||
for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs):
|
||||
new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) )
|
||||
for prototype_build_desc, axis_build_desc in itertools.product(build_descs, axis_build_descs):
|
||||
new_build_descs.append(prototype_build_desc.merged_with(axis_build_desc))
|
||||
build_descs = new_build_descs
|
||||
else:
|
||||
build_descs = axis_build_descs
|
||||
@ -174,60 +174,57 @@ $tr_builds
|
||||
</table>
|
||||
</body></html>''')
|
||||
|
||||
def generate_html_report( html_report_path, builds ):
|
||||
report_dir = os.path.dirname( html_report_path )
|
||||
def generate_html_report(html_report_path, builds):
|
||||
report_dir = os.path.dirname(html_report_path)
|
||||
# Vertical axis: generator
|
||||
# Horizontal: variables, then build_type
|
||||
builds_by_generator = collections.defaultdict( list )
|
||||
builds_by_generator = collections.defaultdict(list)
|
||||
variables = set()
|
||||
build_types_by_variable = collections.defaultdict( set )
|
||||
build_types_by_variable = collections.defaultdict(set)
|
||||
build_by_pos_key = {} # { (generator, var_key, build_type): build }
|
||||
for build in builds:
|
||||
builds_by_generator[build.desc.generator].append( build )
|
||||
builds_by_generator[build.desc.generator].append(build)
|
||||
var_key = tuple(sorted(build.desc.variables))
|
||||
variables.add( var_key )
|
||||
build_types_by_variable[var_key].add( build.desc.build_type )
|
||||
variables.add(var_key)
|
||||
build_types_by_variable[var_key].add(build.desc.build_type)
|
||||
pos_key = (build.desc.generator, var_key, build.desc.build_type)
|
||||
build_by_pos_key[pos_key] = build
|
||||
variables = sorted( variables )
|
||||
variables = sorted(variables)
|
||||
th_vars = []
|
||||
th_build_types = []
|
||||
for variable in variables:
|
||||
build_types = sorted( build_types_by_variable[variable] )
|
||||
build_types = sorted(build_types_by_variable[variable])
|
||||
nb_build_type = len(build_types_by_variable[variable])
|
||||
th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) )
|
||||
th_vars.append('<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape(' '.join(variable))))
|
||||
for build_type in build_types:
|
||||
th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) )
|
||||
th_build_types.append('<th>%s</th>' % cgi.escape(build_type))
|
||||
tr_builds = []
|
||||
for generator in sorted( builds_by_generator ):
|
||||
tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ]
|
||||
for generator in sorted(builds_by_generator):
|
||||
tds = [ '<td>%s</td>\n' % cgi.escape(generator) ]
|
||||
for variable in variables:
|
||||
build_types = sorted( build_types_by_variable[variable] )
|
||||
build_types = sorted(build_types_by_variable[variable])
|
||||
for build_type in build_types:
|
||||
pos_key = (generator, variable, build_type)
|
||||
build = build_by_pos_key.get(pos_key)
|
||||
if build:
|
||||
cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
|
||||
build_status = 'ok' if build.build_succeeded else 'FAILED'
|
||||
cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir )
|
||||
build_log_url = os.path.relpath( build.build_log_path, report_dir )
|
||||
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % (
|
||||
build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
|
||||
cmake_log_url = os.path.relpath(build.cmake_log_path, report_dir)
|
||||
build_log_url = os.path.relpath(build.build_log_path, report_dir)
|
||||
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % ( build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
|
||||
if build.cmake_succeeded:
|
||||
td += '<br><a href="%s" class="%s">Build: %s</a>' % (
|
||||
build_log_url, build_status.lower(), build_status)
|
||||
td += '<br><a href="%s" class="%s">Build: %s</a>' % ( build_log_url, build_status.lower(), build_status)
|
||||
td += '</td>'
|
||||
else:
|
||||
td = '<td></td>'
|
||||
tds.append( td )
|
||||
tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) )
|
||||
html = HTML_TEMPLATE.substitute(
|
||||
title='Batch build report',
|
||||
tds.append(td)
|
||||
tr_builds.append('<tr>%s</tr>' % '\n'.join(tds))
|
||||
html = HTML_TEMPLATE.substitute( title='Batch build report',
|
||||
th_vars=' '.join(th_vars),
|
||||
th_build_types=' '.join( th_build_types),
|
||||
tr_builds='\n'.join( tr_builds ) )
|
||||
with open( html_report_path, 'wt' ) as fhtml:
|
||||
fhtml.write( html )
|
||||
th_build_types=' '.join(th_build_types),
|
||||
tr_builds='\n'.join(tr_builds))
|
||||
with open(html_report_path, 'wt') as fhtml:
|
||||
fhtml.write(html)
|
||||
print('HTML report generated in:', html_report_path)
|
||||
|
||||
def main():
|
||||
@ -246,33 +243,33 @@ python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.j
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
if len(args) < 3:
|
||||
parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." )
|
||||
parser.error("Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH.")
|
||||
work_dir = args[0]
|
||||
source_dir = args[1].rstrip('/\\')
|
||||
config_paths = args[2:]
|
||||
for config_path in config_paths:
|
||||
if not os.path.isfile( config_path ):
|
||||
parser.error( "Can not read: %r" % config_path )
|
||||
if not os.path.isfile(config_path):
|
||||
parser.error("Can not read: %r" % config_path)
|
||||
|
||||
# generate build variants
|
||||
build_descs = []
|
||||
for config_path in config_paths:
|
||||
build_descs_by_axis = load_build_variants_from_config( config_path )
|
||||
build_descs.extend( generate_build_variants( build_descs_by_axis ) )
|
||||
build_descs_by_axis = load_build_variants_from_config(config_path)
|
||||
build_descs.extend(generate_build_variants(build_descs_by_axis))
|
||||
print('Build variants (%d):' % len(build_descs))
|
||||
# assign build directory for each variant
|
||||
if not os.path.isdir( work_dir ):
|
||||
os.makedirs( work_dir )
|
||||
if not os.path.isdir(work_dir):
|
||||
os.makedirs(work_dir)
|
||||
builds = []
|
||||
with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap:
|
||||
for index, build_desc in enumerate( build_descs ):
|
||||
build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) )
|
||||
builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) )
|
||||
fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) )
|
||||
with open(os.path.join(work_dir, 'matrix-dir-map.txt'), 'wt') as fmatrixmap:
|
||||
for index, build_desc in enumerate(build_descs):
|
||||
build_desc_work_dir = os.path.join(work_dir, '%03d' % (index+1))
|
||||
builds.append(BuildData(build_desc, build_desc_work_dir, source_dir))
|
||||
fmatrixmap.write('%s: %s\n' % (build_desc_work_dir, build_desc))
|
||||
for build in builds:
|
||||
build.execute_build()
|
||||
html_report_path = os.path.join( work_dir, 'batchbuild-report.html' )
|
||||
generate_html_report( html_report_path, builds )
|
||||
html_report_path = os.path.join(work_dir, 'batchbuild-report.html')
|
||||
generate_html_report(html_report_path, builds)
|
||||
print('Done')
|
||||
|
||||
|
||||
|
29
3rdparty/jsoncpp/devtools/fixeol.py
vendored
29
3rdparty/jsoncpp/devtools/fixeol.py
vendored
@ -1,10 +1,15 @@
|
||||
# Copyright 2010 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
|
||||
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||
def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
|
||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||
if not os.path.isfile( path ):
|
||||
raise ValueError( 'Path "%s" is not a file' % path )
|
||||
if not os.path.isfile(path):
|
||||
raise ValueError('Path "%s" is not a file' % path)
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError as msg:
|
||||
@ -29,27 +34,27 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||
##
|
||||
##
|
||||
##
|
||||
##def _do_fix( is_dry_run = True ):
|
||||
##def _do_fix(is_dry_run = True):
|
||||
## from waftools import antglob
|
||||
## python_sources = antglob.glob( '.',
|
||||
## python_sources = antglob.glob('.',
|
||||
## includes = '**/*.py **/wscript **/wscript_build',
|
||||
## excludes = antglob.default_excludes + './waf.py',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build')
|
||||
## for path in python_sources:
|
||||
## _fix_python_source( path, is_dry_run )
|
||||
## _fix_python_source(path, is_dry_run)
|
||||
##
|
||||
## cpp_sources = antglob.glob( '.',
|
||||
## cpp_sources = antglob.glob('.',
|
||||
## includes = '**/*.cpp **/*.h **/*.inl',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build')
|
||||
## for path in cpp_sources:
|
||||
## _fix_source_eol( path, is_dry_run )
|
||||
## _fix_source_eol(path, is_dry_run)
|
||||
##
|
||||
##
|
||||
##def dry_fix(context):
|
||||
## _do_fix( is_dry_run = True )
|
||||
## _do_fix(is_dry_run = True)
|
||||
##
|
||||
##def fix(context):
|
||||
## _do_fix( is_dry_run = False )
|
||||
## _do_fix(is_dry_run = False)
|
||||
##
|
||||
##def shutdown():
|
||||
## pass
|
||||
|
26
3rdparty/jsoncpp/devtools/licenseupdater.py
vendored
26
3rdparty/jsoncpp/devtools/licenseupdater.py
vendored
@ -13,7 +13,7 @@ BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
|
||||
|
||||
""".replace('\r\n','\n')
|
||||
|
||||
def update_license( path, dry_run, show_diff ):
|
||||
def update_license(path, dry_run, show_diff):
|
||||
"""Update the license statement in the specified file.
|
||||
Parameters:
|
||||
path: path of the C++ source file to update.
|
||||
@ -22,28 +22,28 @@ def update_license( path, dry_run, show_diff ):
|
||||
show_diff: if True, print the path of the file that would be modified,
|
||||
as well as the change made to the file.
|
||||
"""
|
||||
with open( path, 'rt' ) as fin:
|
||||
with open(path, 'rt') as fin:
|
||||
original_text = fin.read().replace('\r\n','\n')
|
||||
newline = fin.newlines and fin.newlines[0] or '\n'
|
||||
if not original_text.startswith( LICENSE_BEGIN ):
|
||||
if not original_text.startswith(LICENSE_BEGIN):
|
||||
# No existing license found => prepend it
|
||||
new_text = BRIEF_LICENSE + original_text
|
||||
else:
|
||||
license_end_index = original_text.index( '\n\n' ) # search first blank line
|
||||
license_end_index = original_text.index('\n\n') # search first blank line
|
||||
new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
|
||||
if original_text != new_text:
|
||||
if not dry_run:
|
||||
with open( path, 'wb' ) as fout:
|
||||
fout.write( new_text.replace('\n', newline ) )
|
||||
with open(path, 'wb') as fout:
|
||||
fout.write(new_text.replace('\n', newline))
|
||||
print('Updated', path)
|
||||
if show_diff:
|
||||
import difflib
|
||||
print('\n'.join( difflib.unified_diff( original_text.split('\n'),
|
||||
new_text.split('\n') ) ))
|
||||
print('\n'.join(difflib.unified_diff(original_text.split('\n'),
|
||||
new_text.split('\n'))))
|
||||
return True
|
||||
return False
|
||||
|
||||
def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
|
||||
def update_license_in_source_directories(source_dirs, dry_run, show_diff):
|
||||
"""Updates license text in C++ source files found in directory source_dirs.
|
||||
Parameters:
|
||||
source_dirs: list of directory to scan for C++ sources. Directories are
|
||||
@ -56,11 +56,11 @@ def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
|
||||
from devtools import antglob
|
||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||
for source_dir in source_dirs:
|
||||
cpp_sources = antglob.glob( source_dir,
|
||||
cpp_sources = antglob.glob(source_dir,
|
||||
includes = '''**/*.h **/*.cpp **/*.inl''',
|
||||
prune_dirs = prune_dirs )
|
||||
prune_dirs = prune_dirs)
|
||||
for source in cpp_sources:
|
||||
update_license( source, dry_run, show_diff )
|
||||
update_license(source, dry_run, show_diff)
|
||||
|
||||
def main():
|
||||
usage = """%prog DIR [DIR2...]
|
||||
@ -83,7 +83,7 @@ python devtools\licenseupdater.py include src
|
||||
help="""On update, show change made to the file.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
update_license_in_source_directories( args, options.dry_run, options.show_diff )
|
||||
update_license_in_source_directories(args, options.dry_run, options.show_diff)
|
||||
print('Done')
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
47
3rdparty/jsoncpp/devtools/tarball.py
vendored
47
3rdparty/jsoncpp/devtools/tarball.py
vendored
@ -1,5 +1,10 @@
|
||||
import os.path
|
||||
import gzip
|
||||
# Copyright 2010 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from contextlib import closing
|
||||
import os
|
||||
import tarfile
|
||||
|
||||
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
||||
@ -13,41 +18,35 @@ def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
||||
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
|
||||
to make them child of root.
|
||||
"""
|
||||
base_dir = os.path.normpath( os.path.abspath( base_dir ) )
|
||||
def archive_name( path ):
|
||||
base_dir = os.path.normpath(os.path.abspath(base_dir))
|
||||
def archive_name(path):
|
||||
"""Makes path relative to base_dir."""
|
||||
path = os.path.normpath( os.path.abspath( path ) )
|
||||
common_path = os.path.commonprefix( (base_dir, path) )
|
||||
path = os.path.normpath(os.path.abspath(path))
|
||||
common_path = os.path.commonprefix((base_dir, path))
|
||||
archive_name = path[len(common_path):]
|
||||
if os.path.isabs( archive_name ):
|
||||
if os.path.isabs(archive_name):
|
||||
archive_name = archive_name[1:]
|
||||
return os.path.join( prefix_dir, archive_name )
|
||||
return os.path.join(prefix_dir, archive_name)
|
||||
def visit(tar, dirname, names):
|
||||
for name in names:
|
||||
path = os.path.join(dirname, name)
|
||||
if os.path.isfile(path):
|
||||
path_in_tar = archive_name(path)
|
||||
tar.add(path, path_in_tar )
|
||||
tar.add(path, path_in_tar)
|
||||
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
||||
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
|
||||
try:
|
||||
with closing(tarfile.TarFile.open(tarball_path, 'w:gz',
|
||||
compresslevel=compression)) as tar:
|
||||
for source in sources:
|
||||
source_path = source
|
||||
if os.path.isdir( source ):
|
||||
os.path.walk(source_path, visit, tar)
|
||||
if os.path.isdir(source):
|
||||
for dirpath, dirnames, filenames in os.walk(source_path):
|
||||
visit(tar, dirpath, filenames)
|
||||
else:
|
||||
path_in_tar = archive_name(source_path)
|
||||
tar.add(source_path, path_in_tar ) # filename, arcname
|
||||
finally:
|
||||
tar.close()
|
||||
tar.add(source_path, path_in_tar) # filename, arcname
|
||||
|
||||
def decompress( tarball_path, base_dir ):
|
||||
def decompress(tarball_path, base_dir):
|
||||
"""Decompress the gzipped tarball into directory base_dir.
|
||||
"""
|
||||
# !!! This class method is not documented in the online doc
|
||||
# nor is bz2open!
|
||||
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
|
||||
try:
|
||||
tar.extractall( base_dir )
|
||||
finally:
|
||||
tar.close()
|
||||
with closing(tarfile.TarFile.open(tarball_path)) as tar:
|
||||
tar.extractall(base_dir)
|
||||
|
5
3rdparty/jsoncpp/doc/doxyfile.in
vendored
5
3rdparty/jsoncpp/doc/doxyfile.in
vendored
@ -819,7 +819,7 @@ EXCLUDE_SYMBOLS =
|
||||
# that contain example code fragments that are included (see the \include
|
||||
# command).
|
||||
|
||||
EXAMPLE_PATH =
|
||||
EXAMPLE_PATH = ..
|
||||
|
||||
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
|
||||
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
|
||||
@ -1946,8 +1946,7 @@ INCLUDE_FILE_PATTERNS = *.h
|
||||
PREDEFINED = "_MSC_VER=1400" \
|
||||
_CPPRTTI \
|
||||
_WIN32 \
|
||||
JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \
|
||||
JSON_VALUE_USE_INTERNAL_MAP
|
||||
JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
||||
# tag can be used to specify a list of macro names that should be expanded. The
|
||||
|
2
3rdparty/jsoncpp/doc/header.html
vendored
2
3rdparty/jsoncpp/doc/header.html
vendored
@ -16,7 +16,7 @@ JsonCpp - JSON data format manipulation library
|
||||
</a>
|
||||
</td>
|
||||
<td width="40%" align="right" valign="center">
|
||||
<a href="https://github.com/open-source-parsers/jsoncpp">JsonCpp home page</a>
|
||||
<a href="http://open-source-parsers.github.io/jsoncpp-docs/doxygen/">JsonCpp home page</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
133
3rdparty/jsoncpp/doc/jsoncpp.dox
vendored
133
3rdparty/jsoncpp/doc/jsoncpp.dox
vendored
@ -4,11 +4,21 @@
|
||||
|
||||
<a HREF="http://www.json.org/">JSON (JavaScript Object Notation)</a>
|
||||
is a lightweight data-interchange format.
|
||||
It can represent integer, real number, string, an ordered sequence of value, and
|
||||
a collection of name/value pairs.
|
||||
|
||||
Here is an example of JSON data:
|
||||
\verbatim
|
||||
{
|
||||
"encoding" : "UTF-8",
|
||||
"plug-ins" : [
|
||||
"python",
|
||||
"c++",
|
||||
"ruby"
|
||||
],
|
||||
"indent" : { "length" : 3, "use_space": true }
|
||||
}
|
||||
\endverbatim
|
||||
<b>JsonCpp</b> supports comments as <i>meta-data</i>:
|
||||
\code
|
||||
// Configuration options
|
||||
{
|
||||
// Default encoding for text
|
||||
@ -17,22 +27,22 @@ Here is an example of JSON data:
|
||||
// Plug-ins loaded at start-up
|
||||
"plug-ins" : [
|
||||
"python",
|
||||
"c++",
|
||||
"c++", // trailing comment
|
||||
"ruby"
|
||||
],
|
||||
|
||||
// Tab indent size
|
||||
"indent" : { "length" : 3, "use_space": true }
|
||||
// (multi-line comment)
|
||||
"indent" : { /*embedded comment*/ "length" : 3, "use_space": true }
|
||||
}
|
||||
\endverbatim
|
||||
<code>jsoncpp</code> supports comments as <i>meta-data</i>.
|
||||
\endcode
|
||||
|
||||
\section _features Features
|
||||
- read and write JSON document
|
||||
- attach C++ style comments to element during parsing
|
||||
- rewrite JSON document preserving original comments
|
||||
|
||||
Notes: Comments used to be supported in JSON but where removed for
|
||||
Notes: Comments used to be supported in JSON but were removed for
|
||||
portability (C like comments are not supported in Python). Since
|
||||
comments are useful in configuration/input file, this feature was
|
||||
preserved.
|
||||
@ -40,47 +50,77 @@ preserved.
|
||||
\section _example Code example
|
||||
|
||||
\code
|
||||
Json::Value root; // will contains the root value after parsing.
|
||||
Json::Reader reader;
|
||||
bool parsingSuccessful = reader.parse( config_doc, root );
|
||||
if ( !parsingSuccessful )
|
||||
{
|
||||
// report to the user the failure and their locations in the document.
|
||||
std::cout << "Failed to parse configuration\n"
|
||||
<< reader.getFormattedErrorMessages();
|
||||
return;
|
||||
}
|
||||
Json::Value root; // 'root' will contain the root value after parsing.
|
||||
std::cin >> root;
|
||||
|
||||
// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no
|
||||
// such member.
|
||||
std::string encoding = root.get("encoding", "UTF-8" ).asString();
|
||||
// Get the value of the member of root named 'encoding', return a 'null' value if
|
||||
// there is no such member.
|
||||
const Json::Value plugins = root["plug-ins"];
|
||||
for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements.
|
||||
loadPlugIn( plugins[index].asString() );
|
||||
|
||||
setIndentLength( root["indent"].get("length", 3).asInt() );
|
||||
setIndentUseSpace( root["indent"].get("use_space", true).asBool() );
|
||||
|
||||
// ...
|
||||
// At application shutdown to make the new configuration document:
|
||||
// Since Json::Value has implicit constructor for all value types, it is not
|
||||
// necessary to explicitly construct the Json::Value object:
|
||||
root["encoding"] = getCurrentEncoding();
|
||||
root["indent"]["length"] = getCurrentIndentLength();
|
||||
root["indent"]["use_space"] = getCurrentIndentUseSpace();
|
||||
|
||||
Json::StyledWriter writer;
|
||||
// Make a new JSON document for the configuration. Preserve original comments.
|
||||
std::string outputConfig = writer.write( root );
|
||||
|
||||
// You can also use streams. This will put the contents of any JSON
|
||||
// stream at a particular sub-value, if you'd like.
|
||||
// You can also read into a particular sub-value.
|
||||
std::cin >> root["subtree"];
|
||||
|
||||
// And you can write to a stream, using the StyledWriter automatically.
|
||||
// Get the value of the member of root named 'encoding',
|
||||
// and return 'UTF-8' if there is no such member.
|
||||
std::string encoding = root.get("encoding", "UTF-8" ).asString();
|
||||
|
||||
// Get the value of the member of root named 'plug-ins'; return a 'null' value if
|
||||
// there is no such member.
|
||||
const Json::Value plugins = root["plug-ins"];
|
||||
|
||||
// Iterate over the sequence elements.
|
||||
for ( int index = 0; index < plugins.size(); ++index )
|
||||
loadPlugIn( plugins[index].asString() );
|
||||
|
||||
// Try other datatypes. Some are auto-convertible to others.
|
||||
foo::setIndentLength( root["indent"].get("length", 3).asInt() );
|
||||
foo::setIndentUseSpace( root["indent"].get("use_space", true).asBool() );
|
||||
|
||||
// Since Json::Value has an implicit constructor for all value types, it is not
|
||||
// necessary to explicitly construct the Json::Value object.
|
||||
root["encoding"] = foo::getCurrentEncoding();
|
||||
root["indent"]["length"] = foo::getCurrentIndentLength();
|
||||
root["indent"]["use_space"] = foo::getCurrentIndentUseSpace();
|
||||
|
||||
// If you like the defaults, you can insert directly into a stream.
|
||||
std::cout << root;
|
||||
// Of course, you can write to `std::ostringstream` if you prefer.
|
||||
|
||||
// If desired, remember to add a linefeed and flush.
|
||||
std::cout << std::endl;
|
||||
\endcode
|
||||
|
||||
\section _advanced Advanced usage
|
||||
|
||||
Configure *builders* to create *readers* and *writers*. For
|
||||
configuration, we use our own `Json::Value` (rather than
|
||||
standard setters/getters) so that we can add
|
||||
features without losing binary-compatibility.
|
||||
|
||||
\code
|
||||
// For convenience, use `writeString()` with a specialized builder.
|
||||
Json::StreamWriterBuilder wbuilder;
|
||||
wbuilder["indentation"] = "\t";
|
||||
std::string document = Json::writeString(wbuilder, root);
|
||||
|
||||
// Here, using a specialized Builder, we discard comments and
|
||||
// record errors as we parse.
|
||||
Json::CharReaderBuilder rbuilder;
|
||||
rbuilder["collectComments"] = false;
|
||||
std::string errs;
|
||||
bool ok = Json::parseFromStream(rbuilder, std::cin, &root, &errs);
|
||||
\endcode
|
||||
|
||||
Yes, compile-time configuration-checking would be helpful,
|
||||
but `Json::Value` lets you
|
||||
write and read the builder configuration, which is better! In other words,
|
||||
you can configure your JSON parser using JSON.
|
||||
|
||||
CharReaders and StreamWriters are not thread-safe, but they are re-usable.
|
||||
\code
|
||||
Json::CharReaderBuilder rbuilder;
|
||||
cfg >> rbuilder.settings_;
|
||||
std::unique_ptr<Json::CharReader> const reader(rbuilder.newCharReader());
|
||||
reader->parse(start, stop, &value1, &errs);
|
||||
// ...
|
||||
reader->parse(start, stop, &value2, &errs);
|
||||
// etc.
|
||||
\endcode
|
||||
|
||||
\section _pbuild Build instructions
|
||||
@ -116,4 +156,9 @@ Basically JsonCpp is licensed under MIT license, or public domain if desired
|
||||
and recognized in your jurisdiction.
|
||||
|
||||
\author Baptiste Lepilleur <blep@users.sourceforge.net> (originator)
|
||||
\author Christopher Dunn <cdunn2001@gmail.com> (primary maintainer)
|
||||
\version \include version
|
||||
We make strong guarantees about binary-compatibility, consistent with
|
||||
<a href="http://apr.apache.org/versioning.html">the Apache versioning scheme</a>.
|
||||
\sa version.h
|
||||
*/
|
||||
|
2301
3rdparty/jsoncpp/doc/web_doxyfile.in
vendored
Normal file
2301
3rdparty/jsoncpp/doc/web_doxyfile.in
vendored
Normal file
File diff suppressed because it is too large
Load Diff
144
3rdparty/jsoncpp/doxybuild.py
vendored
144
3rdparty/jsoncpp/doxybuild.py
vendored
@ -1,22 +1,37 @@
|
||||
"""Script to generate doxygen documentation.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
from devtools import tarball
|
||||
from contextlib import contextmanager
|
||||
import subprocess
|
||||
import traceback
|
||||
import re
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import shutil
|
||||
|
||||
@contextmanager
|
||||
def cd(newdir):
|
||||
"""
|
||||
http://stackoverflow.com/questions/431684/how-do-i-cd-in-python
|
||||
"""
|
||||
prevdir = os.getcwd()
|
||||
os.chdir(newdir)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(prevdir)
|
||||
|
||||
def find_program(*filenames):
|
||||
"""find a program in folders path_lst, and sets env[var]
|
||||
@param filenames: a list of possible names of the program to search for
|
||||
@return: the full path of the filename if found, or '' if filename could not be found
|
||||
"""
|
||||
paths = os.environ.get('PATH', '').split(os.pathsep)
|
||||
suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or ''
|
||||
suffixes = ('win32' in sys.platform) and '.exe .com .bat .cmd' or ''
|
||||
for filename in filenames:
|
||||
for name in [filename+ext for ext in suffixes.split()]:
|
||||
for name in [filename+ext for ext in suffixes.split(' ')]:
|
||||
for directory in paths:
|
||||
full_path = os.path.join(directory, name)
|
||||
if os.path.isfile(full_path):
|
||||
@ -28,53 +43,56 @@ def do_subst_in_file(targetfile, sourcefile, dict):
|
||||
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
|
||||
then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
|
||||
"""
|
||||
try:
|
||||
f = open(sourcefile, 'rb')
|
||||
with open(sourcefile, 'r') as f:
|
||||
contents = f.read()
|
||||
f.close()
|
||||
except:
|
||||
print("Can't read source file %s"%sourcefile)
|
||||
raise
|
||||
for (k,v) in list(dict.items()):
|
||||
v = v.replace('\\','\\\\')
|
||||
contents = re.sub(k, v, contents)
|
||||
try:
|
||||
f = open(targetfile, 'wb')
|
||||
with open(targetfile, 'w') as f:
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
def getstatusoutput(cmd):
|
||||
"""cmd is a list.
|
||||
"""
|
||||
try:
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
output, _ = process.communicate()
|
||||
status = process.returncode
|
||||
except:
|
||||
print("Can't write target file %s"%targetfile)
|
||||
raise
|
||||
status = -1
|
||||
output = traceback.format_exc()
|
||||
return status, output
|
||||
|
||||
def run_cmd(cmd, silent=False):
|
||||
"""Raise exception on failure.
|
||||
"""
|
||||
info = 'Running: %r in %r' %(' '.join(cmd), os.getcwd())
|
||||
print(info)
|
||||
sys.stdout.flush()
|
||||
if silent:
|
||||
status, output = getstatusoutput(cmd)
|
||||
else:
|
||||
status, output = subprocess.call(cmd), ''
|
||||
if status:
|
||||
msg = 'Error while %s ...\n\terror=%d, output="""%s"""' %(info, status, output)
|
||||
raise Exception(msg)
|
||||
|
||||
def assert_is_exe(path):
|
||||
if not path:
|
||||
raise Exception('path is empty.')
|
||||
if not os.path.isfile(path):
|
||||
raise Exception('%r is not a file.' %path)
|
||||
if not os.access(path, os.X_OK):
|
||||
raise Exception('%r is not executable by this user.' %path)
|
||||
|
||||
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
||||
config_file = os.path.abspath( config_file )
|
||||
doxygen_path = doxygen_path
|
||||
old_cwd = os.getcwd()
|
||||
try:
|
||||
os.chdir( working_dir )
|
||||
assert_is_exe(doxygen_path)
|
||||
config_file = os.path.abspath(config_file)
|
||||
with cd(working_dir):
|
||||
cmd = [doxygen_path, config_file]
|
||||
print('Running:', ' '.join( cmd ))
|
||||
try:
|
||||
import subprocess
|
||||
except:
|
||||
if os.system( ' '.join( cmd ) ) != 0:
|
||||
print('Documentation generation failed')
|
||||
return False
|
||||
else:
|
||||
if is_silent:
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||
else:
|
||||
process = subprocess.Popen( cmd )
|
||||
stdout, _ = process.communicate()
|
||||
if process.returncode:
|
||||
print('Documentation generation failed:')
|
||||
print(stdout)
|
||||
return False
|
||||
return True
|
||||
finally:
|
||||
os.chdir( old_cwd )
|
||||
run_cmd(cmd, is_silent)
|
||||
|
||||
def build_doc( options, make_release=False ):
|
||||
def build_doc(options, make_release=False):
|
||||
if make_release:
|
||||
options.make_tarball = True
|
||||
options.with_dot = True
|
||||
@ -83,56 +101,56 @@ def build_doc( options, make_release=False ):
|
||||
options.open = False
|
||||
options.silent = True
|
||||
|
||||
version = open('version','rt').read().strip()
|
||||
version = open('version', 'rt').read().strip()
|
||||
output_dir = 'dist/doxygen' # relative to doc/doxyfile location.
|
||||
if not os.path.isdir( output_dir ):
|
||||
os.makedirs( output_dir )
|
||||
top_dir = os.path.abspath( '.' )
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
top_dir = os.path.abspath('.')
|
||||
html_output_dirname = 'jsoncpp-api-html-' + version
|
||||
tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' )
|
||||
warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' )
|
||||
html_output_path = os.path.join( output_dir, html_output_dirname )
|
||||
def yesno( bool ):
|
||||
tarball_path = os.path.join('dist', html_output_dirname + '.tar.gz')
|
||||
warning_log_path = os.path.join(output_dir, '../jsoncpp-doxygen-warning.log')
|
||||
html_output_path = os.path.join(output_dir, html_output_dirname)
|
||||
def yesno(bool):
|
||||
return bool and 'YES' or 'NO'
|
||||
subst_keys = {
|
||||
'%JSONCPP_VERSION%': version,
|
||||
'%DOC_TOPDIR%': '',
|
||||
'%TOPDIR%': top_dir,
|
||||
'%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ),
|
||||
'%HTML_OUTPUT%': os.path.join('..', output_dir, html_output_dirname),
|
||||
'%HAVE_DOT%': yesno(options.with_dot),
|
||||
'%DOT_PATH%': os.path.split(options.dot_path)[0],
|
||||
'%HTML_HELP%': yesno(options.with_html_help),
|
||||
'%UML_LOOK%': yesno(options.with_uml_look),
|
||||
'%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path )
|
||||
'%WARNING_LOG_PATH%': os.path.join('..', warning_log_path)
|
||||
}
|
||||
|
||||
if os.path.isdir( output_dir ):
|
||||
if os.path.isdir(output_dir):
|
||||
print('Deleting directory:', output_dir)
|
||||
shutil.rmtree( output_dir )
|
||||
if not os.path.isdir( output_dir ):
|
||||
os.makedirs( output_dir )
|
||||
shutil.rmtree(output_dir)
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
|
||||
ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
|
||||
do_subst_in_file('doc/doxyfile', options.doxyfile_input_path, subst_keys)
|
||||
run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent)
|
||||
if not options.silent:
|
||||
print(open(warning_log_path, 'rb').read())
|
||||
print(open(warning_log_path, 'r').read())
|
||||
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
|
||||
print('Generated documentation can be found in:')
|
||||
print(index_path)
|
||||
if options.open:
|
||||
import webbrowser
|
||||
webbrowser.open( 'file://' + index_path )
|
||||
webbrowser.open('file://' + index_path)
|
||||
if options.make_tarball:
|
||||
print('Generating doc tarball to', tarball_path)
|
||||
tarball_sources = [
|
||||
output_dir,
|
||||
'README.txt',
|
||||
'README.md',
|
||||
'LICENSE',
|
||||
'NEWS.txt',
|
||||
'version'
|
||||
]
|
||||
tarball_basedir = os.path.join( output_dir, html_output_dirname )
|
||||
tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname )
|
||||
tarball_basedir = os.path.join(output_dir, html_output_dirname)
|
||||
tarball.make_tarball(tarball_path, tarball_sources, tarball_basedir, html_output_dirname)
|
||||
return tarball_path, html_output_dirname
|
||||
|
||||
def main():
|
||||
@ -151,6 +169,8 @@ def main():
|
||||
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
|
||||
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'),
|
||||
help="""Path to Doxygen tool. [Default: %default]""")
|
||||
parser.add_option('--in', dest="doxyfile_input_path", action='store', default='doc/doxyfile.in',
|
||||
help="""Path to doxygen inputs. [Default: %default]""")
|
||||
parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False,
|
||||
help="""Enable generation of Microsoft HTML HELP""")
|
||||
parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True,
|
||||
@ -163,7 +183,7 @@ def main():
|
||||
help="""Hides doxygen output""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
build_doc( options )
|
||||
build_doc(options)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
39
3rdparty/jsoncpp/include/json/assertions.h
vendored
39
3rdparty/jsoncpp/include/json/assertions.h
vendored
@ -7,35 +7,48 @@
|
||||
#define CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <sstream>
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include "config.h"
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
|
||||
/** It should not be possible for a maliciously designed file to
|
||||
* cause an abort() or seg-fault, so these macros are used only
|
||||
* for pre-condition violations and internal logic errors.
|
||||
*/
|
||||
#if JSON_USE_EXCEPTION
|
||||
#include <stdexcept>
|
||||
#define JSON_ASSERT(condition) \
|
||||
assert(condition); // @todo <= change this into an exception throw
|
||||
#define JSON_FAIL_MESSAGE(message) throw std::runtime_error(message);
|
||||
|
||||
// @todo <= add detail about condition in exception
|
||||
# define JSON_ASSERT(condition) \
|
||||
{if (!(condition)) {Json::throwLogicError( "assert json failed" );}}
|
||||
|
||||
# define JSON_FAIL_MESSAGE(message) \
|
||||
{ \
|
||||
std::ostringstream oss; oss << message; \
|
||||
Json::throwLogicError(oss.str()); \
|
||||
abort(); \
|
||||
}
|
||||
|
||||
#else // JSON_USE_EXCEPTION
|
||||
#define JSON_ASSERT(condition) assert(condition);
|
||||
|
||||
# define JSON_ASSERT(condition) assert(condition)
|
||||
|
||||
// The call to assert() will show the failure message in debug builds. In
|
||||
// release bugs we write to invalid memory in order to crash hard, so that a
|
||||
// debugger or crash reporter gets the chance to take over. We still call exit()
|
||||
// afterward in order to tell the compiler that this macro doesn't return.
|
||||
#define JSON_FAIL_MESSAGE(message) \
|
||||
// release builds we abort, for a core-dump or debugger.
|
||||
# define JSON_FAIL_MESSAGE(message) \
|
||||
{ \
|
||||
assert(false&& message); \
|
||||
strcpy(reinterpret_cast<char*>(666), message); \
|
||||
exit(123); \
|
||||
std::ostringstream oss; oss << message; \
|
||||
assert(false && oss.str().c_str()); \
|
||||
abort(); \
|
||||
}
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
#define JSON_ASSERT_MESSAGE(condition, message) \
|
||||
if (!(condition)) { \
|
||||
JSON_FAIL_MESSAGE(message) \
|
||||
JSON_FAIL_MESSAGE(message); \
|
||||
}
|
||||
|
||||
#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||
|
19
3rdparty/jsoncpp/include/json/config.h
vendored
19
3rdparty/jsoncpp/include/json/config.h
vendored
@ -15,17 +15,6 @@
|
||||
/// std::map
|
||||
/// as Value container.
|
||||
//# define JSON_USE_CPPTL_SMALLMAP 1
|
||||
/// If defined, indicates that Json specific container should be used
|
||||
/// (hash table & simple deque container with customizable allocator).
|
||||
/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332
|
||||
//# define JSON_VALUE_USE_INTERNAL_MAP 1
|
||||
/// Force usage of standard new/malloc based allocator instead of memory pool
|
||||
/// based allocator.
|
||||
/// The memory pools allocator used optimization (initializing Value and
|
||||
/// ValueInternalLink
|
||||
/// as if it was a POD) that may cause some validation tool to report errors.
|
||||
/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined.
|
||||
//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1
|
||||
|
||||
// If non-zero, the library uses exceptions to report bad input instead of C
|
||||
// assertion macros. The default is to use exceptions.
|
||||
@ -81,6 +70,14 @@
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008
|
||||
/// Indicates that the following function is deprecated.
|
||||
#define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
|
||||
#elif defined(__clang__) && defined(__has_feature)
|
||||
#if __has_feature(attribute_deprecated_with_message)
|
||||
#define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
|
||||
#endif
|
||||
#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
|
||||
#define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
|
||||
#elif defined(__GNUC__) && (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
|
||||
#define JSONCPP_DEPRECATED(message) __attribute__((__deprecated__))
|
||||
#endif
|
||||
|
||||
#if !defined(JSONCPP_DEPRECATED)
|
||||
|
6
3rdparty/jsoncpp/include/json/forwards.h
vendored
6
3rdparty/jsoncpp/include/json/forwards.h
vendored
@ -31,12 +31,6 @@ class Value;
|
||||
class ValueIteratorBase;
|
||||
class ValueIterator;
|
||||
class ValueConstIterator;
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
class ValueMapAllocator;
|
||||
class ValueInternalLink;
|
||||
class ValueInternalArray;
|
||||
class ValueInternalMap;
|
||||
#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
|
||||
} // namespace Json
|
||||
|
||||
|
131
3rdparty/jsoncpp/include/json/reader.h
vendored
131
3rdparty/jsoncpp/include/json/reader.h
vendored
@ -14,6 +14,7 @@
|
||||
#include <iosfwd>
|
||||
#include <stack>
|
||||
#include <string>
|
||||
#include <istream>
|
||||
|
||||
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
|
||||
// be used by...
|
||||
@ -27,6 +28,7 @@ namespace Json {
|
||||
/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a
|
||||
*Value.
|
||||
*
|
||||
* \deprecated Use CharReader and CharReaderBuilder.
|
||||
*/
|
||||
class JSON_API Reader {
|
||||
public:
|
||||
@ -78,7 +80,7 @@ public:
|
||||
document to read.
|
||||
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
|
||||
document to read.
|
||||
\ Must be >= beginDoc.
|
||||
* Must be >= beginDoc.
|
||||
* \param root [out] Contains the root value of the document if it was
|
||||
* successfully parsed.
|
||||
* \param collectComments \c true to collect comment and allow writing them
|
||||
@ -108,7 +110,7 @@ public:
|
||||
* during parsing.
|
||||
* \deprecated Use getFormattedErrorMessages() instead (typo fix).
|
||||
*/
|
||||
JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead")
|
||||
JSONCPP_DEPRECATED("Use getFormattedErrorMessages() instead.")
|
||||
std::string getFormatedErrorMessages() const;
|
||||
|
||||
/** \brief Returns a user friendly string that list errors in the parsed
|
||||
@ -187,7 +189,6 @@ private:
|
||||
|
||||
typedef std::deque<ErrorInfo> Errors;
|
||||
|
||||
bool expectToken(TokenType type, Token& token, const char* message);
|
||||
bool readToken(Token& token);
|
||||
void skipSpaces();
|
||||
bool match(Location pattern, int patternLength);
|
||||
@ -239,8 +240,132 @@ private:
|
||||
std::string commentsBefore_;
|
||||
Features features_;
|
||||
bool collectComments_;
|
||||
}; // Reader
|
||||
|
||||
/** Interface for reading JSON from a char array.
|
||||
*/
|
||||
class JSON_API CharReader {
|
||||
public:
|
||||
virtual ~CharReader() {}
|
||||
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
|
||||
document.
|
||||
* The document must be a UTF-8 encoded string containing the document to read.
|
||||
*
|
||||
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
|
||||
document to read.
|
||||
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
|
||||
document to read.
|
||||
* Must be >= beginDoc.
|
||||
* \param root [out] Contains the root value of the document if it was
|
||||
* successfully parsed.
|
||||
* \param errs [out] Formatted error messages (if not NULL)
|
||||
* a user friendly string that lists errors in the parsed
|
||||
* document.
|
||||
* \return \c true if the document was successfully parsed, \c false if an
|
||||
error occurred.
|
||||
*/
|
||||
virtual bool parse(
|
||||
char const* beginDoc, char const* endDoc,
|
||||
Value* root, std::string* errs) = 0;
|
||||
|
||||
class Factory {
|
||||
public:
|
||||
virtual ~Factory() {}
|
||||
/** \brief Allocate a CharReader via operator new().
|
||||
* \throw std::exception if something goes wrong (e.g. invalid settings)
|
||||
*/
|
||||
virtual CharReader* newCharReader() const = 0;
|
||||
}; // Factory
|
||||
}; // CharReader
|
||||
|
||||
/** \brief Build a CharReader implementation.
|
||||
|
||||
Usage:
|
||||
\code
|
||||
using namespace Json;
|
||||
CharReaderBuilder builder;
|
||||
builder["collectComments"] = false;
|
||||
Value value;
|
||||
std::string errs;
|
||||
bool ok = parseFromStream(builder, std::cin, &value, &errs);
|
||||
\endcode
|
||||
*/
|
||||
class JSON_API CharReaderBuilder : public CharReader::Factory {
|
||||
public:
|
||||
// Note: We use a Json::Value so that we can add data-members to this class
|
||||
// without a major version bump.
|
||||
/** Configuration of this builder.
|
||||
These are case-sensitive.
|
||||
Available settings (case-sensitive):
|
||||
- `"collectComments": false or true`
|
||||
- true to collect comment and allow writing them
|
||||
back during serialization, false to discard comments.
|
||||
This parameter is ignored if allowComments is false.
|
||||
- `"allowComments": false or true`
|
||||
- true if comments are allowed.
|
||||
- `"strictRoot": false or true`
|
||||
- true if root must be either an array or an object value
|
||||
- `"allowDroppedNullPlaceholders": false or true`
|
||||
- true if dropped null placeholders are allowed. (See StreamWriterBuilder.)
|
||||
- `"allowNumericKeys": false or true`
|
||||
- true if numeric object keys are allowed.
|
||||
- `"allowSingleQuotes": false or true`
|
||||
- true if '' are allowed for strings (both keys and values)
|
||||
- `"stackLimit": integer`
|
||||
- Exceeding stackLimit (recursive depth of `readValue()`) will
|
||||
cause an exception.
|
||||
- This is a security issue (seg-faults caused by deeply nested JSON),
|
||||
so the default is low.
|
||||
- `"failIfExtra": false or true`
|
||||
- If true, `parse()` returns false when extra non-whitespace trails
|
||||
the JSON value in the input string.
|
||||
- `"rejectDupKeys": false or true`
|
||||
- If true, `parse()` returns false when a key is duplicated within an object.
|
||||
|
||||
You can examine 'settings_` yourself
|
||||
to see the defaults. You can also write and read them just like any
|
||||
JSON Value.
|
||||
\sa setDefaults()
|
||||
*/
|
||||
Json::Value settings_;
|
||||
|
||||
CharReaderBuilder();
|
||||
virtual ~CharReaderBuilder();
|
||||
|
||||
virtual CharReader* newCharReader() const;
|
||||
|
||||
/** \return true if 'settings' are legal and consistent;
|
||||
* otherwise, indicate bad settings via 'invalid'.
|
||||
*/
|
||||
bool validate(Json::Value* invalid) const;
|
||||
|
||||
/** A simple way to update a specific setting.
|
||||
*/
|
||||
Value& operator[](std::string key);
|
||||
|
||||
/** Called by ctor, but you can use this to reset settings_.
|
||||
* \pre 'settings' != NULL (but Json::null is fine)
|
||||
* \remark Defaults:
|
||||
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderDefaults
|
||||
*/
|
||||
static void setDefaults(Json::Value* settings);
|
||||
/** Same as old Features::strictMode().
|
||||
* \pre 'settings' != NULL (but Json::null is fine)
|
||||
* \remark Defaults:
|
||||
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
|
||||
*/
|
||||
static void strictMode(Json::Value* settings);
|
||||
};
|
||||
|
||||
/** Consume entire stream and use its begin/end.
|
||||
* Someday we might have a real StreamReader, but for now this
|
||||
* is convenient.
|
||||
*/
|
||||
bool JSON_API parseFromStream(
|
||||
CharReader::Factory const&,
|
||||
std::istream&,
|
||||
Value* root, std::string* errs);
|
||||
|
||||
/** \brief Read from 'sin' into 'root'.
|
||||
|
||||
Always keep comments from the input JSON.
|
||||
|
606
3rdparty/jsoncpp/include/json/value.h
vendored
606
3rdparty/jsoncpp/include/json/value.h
vendored
@ -11,6 +11,7 @@
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <exception>
|
||||
|
||||
#ifndef JSON_USE_CPPTL_SMALLMAP
|
||||
#include <map>
|
||||
@ -32,6 +33,31 @@
|
||||
*/
|
||||
namespace Json {
|
||||
|
||||
/** Base class for all exceptions we throw.
|
||||
*
|
||||
* We use nothing but these internally. Of course, STL can throw others.
|
||||
*/
|
||||
class JSON_API Exception;
|
||||
/** Exceptions which the user cannot easily avoid.
|
||||
*
|
||||
* E.g. out-of-memory (when we use malloc), stack-overflow, malicious input
|
||||
*
|
||||
* \remark derived from Json::Exception
|
||||
*/
|
||||
class JSON_API RuntimeError;
|
||||
/** Exceptions thrown by JSON_ASSERT/JSON_FAIL macros.
|
||||
*
|
||||
* These are precondition-violations (user bugs) and internal errors (our bugs).
|
||||
*
|
||||
* \remark derived from Json::Exception
|
||||
*/
|
||||
class JSON_API LogicError;
|
||||
|
||||
/// used internally
|
||||
void throwRuntimeError(std::string const& msg);
|
||||
/// used internally
|
||||
void throwLogicError(std::string const& msg);
|
||||
|
||||
/** \brief Type of the value held by a Value object.
|
||||
*/
|
||||
enum ValueType {
|
||||
@ -74,14 +100,14 @@ enum CommentPlacement {
|
||||
*/
|
||||
class JSON_API StaticString {
|
||||
public:
|
||||
explicit StaticString(const char* czstring) : str_(czstring) {}
|
||||
explicit StaticString(const char* czstring) : c_str_(czstring) {}
|
||||
|
||||
operator const char*() const { return str_; }
|
||||
operator const char*() const { return c_str_; }
|
||||
|
||||
const char* c_str() const { return str_; }
|
||||
const char* c_str() const { return c_str_; }
|
||||
|
||||
private:
|
||||
const char* str_;
|
||||
const char* c_str_;
|
||||
};
|
||||
|
||||
/** \brief Represents a <a HREF="http://www.json.org">JSON</a> value.
|
||||
@ -99,26 +125,27 @@ private:
|
||||
* The type of the held value is represented by a #ValueType and
|
||||
* can be obtained using type().
|
||||
*
|
||||
* values of an #objectValue or #arrayValue can be accessed using operator[]()
|
||||
*methods.
|
||||
* Non const methods will automatically create the a #nullValue element
|
||||
* Values of an #objectValue or #arrayValue can be accessed using operator[]()
|
||||
* methods.
|
||||
* Non-const methods will automatically create the a #nullValue element
|
||||
* if it does not exist.
|
||||
* The sequence of an #arrayValue will be automatically resize and initialized
|
||||
* The sequence of an #arrayValue will be automatically resized and initialized
|
||||
* with #nullValue. resize() can be used to enlarge or truncate an #arrayValue.
|
||||
*
|
||||
* The get() methods can be used to obtanis default value in the case the
|
||||
*required element
|
||||
* does not exist.
|
||||
* The get() methods can be used to obtain default value in the case the
|
||||
* required element does not exist.
|
||||
*
|
||||
* It is possible to iterate over the list of a #objectValue values using
|
||||
* the getMemberNames() method.
|
||||
*
|
||||
* \note #Value string-length fit in size_t, but keys must be < 2^30.
|
||||
* (The reason is an implementation detail.) A #CharReader will raise an
|
||||
* exception if a bound is exceeded to avoid security holes in your app,
|
||||
* but the Value API does *not* check bounds. That is the responsibility
|
||||
* of the caller.
|
||||
*/
|
||||
class JSON_API Value {
|
||||
friend class ValueIteratorBase;
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
friend class ValueInternalLink;
|
||||
friend class ValueInternalMap;
|
||||
#endif
|
||||
public:
|
||||
typedef std::vector<std::string> Members;
|
||||
typedef ValueIterator iterator;
|
||||
@ -133,7 +160,8 @@ public:
|
||||
typedef Json::LargestUInt LargestUInt;
|
||||
typedef Json::ArrayIndex ArrayIndex;
|
||||
|
||||
static const Value& null;
|
||||
static const Value& null; ///< We regret this reference to a global instance; prefer the simpler Value().
|
||||
static const Value& nullRef; ///< just a kludge for binary-compatibility; same as null
|
||||
/// Minimum signed integer value that can be stored in a Json::Value.
|
||||
static const LargestInt minLargestInt;
|
||||
/// Maximum signed integer value that can be stored in a Json::Value.
|
||||
@ -159,7 +187,6 @@ public:
|
||||
|
||||
private:
|
||||
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
class CZString {
|
||||
public:
|
||||
enum DuplicationPolicy {
|
||||
@ -168,20 +195,31 @@ private:
|
||||
duplicateOnCopy
|
||||
};
|
||||
CZString(ArrayIndex index);
|
||||
CZString(const char* cstr, DuplicationPolicy allocate);
|
||||
CZString(const CZString& other);
|
||||
CZString(char const* str, unsigned length, DuplicationPolicy allocate);
|
||||
CZString(CZString const& other);
|
||||
~CZString();
|
||||
CZString& operator=(CZString other);
|
||||
bool operator<(const CZString& other) const;
|
||||
bool operator==(const CZString& other) const;
|
||||
bool operator<(CZString const& other) const;
|
||||
bool operator==(CZString const& other) const;
|
||||
ArrayIndex index() const;
|
||||
const char* c_str() const;
|
||||
//const char* c_str() const; ///< \deprecated
|
||||
char const* data() const;
|
||||
unsigned length() const;
|
||||
bool isStaticString() const;
|
||||
|
||||
private:
|
||||
void swap(CZString& other);
|
||||
const char* cstr_;
|
||||
ArrayIndex index_;
|
||||
|
||||
struct StringStorage {
|
||||
unsigned policy_: 2;
|
||||
unsigned length_: 30; // 1GB max
|
||||
};
|
||||
|
||||
char const* cstr_; // actually, a prefixed string, unless policy is noDup
|
||||
union {
|
||||
ArrayIndex index_;
|
||||
StringStorage storage_;
|
||||
};
|
||||
};
|
||||
|
||||
public:
|
||||
@ -190,7 +228,6 @@ public:
|
||||
#else
|
||||
typedef CppTL::SmallMap<CZString, Value> ObjectValues;
|
||||
#endif // ifndef JSON_USE_CPPTL_SMALLMAP
|
||||
#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||
|
||||
public:
|
||||
@ -217,47 +254,59 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
Value(UInt64 value);
|
||||
#endif // if defined(JSON_HAS_INT64)
|
||||
Value(double value);
|
||||
Value(const char* value);
|
||||
Value(const char* beginValue, const char* endValue);
|
||||
Value(const char* value); ///< Copy til first 0. (NULL causes to seg-fault.)
|
||||
Value(const char* beginValue, const char* endValue); ///< Copy all, incl zeroes.
|
||||
/** \brief Constructs a value from a static string.
|
||||
|
||||
* Like other value string constructor but do not duplicate the string for
|
||||
* internal storage. The given string must remain alive after the call to this
|
||||
* constructor.
|
||||
* \note This works only for null-terminated strings. (We cannot change the
|
||||
* size of this class, so we have nowhere to store the length,
|
||||
* which might be computed later for various operations.)
|
||||
*
|
||||
* Example of usage:
|
||||
* \code
|
||||
* Json::Value aValue( StaticString("some text") );
|
||||
* static StaticString foo("some text");
|
||||
* Json::Value aValue(foo);
|
||||
* \endcode
|
||||
*/
|
||||
Value(const StaticString& value);
|
||||
Value(const std::string& value);
|
||||
Value(const std::string& value); ///< Copy data() til size(). Embedded zeroes too.
|
||||
#ifdef JSON_USE_CPPTL
|
||||
Value(const CppTL::ConstString& value);
|
||||
#endif
|
||||
Value(bool value);
|
||||
/// Deep copy.
|
||||
Value(const Value& other);
|
||||
~Value();
|
||||
|
||||
/// Deep copy, then swap(other).
|
||||
/// \note Over-write existing comments. To preserve comments, use #swapPayload().
|
||||
Value& operator=(Value other);
|
||||
/// Swap values.
|
||||
/// \note Currently, comments are intentionally not swapped, for
|
||||
/// both logic and efficiency.
|
||||
/// Swap everything.
|
||||
void swap(Value& other);
|
||||
/// Swap values but leave comments and source offsets in place.
|
||||
void swapPayload(Value& other);
|
||||
|
||||
ValueType type() const;
|
||||
|
||||
/// Compare payload only, not comments etc.
|
||||
bool operator<(const Value& other) const;
|
||||
bool operator<=(const Value& other) const;
|
||||
bool operator>=(const Value& other) const;
|
||||
bool operator>(const Value& other) const;
|
||||
|
||||
bool operator==(const Value& other) const;
|
||||
bool operator!=(const Value& other) const;
|
||||
|
||||
int compare(const Value& other) const;
|
||||
|
||||
const char* asCString() const;
|
||||
std::string asString() const;
|
||||
const char* asCString() const; ///< Embedded zeroes could cause you trouble!
|
||||
std::string asString() const; ///< Embedded zeroes are possible.
|
||||
/** Get raw char* of string-value.
|
||||
* \return false if !string. (Seg-fault if str or end are NULL.)
|
||||
*/
|
||||
bool getString(
|
||||
char const** str, char const** end) const;
|
||||
#ifdef JSON_USE_CPPTL
|
||||
CppTL::ConstString asConstString() const;
|
||||
#endif
|
||||
@ -348,19 +397,23 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
Value& append(const Value& value);
|
||||
|
||||
/// Access an object value by name, create a null member if it does not exist.
|
||||
/// \note Because of our implementation, keys are limited to 2^30 -1 chars.
|
||||
/// Exceeding that will cause an exception.
|
||||
Value& operator[](const char* key);
|
||||
/// Access an object value by name, returns null if there is no member with
|
||||
/// that name.
|
||||
const Value& operator[](const char* key) const;
|
||||
/// Access an object value by name, create a null member if it does not exist.
|
||||
/// \param key may contain embedded nulls.
|
||||
Value& operator[](const std::string& key);
|
||||
/// Access an object value by name, returns null if there is no member with
|
||||
/// that name.
|
||||
/// \param key may contain embedded nulls.
|
||||
const Value& operator[](const std::string& key) const;
|
||||
/** \brief Access an object value by name, create a null member if it does not
|
||||
exist.
|
||||
|
||||
* If the object as no entry for that name, then the member name used to store
|
||||
* If the object has no entry for that name, then the member name used to store
|
||||
* the new entry is not duplicated.
|
||||
* Example of use:
|
||||
* \code
|
||||
@ -378,27 +431,69 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
const Value& operator[](const CppTL::ConstString& key) const;
|
||||
#endif
|
||||
/// Return the member named key if it exist, defaultValue otherwise.
|
||||
/// \note deep copy
|
||||
Value get(const char* key, const Value& defaultValue) const;
|
||||
/// Return the member named key if it exist, defaultValue otherwise.
|
||||
/// \note deep copy
|
||||
/// \param key may contain embedded nulls.
|
||||
Value get(const char* key, const char* end, const Value& defaultValue) const;
|
||||
/// Return the member named key if it exist, defaultValue otherwise.
|
||||
/// \note deep copy
|
||||
/// \param key may contain embedded nulls.
|
||||
Value get(const std::string& key, const Value& defaultValue) const;
|
||||
#ifdef JSON_USE_CPPTL
|
||||
/// Return the member named key if it exist, defaultValue otherwise.
|
||||
/// \note deep copy
|
||||
Value get(const CppTL::ConstString& key, const Value& defaultValue) const;
|
||||
#endif
|
||||
/// Most general and efficient version of isMember()const, get()const,
|
||||
/// and operator[]const
|
||||
/// \note As stated elsewhere, behavior is undefined if (end-key) >= 2^30
|
||||
Value const* find(char const* key, char const* end) const;
|
||||
/// Most general and efficient version of object-mutators.
|
||||
/// \note As stated elsewhere, behavior is undefined if (end-key) >= 2^30
|
||||
/// \return non-zero, but JSON_ASSERT if this is neither object nor nullValue.
|
||||
Value const* demand(char const* key, char const* end);
|
||||
/// \brief Remove and return the named member.
|
||||
///
|
||||
/// Do nothing if it did not exist.
|
||||
/// \return the removed Value, or null.
|
||||
/// \pre type() is objectValue or nullValue
|
||||
/// \post type() is unchanged
|
||||
/// \deprecated
|
||||
Value removeMember(const char* key);
|
||||
/// Same as removeMember(const char*)
|
||||
/// \param key may contain embedded nulls.
|
||||
/// \deprecated
|
||||
Value removeMember(const std::string& key);
|
||||
/// Same as removeMember(const char* key, const char* end, Value* removed),
|
||||
/// but 'key' is null-terminated.
|
||||
bool removeMember(const char* key, Value* removed);
|
||||
/** \brief Remove the named map member.
|
||||
|
||||
Update 'removed' iff removed.
|
||||
\param key may contain embedded nulls.
|
||||
\return true iff removed (no exceptions)
|
||||
*/
|
||||
bool removeMember(std::string const& key, Value* removed);
|
||||
/// Same as removeMember(std::string const& key, Value* removed)
|
||||
bool removeMember(const char* key, const char* end, Value* removed);
|
||||
/** \brief Remove the indexed array element.
|
||||
|
||||
O(n) expensive operations.
|
||||
Update 'removed' iff removed.
|
||||
\return true iff removed (no exceptions)
|
||||
*/
|
||||
bool removeIndex(ArrayIndex i, Value* removed);
|
||||
|
||||
/// Return true if the object has a member named key.
|
||||
/// \note 'key' must be null-terminated.
|
||||
bool isMember(const char* key) const;
|
||||
/// Return true if the object has a member named key.
|
||||
/// \param key may contain embedded nulls.
|
||||
bool isMember(const std::string& key) const;
|
||||
/// Same as isMember(std::string const& key)const
|
||||
bool isMember(const char* key, const char* end) const;
|
||||
#ifdef JSON_USE_CPPTL
|
||||
/// Return true if the object has a member named key.
|
||||
bool isMember(const CppTL::ConstString& key) const;
|
||||
@ -416,9 +511,12 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
// EnumValues enumValues() const;
|
||||
//# endif
|
||||
|
||||
/// Comments must be //... or /* ... */
|
||||
/// \deprecated Always pass len.
|
||||
JSONCPP_DEPRECATED("Use setComment(std::string const&) instead.")
|
||||
void setComment(const char* comment, CommentPlacement placement);
|
||||
/// Comments must be //... or /* ... */
|
||||
void setComment(const char* comment, size_t len, CommentPlacement placement);
|
||||
/// Comments must be //... or /* ... */
|
||||
void setComment(const std::string& comment, CommentPlacement placement);
|
||||
bool hasComment(CommentPlacement placement) const;
|
||||
/// Include delimiters and embedded newlines.
|
||||
@ -442,26 +540,14 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
private:
|
||||
void initBasic(ValueType type, bool allocated = false);
|
||||
|
||||
Value& resolveReference(const char* key, bool isStatic);
|
||||
Value& resolveReference(const char* key);
|
||||
Value& resolveReference(const char* key, const char* end);
|
||||
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
inline bool isItemAvailable() const { return itemIsUsed_ == 0; }
|
||||
|
||||
inline void setItemUsed(bool isUsed = true) { itemIsUsed_ = isUsed ? 1 : 0; }
|
||||
|
||||
inline bool isMemberNameStatic() const { return memberNameIsStatic_ == 0; }
|
||||
|
||||
inline void setMemberNameIsStatic(bool isStatic) {
|
||||
memberNameIsStatic_ = isStatic ? 1 : 0;
|
||||
}
|
||||
#endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
|
||||
private:
|
||||
struct CommentInfo {
|
||||
CommentInfo();
|
||||
~CommentInfo();
|
||||
|
||||
void setComment(const char* text);
|
||||
void setComment(const char* text, size_t len);
|
||||
|
||||
char* comment_;
|
||||
};
|
||||
@ -480,20 +566,12 @@ private:
|
||||
LargestUInt uint_;
|
||||
double real_;
|
||||
bool bool_;
|
||||
char* string_;
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
ValueInternalArray* array_;
|
||||
ValueInternalMap* map_;
|
||||
#else
|
||||
char* string_; // actually ptr to unsigned, followed by str, unless !allocated_
|
||||
ObjectValues* map_;
|
||||
#endif
|
||||
} value_;
|
||||
ValueType type_ : 8;
|
||||
int allocated_ : 1; // Notes: if declared as bool, bitfield is useless.
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container.
|
||||
int memberNameIsStatic_ : 1; // used by the ValueInternalMap container.
|
||||
#endif
|
||||
unsigned int allocated_ : 1; // Notes: if declared as bool, bitfield is useless.
|
||||
// If not allocated_, string_ must be null-terminated.
|
||||
CommentInfo* comments_;
|
||||
|
||||
// [start, limit) byte offsets in the source JSON text from which this Value
|
||||
@ -565,345 +643,6 @@ private:
|
||||
Args args_;
|
||||
};
|
||||
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
/** \brief Allocator to customize Value internal map.
|
||||
* Below is an example of a simple implementation (default implementation
|
||||
actually
|
||||
* use memory pool for speed).
|
||||
* \code
|
||||
class DefaultValueMapAllocator : public ValueMapAllocator
|
||||
{
|
||||
public: // overridden from ValueMapAllocator
|
||||
virtual ValueInternalMap *newMap()
|
||||
{
|
||||
return new ValueInternalMap();
|
||||
}
|
||||
|
||||
virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other )
|
||||
{
|
||||
return new ValueInternalMap( other );
|
||||
}
|
||||
|
||||
virtual void destructMap( ValueInternalMap *map )
|
||||
{
|
||||
delete map;
|
||||
}
|
||||
|
||||
virtual ValueInternalLink *allocateMapBuckets( unsigned int size )
|
||||
{
|
||||
return new ValueInternalLink[size];
|
||||
}
|
||||
|
||||
virtual void releaseMapBuckets( ValueInternalLink *links )
|
||||
{
|
||||
delete [] links;
|
||||
}
|
||||
|
||||
virtual ValueInternalLink *allocateMapLink()
|
||||
{
|
||||
return new ValueInternalLink();
|
||||
}
|
||||
|
||||
virtual void releaseMapLink( ValueInternalLink *link )
|
||||
{
|
||||
delete link;
|
||||
}
|
||||
};
|
||||
* \endcode
|
||||
*/
|
||||
class JSON_API ValueMapAllocator {
|
||||
public:
|
||||
virtual ~ValueMapAllocator();
|
||||
virtual ValueInternalMap* newMap() = 0;
|
||||
virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) = 0;
|
||||
virtual void destructMap(ValueInternalMap* map) = 0;
|
||||
virtual ValueInternalLink* allocateMapBuckets(unsigned int size) = 0;
|
||||
virtual void releaseMapBuckets(ValueInternalLink* links) = 0;
|
||||
virtual ValueInternalLink* allocateMapLink() = 0;
|
||||
virtual void releaseMapLink(ValueInternalLink* link) = 0;
|
||||
};
|
||||
|
||||
/** \brief ValueInternalMap hash-map bucket chain link (for internal use only).
|
||||
* \internal previous_ & next_ allows for bidirectional traversal.
|
||||
*/
|
||||
class JSON_API ValueInternalLink {
|
||||
public:
|
||||
enum {
|
||||
itemPerLink = 6
|
||||
}; // sizeof(ValueInternalLink) = 128 on 32 bits architecture.
|
||||
enum InternalFlags {
|
||||
flagAvailable = 0,
|
||||
flagUsed = 1
|
||||
};
|
||||
|
||||
ValueInternalLink();
|
||||
|
||||
~ValueInternalLink();
|
||||
|
||||
Value items_[itemPerLink];
|
||||
char* keys_[itemPerLink];
|
||||
ValueInternalLink* previous_;
|
||||
ValueInternalLink* next_;
|
||||
};
|
||||
|
||||
/** \brief A linked page based hash-table implementation used internally by
|
||||
*Value.
|
||||
* \internal ValueInternalMap is a tradional bucket based hash-table, with a
|
||||
*linked
|
||||
* list in each bucket to handle collision. There is an addional twist in that
|
||||
* each node of the collision linked list is a page containing a fixed amount of
|
||||
* value. This provides a better compromise between memory usage and speed.
|
||||
*
|
||||
* Each bucket is made up of a chained list of ValueInternalLink. The last
|
||||
* link of a given bucket can be found in the 'previous_' field of the following
|
||||
*bucket.
|
||||
* The last link of the last bucket is stored in tailLink_ as it has no
|
||||
*following bucket.
|
||||
* Only the last link of a bucket may contains 'available' item. The last link
|
||||
*always
|
||||
* contains at least one element unless is it the bucket one very first link.
|
||||
*/
|
||||
class JSON_API ValueInternalMap {
|
||||
friend class ValueIteratorBase;
|
||||
friend class Value;
|
||||
|
||||
public:
|
||||
typedef unsigned int HashKey;
|
||||
typedef unsigned int BucketIndex;
|
||||
|
||||
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||
struct IteratorState {
|
||||
IteratorState() : map_(0), link_(0), itemIndex_(0), bucketIndex_(0) {}
|
||||
ValueInternalMap* map_;
|
||||
ValueInternalLink* link_;
|
||||
BucketIndex itemIndex_;
|
||||
BucketIndex bucketIndex_;
|
||||
};
|
||||
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||
|
||||
ValueInternalMap();
|
||||
ValueInternalMap(const ValueInternalMap& other);
|
||||
ValueInternalMap& operator=(ValueInternalMap other);
|
||||
~ValueInternalMap();
|
||||
|
||||
void swap(ValueInternalMap& other);
|
||||
|
||||
BucketIndex size() const;
|
||||
|
||||
void clear();
|
||||
|
||||
bool reserveDelta(BucketIndex growth);
|
||||
|
||||
bool reserve(BucketIndex newItemCount);
|
||||
|
||||
const Value* find(const char* key) const;
|
||||
|
||||
Value* find(const char* key);
|
||||
|
||||
Value& resolveReference(const char* key, bool isStatic);
|
||||
|
||||
void remove(const char* key);
|
||||
|
||||
void doActualRemove(ValueInternalLink* link,
|
||||
BucketIndex index,
|
||||
BucketIndex bucketIndex);
|
||||
|
||||
ValueInternalLink*& getLastLinkInBucket(BucketIndex bucketIndex);
|
||||
|
||||
Value& setNewItem(const char* key,
|
||||
bool isStatic,
|
||||
ValueInternalLink* link,
|
||||
BucketIndex index);
|
||||
|
||||
Value& unsafeAdd(const char* key, bool isStatic, HashKey hashedKey);
|
||||
|
||||
HashKey hash(const char* key) const;
|
||||
|
||||
int compare(const ValueInternalMap& other) const;
|
||||
|
||||
private:
|
||||
void makeBeginIterator(IteratorState& it) const;
|
||||
void makeEndIterator(IteratorState& it) const;
|
||||
static bool equals(const IteratorState& x, const IteratorState& other);
|
||||
static void increment(IteratorState& iterator);
|
||||
static void incrementBucket(IteratorState& iterator);
|
||||
static void decrement(IteratorState& iterator);
|
||||
static const char* key(const IteratorState& iterator);
|
||||
static const char* key(const IteratorState& iterator, bool& isStatic);
|
||||
static Value& value(const IteratorState& iterator);
|
||||
static int distance(const IteratorState& x, const IteratorState& y);
|
||||
|
||||
private:
|
||||
ValueInternalLink* buckets_;
|
||||
ValueInternalLink* tailLink_;
|
||||
BucketIndex bucketsSize_;
|
||||
BucketIndex itemCount_;
|
||||
};
|
||||
|
||||
/** \brief A simplified deque implementation used internally by Value.
|
||||
* \internal
|
||||
* It is based on a list of fixed "page", each page contains a fixed number of
|
||||
*items.
|
||||
* Instead of using a linked-list, a array of pointer is used for fast item
|
||||
*look-up.
|
||||
* Look-up for an element is as follow:
|
||||
* - compute page index: pageIndex = itemIndex / itemsPerPage
|
||||
* - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage]
|
||||
*
|
||||
* Insertion is amortized constant time (only the array containing the index of
|
||||
*pointers
|
||||
* need to be reallocated when items are appended).
|
||||
*/
|
||||
class JSON_API ValueInternalArray {
|
||||
friend class Value;
|
||||
friend class ValueIteratorBase;
|
||||
|
||||
public:
|
||||
enum {
|
||||
itemsPerPage = 8
|
||||
}; // should be a power of 2 for fast divide and modulo.
|
||||
typedef Value::ArrayIndex ArrayIndex;
|
||||
typedef unsigned int PageIndex;
|
||||
|
||||
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||
struct IteratorState // Must be a POD
|
||||
{
|
||||
IteratorState() : array_(0), currentPageIndex_(0), currentItemIndex_(0) {}
|
||||
ValueInternalArray* array_;
|
||||
Value** currentPageIndex_;
|
||||
unsigned int currentItemIndex_;
|
||||
};
|
||||
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||
|
||||
ValueInternalArray();
|
||||
ValueInternalArray(const ValueInternalArray& other);
|
||||
ValueInternalArray& operator=(ValueInternalArray other);
|
||||
~ValueInternalArray();
|
||||
void swap(ValueInternalArray& other);
|
||||
|
||||
void clear();
|
||||
void resize(ArrayIndex newSize);
|
||||
|
||||
Value& resolveReference(ArrayIndex index);
|
||||
|
||||
Value* find(ArrayIndex index) const;
|
||||
|
||||
ArrayIndex size() const;
|
||||
|
||||
int compare(const ValueInternalArray& other) const;
|
||||
|
||||
private:
|
||||
static bool equals(const IteratorState& x, const IteratorState& other);
|
||||
static void increment(IteratorState& iterator);
|
||||
static void decrement(IteratorState& iterator);
|
||||
static Value& dereference(const IteratorState& iterator);
|
||||
static Value& unsafeDereference(const IteratorState& iterator);
|
||||
static int distance(const IteratorState& x, const IteratorState& y);
|
||||
static ArrayIndex indexOf(const IteratorState& iterator);
|
||||
void makeBeginIterator(IteratorState& it) const;
|
||||
void makeEndIterator(IteratorState& it) const;
|
||||
void makeIterator(IteratorState& it, ArrayIndex index) const;
|
||||
|
||||
void makeIndexValid(ArrayIndex index);
|
||||
|
||||
Value** pages_;
|
||||
ArrayIndex size_;
|
||||
PageIndex pageCount_;
|
||||
};
|
||||
|
||||
/** \brief Experimental: do not use. Allocator to customize Value internal
|
||||
array.
|
||||
* Below is an example of a simple implementation (actual implementation use
|
||||
* memory pool).
|
||||
\code
|
||||
class DefaultValueArrayAllocator : public ValueArrayAllocator
|
||||
{
|
||||
public: // overridden from ValueArrayAllocator
|
||||
virtual ~DefaultValueArrayAllocator()
|
||||
{
|
||||
}
|
||||
|
||||
virtual ValueInternalArray *newArray()
|
||||
{
|
||||
return new ValueInternalArray();
|
||||
}
|
||||
|
||||
virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other )
|
||||
{
|
||||
return new ValueInternalArray( other );
|
||||
}
|
||||
|
||||
virtual void destruct( ValueInternalArray *array )
|
||||
{
|
||||
delete array;
|
||||
}
|
||||
|
||||
virtual void reallocateArrayPageIndex( Value **&indexes,
|
||||
ValueInternalArray::PageIndex
|
||||
&indexCount,
|
||||
ValueInternalArray::PageIndex
|
||||
minNewIndexCount )
|
||||
{
|
||||
ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1;
|
||||
if ( minNewIndexCount > newIndexCount )
|
||||
newIndexCount = minNewIndexCount;
|
||||
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
|
||||
if ( !newIndexes )
|
||||
throw std::bad_alloc();
|
||||
indexCount = newIndexCount;
|
||||
indexes = static_cast<Value **>( newIndexes );
|
||||
}
|
||||
virtual void releaseArrayPageIndex( Value **indexes,
|
||||
ValueInternalArray::PageIndex indexCount )
|
||||
{
|
||||
if ( indexes )
|
||||
free( indexes );
|
||||
}
|
||||
|
||||
virtual Value *allocateArrayPage()
|
||||
{
|
||||
return static_cast<Value *>( malloc( sizeof(Value) *
|
||||
ValueInternalArray::itemsPerPage ) );
|
||||
}
|
||||
|
||||
virtual void releaseArrayPage( Value *value )
|
||||
{
|
||||
if ( value )
|
||||
free( value );
|
||||
}
|
||||
};
|
||||
\endcode
|
||||
*/
|
||||
class JSON_API ValueArrayAllocator {
|
||||
public:
|
||||
virtual ~ValueArrayAllocator();
|
||||
virtual ValueInternalArray* newArray() = 0;
|
||||
virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) = 0;
|
||||
virtual void destructArray(ValueInternalArray* array) = 0;
|
||||
/** \brief Reallocate array page index.
|
||||
* Reallocates an array of pointer on each page.
|
||||
* \param indexes [input] pointer on the current index. May be \c NULL.
|
||||
* [output] pointer on the new index of at least
|
||||
* \a minNewIndexCount pages.
|
||||
* \param indexCount [input] current number of pages in the index.
|
||||
* [output] number of page the reallocated index can handle.
|
||||
* \b MUST be >= \a minNewIndexCount.
|
||||
* \param minNewIndexCount Minimum number of page the new index must be able
|
||||
* to
|
||||
* handle.
|
||||
*/
|
||||
virtual void
|
||||
reallocateArrayPageIndex(Value**& indexes,
|
||||
ValueInternalArray::PageIndex& indexCount,
|
||||
ValueInternalArray::PageIndex minNewIndexCount) = 0;
|
||||
virtual void
|
||||
releaseArrayPageIndex(Value** indexes,
|
||||
ValueInternalArray::PageIndex indexCount) = 0;
|
||||
virtual Value* allocateArrayPage() = 0;
|
||||
virtual void releaseArrayPage(Value* value) = 0;
|
||||
};
|
||||
#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
|
||||
/** \brief base class for Value iterators.
|
||||
*
|
||||
*/
|
||||
@ -914,32 +653,35 @@ public:
|
||||
typedef int difference_type;
|
||||
typedef ValueIteratorBase SelfType;
|
||||
|
||||
ValueIteratorBase();
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
explicit ValueIteratorBase(const Value::ObjectValues::iterator& current);
|
||||
#else
|
||||
ValueIteratorBase(const ValueInternalArray::IteratorState& state);
|
||||
ValueIteratorBase(const ValueInternalMap::IteratorState& state);
|
||||
#endif
|
||||
|
||||
bool operator==(const SelfType& other) const { return isEqual(other); }
|
||||
|
||||
bool operator!=(const SelfType& other) const { return !isEqual(other); }
|
||||
|
||||
difference_type operator-(const SelfType& other) const {
|
||||
return computeDistance(other);
|
||||
return other.computeDistance(*this);
|
||||
}
|
||||
|
||||
/// Return either the index or the member name of the referenced value as a
|
||||
/// Value.
|
||||
Value key() const;
|
||||
|
||||
/// Return the index of the referenced Value. -1 if it is not an arrayValue.
|
||||
/// Return the index of the referenced Value, or -1 if it is not an arrayValue.
|
||||
UInt index() const;
|
||||
|
||||
/// Return the member name of the referenced Value, or "" if it is not an
|
||||
/// objectValue.
|
||||
/// \note Avoid `c_str()` on result, as embedded zeroes are possible.
|
||||
std::string name() const;
|
||||
|
||||
/// Return the member name of the referenced Value. "" if it is not an
|
||||
/// objectValue.
|
||||
const char* memberName() const;
|
||||
/// \deprecated This cannot be used for UTF-8 strings, since there can be embedded nulls.
|
||||
JSONCPP_DEPRECATED("Use `key = name();` instead.")
|
||||
char const* memberName() const;
|
||||
/// Return the member name of the referenced Value, or NULL if it is not an
|
||||
/// objectValue.
|
||||
/// \note Better version than memberName(). Allows embedded nulls.
|
||||
char const* memberName(char const** end) const;
|
||||
|
||||
protected:
|
||||
Value& deref() const;
|
||||
@ -955,17 +697,15 @@ protected:
|
||||
void copy(const SelfType& other);
|
||||
|
||||
private:
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
Value::ObjectValues::iterator current_;
|
||||
// Indicates that iterator is for a null value.
|
||||
bool isNull_;
|
||||
#else
|
||||
union {
|
||||
ValueInternalArray::IteratorState array_;
|
||||
ValueInternalMap::IteratorState map_;
|
||||
} iterator_;
|
||||
bool isArray_;
|
||||
#endif
|
||||
|
||||
public:
|
||||
// For some reason, BORLAND needs these at the end, rather
|
||||
// than earlier. No idea why.
|
||||
ValueIteratorBase();
|
||||
explicit ValueIteratorBase(const Value::ObjectValues::iterator& current);
|
||||
};
|
||||
|
||||
/** \brief const iterator for object and array value.
|
||||
@ -976,8 +716,8 @@ class JSON_API ValueConstIterator : public ValueIteratorBase {
|
||||
|
||||
public:
|
||||
typedef const Value value_type;
|
||||
typedef unsigned int size_t;
|
||||
typedef int difference_type;
|
||||
//typedef unsigned int size_t;
|
||||
//typedef int difference_type;
|
||||
typedef const Value& reference;
|
||||
typedef const Value* pointer;
|
||||
typedef ValueConstIterator SelfType;
|
||||
@ -987,12 +727,7 @@ public:
|
||||
private:
|
||||
/*! \internal Use by Value to create an iterator.
|
||||
*/
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
explicit ValueConstIterator(const Value::ObjectValues::iterator& current);
|
||||
#else
|
||||
ValueConstIterator(const ValueInternalArray::IteratorState& state);
|
||||
ValueConstIterator(const ValueInternalMap::IteratorState& state);
|
||||
#endif
|
||||
public:
|
||||
SelfType& operator=(const ValueIteratorBase& other);
|
||||
|
||||
@ -1043,12 +778,7 @@ public:
|
||||
private:
|
||||
/*! \internal Use by Value to create an iterator.
|
||||
*/
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
explicit ValueIterator(const Value::ObjectValues::iterator& current);
|
||||
#else
|
||||
ValueIterator(const ValueInternalArray::IteratorState& state);
|
||||
ValueIterator(const ValueInternalMap::IteratorState& state);
|
||||
#endif
|
||||
public:
|
||||
SelfType& operator=(const SelfType& other);
|
||||
|
||||
@ -1081,6 +811,14 @@ public:
|
||||
|
||||
} // namespace Json
|
||||
|
||||
|
||||
namespace std {
|
||||
/// Specialize std::swap() for Json::Value.
|
||||
template<>
|
||||
inline void swap(Json::Value& a, Json::Value& b) { a.swap(b); }
|
||||
}
|
||||
|
||||
|
||||
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
#pragma warning(pop)
|
||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
|
6
3rdparty/jsoncpp/include/json/version.h
vendored
6
3rdparty/jsoncpp/include/json/version.h
vendored
@ -4,10 +4,10 @@
|
||||
#ifndef JSON_VERSION_H_INCLUDED
|
||||
# define JSON_VERSION_H_INCLUDED
|
||||
|
||||
# define JSONCPP_VERSION_STRING "1.1.0"
|
||||
# define JSONCPP_VERSION_STRING "1.6.2"
|
||||
# define JSONCPP_VERSION_MAJOR 1
|
||||
# define JSONCPP_VERSION_MINOR 1
|
||||
# define JSONCPP_VERSION_PATCH 0
|
||||
# define JSONCPP_VERSION_MINOR 6
|
||||
# define JSONCPP_VERSION_PATCH 2
|
||||
# define JSONCPP_VERSION_QUALIFIER
|
||||
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
|
||||
|
||||
|
116
3rdparty/jsoncpp/include/json/writer.h
vendored
116
3rdparty/jsoncpp/include/json/writer.h
vendored
@ -11,6 +11,7 @@
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <ostream>
|
||||
|
||||
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
|
||||
// be used by...
|
||||
@ -23,7 +24,115 @@ namespace Json {
|
||||
|
||||
class Value;
|
||||
|
||||
/**
|
||||
|
||||
Usage:
|
||||
\code
|
||||
using namespace Json;
|
||||
void writeToStdout(StreamWriter::Factory const& factory, Value const& value) {
|
||||
std::unique_ptr<StreamWriter> const writer(
|
||||
factory.newStreamWriter());
|
||||
writer->write(value, &std::cout);
|
||||
std::cout << std::endl; // add lf and flush
|
||||
}
|
||||
\endcode
|
||||
*/
|
||||
class JSON_API StreamWriter {
|
||||
protected:
|
||||
std::ostream* sout_; // not owned; will not delete
|
||||
public:
|
||||
StreamWriter();
|
||||
virtual ~StreamWriter();
|
||||
/** Write Value into document as configured in sub-class.
|
||||
Do not take ownership of sout, but maintain a reference during function.
|
||||
\pre sout != NULL
|
||||
\return zero on success (For now, we always return zero, so check the stream instead.)
|
||||
\throw std::exception possibly, depending on configuration
|
||||
*/
|
||||
virtual int write(Value const& root, std::ostream* sout) = 0;
|
||||
|
||||
/** \brief A simple abstract factory.
|
||||
*/
|
||||
class JSON_API Factory {
|
||||
public:
|
||||
virtual ~Factory();
|
||||
/** \brief Allocate a CharReader via operator new().
|
||||
* \throw std::exception if something goes wrong (e.g. invalid settings)
|
||||
*/
|
||||
virtual StreamWriter* newStreamWriter() const = 0;
|
||||
}; // Factory
|
||||
}; // StreamWriter
|
||||
|
||||
/** \brief Write into stringstream, then return string, for convenience.
|
||||
* A StreamWriter will be created from the factory, used, and then deleted.
|
||||
*/
|
||||
std::string JSON_API writeString(StreamWriter::Factory const& factory, Value const& root);
|
||||
|
||||
|
||||
/** \brief Build a StreamWriter implementation.
|
||||
|
||||
Usage:
|
||||
\code
|
||||
using namespace Json;
|
||||
Value value = ...;
|
||||
StreamWriterBuilder builder;
|
||||
builder["commentStyle"] = "None";
|
||||
builder["indentation"] = " "; // or whatever you like
|
||||
std::unique_ptr<Json::StreamWriter> writer(
|
||||
builder.newStreamWriter());
|
||||
writer->write(value, &std::cout);
|
||||
std::cout << std::endl; // add lf and flush
|
||||
\endcode
|
||||
*/
|
||||
class JSON_API StreamWriterBuilder : public StreamWriter::Factory {
|
||||
public:
|
||||
// Note: We use a Json::Value so that we can add data-members to this class
|
||||
// without a major version bump.
|
||||
/** Configuration of this builder.
|
||||
Available settings (case-sensitive):
|
||||
- "commentStyle": "None" or "All"
|
||||
- "indentation": "<anything>"
|
||||
- "enableYAMLCompatibility": false or true
|
||||
- slightly change the whitespace around colons
|
||||
- "dropNullPlaceholders": false or true
|
||||
- Drop the "null" string from the writer's output for nullValues.
|
||||
Strictly speaking, this is not valid JSON. But when the output is being
|
||||
fed to a browser's Javascript, it makes for smaller output and the
|
||||
browser can handle the output just fine.
|
||||
|
||||
You can examine 'settings_` yourself
|
||||
to see the defaults. You can also write and read them just like any
|
||||
JSON Value.
|
||||
\sa setDefaults()
|
||||
*/
|
||||
Json::Value settings_;
|
||||
|
||||
StreamWriterBuilder();
|
||||
virtual ~StreamWriterBuilder();
|
||||
|
||||
/**
|
||||
* \throw std::exception if something goes wrong (e.g. invalid settings)
|
||||
*/
|
||||
virtual StreamWriter* newStreamWriter() const;
|
||||
|
||||
/** \return true if 'settings' are legal and consistent;
|
||||
* otherwise, indicate bad settings via 'invalid'.
|
||||
*/
|
||||
bool validate(Json::Value* invalid) const;
|
||||
/** A simple way to update a specific setting.
|
||||
*/
|
||||
Value& operator[](std::string key);
|
||||
|
||||
/** Called by ctor, but you can use this to reset settings_.
|
||||
* \pre 'settings' != NULL (but Json::null is fine)
|
||||
* \remark Defaults:
|
||||
* \snippet src/lib_json/json_writer.cpp StreamWriterBuilderDefaults
|
||||
*/
|
||||
static void setDefaults(Json::Value* settings);
|
||||
};
|
||||
|
||||
/** \brief Abstract class for writers.
|
||||
* \deprecated Use StreamWriter. (And really, this is an implementation detail.)
|
||||
*/
|
||||
class JSON_API Writer {
|
||||
public:
|
||||
@ -39,8 +148,10 @@ public:
|
||||
*consumption,
|
||||
* but may be usefull to support feature such as RPC where bandwith is limited.
|
||||
* \sa Reader, Value
|
||||
* \deprecated Use StreamWriterBuilder.
|
||||
*/
|
||||
class JSON_API FastWriter : public Writer {
|
||||
|
||||
public:
|
||||
FastWriter();
|
||||
virtual ~FastWriter() {}
|
||||
@ -90,6 +201,7 @@ private:
|
||||
*#CommentPlacement.
|
||||
*
|
||||
* \sa Reader, Value, Value::setComment()
|
||||
* \deprecated Use StreamWriterBuilder.
|
||||
*/
|
||||
class JSON_API StyledWriter : public Writer {
|
||||
public:
|
||||
@ -151,6 +263,7 @@ private:
|
||||
*
|
||||
* \param indentation Each level will be indented by this amount extra.
|
||||
* \sa Reader, Value, Value::setComment()
|
||||
* \deprecated Use StreamWriterBuilder.
|
||||
*/
|
||||
class JSON_API StyledStreamWriter {
|
||||
public:
|
||||
@ -187,7 +300,8 @@ private:
|
||||
std::string indentString_;
|
||||
int rightMargin_;
|
||||
std::string indentation_;
|
||||
bool addChildValues_;
|
||||
bool addChildValues_ : 1;
|
||||
bool indented_ : 1;
|
||||
};
|
||||
|
||||
#if defined(JSON_HAS_INT64)
|
||||
|
@ -178,15 +178,6 @@
|
||||
<File
|
||||
RelativePath="..\..\include\json\json.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_batchallocator.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_internalarray.inl">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_internalmap.inl">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_reader.cpp">
|
||||
</File>
|
||||
|
202
3rdparty/jsoncpp/makerelease.py
vendored
202
3rdparty/jsoncpp/makerelease.py
vendored
@ -1,3 +1,8 @@
|
||||
# Copyright 2010 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
"""Tag the sandbox for release, make source and doc tarballs.
|
||||
|
||||
Requires Python 2.6
|
||||
@ -14,6 +19,7 @@ python makerelease.py 0.5.0 0.6.0-dev
|
||||
Note: This was for Subversion. Now that we are in GitHub, we do not
|
||||
need to build versioned tarballs anymore, so makerelease.py is defunct.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
import subprocess
|
||||
@ -34,57 +40,57 @@ SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
||||
SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download'
|
||||
SOURCEFORGE_PROJECT = 'jsoncpp'
|
||||
|
||||
def set_version( version ):
|
||||
def set_version(version):
|
||||
with open('version','wb') as f:
|
||||
f.write( version.strip() )
|
||||
f.write(version.strip())
|
||||
|
||||
def rmdir_if_exist( dir_path ):
|
||||
if os.path.isdir( dir_path ):
|
||||
shutil.rmtree( dir_path )
|
||||
def rmdir_if_exist(dir_path):
|
||||
if os.path.isdir(dir_path):
|
||||
shutil.rmtree(dir_path)
|
||||
|
||||
class SVNError(Exception):
|
||||
pass
|
||||
|
||||
def svn_command( command, *args ):
|
||||
def svn_command(command, *args):
|
||||
cmd = ['svn', '--non-interactive', command] + list(args)
|
||||
print('Running:', ' '.join( cmd ))
|
||||
process = subprocess.Popen( cmd,
|
||||
print('Running:', ' '.join(cmd))
|
||||
process = subprocess.Popen(cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT )
|
||||
stderr=subprocess.STDOUT)
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode:
|
||||
error = SVNError( 'SVN command failed:\n' + stdout )
|
||||
error = SVNError('SVN command failed:\n' + stdout)
|
||||
error.returncode = process.returncode
|
||||
raise error
|
||||
return stdout
|
||||
|
||||
def check_no_pending_commit():
|
||||
"""Checks that there is no pending commit in the sandbox."""
|
||||
stdout = svn_command( 'status', '--xml' )
|
||||
etree = ElementTree.fromstring( stdout )
|
||||
stdout = svn_command('status', '--xml')
|
||||
etree = ElementTree.fromstring(stdout)
|
||||
msg = []
|
||||
for entry in etree.getiterator( 'entry' ):
|
||||
for entry in etree.getiterator('entry'):
|
||||
path = entry.get('path')
|
||||
status = entry.find('wc-status').get('item')
|
||||
if status != 'unversioned' and path != 'version':
|
||||
msg.append( 'File "%s" has pending change (status="%s")' % (path, status) )
|
||||
msg.append('File "%s" has pending change (status="%s")' % (path, status))
|
||||
if msg:
|
||||
msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' )
|
||||
return '\n'.join( msg )
|
||||
msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!')
|
||||
return '\n'.join(msg)
|
||||
|
||||
def svn_join_url( base_url, suffix ):
|
||||
def svn_join_url(base_url, suffix):
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
if suffix.startswith('/'):
|
||||
suffix = suffix[1:]
|
||||
return base_url + suffix
|
||||
|
||||
def svn_check_if_tag_exist( tag_url ):
|
||||
def svn_check_if_tag_exist(tag_url):
|
||||
"""Checks if a tag exist.
|
||||
Returns: True if the tag exist, False otherwise.
|
||||
"""
|
||||
try:
|
||||
list_stdout = svn_command( 'list', tag_url )
|
||||
list_stdout = svn_command('list', tag_url)
|
||||
except SVNError as e:
|
||||
if e.returncode != 1 or not str(e).find('tag_url'):
|
||||
raise e
|
||||
@ -92,82 +98,82 @@ def svn_check_if_tag_exist( tag_url ):
|
||||
return False
|
||||
return True
|
||||
|
||||
def svn_commit( message ):
|
||||
def svn_commit(message):
|
||||
"""Commit the sandbox, providing the specified comment.
|
||||
"""
|
||||
svn_command( 'ci', '-m', message )
|
||||
svn_command('ci', '-m', message)
|
||||
|
||||
def svn_tag_sandbox( tag_url, message ):
|
||||
def svn_tag_sandbox(tag_url, message):
|
||||
"""Makes a tag based on the sandbox revisions.
|
||||
"""
|
||||
svn_command( 'copy', '-m', message, '.', tag_url )
|
||||
svn_command('copy', '-m', message, '.', tag_url)
|
||||
|
||||
def svn_remove_tag( tag_url, message ):
|
||||
def svn_remove_tag(tag_url, message):
|
||||
"""Removes an existing tag.
|
||||
"""
|
||||
svn_command( 'delete', '-m', message, tag_url )
|
||||
svn_command('delete', '-m', message, tag_url)
|
||||
|
||||
def svn_export( tag_url, export_dir ):
|
||||
def svn_export(tag_url, export_dir):
|
||||
"""Exports the tag_url revision to export_dir.
|
||||
Target directory, including its parent is created if it does not exist.
|
||||
If the directory export_dir exist, it is deleted before export proceed.
|
||||
"""
|
||||
rmdir_if_exist( export_dir )
|
||||
svn_command( 'export', tag_url, export_dir )
|
||||
rmdir_if_exist(export_dir)
|
||||
svn_command('export', tag_url, export_dir)
|
||||
|
||||
def fix_sources_eol( dist_dir ):
|
||||
def fix_sources_eol(dist_dir):
|
||||
"""Set file EOL for tarball distribution.
|
||||
"""
|
||||
print('Preparing exported source file EOL for distribution...')
|
||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||
win_sources = antglob.glob( dist_dir,
|
||||
win_sources = antglob.glob(dist_dir,
|
||||
includes = '**/*.sln **/*.vcproj',
|
||||
prune_dirs = prune_dirs )
|
||||
unix_sources = antglob.glob( dist_dir,
|
||||
prune_dirs = prune_dirs)
|
||||
unix_sources = antglob.glob(dist_dir,
|
||||
includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
|
||||
sconscript *.json *.expected AUTHORS LICENSE''',
|
||||
excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
|
||||
prune_dirs = prune_dirs )
|
||||
prune_dirs = prune_dirs)
|
||||
for path in win_sources:
|
||||
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' )
|
||||
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\r\n')
|
||||
for path in unix_sources:
|
||||
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' )
|
||||
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\n')
|
||||
|
||||
def download( url, target_path ):
|
||||
def download(url, target_path):
|
||||
"""Download file represented by url to target_path.
|
||||
"""
|
||||
f = urllib2.urlopen( url )
|
||||
f = urllib2.urlopen(url)
|
||||
try:
|
||||
data = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
fout = open( target_path, 'wb' )
|
||||
fout = open(target_path, 'wb')
|
||||
try:
|
||||
fout.write( data )
|
||||
fout.write(data)
|
||||
finally:
|
||||
fout.close()
|
||||
|
||||
def check_compile( distcheck_top_dir, platform ):
|
||||
def check_compile(distcheck_top_dir, platform):
|
||||
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
|
||||
print('Running:', ' '.join( cmd ))
|
||||
log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
|
||||
flog = open( log_path, 'wb' )
|
||||
print('Running:', ' '.join(cmd))
|
||||
log_path = os.path.join(distcheck_top_dir, 'build-%s.log' % platform)
|
||||
flog = open(log_path, 'wb')
|
||||
try:
|
||||
process = subprocess.Popen( cmd,
|
||||
process = subprocess.Popen(cmd,
|
||||
stdout=flog,
|
||||
stderr=subprocess.STDOUT,
|
||||
cwd=distcheck_top_dir )
|
||||
cwd=distcheck_top_dir)
|
||||
stdout = process.communicate()[0]
|
||||
status = (process.returncode == 0)
|
||||
finally:
|
||||
flog.close()
|
||||
return (status, log_path)
|
||||
|
||||
def write_tempfile( content, **kwargs ):
|
||||
fd, path = tempfile.mkstemp( **kwargs )
|
||||
f = os.fdopen( fd, 'wt' )
|
||||
def write_tempfile(content, **kwargs):
|
||||
fd, path = tempfile.mkstemp(**kwargs)
|
||||
f = os.fdopen(fd, 'wt')
|
||||
try:
|
||||
f.write( content )
|
||||
f.write(content)
|
||||
finally:
|
||||
f.close()
|
||||
return path
|
||||
@ -175,34 +181,34 @@ def write_tempfile( content, **kwargs ):
|
||||
class SFTPError(Exception):
|
||||
pass
|
||||
|
||||
def run_sftp_batch( userhost, sftp, batch, retry=0 ):
|
||||
path = write_tempfile( batch, suffix='.sftp', text=True )
|
||||
def run_sftp_batch(userhost, sftp, batch, retry=0):
|
||||
path = write_tempfile(batch, suffix='.sftp', text=True)
|
||||
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
|
||||
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
|
||||
error = None
|
||||
for retry_index in range(0, max(1,retry)):
|
||||
heading = retry_index == 0 and 'Running:' or 'Retrying:'
|
||||
print(heading, ' '.join( cmd ))
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||
print(heading, ' '.join(cmd))
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode != 0:
|
||||
error = SFTPError( 'SFTP batch failed:\n' + stdout )
|
||||
error = SFTPError('SFTP batch failed:\n' + stdout)
|
||||
else:
|
||||
break
|
||||
if error:
|
||||
raise error
|
||||
return stdout
|
||||
|
||||
def sourceforge_web_synchro( sourceforge_project, doc_dir,
|
||||
user=None, sftp='sftp' ):
|
||||
def sourceforge_web_synchro(sourceforge_project, doc_dir,
|
||||
user=None, sftp='sftp'):
|
||||
"""Notes: does not synchronize sub-directory of doc-dir.
|
||||
"""
|
||||
userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project)
|
||||
stdout = run_sftp_batch( userhost, sftp, """
|
||||
stdout = run_sftp_batch(userhost, sftp, """
|
||||
cd htdocs
|
||||
dir
|
||||
exit
|
||||
""" )
|
||||
""")
|
||||
existing_paths = set()
|
||||
collect = 0
|
||||
for line in stdout.split('\n'):
|
||||
@ -216,15 +222,15 @@ exit
|
||||
elif collect == 2:
|
||||
path = line.strip().split()[-1:]
|
||||
if path and path[0] not in ('.', '..'):
|
||||
existing_paths.add( path[0] )
|
||||
upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
|
||||
existing_paths.add(path[0])
|
||||
upload_paths = set([os.path.basename(p) for p in antglob.glob(doc_dir)])
|
||||
paths_to_remove = existing_paths - upload_paths
|
||||
if paths_to_remove:
|
||||
print('Removing the following file from web:')
|
||||
print('\n'.join( paths_to_remove ))
|
||||
stdout = run_sftp_batch( userhost, sftp, """cd htdocs
|
||||
print('\n'.join(paths_to_remove))
|
||||
stdout = run_sftp_batch(userhost, sftp, """cd htdocs
|
||||
rm %s
|
||||
exit""" % ' '.join(paths_to_remove) )
|
||||
exit""" % ' '.join(paths_to_remove))
|
||||
print('Uploading %d files:' % len(upload_paths))
|
||||
batch_size = 10
|
||||
upload_paths = list(upload_paths)
|
||||
@ -235,17 +241,17 @@ exit""" % ' '.join(paths_to_remove) )
|
||||
remaining_files = len(upload_paths) - index
|
||||
remaining_sec = file_per_sec * remaining_files
|
||||
print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
|
||||
run_sftp_batch( userhost, sftp, """cd htdocs
|
||||
run_sftp_batch(userhost, sftp, """cd htdocs
|
||||
lcd %s
|
||||
mput %s
|
||||
exit""" % (doc_dir, ' '.join(paths) ), retry=3 )
|
||||
exit""" % (doc_dir, ' '.join(paths)), retry=3)
|
||||
|
||||
def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ):
|
||||
def sourceforge_release_tarball(sourceforge_project, paths, user=None, sftp='sftp'):
|
||||
userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project)
|
||||
run_sftp_batch( userhost, sftp, """
|
||||
run_sftp_batch(userhost, sftp, """
|
||||
mput %s
|
||||
exit
|
||||
""" % (' '.join(paths),) )
|
||||
""" % (' '.join(paths),))
|
||||
|
||||
|
||||
def main():
|
||||
@ -286,12 +292,12 @@ Warning: --force should only be used when developping/testing the release script
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) != 2:
|
||||
parser.error( 'release_version missing on command-line.' )
|
||||
parser.error('release_version missing on command-line.')
|
||||
release_version = args[0]
|
||||
next_version = args[1]
|
||||
|
||||
if not options.platforms and not options.no_test:
|
||||
parser.error( 'You must specify either --platform or --no-test option.' )
|
||||
parser.error('You must specify either --platform or --no-test option.')
|
||||
|
||||
if options.ignore_pending_commit:
|
||||
msg = ''
|
||||
@ -299,86 +305,86 @@ Warning: --force should only be used when developping/testing the release script
|
||||
msg = check_no_pending_commit()
|
||||
if not msg:
|
||||
print('Setting version to', release_version)
|
||||
set_version( release_version )
|
||||
svn_commit( 'Release ' + release_version )
|
||||
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
|
||||
if svn_check_if_tag_exist( tag_url ):
|
||||
set_version(release_version)
|
||||
svn_commit('Release ' + release_version)
|
||||
tag_url = svn_join_url(SVN_TAG_ROOT, release_version)
|
||||
if svn_check_if_tag_exist(tag_url):
|
||||
if options.retag_release:
|
||||
svn_remove_tag( tag_url, 'Overwriting previous tag' )
|
||||
svn_remove_tag(tag_url, 'Overwriting previous tag')
|
||||
else:
|
||||
print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
|
||||
sys.exit( 1 )
|
||||
svn_tag_sandbox( tag_url, 'Release ' + release_version )
|
||||
sys.exit(1)
|
||||
svn_tag_sandbox(tag_url, 'Release ' + release_version)
|
||||
|
||||
print('Generated doxygen document...')
|
||||
## doc_dirname = r'jsoncpp-api-html-0.5.0'
|
||||
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
|
||||
doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
|
||||
doc_tarball_path, doc_dirname = doxybuild.build_doc(options, make_release=True)
|
||||
doc_distcheck_dir = 'dist/doccheck'
|
||||
tarball.decompress( doc_tarball_path, doc_distcheck_dir )
|
||||
doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname )
|
||||
tarball.decompress(doc_tarball_path, doc_distcheck_dir)
|
||||
doc_distcheck_top_dir = os.path.join(doc_distcheck_dir, doc_dirname)
|
||||
|
||||
export_dir = 'dist/export'
|
||||
svn_export( tag_url, export_dir )
|
||||
fix_sources_eol( export_dir )
|
||||
svn_export(tag_url, export_dir)
|
||||
fix_sources_eol(export_dir)
|
||||
|
||||
source_dir = 'jsoncpp-src-' + release_version
|
||||
source_tarball_path = 'dist/%s.tar.gz' % source_dir
|
||||
print('Generating source tarball to', source_tarball_path)
|
||||
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
|
||||
tarball.make_tarball(source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir)
|
||||
|
||||
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
|
||||
print('Generating amalgamation source tarball to', amalgamation_tarball_path)
|
||||
amalgamation_dir = 'dist/amalgamation'
|
||||
amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
|
||||
amalgamate.amalgamate_source(export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h')
|
||||
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
|
||||
tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir],
|
||||
amalgamation_dir, prefix_dir=amalgamation_source_dir )
|
||||
tarball.make_tarball(amalgamation_tarball_path, [amalgamation_dir],
|
||||
amalgamation_dir, prefix_dir=amalgamation_source_dir)
|
||||
|
||||
# Decompress source tarball, download and install scons-local
|
||||
distcheck_dir = 'dist/distcheck'
|
||||
distcheck_top_dir = distcheck_dir + '/' + source_dir
|
||||
print('Decompressing source tarball to', distcheck_dir)
|
||||
rmdir_if_exist( distcheck_dir )
|
||||
tarball.decompress( source_tarball_path, distcheck_dir )
|
||||
rmdir_if_exist(distcheck_dir)
|
||||
tarball.decompress(source_tarball_path, distcheck_dir)
|
||||
scons_local_path = 'dist/scons-local.tar.gz'
|
||||
print('Downloading scons-local to', scons_local_path)
|
||||
download( SCONS_LOCAL_URL, scons_local_path )
|
||||
download(SCONS_LOCAL_URL, scons_local_path)
|
||||
print('Decompressing scons-local to', distcheck_top_dir)
|
||||
tarball.decompress( scons_local_path, distcheck_top_dir )
|
||||
tarball.decompress(scons_local_path, distcheck_top_dir)
|
||||
|
||||
# Run compilation
|
||||
print('Compiling decompressed tarball')
|
||||
all_build_status = True
|
||||
for platform in options.platforms.split(','):
|
||||
print('Testing platform:', platform)
|
||||
build_status, log_path = check_compile( distcheck_top_dir, platform )
|
||||
build_status, log_path = check_compile(distcheck_top_dir, platform)
|
||||
print('see build log:', log_path)
|
||||
print(build_status and '=> ok' or '=> FAILED')
|
||||
all_build_status = all_build_status and build_status
|
||||
if not build_status:
|
||||
print('Testing failed on at least one platform, aborting...')
|
||||
svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
|
||||
svn_remove_tag(tag_url, 'Removing tag due to failed testing')
|
||||
sys.exit(1)
|
||||
if options.user:
|
||||
if not options.no_web:
|
||||
print('Uploading documentation using user', options.user)
|
||||
sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
|
||||
sourceforge_web_synchro(SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp)
|
||||
print('Completed documentation upload')
|
||||
print('Uploading source and documentation tarballs for release using user', options.user)
|
||||
sourceforge_release_tarball( SOURCEFORGE_PROJECT,
|
||||
sourceforge_release_tarball(SOURCEFORGE_PROJECT,
|
||||
[source_tarball_path, doc_tarball_path],
|
||||
user=options.user, sftp=options.sftp )
|
||||
user=options.user, sftp=options.sftp)
|
||||
print('Source and doc release tarballs uploaded')
|
||||
else:
|
||||
print('No upload user specified. Web site and download tarbal were not uploaded.')
|
||||
print('Tarball can be found at:', doc_tarball_path)
|
||||
|
||||
# Set next version number and commit
|
||||
set_version( next_version )
|
||||
svn_commit( 'Released ' + release_version )
|
||||
set_version(next_version)
|
||||
svn_commit('Released ' + release_version)
|
||||
else:
|
||||
sys.stderr.write( msg + '\n' )
|
||||
sys.stderr.write(msg + '\n')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
4
3rdparty/jsoncpp/pkg-config/jsoncpp.pc.in
vendored
4
3rdparty/jsoncpp/pkg-config/jsoncpp.pc.in
vendored
@ -1,7 +1,7 @@
|
||||
prefix=@CMAKE_INSTALL_PREFIX@
|
||||
exec_prefix=${prefix}
|
||||
libdir=${exec_prefix}/@LIBRARY_INSTALL_DIR@
|
||||
includedir=${prefix}/@INCLUDE_INSTALL_DIR@
|
||||
libdir=@LIBRARY_INSTALL_DIR@
|
||||
includedir=@INCLUDE_INSTALL_DIR@
|
||||
|
||||
Name: jsoncpp
|
||||
Description: A C++ library for interacting with JSON
|
||||
|
33
3rdparty/jsoncpp/scons-tools/globtool.py
vendored
33
3rdparty/jsoncpp/scons-tools/globtool.py
vendored
@ -1,9 +1,14 @@
|
||||
# Copyright 2009 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
def generate( env ):
|
||||
def Glob( env, includes = None, excludes = None, dir = '.' ):
|
||||
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
|
||||
def generate(env):
|
||||
def Glob(env, includes = None, excludes = None, dir = '.'):
|
||||
"""Adds Glob(includes = Split('*'), excludes = None, dir = '.')
|
||||
helper function to environment.
|
||||
|
||||
Glob both the file-system files.
|
||||
@ -12,36 +17,36 @@ def generate( env ):
|
||||
excludes: list of file name pattern exluced from the return list.
|
||||
|
||||
Example:
|
||||
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
|
||||
sources = env.Glob(("*.cpp", '*.h'), "~*.cpp", "#src")
|
||||
"""
|
||||
def filterFilename(path):
|
||||
abs_path = os.path.join( dir, path )
|
||||
abs_path = os.path.join(dir, path)
|
||||
if not os.path.isfile(abs_path):
|
||||
return 0
|
||||
fn = os.path.basename(path)
|
||||
match = 0
|
||||
for include in includes:
|
||||
if fnmatch.fnmatchcase( fn, include ):
|
||||
if fnmatch.fnmatchcase(fn, include):
|
||||
match = 1
|
||||
break
|
||||
if match == 1 and not excludes is None:
|
||||
for exclude in excludes:
|
||||
if fnmatch.fnmatchcase( fn, exclude ):
|
||||
if fnmatch.fnmatchcase(fn, exclude):
|
||||
match = 0
|
||||
break
|
||||
return match
|
||||
if includes is None:
|
||||
includes = ('*',)
|
||||
elif type(includes) in ( type(''), type(u'') ):
|
||||
elif type(includes) in (type(''), type(u'')):
|
||||
includes = (includes,)
|
||||
if type(excludes) in ( type(''), type(u'') ):
|
||||
if type(excludes) in (type(''), type(u'')):
|
||||
excludes = (excludes,)
|
||||
dir = env.Dir(dir).abspath
|
||||
paths = os.listdir( dir )
|
||||
def makeAbsFileNode( path ):
|
||||
return env.File( os.path.join( dir, path ) )
|
||||
nodes = filter( filterFilename, paths )
|
||||
return map( makeAbsFileNode, nodes )
|
||||
paths = os.listdir(dir)
|
||||
def makeAbsFileNode(path):
|
||||
return env.File(os.path.join(dir, path))
|
||||
nodes = filter(filterFilename, paths)
|
||||
return map(makeAbsFileNode, nodes)
|
||||
|
||||
from SCons.Script import Environment
|
||||
Environment.Glob = Glob
|
||||
|
20
3rdparty/jsoncpp/scons-tools/srcdist.py
vendored
20
3rdparty/jsoncpp/scons-tools/srcdist.py
vendored
@ -1,3 +1,8 @@
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
import os
|
||||
import os.path
|
||||
from fnmatch import fnmatch
|
||||
@ -47,7 +52,7 @@ import targz
|
||||
## elif token == "=":
|
||||
## data[key] = list()
|
||||
## else:
|
||||
## append_data( data, key, new_data, token )
|
||||
## append_data(data, key, new_data, token)
|
||||
## new_data = True
|
||||
##
|
||||
## last_token = token
|
||||
@ -55,7 +60,7 @@ import targz
|
||||
##
|
||||
## if last_token == '\\' and token != '\n':
|
||||
## new_data = False
|
||||
## append_data( data, key, new_data, '\\' )
|
||||
## append_data(data, key, new_data, '\\')
|
||||
##
|
||||
## # compress lists of len 1 into single strings
|
||||
## for (k, v) in data.items():
|
||||
@ -116,7 +121,7 @@ import targz
|
||||
## else:
|
||||
## for pattern in file_patterns:
|
||||
## sources.extend(glob.glob("/".join([node, pattern])))
|
||||
## sources = map( lambda path: env.File(path), sources )
|
||||
## sources = map(lambda path: env.File(path), sources)
|
||||
## return sources
|
||||
##
|
||||
##
|
||||
@ -143,7 +148,7 @@ def srcDistEmitter(source, target, env):
|
||||
## # add our output locations
|
||||
## for (k, v) in output_formats.items():
|
||||
## if data.get("GENERATE_" + k, v[0]) == "YES":
|
||||
## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) )
|
||||
## targets.append(env.Dir(os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))))
|
||||
##
|
||||
## # don't clobber targets
|
||||
## for node in targets:
|
||||
@ -161,14 +166,13 @@ def generate(env):
|
||||
Add builders and construction variables for the
|
||||
SrcDist tool.
|
||||
"""
|
||||
## doxyfile_scanner = env.Scanner(
|
||||
## DoxySourceScan,
|
||||
## doxyfile_scanner = env.Scanner(## DoxySourceScan,
|
||||
## "DoxySourceScan",
|
||||
## scan_check = DoxySourceScanCheck,
|
||||
## )
|
||||
##)
|
||||
|
||||
if targz.exists(env):
|
||||
srcdist_builder = targz.makeBuilder( srcDistEmitter )
|
||||
srcdist_builder = targz.makeBuilder(srcDistEmitter)
|
||||
|
||||
env['BUILDERS']['SrcDist'] = srcdist_builder
|
||||
|
||||
|
7
3rdparty/jsoncpp/scons-tools/substinfile.py
vendored
7
3rdparty/jsoncpp/scons-tools/substinfile.py
vendored
@ -1,3 +1,8 @@
|
||||
# Copyright 2010 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
import re
|
||||
from SCons.Script import * # the usual scons stuff you get in a SConscript
|
||||
import collections
|
||||
@ -70,7 +75,7 @@ def generate(env):
|
||||
return target, source
|
||||
|
||||
## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!?
|
||||
subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string )
|
||||
subst_action = SCons.Action.Action(subst_in_file, subst_in_file_string)
|
||||
env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter)
|
||||
|
||||
def exists(env):
|
||||
|
21
3rdparty/jsoncpp/scons-tools/targz.py
vendored
21
3rdparty/jsoncpp/scons-tools/targz.py
vendored
@ -1,3 +1,8 @@
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
"""tarball
|
||||
|
||||
Tool-specific initialization for tarball.
|
||||
@ -27,9 +32,9 @@ TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
||||
|
||||
if internal_targz:
|
||||
def targz(target, source, env):
|
||||
def archive_name( path ):
|
||||
path = os.path.normpath( os.path.abspath( path ) )
|
||||
common_path = os.path.commonprefix( (base_dir, path) )
|
||||
def archive_name(path):
|
||||
path = os.path.normpath(os.path.abspath(path))
|
||||
common_path = os.path.commonprefix((base_dir, path))
|
||||
archive_name = path[len(common_path):]
|
||||
return archive_name
|
||||
|
||||
@ -37,23 +42,23 @@ if internal_targz:
|
||||
for name in names:
|
||||
path = os.path.join(dirname, name)
|
||||
if os.path.isfile(path):
|
||||
tar.add(path, archive_name(path) )
|
||||
tar.add(path, archive_name(path))
|
||||
compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL)
|
||||
base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath )
|
||||
base_dir = os.path.normpath(env.get('TARGZ_BASEDIR', env.Dir('.')).abspath)
|
||||
target_path = str(target[0])
|
||||
fileobj = gzip.GzipFile( target_path, 'wb', compression )
|
||||
fileobj = gzip.GzipFile(target_path, 'wb', compression)
|
||||
tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj)
|
||||
for source in source:
|
||||
source_path = str(source)
|
||||
if source.isdir():
|
||||
os.path.walk(source_path, visit, tar)
|
||||
else:
|
||||
tar.add(source_path, archive_name(source_path) ) # filename, arcname
|
||||
tar.add(source_path, archive_name(source_path)) # filename, arcname
|
||||
tar.close()
|
||||
|
||||
targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR'])
|
||||
|
||||
def makeBuilder( emitter = None ):
|
||||
def makeBuilder(emitter = None):
|
||||
return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'),
|
||||
source_factory = SCons.Node.FS.Entry,
|
||||
source_scanner = SCons.Defaults.DirScanner,
|
||||
|
@ -1,20 +1,23 @@
|
||||
FIND_PACKAGE(PythonInterp 2.6 REQUIRED)
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
ADD_DEFINITIONS( -DJSON_DLL )
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
FIND_PACKAGE(PythonInterp 2.6)
|
||||
|
||||
ADD_EXECUTABLE(jsontestrunner_exe
|
||||
main.cpp
|
||||
)
|
||||
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
|
||||
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
ADD_DEFINITIONS( -DJSON_DLL )
|
||||
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
|
||||
ELSE(BUILD_SHARED_LIBS)
|
||||
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib_static)
|
||||
ENDIF(BUILD_SHARED_LIBS)
|
||||
|
||||
SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe)
|
||||
|
||||
IF(PYTHONINTERP_FOUND)
|
||||
# Run end to end parser/writer tests
|
||||
SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test)
|
||||
SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py)
|
||||
ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests ALL
|
||||
ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests
|
||||
"${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
|
||||
DEPENDS jsontestrunner_exe jsoncpp_test
|
||||
)
|
||||
|
190
3rdparty/jsoncpp/src/jsontestrunner/main.cpp
vendored
190
3rdparty/jsoncpp/src/jsontestrunner/main.cpp
vendored
@ -8,12 +8,22 @@
|
||||
|
||||
#include <json/json.h>
|
||||
#include <algorithm> // sort
|
||||
#include <sstream>
|
||||
#include <stdio.h>
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1310
|
||||
#pragma warning(disable : 4996) // disable fopen deprecation warning
|
||||
#endif
|
||||
|
||||
struct Options
|
||||
{
|
||||
std::string path;
|
||||
Json::Features features;
|
||||
bool parseOnly;
|
||||
typedef std::string (*writeFuncType)(Json::Value const&);
|
||||
writeFuncType write;
|
||||
};
|
||||
|
||||
static std::string normalizeFloatingPointStr(double value) {
|
||||
char buffer[32];
|
||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||
@ -129,43 +139,67 @@ printValueTree(FILE* fout, Json::Value& value, const std::string& path = ".") {
|
||||
static int parseAndSaveValueTree(const std::string& input,
|
||||
const std::string& actual,
|
||||
const std::string& kind,
|
||||
Json::Value& root,
|
||||
const Json::Features& features,
|
||||
bool parseOnly) {
|
||||
bool parseOnly,
|
||||
Json::Value* root)
|
||||
{
|
||||
Json::Reader reader(features);
|
||||
bool parsingSuccessful = reader.parse(input, root);
|
||||
bool parsingSuccessful = reader.parse(input, *root);
|
||||
if (!parsingSuccessful) {
|
||||
printf("Failed to parse %s file: \n%s\n",
|
||||
kind.c_str(),
|
||||
reader.getFormattedErrorMessages().c_str());
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!parseOnly) {
|
||||
FILE* factual = fopen(actual.c_str(), "wt");
|
||||
if (!factual) {
|
||||
printf("Failed to create %s actual file.\n", kind.c_str());
|
||||
return 2;
|
||||
}
|
||||
printValueTree(factual, root);
|
||||
printValueTree(factual, *root);
|
||||
fclose(factual);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int rewriteValueTree(const std::string& rewritePath,
|
||||
const Json::Value& root,
|
||||
std::string& rewrite) {
|
||||
// Json::FastWriter writer;
|
||||
// writer.enableYAMLCompatibility();
|
||||
// static std::string useFastWriter(Json::Value const& root) {
|
||||
// Json::FastWriter writer;
|
||||
// writer.enableYAMLCompatibility();
|
||||
// return writer.write(root);
|
||||
// }
|
||||
static std::string useStyledWriter(
|
||||
Json::Value const& root)
|
||||
{
|
||||
Json::StyledWriter writer;
|
||||
rewrite = writer.write(root);
|
||||
return writer.write(root);
|
||||
}
|
||||
static std::string useStyledStreamWriter(
|
||||
Json::Value const& root)
|
||||
{
|
||||
Json::StyledStreamWriter writer;
|
||||
std::ostringstream sout;
|
||||
writer.write(sout, root);
|
||||
return sout.str();
|
||||
}
|
||||
static std::string useBuiltStyledStreamWriter(
|
||||
Json::Value const& root)
|
||||
{
|
||||
Json::StreamWriterBuilder builder;
|
||||
return Json::writeString(builder, root);
|
||||
}
|
||||
static int rewriteValueTree(
|
||||
const std::string& rewritePath,
|
||||
const Json::Value& root,
|
||||
Options::writeFuncType write,
|
||||
std::string* rewrite)
|
||||
{
|
||||
*rewrite = write(root);
|
||||
FILE* fout = fopen(rewritePath.c_str(), "wt");
|
||||
if (!fout) {
|
||||
printf("Failed to create rewrite file: %s\n", rewritePath.c_str());
|
||||
return 2;
|
||||
}
|
||||
fprintf(fout, "%s\n", rewrite.c_str());
|
||||
fprintf(fout, "%s\n", rewrite->c_str());
|
||||
fclose(fout);
|
||||
return 0;
|
||||
}
|
||||
@ -194,84 +228,98 @@ static int printUsage(const char* argv[]) {
|
||||
return 3;
|
||||
}
|
||||
|
||||
int parseCommandLine(int argc,
|
||||
const char* argv[],
|
||||
Json::Features& features,
|
||||
std::string& path,
|
||||
bool& parseOnly) {
|
||||
parseOnly = false;
|
||||
static int parseCommandLine(
|
||||
int argc, const char* argv[], Options* opts)
|
||||
{
|
||||
opts->parseOnly = false;
|
||||
opts->write = &useStyledWriter;
|
||||
if (argc < 2) {
|
||||
return printUsage(argv);
|
||||
}
|
||||
|
||||
int index = 1;
|
||||
if (std::string(argv[1]) == "--json-checker") {
|
||||
features = Json::Features::strictMode();
|
||||
parseOnly = true;
|
||||
if (std::string(argv[index]) == "--json-checker") {
|
||||
opts->features = Json::Features::strictMode();
|
||||
opts->parseOnly = true;
|
||||
++index;
|
||||
}
|
||||
|
||||
if (std::string(argv[1]) == "--json-config") {
|
||||
if (std::string(argv[index]) == "--json-config") {
|
||||
printConfig();
|
||||
return 3;
|
||||
}
|
||||
|
||||
if (std::string(argv[index]) == "--json-writer") {
|
||||
++index;
|
||||
std::string const writerName(argv[index++]);
|
||||
if (writerName == "StyledWriter") {
|
||||
opts->write = &useStyledWriter;
|
||||
} else if (writerName == "StyledStreamWriter") {
|
||||
opts->write = &useStyledStreamWriter;
|
||||
} else if (writerName == "BuiltStyledStreamWriter") {
|
||||
opts->write = &useBuiltStyledStreamWriter;
|
||||
} else {
|
||||
printf("Unknown '--json-writer %s'\n", writerName.c_str());
|
||||
return 4;
|
||||
}
|
||||
}
|
||||
if (index == argc || index + 1 < argc) {
|
||||
return printUsage(argv);
|
||||
}
|
||||
|
||||
path = argv[index];
|
||||
opts->path = argv[index];
|
||||
return 0;
|
||||
}
|
||||
static int runTest(Options const& opts)
|
||||
{
|
||||
int exitCode = 0;
|
||||
|
||||
int main(int argc, const char* argv[]) {
|
||||
std::string path;
|
||||
Json::Features features;
|
||||
bool parseOnly;
|
||||
int exitCode = parseCommandLine(argc, argv, features, path, parseOnly);
|
||||
if (exitCode != 0) {
|
||||
return exitCode;
|
||||
std::string input = readInputTestFile(opts.path.c_str());
|
||||
if (input.empty()) {
|
||||
printf("Failed to read input or empty input: %s\n", opts.path.c_str());
|
||||
return 3;
|
||||
}
|
||||
|
||||
std::string basePath = removeSuffix(opts.path, ".json");
|
||||
if (!opts.parseOnly && basePath.empty()) {
|
||||
printf("Bad input path. Path does not end with '.expected':\n%s\n",
|
||||
opts.path.c_str());
|
||||
return 3;
|
||||
}
|
||||
|
||||
std::string const actualPath = basePath + ".actual";
|
||||
std::string const rewritePath = basePath + ".rewrite";
|
||||
std::string const rewriteActualPath = basePath + ".actual-rewrite";
|
||||
|
||||
Json::Value root;
|
||||
exitCode = parseAndSaveValueTree(
|
||||
input, actualPath, "input",
|
||||
opts.features, opts.parseOnly, &root);
|
||||
if (exitCode || opts.parseOnly) {
|
||||
return exitCode;
|
||||
}
|
||||
std::string rewrite;
|
||||
exitCode = rewriteValueTree(rewritePath, root, opts.write, &rewrite);
|
||||
if (exitCode) {
|
||||
return exitCode;
|
||||
}
|
||||
Json::Value rewriteRoot;
|
||||
exitCode = parseAndSaveValueTree(
|
||||
rewrite, rewriteActualPath, "rewrite",
|
||||
opts.features, opts.parseOnly, &rewriteRoot);
|
||||
if (exitCode) {
|
||||
return exitCode;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
int main(int argc, const char* argv[]) {
|
||||
Options opts;
|
||||
int exitCode = parseCommandLine(argc, argv, &opts);
|
||||
if (exitCode != 0) {
|
||||
printf("Failed to parse command-line.");
|
||||
return exitCode;
|
||||
}
|
||||
try {
|
||||
std::string input = readInputTestFile(path.c_str());
|
||||
if (input.empty()) {
|
||||
printf("Failed to read input or empty input: %s\n", path.c_str());
|
||||
return 3;
|
||||
}
|
||||
|
||||
std::string basePath = removeSuffix(argv[1], ".json");
|
||||
if (!parseOnly && basePath.empty()) {
|
||||
printf("Bad input path. Path does not end with '.expected':\n%s\n",
|
||||
path.c_str());
|
||||
return 3;
|
||||
}
|
||||
|
||||
std::string actualPath = basePath + ".actual";
|
||||
std::string rewritePath = basePath + ".rewrite";
|
||||
std::string rewriteActualPath = basePath + ".actual-rewrite";
|
||||
|
||||
Json::Value root;
|
||||
exitCode = parseAndSaveValueTree(
|
||||
input, actualPath, "input", root, features, parseOnly);
|
||||
if (exitCode == 0 && !parseOnly) {
|
||||
std::string rewrite;
|
||||
exitCode = rewriteValueTree(rewritePath, root, rewrite);
|
||||
if (exitCode == 0) {
|
||||
Json::Value rewriteRoot;
|
||||
exitCode = parseAndSaveValueTree(rewrite,
|
||||
rewriteActualPath,
|
||||
"rewrite",
|
||||
rewriteRoot,
|
||||
features,
|
||||
parseOnly);
|
||||
}
|
||||
}
|
||||
return runTest(opts);
|
||||
}
|
||||
catch (const std::exception& e) {
|
||||
printf("Unhandled exception:\n%s\n", e.what());
|
||||
exitCode = 1;
|
||||
return 1;
|
||||
}
|
||||
|
||||
return exitCode;
|
||||
}
|
||||
|
80
3rdparty/jsoncpp/src/lib_json/CMakeLists.txt
vendored
80
3rdparty/jsoncpp/src/lib_json/CMakeLists.txt
vendored
@ -1,20 +1,8 @@
|
||||
OPTION(JSONCPP_LIB_BUILD_SHARED "Build jsoncpp_lib as a shared library." OFF)
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
SET(JSONCPP_LIB_BUILD_SHARED ON)
|
||||
ENDIF(BUILD_SHARED_LIBS)
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
SET(JSONCPP_LIB_TYPE SHARED)
|
||||
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
|
||||
ELSE(JSONCPP_LIB_BUILD_SHARED)
|
||||
SET(JSONCPP_LIB_TYPE STATIC)
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
|
||||
if( CMAKE_COMPILER_IS_GNUCXX )
|
||||
#Get compiler version.
|
||||
execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion
|
||||
OUTPUT_VARIABLE GNUCXX_VERSION )
|
||||
|
||||
|
||||
#-Werror=* was introduced -after- GCC 4.1.2
|
||||
if( GNUCXX_VERSION VERSION_GREATER 4.1.2 )
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing")
|
||||
@ -36,25 +24,13 @@ SET( PUBLIC_HEADERS
|
||||
|
||||
SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} )
|
||||
|
||||
ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE}
|
||||
${PUBLIC_HEADERS}
|
||||
json_tool.h
|
||||
json_reader.cpp
|
||||
json_batchallocator.h
|
||||
json_valueiterator.inl
|
||||
json_value.cpp
|
||||
json_writer.cpp
|
||||
version.h.in
|
||||
)
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp )
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR} )
|
||||
|
||||
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC
|
||||
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
)
|
||||
ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
SET(jsoncpp_sources
|
||||
json_tool.h
|
||||
json_reader.cpp
|
||||
json_valueiterator.inl
|
||||
json_value.cpp
|
||||
json_writer.cpp
|
||||
version.h.in)
|
||||
|
||||
# Install instructions for this target
|
||||
IF(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
@ -63,8 +39,42 @@ ELSE(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
SET(INSTALL_EXPORT)
|
||||
ENDIF(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
|
||||
INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT}
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
|
||||
ADD_LIBRARY(jsoncpp_lib SHARED ${PUBLIC_HEADERS} ${jsoncpp_sources})
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR})
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp
|
||||
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
|
||||
|
||||
INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT}
|
||||
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
|
||||
LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR}
|
||||
ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR}
|
||||
)
|
||||
ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR})
|
||||
|
||||
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC
|
||||
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>)
|
||||
ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
|
||||
ENDIF()
|
||||
|
||||
IF(BUILD_STATIC_LIBS)
|
||||
ADD_LIBRARY(jsoncpp_lib_static STATIC ${PUBLIC_HEADERS} ${jsoncpp_sources})
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR})
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES OUTPUT_NAME jsoncpp
|
||||
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
|
||||
|
||||
INSTALL( TARGETS jsoncpp_lib_static ${INSTALL_EXPORT}
|
||||
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
|
||||
LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR}
|
||||
ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR})
|
||||
|
||||
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib_static PUBLIC
|
||||
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
)
|
||||
ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
|
||||
ENDIF()
|
||||
|
121
3rdparty/jsoncpp/src/lib_json/json_batchallocator.h
vendored
121
3rdparty/jsoncpp/src/lib_json/json_batchallocator.h
vendored
@ -1,121 +0,0 @@
|
||||
// Copyright 2007-2010 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED
|
||||
#define JSONCPP_BATCHALLOCATOR_H_INCLUDED
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <assert.h>
|
||||
|
||||
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||
|
||||
namespace Json {
|
||||
|
||||
/* Fast memory allocator.
|
||||
*
|
||||
* This memory allocator allocates memory for a batch of object (specified by
|
||||
* the page size, the number of object in each page).
|
||||
*
|
||||
* It does not allow the destruction of a single object. All the allocated
|
||||
* objects can be destroyed at once. The memory can be either released or reused
|
||||
* for future allocation.
|
||||
*
|
||||
* The in-place new operator must be used to construct the object using the
|
||||
* pointer returned by allocate.
|
||||
*/
|
||||
template <typename AllocatedType, const unsigned int objectPerAllocation>
|
||||
class BatchAllocator {
|
||||
public:
|
||||
BatchAllocator(unsigned int objectsPerPage = 255)
|
||||
: freeHead_(0), objectsPerPage_(objectsPerPage) {
|
||||
// printf( "Size: %d => %s\n", sizeof(AllocatedType),
|
||||
// typeid(AllocatedType).name() );
|
||||
assert(sizeof(AllocatedType) * objectPerAllocation >=
|
||||
sizeof(AllocatedType*)); // We must be able to store a slist in the
|
||||
// object free space.
|
||||
assert(objectsPerPage >= 16);
|
||||
batches_ = allocateBatch(0); // allocated a dummy page
|
||||
currentBatch_ = batches_;
|
||||
}
|
||||
|
||||
~BatchAllocator() {
|
||||
for (BatchInfo* batch = batches_; batch;) {
|
||||
BatchInfo* nextBatch = batch->next_;
|
||||
free(batch);
|
||||
batch = nextBatch;
|
||||
}
|
||||
}
|
||||
|
||||
/// allocate space for an array of objectPerAllocation object.
|
||||
/// @warning it is the responsability of the caller to call objects
|
||||
/// constructors.
|
||||
AllocatedType* allocate() {
|
||||
if (freeHead_) // returns node from free list.
|
||||
{
|
||||
AllocatedType* object = freeHead_;
|
||||
freeHead_ = *(AllocatedType**)object;
|
||||
return object;
|
||||
}
|
||||
if (currentBatch_->used_ == currentBatch_->end_) {
|
||||
currentBatch_ = currentBatch_->next_;
|
||||
while (currentBatch_ && currentBatch_->used_ == currentBatch_->end_)
|
||||
currentBatch_ = currentBatch_->next_;
|
||||
|
||||
if (!currentBatch_) // no free batch found, allocate a new one
|
||||
{
|
||||
currentBatch_ = allocateBatch(objectsPerPage_);
|
||||
currentBatch_->next_ = batches_; // insert at the head of the list
|
||||
batches_ = currentBatch_;
|
||||
}
|
||||
}
|
||||
AllocatedType* allocated = currentBatch_->used_;
|
||||
currentBatch_->used_ += objectPerAllocation;
|
||||
return allocated;
|
||||
}
|
||||
|
||||
/// Release the object.
|
||||
/// @warning it is the responsability of the caller to actually destruct the
|
||||
/// object.
|
||||
void release(AllocatedType* object) {
|
||||
assert(object != 0);
|
||||
*(AllocatedType**)object = freeHead_;
|
||||
freeHead_ = object;
|
||||
}
|
||||
|
||||
private:
|
||||
struct BatchInfo {
|
||||
BatchInfo* next_;
|
||||
AllocatedType* used_;
|
||||
AllocatedType* end_;
|
||||
AllocatedType buffer_[objectPerAllocation];
|
||||
};
|
||||
|
||||
// disabled copy constructor and assignement operator.
|
||||
BatchAllocator(const BatchAllocator&);
|
||||
void operator=(const BatchAllocator&);
|
||||
|
||||
static BatchInfo* allocateBatch(unsigned int objectsPerPage) {
|
||||
const unsigned int mallocSize =
|
||||
sizeof(BatchInfo) - sizeof(AllocatedType) * objectPerAllocation +
|
||||
sizeof(AllocatedType) * objectPerAllocation * objectsPerPage;
|
||||
BatchInfo* batch = static_cast<BatchInfo*>(malloc(mallocSize));
|
||||
batch->next_ = 0;
|
||||
batch->used_ = batch->buffer_;
|
||||
batch->end_ = batch->buffer_ + objectsPerPage;
|
||||
return batch;
|
||||
}
|
||||
|
||||
BatchInfo* batches_;
|
||||
BatchInfo* currentBatch_;
|
||||
/// Head of a single linked list within the allocated space of freeed object
|
||||
AllocatedType* freeHead_;
|
||||
unsigned int objectsPerPage_;
|
||||
};
|
||||
|
||||
} // namespace Json
|
||||
|
||||
#endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION
|
||||
|
||||
#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED
|
360
3rdparty/jsoncpp/src/lib_json/json_internalarray.inl
vendored
360
3rdparty/jsoncpp/src/lib_json/json_internalarray.inl
vendored
@ -1,360 +0,0 @@
|
||||
// Copyright 2007-2010 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
// included by json_value.cpp
|
||||
|
||||
namespace Json {
|
||||
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// class ValueInternalArray
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
ValueArrayAllocator::~ValueArrayAllocator() {}
|
||||
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// class DefaultValueArrayAllocator
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
|
||||
class DefaultValueArrayAllocator : public ValueArrayAllocator {
|
||||
public: // overridden from ValueArrayAllocator
|
||||
virtual ~DefaultValueArrayAllocator() {}
|
||||
|
||||
virtual ValueInternalArray* newArray() { return new ValueInternalArray(); }
|
||||
|
||||
virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) {
|
||||
return new ValueInternalArray(other);
|
||||
}
|
||||
|
||||
virtual void destructArray(ValueInternalArray* array) { delete array; }
|
||||
|
||||
virtual void
|
||||
reallocateArrayPageIndex(Value**& indexes,
|
||||
ValueInternalArray::PageIndex& indexCount,
|
||||
ValueInternalArray::PageIndex minNewIndexCount) {
|
||||
ValueInternalArray::PageIndex newIndexCount = (indexCount * 3) / 2 + 1;
|
||||
if (minNewIndexCount > newIndexCount)
|
||||
newIndexCount = minNewIndexCount;
|
||||
void* newIndexes = realloc(indexes, sizeof(Value*) * newIndexCount);
|
||||
JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc.");
|
||||
indexCount = newIndexCount;
|
||||
indexes = static_cast<Value**>(newIndexes);
|
||||
}
|
||||
virtual void releaseArrayPageIndex(Value** indexes,
|
||||
ValueInternalArray::PageIndex indexCount) {
|
||||
if (indexes)
|
||||
free(indexes);
|
||||
}
|
||||
|
||||
virtual Value* allocateArrayPage() {
|
||||
return static_cast<Value*>(
|
||||
malloc(sizeof(Value) * ValueInternalArray::itemsPerPage));
|
||||
}
|
||||
|
||||
virtual void releaseArrayPage(Value* value) {
|
||||
if (value)
|
||||
free(value);
|
||||
}
|
||||
};
|
||||
|
||||
#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
|
||||
/// @todo make this thread-safe (lock when accessign batch allocator)
|
||||
class DefaultValueArrayAllocator : public ValueArrayAllocator {
|
||||
public: // overridden from ValueArrayAllocator
|
||||
virtual ~DefaultValueArrayAllocator() {}
|
||||
|
||||
virtual ValueInternalArray* newArray() {
|
||||
ValueInternalArray* array = arraysAllocator_.allocate();
|
||||
new (array) ValueInternalArray(); // placement new
|
||||
return array;
|
||||
}
|
||||
|
||||
virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) {
|
||||
ValueInternalArray* array = arraysAllocator_.allocate();
|
||||
new (array) ValueInternalArray(other); // placement new
|
||||
return array;
|
||||
}
|
||||
|
||||
virtual void destructArray(ValueInternalArray* array) {
|
||||
if (array) {
|
||||
array->~ValueInternalArray();
|
||||
arraysAllocator_.release(array);
|
||||
}
|
||||
}
|
||||
|
||||
virtual void
|
||||
reallocateArrayPageIndex(Value**& indexes,
|
||||
ValueInternalArray::PageIndex& indexCount,
|
||||
ValueInternalArray::PageIndex minNewIndexCount) {
|
||||
ValueInternalArray::PageIndex newIndexCount = (indexCount * 3) / 2 + 1;
|
||||
if (minNewIndexCount > newIndexCount)
|
||||
newIndexCount = minNewIndexCount;
|
||||
void* newIndexes = realloc(indexes, sizeof(Value*) * newIndexCount);
|
||||
JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc.");
|
||||
indexCount = newIndexCount;
|
||||
indexes = static_cast<Value**>(newIndexes);
|
||||
}
|
||||
virtual void releaseArrayPageIndex(Value** indexes,
|
||||
ValueInternalArray::PageIndex indexCount) {
|
||||
if (indexes)
|
||||
free(indexes);
|
||||
}
|
||||
|
||||
virtual Value* allocateArrayPage() {
|
||||
return static_cast<Value*>(pagesAllocator_.allocate());
|
||||
}
|
||||
|
||||
virtual void releaseArrayPage(Value* value) {
|
||||
if (value)
|
||||
pagesAllocator_.release(value);
|
||||
}
|
||||
|
||||
private:
|
||||
BatchAllocator<ValueInternalArray, 1> arraysAllocator_;
|
||||
BatchAllocator<Value, ValueInternalArray::itemsPerPage> pagesAllocator_;
|
||||
};
|
||||
#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
|
||||
|
||||
static ValueArrayAllocator*& arrayAllocator() {
|
||||
static DefaultValueArrayAllocator defaultAllocator;
|
||||
static ValueArrayAllocator* arrayAllocator = &defaultAllocator;
|
||||
return arrayAllocator;
|
||||
}
|
||||
|
||||
static struct DummyArrayAllocatorInitializer {
|
||||
DummyArrayAllocatorInitializer() {
|
||||
arrayAllocator(); // ensure arrayAllocator() statics are initialized before
|
||||
// main().
|
||||
}
|
||||
} dummyArrayAllocatorInitializer;
|
||||
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// class ValueInternalArray
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
bool ValueInternalArray::equals(const IteratorState& x,
|
||||
const IteratorState& other) {
|
||||
return x.array_ == other.array_ &&
|
||||
x.currentItemIndex_ == other.currentItemIndex_ &&
|
||||
x.currentPageIndex_ == other.currentPageIndex_;
|
||||
}
|
||||
|
||||
void ValueInternalArray::increment(IteratorState& it) {
|
||||
JSON_ASSERT_MESSAGE(
|
||||
it.array_ && (it.currentPageIndex_ - it.array_->pages_) * itemsPerPage +
|
||||
it.currentItemIndex_ !=
|
||||
it.array_->size_,
|
||||
"ValueInternalArray::increment(): moving iterator beyond end");
|
||||
++(it.currentItemIndex_);
|
||||
if (it.currentItemIndex_ == itemsPerPage) {
|
||||
it.currentItemIndex_ = 0;
|
||||
++(it.currentPageIndex_);
|
||||
}
|
||||
}
|
||||
|
||||
void ValueInternalArray::decrement(IteratorState& it) {
|
||||
JSON_ASSERT_MESSAGE(
|
||||
it.array_ && it.currentPageIndex_ == it.array_->pages_ &&
|
||||
it.currentItemIndex_ == 0,
|
||||
"ValueInternalArray::decrement(): moving iterator beyond end");
|
||||
if (it.currentItemIndex_ == 0) {
|
||||
it.currentItemIndex_ = itemsPerPage - 1;
|
||||
--(it.currentPageIndex_);
|
||||
} else {
|
||||
--(it.currentItemIndex_);
|
||||
}
|
||||
}
|
||||
|
||||
Value& ValueInternalArray::unsafeDereference(const IteratorState& it) {
|
||||
return (*(it.currentPageIndex_))[it.currentItemIndex_];
|
||||
}
|
||||
|
||||
Value& ValueInternalArray::dereference(const IteratorState& it) {
|
||||
JSON_ASSERT_MESSAGE(
|
||||
it.array_ && (it.currentPageIndex_ - it.array_->pages_) * itemsPerPage +
|
||||
it.currentItemIndex_ <
|
||||
it.array_->size_,
|
||||
"ValueInternalArray::dereference(): dereferencing invalid iterator");
|
||||
return unsafeDereference(it);
|
||||
}
|
||||
|
||||
void ValueInternalArray::makeBeginIterator(IteratorState& it) const {
|
||||
it.array_ = const_cast<ValueInternalArray*>(this);
|
||||
it.currentItemIndex_ = 0;
|
||||
it.currentPageIndex_ = pages_;
|
||||
}
|
||||
|
||||
void ValueInternalArray::makeIterator(IteratorState& it,
|
||||
ArrayIndex index) const {
|
||||
it.array_ = const_cast<ValueInternalArray*>(this);
|
||||
it.currentItemIndex_ = index % itemsPerPage;
|
||||
it.currentPageIndex_ = pages_ + index / itemsPerPage;
|
||||
}
|
||||
|
||||
void ValueInternalArray::makeEndIterator(IteratorState& it) const {
|
||||
makeIterator(it, size_);
|
||||
}
|
||||
|
||||
ValueInternalArray::ValueInternalArray() : pages_(0), size_(0), pageCount_(0) {}
|
||||
|
||||
ValueInternalArray::ValueInternalArray(const ValueInternalArray& other)
|
||||
: pages_(0), size_(other.size_), pageCount_(0) {
|
||||
PageIndex minNewPages = other.size_ / itemsPerPage;
|
||||
arrayAllocator()->reallocateArrayPageIndex(pages_, pageCount_, minNewPages);
|
||||
JSON_ASSERT_MESSAGE(pageCount_ >= minNewPages,
|
||||
"ValueInternalArray::reserve(): bad reallocation");
|
||||
IteratorState itOther;
|
||||
other.makeBeginIterator(itOther);
|
||||
Value* value;
|
||||
for (ArrayIndex index = 0; index < size_; ++index, increment(itOther)) {
|
||||
if (index % itemsPerPage == 0) {
|
||||
PageIndex pageIndex = index / itemsPerPage;
|
||||
value = arrayAllocator()->allocateArrayPage();
|
||||
pages_[pageIndex] = value;
|
||||
}
|
||||
new (value) Value(dereference(itOther));
|
||||
}
|
||||
}
|
||||
|
||||
ValueInternalArray& ValueInternalArray::operator=(ValueInternalArray other) {
|
||||
swap(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
ValueInternalArray::~ValueInternalArray() {
|
||||
// destroy all constructed items
|
||||
IteratorState it;
|
||||
IteratorState itEnd;
|
||||
makeBeginIterator(it);
|
||||
makeEndIterator(itEnd);
|
||||
for (; !equals(it, itEnd); increment(it)) {
|
||||
Value* value = &dereference(it);
|
||||
value->~Value();
|
||||
}
|
||||
// release all pages
|
||||
PageIndex lastPageIndex = size_ / itemsPerPage;
|
||||
for (PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex)
|
||||
arrayAllocator()->releaseArrayPage(pages_[pageIndex]);
|
||||
// release pages index
|
||||
arrayAllocator()->releaseArrayPageIndex(pages_, pageCount_);
|
||||
}
|
||||
|
||||
void ValueInternalArray::swap(ValueInternalArray& other) {
|
||||
Value** tempPages = pages_;
|
||||
pages_ = other.pages_;
|
||||
other.pages_ = tempPages;
|
||||
ArrayIndex tempSize = size_;
|
||||
size_ = other.size_;
|
||||
other.size_ = tempSize;
|
||||
PageIndex tempPageCount = pageCount_;
|
||||
pageCount_ = other.pageCount_;
|
||||
other.pageCount_ = tempPageCount;
|
||||
}
|
||||
|
||||
void ValueInternalArray::clear() {
|
||||
ValueInternalArray dummy;
|
||||
swap(dummy);
|
||||
}
|
||||
|
||||
void ValueInternalArray::resize(ArrayIndex newSize) {
|
||||
if (newSize == 0)
|
||||
clear();
|
||||
else if (newSize < size_) {
|
||||
IteratorState it;
|
||||
IteratorState itEnd;
|
||||
makeIterator(it, newSize);
|
||||
makeIterator(itEnd, size_);
|
||||
for (; !equals(it, itEnd); increment(it)) {
|
||||
Value* value = &dereference(it);
|
||||
value->~Value();
|
||||
}
|
||||
PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage;
|
||||
PageIndex lastPageIndex = size_ / itemsPerPage;
|
||||
for (; pageIndex < lastPageIndex; ++pageIndex)
|
||||
arrayAllocator()->releaseArrayPage(pages_[pageIndex]);
|
||||
size_ = newSize;
|
||||
} else if (newSize > size_)
|
||||
resolveReference(newSize);
|
||||
}
|
||||
|
||||
void ValueInternalArray::makeIndexValid(ArrayIndex index) {
|
||||
// Need to enlarge page index ?
|
||||
if (index >= pageCount_ * itemsPerPage) {
|
||||
PageIndex minNewPages = (index + 1) / itemsPerPage;
|
||||
arrayAllocator()->reallocateArrayPageIndex(pages_, pageCount_, minNewPages);
|
||||
JSON_ASSERT_MESSAGE(pageCount_ >= minNewPages,
|
||||
"ValueInternalArray::reserve(): bad reallocation");
|
||||
}
|
||||
|
||||
// Need to allocate new pages ?
|
||||
ArrayIndex nextPageIndex = (size_ % itemsPerPage) != 0
|
||||
? size_ - (size_ % itemsPerPage) + itemsPerPage
|
||||
: size_;
|
||||
if (nextPageIndex <= index) {
|
||||
PageIndex pageIndex = nextPageIndex / itemsPerPage;
|
||||
PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1;
|
||||
for (; pageToAllocate-- > 0; ++pageIndex)
|
||||
pages_[pageIndex] = arrayAllocator()->allocateArrayPage();
|
||||
}
|
||||
|
||||
// Initialize all new entries
|
||||
IteratorState it;
|
||||
IteratorState itEnd;
|
||||
makeIterator(it, size_);
|
||||
size_ = index + 1;
|
||||
makeIterator(itEnd, size_);
|
||||
for (; !equals(it, itEnd); increment(it)) {
|
||||
Value* value = &dereference(it);
|
||||
new (value) Value(); // Construct a default value using placement new
|
||||
}
|
||||
}
|
||||
|
||||
Value& ValueInternalArray::resolveReference(ArrayIndex index) {
|
||||
if (index >= size_)
|
||||
makeIndexValid(index);
|
||||
return pages_[index / itemsPerPage][index % itemsPerPage];
|
||||
}
|
||||
|
||||
Value* ValueInternalArray::find(ArrayIndex index) const {
|
||||
if (index >= size_)
|
||||
return 0;
|
||||
return &(pages_[index / itemsPerPage][index % itemsPerPage]);
|
||||
}
|
||||
|
||||
ValueInternalArray::ArrayIndex ValueInternalArray::size() const {
|
||||
return size_;
|
||||
}
|
||||
|
||||
int ValueInternalArray::distance(const IteratorState& x,
|
||||
const IteratorState& y) {
|
||||
return indexOf(y) - indexOf(x);
|
||||
}
|
||||
|
||||
ValueInternalArray::ArrayIndex
|
||||
ValueInternalArray::indexOf(const IteratorState& iterator) {
|
||||
if (!iterator.array_)
|
||||
return ArrayIndex(-1);
|
||||
return ArrayIndex((iterator.currentPageIndex_ - iterator.array_->pages_) *
|
||||
itemsPerPage +
|
||||
iterator.currentItemIndex_);
|
||||
}
|
||||
|
||||
int ValueInternalArray::compare(const ValueInternalArray& other) const {
|
||||
int sizeDiff(size_ - other.size_);
|
||||
if (sizeDiff != 0)
|
||||
return sizeDiff;
|
||||
|
||||
for (ArrayIndex index = 0; index < size_; ++index) {
|
||||
int diff = pages_[index / itemsPerPage][index % itemsPerPage].compare(
|
||||
other.pages_[index / itemsPerPage][index % itemsPerPage]);
|
||||
if (diff != 0)
|
||||
return diff;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
} // namespace Json
|
473
3rdparty/jsoncpp/src/lib_json/json_internalmap.inl
vendored
473
3rdparty/jsoncpp/src/lib_json/json_internalmap.inl
vendored
@ -1,473 +0,0 @@
|
||||
// Copyright 2007-2010 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
// included by json_value.cpp
|
||||
|
||||
namespace Json {
|
||||
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// class ValueInternalMap
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
/** \internal MUST be safely initialized using memset( this, 0,
|
||||
* sizeof(ValueInternalLink) );
|
||||
* This optimization is used by the fast allocator.
|
||||
*/
|
||||
ValueInternalLink::ValueInternalLink() : previous_(0), next_(0) {}
|
||||
|
||||
ValueInternalLink::~ValueInternalLink() {
|
||||
for (int index = 0; index < itemPerLink; ++index) {
|
||||
if (!items_[index].isItemAvailable()) {
|
||||
if (!items_[index].isMemberNameStatic())
|
||||
free(keys_[index]);
|
||||
} else
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ValueMapAllocator::~ValueMapAllocator() {}
|
||||
|
||||
#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
|
||||
class DefaultValueMapAllocator : public ValueMapAllocator {
|
||||
public: // overridden from ValueMapAllocator
|
||||
virtual ValueInternalMap* newMap() { return new ValueInternalMap(); }
|
||||
|
||||
virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) {
|
||||
return new ValueInternalMap(other);
|
||||
}
|
||||
|
||||
virtual void destructMap(ValueInternalMap* map) { delete map; }
|
||||
|
||||
virtual ValueInternalLink* allocateMapBuckets(unsigned int size) {
|
||||
return new ValueInternalLink[size];
|
||||
}
|
||||
|
||||
virtual void releaseMapBuckets(ValueInternalLink* links) { delete[] links; }
|
||||
|
||||
virtual ValueInternalLink* allocateMapLink() {
|
||||
return new ValueInternalLink();
|
||||
}
|
||||
|
||||
virtual void releaseMapLink(ValueInternalLink* link) { delete link; }
|
||||
};
|
||||
#else
|
||||
/// @todo make this thread-safe (lock when accessign batch allocator)
|
||||
class DefaultValueMapAllocator : public ValueMapAllocator {
|
||||
public: // overridden from ValueMapAllocator
|
||||
virtual ValueInternalMap* newMap() {
|
||||
ValueInternalMap* map = mapsAllocator_.allocate();
|
||||
new (map) ValueInternalMap(); // placement new
|
||||
return map;
|
||||
}
|
||||
|
||||
virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) {
|
||||
ValueInternalMap* map = mapsAllocator_.allocate();
|
||||
new (map) ValueInternalMap(other); // placement new
|
||||
return map;
|
||||
}
|
||||
|
||||
virtual void destructMap(ValueInternalMap* map) {
|
||||
if (map) {
|
||||
map->~ValueInternalMap();
|
||||
mapsAllocator_.release(map);
|
||||
}
|
||||
}
|
||||
|
||||
virtual ValueInternalLink* allocateMapBuckets(unsigned int size) {
|
||||
return new ValueInternalLink[size];
|
||||
}
|
||||
|
||||
virtual void releaseMapBuckets(ValueInternalLink* links) { delete[] links; }
|
||||
|
||||
virtual ValueInternalLink* allocateMapLink() {
|
||||
ValueInternalLink* link = linksAllocator_.allocate();
|
||||
memset(link, 0, sizeof(ValueInternalLink));
|
||||
return link;
|
||||
}
|
||||
|
||||
virtual void releaseMapLink(ValueInternalLink* link) {
|
||||
link->~ValueInternalLink();
|
||||
linksAllocator_.release(link);
|
||||
}
|
||||
|
||||
private:
|
||||
BatchAllocator<ValueInternalMap, 1> mapsAllocator_;
|
||||
BatchAllocator<ValueInternalLink, 1> linksAllocator_;
|
||||
};
|
||||
#endif
|
||||
|
||||
static ValueMapAllocator*& mapAllocator() {
|
||||
static DefaultValueMapAllocator defaultAllocator;
|
||||
static ValueMapAllocator* mapAllocator = &defaultAllocator;
|
||||
return mapAllocator;
|
||||
}
|
||||
|
||||
static struct DummyMapAllocatorInitializer {
|
||||
DummyMapAllocatorInitializer() {
|
||||
mapAllocator(); // ensure mapAllocator() statics are initialized before
|
||||
// main().
|
||||
}
|
||||
} dummyMapAllocatorInitializer;
|
||||
|
||||
// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32.
|
||||
|
||||
/*
|
||||
use linked list hash map.
|
||||
buckets array is a container.
|
||||
linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124)
|
||||
value have extra state: valid, available, deleted
|
||||
*/
|
||||
|
||||
ValueInternalMap::ValueInternalMap()
|
||||
: buckets_(0), tailLink_(0), bucketsSize_(0), itemCount_(0) {}
|
||||
|
||||
ValueInternalMap::ValueInternalMap(const ValueInternalMap& other)
|
||||
: buckets_(0), tailLink_(0), bucketsSize_(0), itemCount_(0) {
|
||||
reserve(other.itemCount_);
|
||||
IteratorState it;
|
||||
IteratorState itEnd;
|
||||
other.makeBeginIterator(it);
|
||||
other.makeEndIterator(itEnd);
|
||||
for (; !equals(it, itEnd); increment(it)) {
|
||||
bool isStatic;
|
||||
const char* memberName = key(it, isStatic);
|
||||
const Value& aValue = value(it);
|
||||
resolveReference(memberName, isStatic) = aValue;
|
||||
}
|
||||
}
|
||||
|
||||
ValueInternalMap& ValueInternalMap::operator=(ValueInternalMap other) {
|
||||
swap(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
ValueInternalMap::~ValueInternalMap() {
|
||||
if (buckets_) {
|
||||
for (BucketIndex bucketIndex = 0; bucketIndex < bucketsSize_;
|
||||
++bucketIndex) {
|
||||
ValueInternalLink* link = buckets_[bucketIndex].next_;
|
||||
while (link) {
|
||||
ValueInternalLink* linkToRelease = link;
|
||||
link = link->next_;
|
||||
mapAllocator()->releaseMapLink(linkToRelease);
|
||||
}
|
||||
}
|
||||
mapAllocator()->releaseMapBuckets(buckets_);
|
||||
}
|
||||
}
|
||||
|
||||
void ValueInternalMap::swap(ValueInternalMap& other) {
|
||||
ValueInternalLink* tempBuckets = buckets_;
|
||||
buckets_ = other.buckets_;
|
||||
other.buckets_ = tempBuckets;
|
||||
ValueInternalLink* tempTailLink = tailLink_;
|
||||
tailLink_ = other.tailLink_;
|
||||
other.tailLink_ = tempTailLink;
|
||||
BucketIndex tempBucketsSize = bucketsSize_;
|
||||
bucketsSize_ = other.bucketsSize_;
|
||||
other.bucketsSize_ = tempBucketsSize;
|
||||
BucketIndex tempItemCount = itemCount_;
|
||||
itemCount_ = other.itemCount_;
|
||||
other.itemCount_ = tempItemCount;
|
||||
}
|
||||
|
||||
void ValueInternalMap::clear() {
|
||||
ValueInternalMap dummy;
|
||||
swap(dummy);
|
||||
}
|
||||
|
||||
ValueInternalMap::BucketIndex ValueInternalMap::size() const {
|
||||
return itemCount_;
|
||||
}
|
||||
|
||||
bool ValueInternalMap::reserveDelta(BucketIndex growth) {
|
||||
return reserve(itemCount_ + growth);
|
||||
}
|
||||
|
||||
bool ValueInternalMap::reserve(BucketIndex newItemCount) {
|
||||
if (!buckets_ && newItemCount > 0) {
|
||||
buckets_ = mapAllocator()->allocateMapBuckets(1);
|
||||
bucketsSize_ = 1;
|
||||
tailLink_ = &buckets_[0];
|
||||
}
|
||||
// BucketIndex idealBucketCount = (newItemCount +
|
||||
// ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink;
|
||||
return true;
|
||||
}
|
||||
|
||||
const Value* ValueInternalMap::find(const char* key) const {
|
||||
if (!bucketsSize_)
|
||||
return 0;
|
||||
HashKey hashedKey = hash(key);
|
||||
BucketIndex bucketIndex = hashedKey % bucketsSize_;
|
||||
for (const ValueInternalLink* current = &buckets_[bucketIndex]; current != 0;
|
||||
current = current->next_) {
|
||||
for (BucketIndex index = 0; index < ValueInternalLink::itemPerLink;
|
||||
++index) {
|
||||
if (current->items_[index].isItemAvailable())
|
||||
return 0;
|
||||
if (strcmp(key, current->keys_[index]) == 0)
|
||||
return ¤t->items_[index];
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
Value* ValueInternalMap::find(const char* key) {
|
||||
const ValueInternalMap* constThis = this;
|
||||
return const_cast<Value*>(constThis->find(key));
|
||||
}
|
||||
|
||||
Value& ValueInternalMap::resolveReference(const char* key, bool isStatic) {
|
||||
HashKey hashedKey = hash(key);
|
||||
if (bucketsSize_) {
|
||||
BucketIndex bucketIndex = hashedKey % bucketsSize_;
|
||||
ValueInternalLink** previous = 0;
|
||||
BucketIndex index;
|
||||
for (ValueInternalLink* current = &buckets_[bucketIndex]; current != 0;
|
||||
previous = ¤t->next_, current = current->next_) {
|
||||
for (index = 0; index < ValueInternalLink::itemPerLink; ++index) {
|
||||
if (current->items_[index].isItemAvailable())
|
||||
return setNewItem(key, isStatic, current, index);
|
||||
if (strcmp(key, current->keys_[index]) == 0)
|
||||
return current->items_[index];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
reserveDelta(1);
|
||||
return unsafeAdd(key, isStatic, hashedKey);
|
||||
}
|
||||
|
||||
void ValueInternalMap::remove(const char* key) {
|
||||
HashKey hashedKey = hash(key);
|
||||
if (!bucketsSize_)
|
||||
return;
|
||||
BucketIndex bucketIndex = hashedKey % bucketsSize_;
|
||||
for (ValueInternalLink* link = &buckets_[bucketIndex]; link != 0;
|
||||
link = link->next_) {
|
||||
BucketIndex index;
|
||||
for (index = 0; index < ValueInternalLink::itemPerLink; ++index) {
|
||||
if (link->items_[index].isItemAvailable())
|
||||
return;
|
||||
if (strcmp(key, link->keys_[index]) == 0) {
|
||||
doActualRemove(link, index, bucketIndex);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ValueInternalMap::doActualRemove(ValueInternalLink* link,
|
||||
BucketIndex index,
|
||||
BucketIndex bucketIndex) {
|
||||
// find last item of the bucket and swap it with the 'removed' one.
|
||||
// set removed items flags to 'available'.
|
||||
// if last page only contains 'available' items, then desallocate it (it's
|
||||
// empty)
|
||||
ValueInternalLink*& lastLink = getLastLinkInBucket(index);
|
||||
BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1
|
||||
for (; lastItemIndex < ValueInternalLink::itemPerLink;
|
||||
++lastItemIndex) // may be optimized with dicotomic search
|
||||
{
|
||||
if (lastLink->items_[lastItemIndex].isItemAvailable())
|
||||
break;
|
||||
}
|
||||
|
||||
BucketIndex lastUsedIndex = lastItemIndex - 1;
|
||||
Value* valueToDelete = &link->items_[index];
|
||||
Value* valueToPreserve = &lastLink->items_[lastUsedIndex];
|
||||
if (valueToDelete != valueToPreserve)
|
||||
valueToDelete->swap(*valueToPreserve);
|
||||
if (lastUsedIndex == 0) // page is now empty
|
||||
{ // remove it from bucket linked list and delete it.
|
||||
ValueInternalLink* linkPreviousToLast = lastLink->previous_;
|
||||
if (linkPreviousToLast != 0) // can not deleted bucket link.
|
||||
{
|
||||
mapAllocator()->releaseMapLink(lastLink);
|
||||
linkPreviousToLast->next_ = 0;
|
||||
lastLink = linkPreviousToLast;
|
||||
}
|
||||
} else {
|
||||
Value dummy;
|
||||
valueToPreserve->swap(dummy); // restore deleted to default Value.
|
||||
valueToPreserve->setItemUsed(false);
|
||||
}
|
||||
--itemCount_;
|
||||
}
|
||||
|
||||
ValueInternalLink*&
|
||||
ValueInternalMap::getLastLinkInBucket(BucketIndex bucketIndex) {
|
||||
if (bucketIndex == bucketsSize_ - 1)
|
||||
return tailLink_;
|
||||
ValueInternalLink*& previous = buckets_[bucketIndex + 1].previous_;
|
||||
if (!previous)
|
||||
previous = &buckets_[bucketIndex];
|
||||
return previous;
|
||||
}
|
||||
|
||||
Value& ValueInternalMap::setNewItem(const char* key,
|
||||
bool isStatic,
|
||||
ValueInternalLink* link,
|
||||
BucketIndex index) {
|
||||
char* duplicatedKey = makeMemberName(key);
|
||||
++itemCount_;
|
||||
link->keys_[index] = duplicatedKey;
|
||||
link->items_[index].setItemUsed();
|
||||
link->items_[index].setMemberNameIsStatic(isStatic);
|
||||
return link->items_[index]; // items already default constructed.
|
||||
}
|
||||
|
||||
Value&
|
||||
ValueInternalMap::unsafeAdd(const char* key, bool isStatic, HashKey hashedKey) {
|
||||
JSON_ASSERT_MESSAGE(bucketsSize_ > 0,
|
||||
"ValueInternalMap::unsafeAdd(): internal logic error.");
|
||||
BucketIndex bucketIndex = hashedKey % bucketsSize_;
|
||||
ValueInternalLink*& previousLink = getLastLinkInBucket(bucketIndex);
|
||||
ValueInternalLink* link = previousLink;
|
||||
BucketIndex index;
|
||||
for (index = 0; index < ValueInternalLink::itemPerLink; ++index) {
|
||||
if (link->items_[index].isItemAvailable())
|
||||
break;
|
||||
}
|
||||
if (index == ValueInternalLink::itemPerLink) // need to add a new page
|
||||
{
|
||||
ValueInternalLink* newLink = mapAllocator()->allocateMapLink();
|
||||
index = 0;
|
||||
link->next_ = newLink;
|
||||
previousLink = newLink;
|
||||
link = newLink;
|
||||
}
|
||||
return setNewItem(key, isStatic, link, index);
|
||||
}
|
||||
|
||||
ValueInternalMap::HashKey ValueInternalMap::hash(const char* key) const {
|
||||
HashKey hash = 0;
|
||||
while (*key)
|
||||
hash += *key++ * 37;
|
||||
return hash;
|
||||
}
|
||||
|
||||
int ValueInternalMap::compare(const ValueInternalMap& other) const {
|
||||
int sizeDiff(itemCount_ - other.itemCount_);
|
||||
if (sizeDiff != 0)
|
||||
return sizeDiff;
|
||||
// Strict order guaranty is required. Compare all keys FIRST, then compare
|
||||
// values.
|
||||
IteratorState it;
|
||||
IteratorState itEnd;
|
||||
makeBeginIterator(it);
|
||||
makeEndIterator(itEnd);
|
||||
for (; !equals(it, itEnd); increment(it)) {
|
||||
if (!other.find(key(it)))
|
||||
return 1;
|
||||
}
|
||||
|
||||
// All keys are equals, let's compare values
|
||||
makeBeginIterator(it);
|
||||
for (; !equals(it, itEnd); increment(it)) {
|
||||
const Value* otherValue = other.find(key(it));
|
||||
int valueDiff = value(it).compare(*otherValue);
|
||||
if (valueDiff != 0)
|
||||
return valueDiff;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ValueInternalMap::makeBeginIterator(IteratorState& it) const {
|
||||
it.map_ = const_cast<ValueInternalMap*>(this);
|
||||
it.bucketIndex_ = 0;
|
||||
it.itemIndex_ = 0;
|
||||
it.link_ = buckets_;
|
||||
}
|
||||
|
||||
void ValueInternalMap::makeEndIterator(IteratorState& it) const {
|
||||
it.map_ = const_cast<ValueInternalMap*>(this);
|
||||
it.bucketIndex_ = bucketsSize_;
|
||||
it.itemIndex_ = 0;
|
||||
it.link_ = 0;
|
||||
}
|
||||
|
||||
bool ValueInternalMap::equals(const IteratorState& x,
|
||||
const IteratorState& other) {
|
||||
return x.map_ == other.map_ && x.bucketIndex_ == other.bucketIndex_ &&
|
||||
x.link_ == other.link_ && x.itemIndex_ == other.itemIndex_;
|
||||
}
|
||||
|
||||
void ValueInternalMap::incrementBucket(IteratorState& iterator) {
|
||||
++iterator.bucketIndex_;
|
||||
JSON_ASSERT_MESSAGE(
|
||||
iterator.bucketIndex_ <= iterator.map_->bucketsSize_,
|
||||
"ValueInternalMap::increment(): attempting to iterate beyond end.");
|
||||
if (iterator.bucketIndex_ == iterator.map_->bucketsSize_)
|
||||
iterator.link_ = 0;
|
||||
else
|
||||
iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]);
|
||||
iterator.itemIndex_ = 0;
|
||||
}
|
||||
|
||||
void ValueInternalMap::increment(IteratorState& iterator) {
|
||||
JSON_ASSERT_MESSAGE(iterator.map_,
|
||||
"Attempting to iterator using invalid iterator.");
|
||||
++iterator.itemIndex_;
|
||||
if (iterator.itemIndex_ == ValueInternalLink::itemPerLink) {
|
||||
JSON_ASSERT_MESSAGE(
|
||||
iterator.link_ != 0,
|
||||
"ValueInternalMap::increment(): attempting to iterate beyond end.");
|
||||
iterator.link_ = iterator.link_->next_;
|
||||
if (iterator.link_ == 0)
|
||||
incrementBucket(iterator);
|
||||
} else if (iterator.link_->items_[iterator.itemIndex_].isItemAvailable()) {
|
||||
incrementBucket(iterator);
|
||||
}
|
||||
}
|
||||
|
||||
void ValueInternalMap::decrement(IteratorState& iterator) {
|
||||
if (iterator.itemIndex_ == 0) {
|
||||
JSON_ASSERT_MESSAGE(iterator.map_,
|
||||
"Attempting to iterate using invalid iterator.");
|
||||
if (iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_]) {
|
||||
JSON_ASSERT_MESSAGE(iterator.bucketIndex_ > 0,
|
||||
"Attempting to iterate beyond beginning.");
|
||||
--(iterator.bucketIndex_);
|
||||
}
|
||||
iterator.link_ = iterator.link_->previous_;
|
||||
iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1;
|
||||
}
|
||||
}
|
||||
|
||||
const char* ValueInternalMap::key(const IteratorState& iterator) {
|
||||
JSON_ASSERT_MESSAGE(iterator.link_,
|
||||
"Attempting to iterate using invalid iterator.");
|
||||
return iterator.link_->keys_[iterator.itemIndex_];
|
||||
}
|
||||
|
||||
const char* ValueInternalMap::key(const IteratorState& iterator,
|
||||
bool& isStatic) {
|
||||
JSON_ASSERT_MESSAGE(iterator.link_,
|
||||
"Attempting to iterate using invalid iterator.");
|
||||
isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic();
|
||||
return iterator.link_->keys_[iterator.itemIndex_];
|
||||
}
|
||||
|
||||
Value& ValueInternalMap::value(const IteratorState& iterator) {
|
||||
JSON_ASSERT_MESSAGE(iterator.link_,
|
||||
"Attempting to iterate using invalid iterator.");
|
||||
return iterator.link_->items_[iterator.itemIndex_];
|
||||
}
|
||||
|
||||
int ValueInternalMap::distance(const IteratorState& x, const IteratorState& y) {
|
||||
int offset = 0;
|
||||
IteratorState it = x;
|
||||
while (!equals(it, y))
|
||||
increment(it);
|
||||
return offset;
|
||||
}
|
||||
|
||||
} // namespace Json
|
1280
3rdparty/jsoncpp/src/lib_json/json_reader.cpp
vendored
1280
3rdparty/jsoncpp/src/lib_json/json_reader.cpp
vendored
File diff suppressed because it is too large
Load Diff
666
3rdparty/jsoncpp/src/lib_json/json_value.cpp
vendored
666
3rdparty/jsoncpp/src/lib_json/json_value.cpp
vendored
File diff suppressed because it is too large
Load Diff
129
3rdparty/jsoncpp/src/lib_json/json_valueiterator.inl
vendored
129
3rdparty/jsoncpp/src/lib_json/json_valueiterator.inl
vendored
@ -16,68 +16,29 @@ namespace Json {
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
ValueIteratorBase::ValueIteratorBase()
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
: current_(), isNull_(true) {
|
||||
}
|
||||
#else
|
||||
: isArray_(true), isNull_(true) {
|
||||
iterator_.array_ = ValueInternalArray::IteratorState();
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
ValueIteratorBase::ValueIteratorBase(
|
||||
const Value::ObjectValues::iterator& current)
|
||||
: current_(current), isNull_(false) {}
|
||||
#else
|
||||
ValueIteratorBase::ValueIteratorBase(
|
||||
const ValueInternalArray::IteratorState& state)
|
||||
: isArray_(true) {
|
||||
iterator_.array_ = state;
|
||||
}
|
||||
|
||||
ValueIteratorBase::ValueIteratorBase(
|
||||
const ValueInternalMap::IteratorState& state)
|
||||
: isArray_(false) {
|
||||
iterator_.map_ = state;
|
||||
}
|
||||
#endif
|
||||
|
||||
Value& ValueIteratorBase::deref() const {
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
return current_->second;
|
||||
#else
|
||||
if (isArray_)
|
||||
return ValueInternalArray::dereference(iterator_.array_);
|
||||
return ValueInternalMap::value(iterator_.map_);
|
||||
#endif
|
||||
}
|
||||
|
||||
void ValueIteratorBase::increment() {
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
++current_;
|
||||
#else
|
||||
if (isArray_)
|
||||
ValueInternalArray::increment(iterator_.array_);
|
||||
ValueInternalMap::increment(iterator_.map_);
|
||||
#endif
|
||||
}
|
||||
|
||||
void ValueIteratorBase::decrement() {
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
--current_;
|
||||
#else
|
||||
if (isArray_)
|
||||
ValueInternalArray::decrement(iterator_.array_);
|
||||
ValueInternalMap::decrement(iterator_.map_);
|
||||
#endif
|
||||
}
|
||||
|
||||
ValueIteratorBase::difference_type
|
||||
ValueIteratorBase::computeDistance(const SelfType& other) const {
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
#ifdef JSON_USE_CPPTL_SMALLMAP
|
||||
return current_ - other.current_;
|
||||
return other.current_ - current_;
|
||||
#else
|
||||
// Iterator for null value are initialized using the default
|
||||
// constructor, which initialize current_ to the default
|
||||
@ -100,80 +61,58 @@ ValueIteratorBase::computeDistance(const SelfType& other) const {
|
||||
}
|
||||
return myDistance;
|
||||
#endif
|
||||
#else
|
||||
if (isArray_)
|
||||
return ValueInternalArray::distance(iterator_.array_,
|
||||
other.iterator_.array_);
|
||||
return ValueInternalMap::distance(iterator_.map_, other.iterator_.map_);
|
||||
#endif
|
||||
}
|
||||
|
||||
bool ValueIteratorBase::isEqual(const SelfType& other) const {
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
if (isNull_) {
|
||||
return other.isNull_;
|
||||
}
|
||||
return current_ == other.current_;
|
||||
#else
|
||||
if (isArray_)
|
||||
return ValueInternalArray::equals(iterator_.array_, other.iterator_.array_);
|
||||
return ValueInternalMap::equals(iterator_.map_, other.iterator_.map_);
|
||||
#endif
|
||||
}
|
||||
|
||||
void ValueIteratorBase::copy(const SelfType& other) {
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
current_ = other.current_;
|
||||
isNull_ = other.isNull_;
|
||||
#else
|
||||
if (isArray_)
|
||||
iterator_.array_ = other.iterator_.array_;
|
||||
iterator_.map_ = other.iterator_.map_;
|
||||
#endif
|
||||
}
|
||||
|
||||
Value ValueIteratorBase::key() const {
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
const Value::CZString czstring = (*current_).first;
|
||||
if (czstring.c_str()) {
|
||||
if (czstring.data()) {
|
||||
if (czstring.isStaticString())
|
||||
return Value(StaticString(czstring.c_str()));
|
||||
return Value(czstring.c_str());
|
||||
return Value(StaticString(czstring.data()));
|
||||
return Value(czstring.data(), czstring.data() + czstring.length());
|
||||
}
|
||||
return Value(czstring.index());
|
||||
#else
|
||||
if (isArray_)
|
||||
return Value(ValueInternalArray::indexOf(iterator_.array_));
|
||||
bool isStatic;
|
||||
const char* memberName = ValueInternalMap::key(iterator_.map_, isStatic);
|
||||
if (isStatic)
|
||||
return Value(StaticString(memberName));
|
||||
return Value(memberName);
|
||||
#endif
|
||||
}
|
||||
|
||||
UInt ValueIteratorBase::index() const {
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
const Value::CZString czstring = (*current_).first;
|
||||
if (!czstring.c_str())
|
||||
if (!czstring.data())
|
||||
return czstring.index();
|
||||
return Value::UInt(-1);
|
||||
#else
|
||||
if (isArray_)
|
||||
return Value::UInt(ValueInternalArray::indexOf(iterator_.array_));
|
||||
return Value::UInt(-1);
|
||||
#endif
|
||||
}
|
||||
|
||||
const char* ValueIteratorBase::memberName() const {
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
const char* name = (*current_).first.c_str();
|
||||
std::string ValueIteratorBase::name() const {
|
||||
char const* key;
|
||||
char const* end;
|
||||
key = memberName(&end);
|
||||
if (!key) return std::string();
|
||||
return std::string(key, end);
|
||||
}
|
||||
|
||||
char const* ValueIteratorBase::memberName() const {
|
||||
const char* name = (*current_).first.data();
|
||||
return name ? name : "";
|
||||
#else
|
||||
if (!isArray_)
|
||||
return ValueInternalMap::key(iterator_.map_);
|
||||
return "";
|
||||
#endif
|
||||
}
|
||||
|
||||
char const* ValueIteratorBase::memberName(char const** end) const {
|
||||
const char* name = (*current_).first.data();
|
||||
if (!name) {
|
||||
*end = NULL;
|
||||
return NULL;
|
||||
}
|
||||
*end = name + (*current_).first.length();
|
||||
return name;
|
||||
}
|
||||
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
@ -186,19 +125,9 @@ const char* ValueIteratorBase::memberName() const {
|
||||
|
||||
ValueConstIterator::ValueConstIterator() {}
|
||||
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
ValueConstIterator::ValueConstIterator(
|
||||
const Value::ObjectValues::iterator& current)
|
||||
: ValueIteratorBase(current) {}
|
||||
#else
|
||||
ValueConstIterator::ValueConstIterator(
|
||||
const ValueInternalArray::IteratorState& state)
|
||||
: ValueIteratorBase(state) {}
|
||||
|
||||
ValueConstIterator::ValueConstIterator(
|
||||
const ValueInternalMap::IteratorState& state)
|
||||
: ValueIteratorBase(state) {}
|
||||
#endif
|
||||
|
||||
ValueConstIterator& ValueConstIterator::
|
||||
operator=(const ValueIteratorBase& other) {
|
||||
@ -216,16 +145,8 @@ operator=(const ValueIteratorBase& other) {
|
||||
|
||||
ValueIterator::ValueIterator() {}
|
||||
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
ValueIterator::ValueIterator(const Value::ObjectValues::iterator& current)
|
||||
: ValueIteratorBase(current) {}
|
||||
#else
|
||||
ValueIterator::ValueIterator(const ValueInternalArray::IteratorState& state)
|
||||
: ValueIteratorBase(state) {}
|
||||
|
||||
ValueIterator::ValueIterator(const ValueInternalMap::IteratorState& state)
|
||||
: ValueIteratorBase(state) {}
|
||||
#endif
|
||||
|
||||
ValueIterator::ValueIterator(const ValueConstIterator& other)
|
||||
: ValueIteratorBase(other) {}
|
||||
|
644
3rdparty/jsoncpp/src/lib_json/json_writer.cpp
vendored
644
3rdparty/jsoncpp/src/lib_json/json_writer.cpp
vendored
@ -7,15 +7,35 @@
|
||||
#include <json/writer.h>
|
||||
#include "json_tool.h"
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <utility>
|
||||
#include <assert.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
#include <math.h>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
#include <utility>
|
||||
#include <set>
|
||||
#include <cassert>
|
||||
#include <cstring>
|
||||
#include <cstdio>
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1200 && _MSC_VER < 1800 // Between VC++ 6.0 and VC++ 11.0
|
||||
#include <float.h>
|
||||
#define isfinite _finite
|
||||
#elif defined(__sun) && defined(__SVR4) //Solaris
|
||||
#include <ieeefp.h>
|
||||
#define isfinite finite
|
||||
#else
|
||||
#include <cmath>
|
||||
#define isfinite std::isfinite
|
||||
#endif
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER < 1500 // VC++ 8.0 and below
|
||||
#define snprintf _snprintf
|
||||
#elif defined(__ANDROID__)
|
||||
#define snprintf snprintf
|
||||
#elif __cplusplus >= 201103L
|
||||
#define snprintf std::snprintf
|
||||
#endif
|
||||
|
||||
#if defined(__BORLANDC__)
|
||||
#include <float.h>
|
||||
#define isfinite _finite
|
||||
#define snprintf _snprintf
|
||||
@ -26,13 +46,14 @@
|
||||
#pragma warning(disable : 4996)
|
||||
#endif
|
||||
|
||||
#if defined(__sun) && defined(__SVR4) //Solaris
|
||||
#include <ieeefp.h>
|
||||
#define isfinite finite
|
||||
#endif
|
||||
|
||||
namespace Json {
|
||||
|
||||
#if __cplusplus >= 201103L
|
||||
typedef std::unique_ptr<StreamWriter> StreamWriterPtr;
|
||||
#else
|
||||
typedef std::auto_ptr<StreamWriter> StreamWriterPtr;
|
||||
#endif
|
||||
|
||||
static bool containsControlCharacter(const char* str) {
|
||||
while (*str) {
|
||||
if (isControlCharacter(*(str++)))
|
||||
@ -41,6 +62,16 @@ static bool containsControlCharacter(const char* str) {
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool containsControlCharacter0(const char* str, unsigned len) {
|
||||
char const* end = str + len;
|
||||
while (end != str) {
|
||||
if (isControlCharacter(*str) || 0==*str)
|
||||
return true;
|
||||
++str;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
std::string valueToString(LargestInt value) {
|
||||
UIntToStringBuffer buffer;
|
||||
char* current = buffer + sizeof(buffer);
|
||||
@ -175,6 +206,84 @@ std::string valueToQuotedString(const char* value) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// https://github.com/upcaste/upcaste/blob/master/src/upcore/src/cstring/strnpbrk.cpp
|
||||
static char const* strnpbrk(char const* s, char const* accept, size_t n) {
|
||||
assert((s || !n) && accept);
|
||||
|
||||
char const* const end = s + n;
|
||||
for (char const* cur = s; cur < end; ++cur) {
|
||||
int const c = *cur;
|
||||
for (char const* a = accept; *a; ++a) {
|
||||
if (*a == c) {
|
||||
return cur;
|
||||
}
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
static std::string valueToQuotedStringN(const char* value, unsigned length) {
|
||||
if (value == NULL)
|
||||
return "";
|
||||
// Not sure how to handle unicode...
|
||||
if (strnpbrk(value, "\"\\\b\f\n\r\t", length) == NULL &&
|
||||
!containsControlCharacter0(value, length))
|
||||
return std::string("\"") + value + "\"";
|
||||
// We have to walk value and escape any special characters.
|
||||
// Appending to std::string is not efficient, but this should be rare.
|
||||
// (Note: forward slashes are *not* rare, but I am not escaping them.)
|
||||
std::string::size_type maxsize =
|
||||
length * 2 + 3; // allescaped+quotes+NULL
|
||||
std::string result;
|
||||
result.reserve(maxsize); // to avoid lots of mallocs
|
||||
result += "\"";
|
||||
char const* end = value + length;
|
||||
for (const char* c = value; c != end; ++c) {
|
||||
switch (*c) {
|
||||
case '\"':
|
||||
result += "\\\"";
|
||||
break;
|
||||
case '\\':
|
||||
result += "\\\\";
|
||||
break;
|
||||
case '\b':
|
||||
result += "\\b";
|
||||
break;
|
||||
case '\f':
|
||||
result += "\\f";
|
||||
break;
|
||||
case '\n':
|
||||
result += "\\n";
|
||||
break;
|
||||
case '\r':
|
||||
result += "\\r";
|
||||
break;
|
||||
case '\t':
|
||||
result += "\\t";
|
||||
break;
|
||||
// case '/':
|
||||
// Even though \/ is considered a legal escape in JSON, a bare
|
||||
// slash is also legal, so I see no reason to escape it.
|
||||
// (I hope I am not misunderstanding something.)
|
||||
// blep notes: actually escaping \/ may be useful in javascript to avoid </
|
||||
// sequence.
|
||||
// Should add a flag to allow this compatibility mode and prevent this
|
||||
// sequence from occurring.
|
||||
default:
|
||||
if ((isControlCharacter(*c)) || (*c == 0)) {
|
||||
std::ostringstream oss;
|
||||
oss << "\\u" << std::hex << std::uppercase << std::setfill('0')
|
||||
<< std::setw(4) << static_cast<int>(*c);
|
||||
result += oss.str();
|
||||
} else {
|
||||
result += *c;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
result += "\"";
|
||||
return result;
|
||||
}
|
||||
|
||||
// Class Writer
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
Writer::~Writer() {}
|
||||
@ -216,8 +325,14 @@ void FastWriter::writeValue(const Value& value) {
|
||||
document_ += valueToString(value.asDouble());
|
||||
break;
|
||||
case stringValue:
|
||||
document_ += valueToQuotedString(value.asCString());
|
||||
{
|
||||
// Is NULL possible for value.string_?
|
||||
char const* str;
|
||||
char const* end;
|
||||
bool ok = value.getString(&str, &end);
|
||||
if (ok) document_ += valueToQuotedStringN(str, static_cast<unsigned>(end-str));
|
||||
break;
|
||||
}
|
||||
case booleanValue:
|
||||
document_ += valueToString(value.asBool());
|
||||
break;
|
||||
@ -239,7 +354,7 @@ void FastWriter::writeValue(const Value& value) {
|
||||
const std::string& name = *it;
|
||||
if (it != members.begin())
|
||||
document_ += ',';
|
||||
document_ += valueToQuotedString(name.c_str());
|
||||
document_ += valueToQuotedStringN(name.data(), static_cast<unsigned>(name.length()));
|
||||
document_ += yamlCompatiblityEnabled_ ? ": " : ":";
|
||||
writeValue(value[name]);
|
||||
}
|
||||
@ -280,8 +395,15 @@ void StyledWriter::writeValue(const Value& value) {
|
||||
pushValue(valueToString(value.asDouble()));
|
||||
break;
|
||||
case stringValue:
|
||||
pushValue(valueToQuotedString(value.asCString()));
|
||||
{
|
||||
// Is NULL possible for value.string_?
|
||||
char const* str;
|
||||
char const* end;
|
||||
bool ok = value.getString(&str, &end);
|
||||
if (ok) pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end-str)));
|
||||
else pushValue("");
|
||||
break;
|
||||
}
|
||||
case booleanValue:
|
||||
pushValue(valueToString(value.asBool()));
|
||||
break;
|
||||
@ -376,6 +498,9 @@ bool StyledWriter::isMultineArray(const Value& value) {
|
||||
addChildValues_ = true;
|
||||
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||
for (int index = 0; index < size; ++index) {
|
||||
if (hasCommentForValue(value[index])) {
|
||||
isMultiLine = true;
|
||||
}
|
||||
writeValue(value[index]);
|
||||
lineLength += int(childValues_[index].length());
|
||||
}
|
||||
@ -421,26 +546,27 @@ void StyledWriter::writeCommentBeforeValue(const Value& root) {
|
||||
|
||||
document_ += "\n";
|
||||
writeIndent();
|
||||
std::string normalizedComment = normalizeEOL(root.getComment(commentBefore));
|
||||
std::string::const_iterator iter = normalizedComment.begin();
|
||||
while (iter != normalizedComment.end()) {
|
||||
const std::string& comment = root.getComment(commentBefore);
|
||||
std::string::const_iterator iter = comment.begin();
|
||||
while (iter != comment.end()) {
|
||||
document_ += *iter;
|
||||
if (*iter == '\n' && *(iter + 1) == '/')
|
||||
if (*iter == '\n' &&
|
||||
(iter != comment.end() && *(iter + 1) == '/'))
|
||||
writeIndent();
|
||||
++iter;
|
||||
}
|
||||
|
||||
// Comments are stripped of newlines, so add one here
|
||||
// Comments are stripped of trailing newlines, so add one here
|
||||
document_ += "\n";
|
||||
}
|
||||
|
||||
void StyledWriter::writeCommentAfterValueOnSameLine(const Value& root) {
|
||||
if (root.hasComment(commentAfterOnSameLine))
|
||||
document_ += " " + normalizeEOL(root.getComment(commentAfterOnSameLine));
|
||||
document_ += " " + root.getComment(commentAfterOnSameLine);
|
||||
|
||||
if (root.hasComment(commentAfter)) {
|
||||
document_ += "\n";
|
||||
document_ += normalizeEOL(root.getComment(commentAfter));
|
||||
document_ += root.getComment(commentAfter);
|
||||
document_ += "\n";
|
||||
}
|
||||
}
|
||||
@ -451,25 +577,6 @@ bool StyledWriter::hasCommentForValue(const Value& value) {
|
||||
value.hasComment(commentAfter);
|
||||
}
|
||||
|
||||
std::string StyledWriter::normalizeEOL(const std::string& text) {
|
||||
std::string normalized;
|
||||
normalized.reserve(text.length());
|
||||
const char* begin = text.c_str();
|
||||
const char* end = begin + text.length();
|
||||
const char* current = begin;
|
||||
while (current != end) {
|
||||
char c = *current++;
|
||||
if (c == '\r') // mac or dos EOL
|
||||
{
|
||||
if (*current == '\n') // convert dos EOL
|
||||
++current;
|
||||
normalized += '\n';
|
||||
} else // handle unix EOL & other char
|
||||
normalized += c;
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
// Class StyledStreamWriter
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
@ -481,7 +588,10 @@ void StyledStreamWriter::write(std::ostream& out, const Value& root) {
|
||||
document_ = &out;
|
||||
addChildValues_ = false;
|
||||
indentString_ = "";
|
||||
indented_ = true;
|
||||
writeCommentBeforeValue(root);
|
||||
if (!indented_) writeIndent();
|
||||
indented_ = true;
|
||||
writeValue(root);
|
||||
writeCommentAfterValueOnSameLine(root);
|
||||
*document_ << "\n";
|
||||
@ -503,8 +613,15 @@ void StyledStreamWriter::writeValue(const Value& value) {
|
||||
pushValue(valueToString(value.asDouble()));
|
||||
break;
|
||||
case stringValue:
|
||||
pushValue(valueToQuotedString(value.asCString()));
|
||||
{
|
||||
// Is NULL possible for value.string_?
|
||||
char const* str;
|
||||
char const* end;
|
||||
bool ok = value.getString(&str, &end);
|
||||
if (ok) pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end-str)));
|
||||
else pushValue("");
|
||||
break;
|
||||
}
|
||||
case booleanValue:
|
||||
pushValue(valueToString(value.asBool()));
|
||||
break;
|
||||
@ -557,8 +674,10 @@ void StyledStreamWriter::writeArrayValue(const Value& value) {
|
||||
if (hasChildValue)
|
||||
writeWithIndent(childValues_[index]);
|
||||
else {
|
||||
writeIndent();
|
||||
if (!indented_) writeIndent();
|
||||
indented_ = true;
|
||||
writeValue(childValue);
|
||||
indented_ = false;
|
||||
}
|
||||
if (++index == size) {
|
||||
writeCommentAfterValueOnSameLine(childValue);
|
||||
@ -599,6 +718,9 @@ bool StyledStreamWriter::isMultineArray(const Value& value) {
|
||||
addChildValues_ = true;
|
||||
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||
for (int index = 0; index < size; ++index) {
|
||||
if (hasCommentForValue(value[index])) {
|
||||
isMultiLine = true;
|
||||
}
|
||||
writeValue(value[index]);
|
||||
lineLength += int(childValues_[index].length());
|
||||
}
|
||||
@ -616,24 +738,17 @@ void StyledStreamWriter::pushValue(const std::string& value) {
|
||||
}
|
||||
|
||||
void StyledStreamWriter::writeIndent() {
|
||||
/*
|
||||
Some comments in this method would have been nice. ;-)
|
||||
|
||||
if ( !document_.empty() )
|
||||
{
|
||||
char last = document_[document_.length()-1];
|
||||
if ( last == ' ' ) // already indented
|
||||
return;
|
||||
if ( last != '\n' ) // Comments may add new-line
|
||||
*document_ << '\n';
|
||||
}
|
||||
*/
|
||||
// blep intended this to look at the so-far-written string
|
||||
// to determine whether we are already indented, but
|
||||
// with a stream we cannot do that. So we rely on some saved state.
|
||||
// The caller checks indented_.
|
||||
*document_ << '\n' << indentString_;
|
||||
}
|
||||
|
||||
void StyledStreamWriter::writeWithIndent(const std::string& value) {
|
||||
writeIndent();
|
||||
if (!indented_) writeIndent();
|
||||
*document_ << value;
|
||||
indented_ = false;
|
||||
}
|
||||
|
||||
void StyledStreamWriter::indent() { indentString_ += indentation_; }
|
||||
@ -646,19 +761,30 @@ void StyledStreamWriter::unindent() {
|
||||
void StyledStreamWriter::writeCommentBeforeValue(const Value& root) {
|
||||
if (!root.hasComment(commentBefore))
|
||||
return;
|
||||
*document_ << normalizeEOL(root.getComment(commentBefore));
|
||||
*document_ << "\n";
|
||||
|
||||
if (!indented_) writeIndent();
|
||||
const std::string& comment = root.getComment(commentBefore);
|
||||
std::string::const_iterator iter = comment.begin();
|
||||
while (iter != comment.end()) {
|
||||
*document_ << *iter;
|
||||
if (*iter == '\n' &&
|
||||
(iter != comment.end() && *(iter + 1) == '/'))
|
||||
// writeIndent(); // would include newline
|
||||
*document_ << indentString_;
|
||||
++iter;
|
||||
}
|
||||
indented_ = false;
|
||||
}
|
||||
|
||||
void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) {
|
||||
if (root.hasComment(commentAfterOnSameLine))
|
||||
*document_ << " " + normalizeEOL(root.getComment(commentAfterOnSameLine));
|
||||
*document_ << ' ' << root.getComment(commentAfterOnSameLine);
|
||||
|
||||
if (root.hasComment(commentAfter)) {
|
||||
*document_ << "\n";
|
||||
*document_ << normalizeEOL(root.getComment(commentAfter));
|
||||
*document_ << "\n";
|
||||
writeIndent();
|
||||
*document_ << root.getComment(commentAfter);
|
||||
}
|
||||
indented_ = false;
|
||||
}
|
||||
|
||||
bool StyledStreamWriter::hasCommentForValue(const Value& value) {
|
||||
@ -667,28 +793,386 @@ bool StyledStreamWriter::hasCommentForValue(const Value& value) {
|
||||
value.hasComment(commentAfter);
|
||||
}
|
||||
|
||||
std::string StyledStreamWriter::normalizeEOL(const std::string& text) {
|
||||
std::string normalized;
|
||||
normalized.reserve(text.length());
|
||||
const char* begin = text.c_str();
|
||||
const char* end = begin + text.length();
|
||||
const char* current = begin;
|
||||
while (current != end) {
|
||||
char c = *current++;
|
||||
if (c == '\r') // mac or dos EOL
|
||||
{
|
||||
if (*current == '\n') // convert dos EOL
|
||||
++current;
|
||||
normalized += '\n';
|
||||
} else // handle unix EOL & other char
|
||||
normalized += c;
|
||||
//////////////////////////
|
||||
// BuiltStyledStreamWriter
|
||||
|
||||
/// Scoped enums are not available until C++11.
|
||||
struct CommentStyle {
|
||||
/// Decide whether to write comments.
|
||||
enum Enum {
|
||||
None, ///< Drop all comments.
|
||||
Most, ///< Recover odd behavior of previous versions (not implemented yet).
|
||||
All ///< Keep all comments.
|
||||
};
|
||||
};
|
||||
|
||||
struct BuiltStyledStreamWriter : public StreamWriter
|
||||
{
|
||||
BuiltStyledStreamWriter(
|
||||
std::string const& indentation,
|
||||
CommentStyle::Enum cs,
|
||||
std::string const& colonSymbol,
|
||||
std::string const& nullSymbol,
|
||||
std::string const& endingLineFeedSymbol);
|
||||
virtual int write(Value const& root, std::ostream* sout);
|
||||
private:
|
||||
void writeValue(Value const& value);
|
||||
void writeArrayValue(Value const& value);
|
||||
bool isMultineArray(Value const& value);
|
||||
void pushValue(std::string const& value);
|
||||
void writeIndent();
|
||||
void writeWithIndent(std::string const& value);
|
||||
void indent();
|
||||
void unindent();
|
||||
void writeCommentBeforeValue(Value const& root);
|
||||
void writeCommentAfterValueOnSameLine(Value const& root);
|
||||
static bool hasCommentForValue(const Value& value);
|
||||
|
||||
typedef std::vector<std::string> ChildValues;
|
||||
|
||||
ChildValues childValues_;
|
||||
std::string indentString_;
|
||||
int rightMargin_;
|
||||
std::string indentation_;
|
||||
CommentStyle::Enum cs_;
|
||||
std::string colonSymbol_;
|
||||
std::string nullSymbol_;
|
||||
std::string endingLineFeedSymbol_;
|
||||
bool addChildValues_ : 1;
|
||||
bool indented_ : 1;
|
||||
};
|
||||
BuiltStyledStreamWriter::BuiltStyledStreamWriter(
|
||||
std::string const& indentation,
|
||||
CommentStyle::Enum cs,
|
||||
std::string const& colonSymbol,
|
||||
std::string const& nullSymbol,
|
||||
std::string const& endingLineFeedSymbol)
|
||||
: rightMargin_(74)
|
||||
, indentation_(indentation)
|
||||
, cs_(cs)
|
||||
, colonSymbol_(colonSymbol)
|
||||
, nullSymbol_(nullSymbol)
|
||||
, endingLineFeedSymbol_(endingLineFeedSymbol)
|
||||
, addChildValues_(false)
|
||||
, indented_(false)
|
||||
{
|
||||
}
|
||||
int BuiltStyledStreamWriter::write(Value const& root, std::ostream* sout)
|
||||
{
|
||||
sout_ = sout;
|
||||
addChildValues_ = false;
|
||||
indented_ = true;
|
||||
indentString_ = "";
|
||||
writeCommentBeforeValue(root);
|
||||
if (!indented_) writeIndent();
|
||||
indented_ = true;
|
||||
writeValue(root);
|
||||
writeCommentAfterValueOnSameLine(root);
|
||||
*sout_ << endingLineFeedSymbol_;
|
||||
sout_ = NULL;
|
||||
return 0;
|
||||
}
|
||||
void BuiltStyledStreamWriter::writeValue(Value const& value) {
|
||||
switch (value.type()) {
|
||||
case nullValue:
|
||||
pushValue(nullSymbol_);
|
||||
break;
|
||||
case intValue:
|
||||
pushValue(valueToString(value.asLargestInt()));
|
||||
break;
|
||||
case uintValue:
|
||||
pushValue(valueToString(value.asLargestUInt()));
|
||||
break;
|
||||
case realValue:
|
||||
pushValue(valueToString(value.asDouble()));
|
||||
break;
|
||||
case stringValue:
|
||||
{
|
||||
// Is NULL is possible for value.string_?
|
||||
char const* str;
|
||||
char const* end;
|
||||
bool ok = value.getString(&str, &end);
|
||||
if (ok) pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end-str)));
|
||||
else pushValue("");
|
||||
break;
|
||||
}
|
||||
case booleanValue:
|
||||
pushValue(valueToString(value.asBool()));
|
||||
break;
|
||||
case arrayValue:
|
||||
writeArrayValue(value);
|
||||
break;
|
||||
case objectValue: {
|
||||
Value::Members members(value.getMemberNames());
|
||||
if (members.empty())
|
||||
pushValue("{}");
|
||||
else {
|
||||
writeWithIndent("{");
|
||||
indent();
|
||||
Value::Members::iterator it = members.begin();
|
||||
for (;;) {
|
||||
std::string const& name = *it;
|
||||
Value const& childValue = value[name];
|
||||
writeCommentBeforeValue(childValue);
|
||||
writeWithIndent(valueToQuotedStringN(name.data(), static_cast<unsigned>(name.length())));
|
||||
*sout_ << colonSymbol_;
|
||||
writeValue(childValue);
|
||||
if (++it == members.end()) {
|
||||
writeCommentAfterValueOnSameLine(childValue);
|
||||
break;
|
||||
}
|
||||
*sout_ << ",";
|
||||
writeCommentAfterValueOnSameLine(childValue);
|
||||
}
|
||||
unindent();
|
||||
writeWithIndent("}");
|
||||
}
|
||||
} break;
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
std::ostream& operator<<(std::ostream& sout, const Value& root) {
|
||||
Json::StyledStreamWriter writer;
|
||||
writer.write(sout, root);
|
||||
void BuiltStyledStreamWriter::writeArrayValue(Value const& value) {
|
||||
unsigned size = value.size();
|
||||
if (size == 0)
|
||||
pushValue("[]");
|
||||
else {
|
||||
bool isMultiLine = (cs_ == CommentStyle::All) || isMultineArray(value);
|
||||
if (isMultiLine) {
|
||||
writeWithIndent("[");
|
||||
indent();
|
||||
bool hasChildValue = !childValues_.empty();
|
||||
unsigned index = 0;
|
||||
for (;;) {
|
||||
Value const& childValue = value[index];
|
||||
writeCommentBeforeValue(childValue);
|
||||
if (hasChildValue)
|
||||
writeWithIndent(childValues_[index]);
|
||||
else {
|
||||
if (!indented_) writeIndent();
|
||||
indented_ = true;
|
||||
writeValue(childValue);
|
||||
indented_ = false;
|
||||
}
|
||||
if (++index == size) {
|
||||
writeCommentAfterValueOnSameLine(childValue);
|
||||
break;
|
||||
}
|
||||
*sout_ << ",";
|
||||
writeCommentAfterValueOnSameLine(childValue);
|
||||
}
|
||||
unindent();
|
||||
writeWithIndent("]");
|
||||
} else // output on a single line
|
||||
{
|
||||
assert(childValues_.size() == size);
|
||||
*sout_ << "[";
|
||||
if (!indentation_.empty()) *sout_ << " ";
|
||||
for (unsigned index = 0; index < size; ++index) {
|
||||
if (index > 0)
|
||||
*sout_ << ", ";
|
||||
*sout_ << childValues_[index];
|
||||
}
|
||||
if (!indentation_.empty()) *sout_ << " ";
|
||||
*sout_ << "]";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool BuiltStyledStreamWriter::isMultineArray(Value const& value) {
|
||||
int size = value.size();
|
||||
bool isMultiLine = size * 3 >= rightMargin_;
|
||||
childValues_.clear();
|
||||
for (int index = 0; index < size && !isMultiLine; ++index) {
|
||||
Value const& childValue = value[index];
|
||||
isMultiLine =
|
||||
isMultiLine || ((childValue.isArray() || childValue.isObject()) &&
|
||||
childValue.size() > 0);
|
||||
}
|
||||
if (!isMultiLine) // check if line length > max line length
|
||||
{
|
||||
childValues_.reserve(size);
|
||||
addChildValues_ = true;
|
||||
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||
for (int index = 0; index < size; ++index) {
|
||||
if (hasCommentForValue(value[index])) {
|
||||
isMultiLine = true;
|
||||
}
|
||||
writeValue(value[index]);
|
||||
lineLength += int(childValues_[index].length());
|
||||
}
|
||||
addChildValues_ = false;
|
||||
isMultiLine = isMultiLine || lineLength >= rightMargin_;
|
||||
}
|
||||
return isMultiLine;
|
||||
}
|
||||
|
||||
void BuiltStyledStreamWriter::pushValue(std::string const& value) {
|
||||
if (addChildValues_)
|
||||
childValues_.push_back(value);
|
||||
else
|
||||
*sout_ << value;
|
||||
}
|
||||
|
||||
void BuiltStyledStreamWriter::writeIndent() {
|
||||
// blep intended this to look at the so-far-written string
|
||||
// to determine whether we are already indented, but
|
||||
// with a stream we cannot do that. So we rely on some saved state.
|
||||
// The caller checks indented_.
|
||||
|
||||
if (!indentation_.empty()) {
|
||||
// In this case, drop newlines too.
|
||||
*sout_ << '\n' << indentString_;
|
||||
}
|
||||
}
|
||||
|
||||
void BuiltStyledStreamWriter::writeWithIndent(std::string const& value) {
|
||||
if (!indented_) writeIndent();
|
||||
*sout_ << value;
|
||||
indented_ = false;
|
||||
}
|
||||
|
||||
void BuiltStyledStreamWriter::indent() { indentString_ += indentation_; }
|
||||
|
||||
void BuiltStyledStreamWriter::unindent() {
|
||||
assert(indentString_.size() >= indentation_.size());
|
||||
indentString_.resize(indentString_.size() - indentation_.size());
|
||||
}
|
||||
|
||||
void BuiltStyledStreamWriter::writeCommentBeforeValue(Value const& root) {
|
||||
if (cs_ == CommentStyle::None) return;
|
||||
if (!root.hasComment(commentBefore))
|
||||
return;
|
||||
|
||||
if (!indented_) writeIndent();
|
||||
const std::string& comment = root.getComment(commentBefore);
|
||||
std::string::const_iterator iter = comment.begin();
|
||||
while (iter != comment.end()) {
|
||||
*sout_ << *iter;
|
||||
if (*iter == '\n' &&
|
||||
(iter != comment.end() && *(iter + 1) == '/'))
|
||||
// writeIndent(); // would write extra newline
|
||||
*sout_ << indentString_;
|
||||
++iter;
|
||||
}
|
||||
indented_ = false;
|
||||
}
|
||||
|
||||
void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(Value const& root) {
|
||||
if (cs_ == CommentStyle::None) return;
|
||||
if (root.hasComment(commentAfterOnSameLine))
|
||||
*sout_ << " " + root.getComment(commentAfterOnSameLine);
|
||||
|
||||
if (root.hasComment(commentAfter)) {
|
||||
writeIndent();
|
||||
*sout_ << root.getComment(commentAfter);
|
||||
}
|
||||
}
|
||||
|
||||
// static
|
||||
bool BuiltStyledStreamWriter::hasCommentForValue(const Value& value) {
|
||||
return value.hasComment(commentBefore) ||
|
||||
value.hasComment(commentAfterOnSameLine) ||
|
||||
value.hasComment(commentAfter);
|
||||
}
|
||||
|
||||
///////////////
|
||||
// StreamWriter
|
||||
|
||||
StreamWriter::StreamWriter()
|
||||
: sout_(NULL)
|
||||
{
|
||||
}
|
||||
StreamWriter::~StreamWriter()
|
||||
{
|
||||
}
|
||||
StreamWriter::Factory::~Factory()
|
||||
{}
|
||||
StreamWriterBuilder::StreamWriterBuilder()
|
||||
{
|
||||
setDefaults(&settings_);
|
||||
}
|
||||
StreamWriterBuilder::~StreamWriterBuilder()
|
||||
{}
|
||||
StreamWriter* StreamWriterBuilder::newStreamWriter() const
|
||||
{
|
||||
std::string indentation = settings_["indentation"].asString();
|
||||
std::string cs_str = settings_["commentStyle"].asString();
|
||||
bool eyc = settings_["enableYAMLCompatibility"].asBool();
|
||||
bool dnp = settings_["dropNullPlaceholders"].asBool();
|
||||
CommentStyle::Enum cs = CommentStyle::All;
|
||||
if (cs_str == "All") {
|
||||
cs = CommentStyle::All;
|
||||
} else if (cs_str == "None") {
|
||||
cs = CommentStyle::None;
|
||||
} else {
|
||||
throwRuntimeError("commentStyle must be 'All' or 'None'");
|
||||
}
|
||||
std::string colonSymbol = " : ";
|
||||
if (eyc) {
|
||||
colonSymbol = ": ";
|
||||
} else if (indentation.empty()) {
|
||||
colonSymbol = ":";
|
||||
}
|
||||
std::string nullSymbol = "null";
|
||||
if (dnp) {
|
||||
nullSymbol = "";
|
||||
}
|
||||
std::string endingLineFeedSymbol = "";
|
||||
return new BuiltStyledStreamWriter(
|
||||
indentation, cs,
|
||||
colonSymbol, nullSymbol, endingLineFeedSymbol);
|
||||
}
|
||||
static void getValidWriterKeys(std::set<std::string>* valid_keys)
|
||||
{
|
||||
valid_keys->clear();
|
||||
valid_keys->insert("indentation");
|
||||
valid_keys->insert("commentStyle");
|
||||
valid_keys->insert("enableYAMLCompatibility");
|
||||
valid_keys->insert("dropNullPlaceholders");
|
||||
}
|
||||
bool StreamWriterBuilder::validate(Json::Value* invalid) const
|
||||
{
|
||||
Json::Value my_invalid;
|
||||
if (!invalid) invalid = &my_invalid; // so we do not need to test for NULL
|
||||
Json::Value& inv = *invalid;
|
||||
std::set<std::string> valid_keys;
|
||||
getValidWriterKeys(&valid_keys);
|
||||
Value::Members keys = settings_.getMemberNames();
|
||||
size_t n = keys.size();
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
std::string const& key = keys[i];
|
||||
if (valid_keys.find(key) == valid_keys.end()) {
|
||||
inv[key] = settings_[key];
|
||||
}
|
||||
}
|
||||
return 0u == inv.size();
|
||||
}
|
||||
Value& StreamWriterBuilder::operator[](std::string key)
|
||||
{
|
||||
return settings_[key];
|
||||
}
|
||||
// static
|
||||
void StreamWriterBuilder::setDefaults(Json::Value* settings)
|
||||
{
|
||||
//! [StreamWriterBuilderDefaults]
|
||||
(*settings)["commentStyle"] = "All";
|
||||
(*settings)["indentation"] = "\t";
|
||||
(*settings)["enableYAMLCompatibility"] = false;
|
||||
(*settings)["dropNullPlaceholders"] = false;
|
||||
//! [StreamWriterBuilderDefaults]
|
||||
}
|
||||
|
||||
std::string writeString(StreamWriter::Factory const& builder, Value const& root) {
|
||||
std::ostringstream sout;
|
||||
StreamWriterPtr const writer(builder.newStreamWriter());
|
||||
writer->write(root, &sout);
|
||||
return sout.str();
|
||||
}
|
||||
|
||||
std::ostream& operator<<(std::ostream& sout, Value const& root) {
|
||||
StreamWriterBuilder builder;
|
||||
StreamWriterPtr const writer(builder.newStreamWriter());
|
||||
writer->write(root, &sout);
|
||||
return sout;
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,4 @@
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
ADD_DEFINITIONS( -DJSON_DLL )
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
# vim: et ts=4 sts=4 sw=4 tw=0
|
||||
|
||||
ADD_EXECUTABLE( jsoncpp_test
|
||||
jsontest.cpp
|
||||
@ -9,14 +6,33 @@ ADD_EXECUTABLE( jsoncpp_test
|
||||
main.cpp
|
||||
)
|
||||
|
||||
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
|
||||
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
ADD_DEFINITIONS( -DJSON_DLL )
|
||||
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
|
||||
ELSE(BUILD_SHARED_LIBS)
|
||||
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib_static)
|
||||
ENDIF(BUILD_SHARED_LIBS)
|
||||
|
||||
# another way to solve issue #90
|
||||
#set_target_properties(jsoncpp_test PROPERTIES COMPILE_FLAGS -ffloat-store)
|
||||
|
||||
# Run unit tests in post-build
|
||||
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
|
||||
IF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
||||
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
|
||||
POST_BUILD
|
||||
COMMAND $<TARGET_FILE:jsoncpp_test>)
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
# First, copy the shared lib, for Microsoft.
|
||||
# Then, run the test executable.
|
||||
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_if_different $<TARGET_FILE:jsoncpp_lib> $<TARGET_FILE_DIR:jsoncpp_test>
|
||||
COMMAND $<TARGET_FILE:jsoncpp_test>)
|
||||
ELSE(BUILD_SHARED_LIBS)
|
||||
# Just run the test executable.
|
||||
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
|
||||
POST_BUILD
|
||||
COMMAND $<TARGET_FILE:jsoncpp_test>)
|
||||
ENDIF(BUILD_SHARED_LIBS)
|
||||
ENDIF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
||||
|
||||
SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)
|
||||
|
@ -323,7 +323,7 @@ void Runner::listTests() const {
|
||||
}
|
||||
|
||||
int Runner::runCommandLine(int argc, const char* argv[]) const {
|
||||
typedef std::deque<std::string> TestNames;
|
||||
// typedef std::deque<std::string> TestNames;
|
||||
Runner subrunner;
|
||||
for (int index = 1; index < argc; ++index) {
|
||||
std::string opt = argv[index];
|
||||
|
@ -178,8 +178,8 @@ private:
|
||||
|
||||
template <typename T, typename U>
|
||||
TestResult& checkEqual(TestResult& result,
|
||||
const T& expected,
|
||||
const U& actual,
|
||||
T expected,
|
||||
U actual,
|
||||
const char* file,
|
||||
unsigned int line,
|
||||
const char* expr) {
|
||||
@ -214,7 +214,7 @@ TestResult& checkStringEqual(TestResult& result,
|
||||
#define JSONTEST_ASSERT_PRED(expr) \
|
||||
{ \
|
||||
JsonTest::PredicateContext _minitest_Context = { \
|
||||
result_->predicateId_, __FILE__, __LINE__, #expr \
|
||||
result_->predicateId_, __FILE__, __LINE__, #expr, NULL, NULL \
|
||||
}; \
|
||||
result_->predicateStackTail_->next_ = &_minitest_Context; \
|
||||
result_->predicateId_ += 1; \
|
||||
|
777
3rdparty/jsoncpp/src/test_lib_json/main.cpp
vendored
777
3rdparty/jsoncpp/src/test_lib_json/main.cpp
vendored
@ -6,7 +6,7 @@
|
||||
#include "jsontest.h"
|
||||
#include <json/config.h>
|
||||
#include <json/json.h>
|
||||
#include <stdexcept>
|
||||
#include <cstring>
|
||||
|
||||
// Make numeric limits more convenient to talk about.
|
||||
// Assumes int type in 32 bits.
|
||||
@ -17,8 +17,8 @@
|
||||
#define kint64min Json::Value::minInt64
|
||||
#define kuint64max Json::Value::maxUInt64
|
||||
|
||||
static const double kdint64max = double(kint64max);
|
||||
static const float kfint64max = float(kint64max);
|
||||
//static const double kdint64max = double(kint64max);
|
||||
//static const float kfint64max = float(kint64max);
|
||||
static const float kfint32max = float(kint32max);
|
||||
static const float kfuint32max = float(kuint32max);
|
||||
|
||||
@ -198,6 +198,18 @@ JSONTEST_FIXTURE(ValueTest, objects) {
|
||||
|
||||
object1_["some other id"] = "foo";
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value("foo"), object1_["some other id"]);
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value("foo"), object1_["some other id"]);
|
||||
|
||||
// Remove.
|
||||
Json::Value got;
|
||||
bool did;
|
||||
did = object1_.removeMember("some other id", &got);
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value("foo"), got);
|
||||
JSONTEST_ASSERT_EQUAL(true, did);
|
||||
got = Json::Value("bar");
|
||||
did = object1_.removeMember("some other id", &got);
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value("bar"), got);
|
||||
JSONTEST_ASSERT_EQUAL(false, did);
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, arrays) {
|
||||
@ -240,6 +252,24 @@ JSONTEST_FIXTURE(ValueTest, arrays) {
|
||||
array1_[2] = Json::Value(17);
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value(), array1_[1]);
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value(17), array1_[2]);
|
||||
Json::Value got;
|
||||
JSONTEST_ASSERT_EQUAL(true, array1_.removeIndex(2, &got));
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value(17), got);
|
||||
JSONTEST_ASSERT_EQUAL(false, array1_.removeIndex(2, &got)); // gone now
|
||||
}
|
||||
JSONTEST_FIXTURE(ValueTest, arrayIssue252)
|
||||
{
|
||||
int count = 5;
|
||||
Json::Value root;
|
||||
Json::Value item;
|
||||
root["array"] = Json::Value::nullRef;
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
item["a"] = i;
|
||||
item["b"] = i;
|
||||
root["array"][i] = item;
|
||||
}
|
||||
//JSONTEST_ASSERT_EQUAL(5, root["array"].size());
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, null) {
|
||||
@ -265,6 +295,8 @@ JSONTEST_FIXTURE(ValueTest, null) {
|
||||
JSONTEST_ASSERT_EQUAL(0.0, null_.asDouble());
|
||||
JSONTEST_ASSERT_EQUAL(0.0, null_.asFloat());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", null_.asString());
|
||||
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value::null, null_);
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, strings) {
|
||||
@ -1499,6 +1531,126 @@ JSONTEST_FIXTURE(ValueTest, offsetAccessors) {
|
||||
JSONTEST_ASSERT(y.getOffsetLimit() == 0);
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, StaticString) {
|
||||
char mutant[] = "hello";
|
||||
Json::StaticString ss(mutant);
|
||||
std::string regular(mutant);
|
||||
mutant[1] = 'a';
|
||||
JSONTEST_ASSERT_STRING_EQUAL("hallo", ss.c_str());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("hello", regular.c_str());
|
||||
{
|
||||
Json::Value root;
|
||||
root["top"] = ss;
|
||||
JSONTEST_ASSERT_STRING_EQUAL("hallo", root["top"].asString());
|
||||
mutant[1] = 'u';
|
||||
JSONTEST_ASSERT_STRING_EQUAL("hullo", root["top"].asString());
|
||||
}
|
||||
{
|
||||
Json::Value root;
|
||||
root["top"] = regular;
|
||||
JSONTEST_ASSERT_STRING_EQUAL("hello", root["top"].asString());
|
||||
mutant[1] = 'u';
|
||||
JSONTEST_ASSERT_STRING_EQUAL("hello", root["top"].asString());
|
||||
}
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, CommentBefore) {
|
||||
Json::Value val; // fill val
|
||||
val.setComment(std::string("// this comment should appear before"), Json::commentBefore);
|
||||
Json::StreamWriterBuilder wbuilder;
|
||||
wbuilder.settings_["commentStyle"] = "All";
|
||||
{
|
||||
char const expected[] = "// this comment should appear before\nnull";
|
||||
std::string result = Json::writeString(wbuilder, val);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
std::string res2 = val.toStyledString();
|
||||
std::string exp2 = "\n";
|
||||
exp2 += expected;
|
||||
exp2 += "\n";
|
||||
JSONTEST_ASSERT_STRING_EQUAL(exp2, res2);
|
||||
}
|
||||
Json::Value other = "hello";
|
||||
val.swapPayload(other);
|
||||
{
|
||||
char const expected[] = "// this comment should appear before\n\"hello\"";
|
||||
std::string result = Json::writeString(wbuilder, val);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
std::string res2 = val.toStyledString();
|
||||
std::string exp2 = "\n";
|
||||
exp2 += expected;
|
||||
exp2 += "\n";
|
||||
JSONTEST_ASSERT_STRING_EQUAL(exp2, res2);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("null\n", other.toStyledString());
|
||||
}
|
||||
val = "hello";
|
||||
// val.setComment("// this comment should appear before", Json::CommentPlacement::commentBefore);
|
||||
// Assignment over-writes comments.
|
||||
{
|
||||
char const expected[] = "\"hello\"";
|
||||
std::string result = Json::writeString(wbuilder, val);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
std::string res2 = val.toStyledString();
|
||||
std::string exp2 = "";
|
||||
exp2 += expected;
|
||||
exp2 += "\n";
|
||||
JSONTEST_ASSERT_STRING_EQUAL(exp2, res2);
|
||||
}
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, zeroes) {
|
||||
char const cstr[] = "h\0i";
|
||||
std::string binary(cstr, sizeof(cstr)); // include trailing 0
|
||||
JSONTEST_ASSERT_EQUAL(4U, binary.length());
|
||||
Json::StreamWriterBuilder b;
|
||||
{
|
||||
Json::Value root;
|
||||
root = binary;
|
||||
JSONTEST_ASSERT_STRING_EQUAL(binary, root.asString());
|
||||
}
|
||||
{
|
||||
char const top[] = "top";
|
||||
Json::Value root;
|
||||
root[top] = binary;
|
||||
JSONTEST_ASSERT_STRING_EQUAL(binary, root[top].asString());
|
||||
Json::Value removed;
|
||||
bool did;
|
||||
did = root.removeMember(top, top + sizeof(top) - 1U,
|
||||
&removed);
|
||||
JSONTEST_ASSERT(did);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(binary, removed.asString());
|
||||
did = root.removeMember(top, top + sizeof(top) - 1U,
|
||||
&removed);
|
||||
JSONTEST_ASSERT(!did);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(binary, removed.asString()); // still
|
||||
}
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, zeroesInKeys) {
|
||||
char const cstr[] = "h\0i";
|
||||
std::string binary(cstr, sizeof(cstr)); // include trailing 0
|
||||
JSONTEST_ASSERT_EQUAL(4U, binary.length());
|
||||
{
|
||||
Json::Value root;
|
||||
root[binary] = "there";
|
||||
JSONTEST_ASSERT_STRING_EQUAL("there", root[binary].asString());
|
||||
JSONTEST_ASSERT(!root.isMember("h"));
|
||||
JSONTEST_ASSERT(root.isMember(binary));
|
||||
JSONTEST_ASSERT_STRING_EQUAL("there", root.get(binary, Json::Value::nullRef).asString());
|
||||
Json::Value removed;
|
||||
bool did;
|
||||
did = root.removeMember(binary.data(), binary.data() + binary.length(),
|
||||
&removed);
|
||||
JSONTEST_ASSERT(did);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("there", removed.asString());
|
||||
did = root.removeMember(binary.data(), binary.data() + binary.length(),
|
||||
&removed);
|
||||
JSONTEST_ASSERT(!did);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("there", removed.asString()); // still
|
||||
JSONTEST_ASSERT(!root.isMember(binary));
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", root.get(binary, Json::Value::nullRef).asString());
|
||||
}
|
||||
}
|
||||
|
||||
struct WriterTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(WriterTest, dropNullPlaceholders) {
|
||||
@ -1510,6 +1662,39 @@ JSONTEST_FIXTURE(WriterTest, dropNullPlaceholders) {
|
||||
JSONTEST_ASSERT(writer.write(nullValue) == "\n");
|
||||
}
|
||||
|
||||
struct StreamWriterTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(StreamWriterTest, dropNullPlaceholders) {
|
||||
Json::StreamWriterBuilder b;
|
||||
Json::Value nullValue;
|
||||
b.settings_["dropNullPlaceholders"] = false;
|
||||
JSONTEST_ASSERT(Json::writeString(b, nullValue) == "null");
|
||||
b.settings_["dropNullPlaceholders"] = true;
|
||||
JSONTEST_ASSERT(Json::writeString(b, nullValue) == "");
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(StreamWriterTest, writeZeroes) {
|
||||
std::string binary("hi", 3); // include trailing 0
|
||||
JSONTEST_ASSERT_EQUAL(3, binary.length());
|
||||
std::string expected("\"hi\\u0000\""); // unicoded zero
|
||||
Json::StreamWriterBuilder b;
|
||||
{
|
||||
Json::Value root;
|
||||
root = binary;
|
||||
JSONTEST_ASSERT_STRING_EQUAL(binary, root.asString());
|
||||
std::string out = Json::writeString(b, root);
|
||||
JSONTEST_ASSERT_EQUAL(expected.size(), out.size());
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, out);
|
||||
}
|
||||
{
|
||||
Json::Value root;
|
||||
root["top"] = binary;
|
||||
JSONTEST_ASSERT_STRING_EQUAL(binary, root["top"].asString());
|
||||
std::string out = Json::writeString(b, root["top"]);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, out);
|
||||
}
|
||||
}
|
||||
|
||||
struct ReaderTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(ReaderTest, parseWithNoErrors) {
|
||||
@ -1601,12 +1786,561 @@ JSONTEST_FIXTURE(ReaderTest, parseWithDetailError) {
|
||||
JSONTEST_ASSERT(errors.at(0).message == "Bad escape sequence in string");
|
||||
}
|
||||
|
||||
struct CharReaderTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderTest, parseWithNoErrors) {
|
||||
Json::CharReaderBuilder b;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
Json::Value root;
|
||||
char const doc[] = "{ \"property\" : \"value\" }";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT(errs.size() == 0);
|
||||
delete reader;
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderTest, parseWithNoErrorsTestingOffsets) {
|
||||
Json::CharReaderBuilder b;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
"{ \"property\" : [\"value\", \"value2\"], \"obj\" : "
|
||||
"{ \"nested\" : 123, \"bool\" : true}, \"null\" : "
|
||||
"null, \"false\" : false }";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT(errs.size() == 0);
|
||||
delete reader;
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderTest, parseWithOneError) {
|
||||
Json::CharReaderBuilder b;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
"{ \"property\" :: \"value\" }";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(!ok);
|
||||
JSONTEST_ASSERT(errs ==
|
||||
"* Line 1, Column 15\n Syntax error: value, object or array "
|
||||
"expected.\n");
|
||||
delete reader;
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderTest, parseChineseWithOneError) {
|
||||
Json::CharReaderBuilder b;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
"{ \"pr佐藤erty\" :: \"value\" }";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(!ok);
|
||||
JSONTEST_ASSERT(errs ==
|
||||
"* Line 1, Column 19\n Syntax error: value, object or array "
|
||||
"expected.\n");
|
||||
delete reader;
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderTest, parseWithDetailError) {
|
||||
Json::CharReaderBuilder b;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
"{ \"property\" : \"v\\alue\" }";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(!ok);
|
||||
JSONTEST_ASSERT(errs ==
|
||||
"* Line 1, Column 16\n Bad escape sequence in string\nSee "
|
||||
"Line 1, Column 20 for detail.\n");
|
||||
delete reader;
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderTest, parseWithStackLimit) {
|
||||
Json::CharReaderBuilder b;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
"{ \"property\" : \"value\" }";
|
||||
{
|
||||
b.settings_["stackLimit"] = 2;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT(errs == "");
|
||||
JSONTEST_ASSERT_EQUAL("value", root["property"]);
|
||||
delete reader;
|
||||
}
|
||||
{
|
||||
b.settings_["stackLimit"] = 1;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
JSONTEST_ASSERT_THROWS(reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs));
|
||||
delete reader;
|
||||
}
|
||||
}
|
||||
|
||||
struct CharReaderStrictModeTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderStrictModeTest, dupKeys) {
|
||||
Json::CharReaderBuilder b;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
"{ \"property\" : \"value\", \"key\" : \"val1\", \"key\" : \"val2\" }";
|
||||
{
|
||||
b.strictMode(&b.settings_);
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(!ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(
|
||||
"* Line 1, Column 41\n"
|
||||
" Duplicate key: 'key'\n",
|
||||
errs);
|
||||
JSONTEST_ASSERT_EQUAL("val1", root["key"]); // so far
|
||||
delete reader;
|
||||
}
|
||||
}
|
||||
struct CharReaderFailIfExtraTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderFailIfExtraTest, issue164) {
|
||||
// This is interpretted as a string value followed by a colon.
|
||||
Json::CharReaderBuilder b;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
" \"property\" : \"value\" }";
|
||||
{
|
||||
b.settings_["failIfExtra"] = false;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT(errs == "");
|
||||
JSONTEST_ASSERT_EQUAL("property", root);
|
||||
delete reader;
|
||||
}
|
||||
{
|
||||
b.settings_["failIfExtra"] = true;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(!ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(errs,
|
||||
"* Line 1, Column 13\n"
|
||||
" Extra non-whitespace after JSON value.\n");
|
||||
JSONTEST_ASSERT_EQUAL("property", root);
|
||||
delete reader;
|
||||
}
|
||||
{
|
||||
b.settings_["failIfExtra"] = false;
|
||||
b.strictMode(&b.settings_);
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(!ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(errs,
|
||||
"* Line 1, Column 13\n"
|
||||
" Extra non-whitespace after JSON value.\n");
|
||||
JSONTEST_ASSERT_EQUAL("property", root);
|
||||
delete reader;
|
||||
}
|
||||
}
|
||||
JSONTEST_FIXTURE(CharReaderFailIfExtraTest, issue107) {
|
||||
// This is interpretted as an int value followed by a colon.
|
||||
Json::CharReaderBuilder b;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
"1:2:3";
|
||||
b.settings_["failIfExtra"] = true;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(!ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(
|
||||
"* Line 1, Column 2\n"
|
||||
" Extra non-whitespace after JSON value.\n",
|
||||
errs);
|
||||
JSONTEST_ASSERT_EQUAL(1, root.asInt());
|
||||
delete reader;
|
||||
}
|
||||
JSONTEST_FIXTURE(CharReaderFailIfExtraTest, commentAfterObject) {
|
||||
Json::CharReaderBuilder b;
|
||||
Json::Value root;
|
||||
{
|
||||
char const doc[] =
|
||||
"{ \"property\" : \"value\" } //trailing\n//comment\n";
|
||||
b.settings_["failIfExtra"] = true;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL("value", root["property"]);
|
||||
delete reader;
|
||||
}
|
||||
}
|
||||
JSONTEST_FIXTURE(CharReaderFailIfExtraTest, commentAfterArray) {
|
||||
Json::CharReaderBuilder b;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
"[ \"property\" , \"value\" ] //trailing\n//comment\n";
|
||||
b.settings_["failIfExtra"] = true;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL("value", root[1u]);
|
||||
delete reader;
|
||||
}
|
||||
JSONTEST_FIXTURE(CharReaderFailIfExtraTest, commentAfterBool) {
|
||||
Json::CharReaderBuilder b;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
" true /*trailing\ncomment*/";
|
||||
b.settings_["failIfExtra"] = true;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
std::string errs;
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(true, root.asBool());
|
||||
delete reader;
|
||||
}
|
||||
struct CharReaderAllowDropNullTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderAllowDropNullTest, issue178) {
|
||||
Json::CharReaderBuilder b;
|
||||
b.settings_["allowDroppedNullPlaceholders"] = true;
|
||||
Json::Value root;
|
||||
std::string errs;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
{
|
||||
char const doc[] = "{\"a\":,\"b\":true}";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(2u, root.size());
|
||||
JSONTEST_ASSERT_EQUAL(Json::nullValue, root.get("a", true));
|
||||
}
|
||||
{
|
||||
char const doc[] = "{\"a\":}";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(1u, root.size());
|
||||
JSONTEST_ASSERT_EQUAL(Json::nullValue, root.get("a", true));
|
||||
}
|
||||
{
|
||||
char const doc[] = "[]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT(errs == "");
|
||||
JSONTEST_ASSERT_EQUAL(0u, root.size());
|
||||
JSONTEST_ASSERT_EQUAL(Json::arrayValue, root);
|
||||
}
|
||||
{
|
||||
char const doc[] = "[null]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT(errs == "");
|
||||
JSONTEST_ASSERT_EQUAL(1u, root.size());
|
||||
}
|
||||
{
|
||||
char const doc[] = "[,]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(2u, root.size());
|
||||
}
|
||||
{
|
||||
char const doc[] = "[,,,]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(4u, root.size());
|
||||
}
|
||||
{
|
||||
char const doc[] = "[null,]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(2u, root.size());
|
||||
}
|
||||
{
|
||||
char const doc[] = "[,null]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT(errs == "");
|
||||
JSONTEST_ASSERT_EQUAL(2u, root.size());
|
||||
}
|
||||
{
|
||||
char const doc[] = "[,,]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(3u, root.size());
|
||||
}
|
||||
{
|
||||
char const doc[] = "[null,,]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(3u, root.size());
|
||||
}
|
||||
{
|
||||
char const doc[] = "[,null,]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(3u, root.size());
|
||||
}
|
||||
{
|
||||
char const doc[] = "[,,null]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT(errs == "");
|
||||
JSONTEST_ASSERT_EQUAL(3u, root.size());
|
||||
}
|
||||
{
|
||||
char const doc[] = "[[],,,]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(4u, root.size());
|
||||
JSONTEST_ASSERT_EQUAL(Json::arrayValue, root[0u]);
|
||||
}
|
||||
{
|
||||
char const doc[] = "[,[],,]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(4u, root.size());
|
||||
JSONTEST_ASSERT_EQUAL(Json::arrayValue, root[1u]);
|
||||
}
|
||||
{
|
||||
char const doc[] = "[,,,[]]";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT(errs == "");
|
||||
JSONTEST_ASSERT_EQUAL(4u, root.size());
|
||||
JSONTEST_ASSERT_EQUAL(Json::arrayValue, root[3u]);
|
||||
}
|
||||
delete reader;
|
||||
}
|
||||
|
||||
struct CharReaderAllowSingleQuotesTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderAllowSingleQuotesTest, issue182) {
|
||||
Json::CharReaderBuilder b;
|
||||
b.settings_["allowSingleQuotes"] = true;
|
||||
Json::Value root;
|
||||
std::string errs;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
{
|
||||
char const doc[] = "{'a':true,\"b\":true}";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(2u, root.size());
|
||||
JSONTEST_ASSERT_EQUAL(true, root.get("a", false));
|
||||
JSONTEST_ASSERT_EQUAL(true, root.get("b", false));
|
||||
}
|
||||
{
|
||||
char const doc[] = "{'a': 'x', \"b\":'y'}";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(2u, root.size());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("x", root["a"].asString());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("y", root["b"].asString());
|
||||
}
|
||||
delete reader;
|
||||
}
|
||||
|
||||
struct CharReaderAllowZeroesTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderAllowZeroesTest, issue176) {
|
||||
Json::CharReaderBuilder b;
|
||||
b.settings_["allowSingleQuotes"] = true;
|
||||
Json::Value root;
|
||||
std::string errs;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
{
|
||||
char const doc[] = "{'a':true,\"b\":true}";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(2u, root.size());
|
||||
JSONTEST_ASSERT_EQUAL(true, root.get("a", false));
|
||||
JSONTEST_ASSERT_EQUAL(true, root.get("b", false));
|
||||
}
|
||||
{
|
||||
char const doc[] = "{'a': 'x', \"b\":'y'}";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(2u, root.size());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("x", root["a"].asString());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("y", root["b"].asString());
|
||||
}
|
||||
delete reader;
|
||||
}
|
||||
|
||||
struct BuilderTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(BuilderTest, settings) {
|
||||
{
|
||||
Json::Value errs;
|
||||
Json::CharReaderBuilder rb;
|
||||
JSONTEST_ASSERT_EQUAL(false, rb.settings_.isMember("foo"));
|
||||
JSONTEST_ASSERT_EQUAL(true, rb.validate(&errs));
|
||||
rb["foo"] = "bar";
|
||||
JSONTEST_ASSERT_EQUAL(true, rb.settings_.isMember("foo"));
|
||||
JSONTEST_ASSERT_EQUAL(false, rb.validate(&errs));
|
||||
}
|
||||
{
|
||||
Json::Value errs;
|
||||
Json::StreamWriterBuilder wb;
|
||||
JSONTEST_ASSERT_EQUAL(false, wb.settings_.isMember("foo"));
|
||||
JSONTEST_ASSERT_EQUAL(true, wb.validate(&errs));
|
||||
wb["foo"] = "bar";
|
||||
JSONTEST_ASSERT_EQUAL(true, wb.settings_.isMember("foo"));
|
||||
JSONTEST_ASSERT_EQUAL(false, wb.validate(&errs));
|
||||
}
|
||||
}
|
||||
|
||||
struct IteratorTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(IteratorTest, distance) {
|
||||
Json::Value json;
|
||||
json["k1"] = "a";
|
||||
json["k2"] = "b";
|
||||
int dist = 0;
|
||||
std::string str;
|
||||
for (Json::ValueIterator it = json.begin(); it != json.end(); ++it) {
|
||||
dist = it - json.begin();
|
||||
str = it->asString().c_str();
|
||||
}
|
||||
JSONTEST_ASSERT_EQUAL(1, dist);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("b", str);
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(IteratorTest, names) {
|
||||
Json::Value json;
|
||||
json["k1"] = "a";
|
||||
json["k2"] = "b";
|
||||
Json::ValueIterator it = json.begin();
|
||||
JSONTEST_ASSERT(it != json.end());
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value("k1"), it.key());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("k1", it.name());
|
||||
JSONTEST_ASSERT_EQUAL(-1, it.index());
|
||||
++it;
|
||||
JSONTEST_ASSERT(it != json.end());
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value("k2"), it.key());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("k2", it.name());
|
||||
JSONTEST_ASSERT_EQUAL(-1, it.index());
|
||||
++it;
|
||||
JSONTEST_ASSERT(it == json.end());
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(IteratorTest, indexes) {
|
||||
Json::Value json;
|
||||
json[0] = "a";
|
||||
json[1] = "b";
|
||||
Json::ValueIterator it = json.begin();
|
||||
JSONTEST_ASSERT(it != json.end());
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value(Json::ArrayIndex(0)), it.key());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", it.name());
|
||||
JSONTEST_ASSERT_EQUAL(0, it.index());
|
||||
++it;
|
||||
JSONTEST_ASSERT(it != json.end());
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value(Json::ArrayIndex(1)), it.key());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", it.name());
|
||||
JSONTEST_ASSERT_EQUAL(1, it.index());
|
||||
++it;
|
||||
JSONTEST_ASSERT(it == json.end());
|
||||
}
|
||||
|
||||
int main(int argc, const char* argv[]) {
|
||||
JsonTest::Runner runner;
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, checkNormalizeFloatingPointStr);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, memberCount);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, objects);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, arrays);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, arrayIssue252);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, null);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, strings);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, bools);
|
||||
@ -1623,6 +2357,15 @@ int main(int argc, const char* argv[]) {
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, compareType);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, offsetAccessors);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, typeChecksThrowExceptions);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, StaticString);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, CommentBefore);
|
||||
//JSONTEST_REGISTER_FIXTURE(runner, ValueTest, nulls);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, zeroes);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, zeroesInKeys);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, WriterTest, dropNullPlaceholders);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, StreamWriterTest, dropNullPlaceholders);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, StreamWriterTest, writeZeroes);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ReaderTest, parseWithNoErrors);
|
||||
JSONTEST_REGISTER_FIXTURE(
|
||||
@ -1631,7 +2374,33 @@ int main(int argc, const char* argv[]) {
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ReaderTest, parseChineseWithOneError);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ReaderTest, parseWithDetailError);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, WriterTest, dropNullPlaceholders);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderTest, parseWithNoErrors);
|
||||
JSONTEST_REGISTER_FIXTURE(
|
||||
runner, CharReaderTest, parseWithNoErrorsTestingOffsets);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderTest, parseWithOneError);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderTest, parseChineseWithOneError);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderTest, parseWithDetailError);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderTest, parseWithStackLimit);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderStrictModeTest, dupKeys);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderFailIfExtraTest, issue164);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderFailIfExtraTest, issue107);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderFailIfExtraTest, commentAfterObject);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderFailIfExtraTest, commentAfterArray);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderFailIfExtraTest, commentAfterBool);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderAllowDropNullTest, issue178);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderAllowSingleQuotesTest, issue182);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderAllowZeroesTest, issue176);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, BuilderTest, settings);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, IteratorTest, distance);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, IteratorTest, names);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, IteratorTest, indexes);
|
||||
|
||||
return runner.runCommandLine(argc, argv);
|
||||
}
|
||||
|
12
3rdparty/jsoncpp/test/cleantests.py
vendored
12
3rdparty/jsoncpp/test/cleantests.py
vendored
@ -1,10 +1,16 @@
|
||||
# removes all files created during testing
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
"""Removes all files created during testing."""
|
||||
|
||||
import glob
|
||||
import os
|
||||
|
||||
paths = []
|
||||
for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]:
|
||||
paths += glob.glob( 'data/' + pattern )
|
||||
paths += glob.glob('data/' + pattern)
|
||||
|
||||
for path in paths:
|
||||
os.unlink( path )
|
||||
os.unlink(path)
|
||||
|
4
3rdparty/jsoncpp/test/data/test_comment_00.expected
vendored
Normal file
4
3rdparty/jsoncpp/test/data/test_comment_00.expected
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
// Comment for array
|
||||
.=[]
|
||||
// Comment within array
|
||||
.[0]="one-element"
|
5
3rdparty/jsoncpp/test/data/test_comment_00.json
vendored
Normal file
5
3rdparty/jsoncpp/test/data/test_comment_00.json
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
// Comment for array
|
||||
[
|
||||
// Comment within array
|
||||
"one-element"
|
||||
]
|
@ -1,5 +1,7 @@
|
||||
.={}
|
||||
// Comment for array
|
||||
.test=[]
|
||||
// Comment within array
|
||||
.test[0]={}
|
||||
.test[0].a="aaa"
|
||||
.test[1]={}
|
||||
|
@ -1,6 +1,8 @@
|
||||
{
|
||||
"test":
|
||||
// Comment for array
|
||||
[
|
||||
// Comment within array
|
||||
{ "a" : "aaa" }, // Comment for a
|
||||
{ "b" : "bbb" }, // Comment for b
|
||||
{ "c" : "ccc" } // Comment for c
|
||||
|
@ -11,4 +11,13 @@
|
||||
// Multiline comment cpp-style
|
||||
// Second line
|
||||
.cpp-test.c=3
|
||||
.cpp-test.d=4
|
||||
// Comment before double
|
||||
.cpp-test.d=4.1
|
||||
// Comment before string
|
||||
.cpp-test.e="e-string"
|
||||
// Comment before true
|
||||
.cpp-test.f=true
|
||||
// Comment before false
|
||||
.cpp-test.g=false
|
||||
// Comment before null
|
||||
.cpp-test.h=null
|
||||
|
11
3rdparty/jsoncpp/test/data/test_comment_02.json
vendored
11
3rdparty/jsoncpp/test/data/test_comment_02.json
vendored
@ -12,6 +12,15 @@
|
||||
// Multiline comment cpp-style
|
||||
// Second line
|
||||
"c" : 3,
|
||||
"d" : 4
|
||||
// Comment before double
|
||||
"d" : 4.1,
|
||||
// Comment before string
|
||||
"e" : "e-string",
|
||||
// Comment before true
|
||||
"f" : true,
|
||||
// Comment before false
|
||||
"g" : false,
|
||||
// Comment before null
|
||||
"h" : null
|
||||
}
|
||||
}
|
||||
|
9
3rdparty/jsoncpp/test/generate_expected.py
vendored
9
3rdparty/jsoncpp/test/generate_expected.py
vendored
@ -1,10 +1,15 @@
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from __future__ import print_function
|
||||
import glob
|
||||
import os.path
|
||||
for path in glob.glob( '*.json' ):
|
||||
for path in glob.glob('*.json'):
|
||||
text = file(path,'rt').read()
|
||||
target = os.path.splitext(path)[0] + '.expected'
|
||||
if os.path.exists( target ):
|
||||
if os.path.exists(target):
|
||||
print('skipping:', target)
|
||||
else:
|
||||
print('creating:', target)
|
||||
|
57
3rdparty/jsoncpp/test/pyjsontestrunner.py
vendored
57
3rdparty/jsoncpp/test/pyjsontestrunner.py
vendored
@ -1,4 +1,11 @@
|
||||
# Simple implementation of a json test runner to run the test against json-py.
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
"""Simple implementation of a json test runner to run the test against
|
||||
json-py."""
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os.path
|
||||
@ -15,50 +22,50 @@ actual_path = base_path + '.actual'
|
||||
rewrite_path = base_path + '.rewrite'
|
||||
rewrite_actual_path = base_path + '.actual-rewrite'
|
||||
|
||||
def valueTreeToString( fout, value, path = '.' ):
|
||||
def valueTreeToString(fout, value, path = '.'):
|
||||
ty = type(value)
|
||||
if ty is types.DictType:
|
||||
fout.write( '%s={}\n' % path )
|
||||
fout.write('%s={}\n' % path)
|
||||
suffix = path[-1] != '.' and '.' or ''
|
||||
names = value.keys()
|
||||
names.sort()
|
||||
for name in names:
|
||||
valueTreeToString( fout, value[name], path + suffix + name )
|
||||
valueTreeToString(fout, value[name], path + suffix + name)
|
||||
elif ty is types.ListType:
|
||||
fout.write( '%s=[]\n' % path )
|
||||
for index, childValue in zip( xrange(0,len(value)), value ):
|
||||
valueTreeToString( fout, childValue, path + '[%d]' % index )
|
||||
fout.write('%s=[]\n' % path)
|
||||
for index, childValue in zip(xrange(0,len(value)), value):
|
||||
valueTreeToString(fout, childValue, path + '[%d]' % index)
|
||||
elif ty is types.StringType:
|
||||
fout.write( '%s="%s"\n' % (path,value) )
|
||||
fout.write('%s="%s"\n' % (path,value))
|
||||
elif ty is types.IntType:
|
||||
fout.write( '%s=%d\n' % (path,value) )
|
||||
fout.write('%s=%d\n' % (path,value))
|
||||
elif ty is types.FloatType:
|
||||
fout.write( '%s=%.16g\n' % (path,value) )
|
||||
fout.write('%s=%.16g\n' % (path,value))
|
||||
elif value is True:
|
||||
fout.write( '%s=true\n' % path )
|
||||
fout.write('%s=true\n' % path)
|
||||
elif value is False:
|
||||
fout.write( '%s=false\n' % path )
|
||||
fout.write('%s=false\n' % path)
|
||||
elif value is None:
|
||||
fout.write( '%s=null\n' % path )
|
||||
fout.write('%s=null\n' % path)
|
||||
else:
|
||||
assert False and "Unexpected value type"
|
||||
|
||||
def parseAndSaveValueTree( input, actual_path ):
|
||||
root = json.loads( input )
|
||||
fout = file( actual_path, 'wt' )
|
||||
valueTreeToString( fout, root )
|
||||
def parseAndSaveValueTree(input, actual_path):
|
||||
root = json.loads(input)
|
||||
fout = file(actual_path, 'wt')
|
||||
valueTreeToString(fout, root)
|
||||
fout.close()
|
||||
return root
|
||||
|
||||
def rewriteValueTree( value, rewrite_path ):
|
||||
rewrite = json.dumps( value )
|
||||
def rewriteValueTree(value, rewrite_path):
|
||||
rewrite = json.dumps(value)
|
||||
#rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ?
|
||||
file( rewrite_path, 'wt').write( rewrite + '\n' )
|
||||
file(rewrite_path, 'wt').write(rewrite + '\n')
|
||||
return rewrite
|
||||
|
||||
input = file( input_path, 'rt' ).read()
|
||||
root = parseAndSaveValueTree( input, actual_path )
|
||||
rewrite = rewriteValueTree( json.write( root ), rewrite_path )
|
||||
rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path )
|
||||
input = file(input_path, 'rt').read()
|
||||
root = parseAndSaveValueTree(input, actual_path)
|
||||
rewrite = rewriteValueTree(json.write(root), rewrite_path)
|
||||
rewrite_root = parseAndSaveValueTree(rewrite, rewrite_actual_path)
|
||||
|
||||
sys.exit( 0 )
|
||||
sys.exit(0)
|
||||
|
119
3rdparty/jsoncpp/test/runjsontests.py
vendored
119
3rdparty/jsoncpp/test/runjsontests.py
vendored
@ -1,17 +1,41 @@
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
from io import open
|
||||
from glob import glob
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
from glob import glob
|
||||
import optparse
|
||||
|
||||
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes '
|
||||
|
||||
def compareOutputs( expected, actual, message ):
|
||||
def getStatusOutput(cmd):
|
||||
"""
|
||||
Return int, unicode (for both Python 2 and 3).
|
||||
Note: os.popen().close() would return None for 0.
|
||||
"""
|
||||
print(cmd, file=sys.stderr)
|
||||
pipe = os.popen(cmd)
|
||||
process_output = pipe.read()
|
||||
try:
|
||||
# We have been using os.popen(). When we read() the result
|
||||
# we get 'str' (bytes) in py2, and 'str' (unicode) in py3.
|
||||
# Ugh! There must be a better way to handle this.
|
||||
process_output = process_output.decode('utf-8')
|
||||
except AttributeError:
|
||||
pass # python3
|
||||
status = pipe.close()
|
||||
return status, process_output
|
||||
def compareOutputs(expected, actual, message):
|
||||
expected = expected.strip().replace('\r','').split('\n')
|
||||
actual = actual.strip().replace('\r','').split('\n')
|
||||
diff_line = 0
|
||||
max_line_to_compare = min( len(expected), len(actual) )
|
||||
max_line_to_compare = min(len(expected), len(actual))
|
||||
for index in range(0,max_line_to_compare):
|
||||
if expected[index].strip() != actual[index].strip():
|
||||
diff_line = index + 1
|
||||
@ -20,7 +44,7 @@ def compareOutputs( expected, actual, message ):
|
||||
diff_line = max_line_to_compare+1
|
||||
if diff_line == 0:
|
||||
return None
|
||||
def safeGetLine( lines, index ):
|
||||
def safeGetLine(lines, index):
|
||||
index += -1
|
||||
if index >= len(lines):
|
||||
return ''
|
||||
@ -30,65 +54,65 @@ def compareOutputs( expected, actual, message ):
|
||||
Actual: '%s'
|
||||
""" % (message, diff_line,
|
||||
safeGetLine(expected,diff_line),
|
||||
safeGetLine(actual,diff_line) )
|
||||
safeGetLine(actual,diff_line))
|
||||
|
||||
def safeReadFile( path ):
|
||||
def safeReadFile(path):
|
||||
try:
|
||||
return file( path, 'rt' ).read()
|
||||
return open(path, 'rt', encoding = 'utf-8').read()
|
||||
except IOError as e:
|
||||
return '<File "%s" is missing: %s>' % (path,e)
|
||||
|
||||
def runAllTests( jsontest_executable_path, input_dir = None,
|
||||
use_valgrind=False, with_json_checker=False ):
|
||||
def runAllTests(jsontest_executable_path, input_dir = None,
|
||||
use_valgrind=False, with_json_checker=False,
|
||||
writerClass='StyledWriter'):
|
||||
if not input_dir:
|
||||
input_dir = os.path.join( os.getcwd(), 'data' )
|
||||
tests = glob( os.path.join( input_dir, '*.json' ) )
|
||||
input_dir = os.path.join(os.getcwd(), 'data')
|
||||
tests = glob(os.path.join(input_dir, '*.json'))
|
||||
if with_json_checker:
|
||||
test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) )
|
||||
test_jsonchecker = glob(os.path.join(input_dir, '../jsonchecker', '*.json'))
|
||||
else:
|
||||
test_jsonchecker = []
|
||||
failed_tests = []
|
||||
valgrind_path = use_valgrind and VALGRIND_CMD or ''
|
||||
for input_path in tests + test_jsonchecker:
|
||||
expect_failure = os.path.basename( input_path ).startswith( 'fail' )
|
||||
expect_failure = os.path.basename(input_path).startswith('fail')
|
||||
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
|
||||
print('TESTING:', input_path, end=' ')
|
||||
options = is_json_checker_test and '--json-checker' or ''
|
||||
pipe = os.popen( '%s%s %s "%s"' % (
|
||||
valgrind_path, jsontest_executable_path, options,
|
||||
input_path) )
|
||||
process_output = pipe.read()
|
||||
status = pipe.close()
|
||||
options += ' --json-writer %s'%writerClass
|
||||
cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options,
|
||||
input_path)
|
||||
status, process_output = getStatusOutput(cmd)
|
||||
if is_json_checker_test:
|
||||
if expect_failure:
|
||||
if status is None:
|
||||
if not status:
|
||||
print('FAILED')
|
||||
failed_tests.append( (input_path, 'Parsing should have failed:\n%s' %
|
||||
safeReadFile(input_path)) )
|
||||
failed_tests.append((input_path, 'Parsing should have failed:\n%s' %
|
||||
safeReadFile(input_path)))
|
||||
else:
|
||||
print('OK')
|
||||
else:
|
||||
if status is not None:
|
||||
if status:
|
||||
print('FAILED')
|
||||
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
|
||||
failed_tests.append((input_path, 'Parsing failed:\n' + process_output))
|
||||
else:
|
||||
print('OK')
|
||||
else:
|
||||
base_path = os.path.splitext(input_path)[0]
|
||||
actual_output = safeReadFile( base_path + '.actual' )
|
||||
actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' )
|
||||
file(base_path + '.process-output','wt').write( process_output )
|
||||
actual_output = safeReadFile(base_path + '.actual')
|
||||
actual_rewrite_output = safeReadFile(base_path + '.actual-rewrite')
|
||||
open(base_path + '.process-output', 'wt', encoding = 'utf-8').write(process_output)
|
||||
if status:
|
||||
print('parsing failed')
|
||||
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
|
||||
failed_tests.append((input_path, 'Parsing failed:\n' + process_output))
|
||||
else:
|
||||
expected_output_path = os.path.splitext(input_path)[0] + '.expected'
|
||||
expected_output = file( expected_output_path, 'rt' ).read()
|
||||
detail = ( compareOutputs( expected_output, actual_output, 'input' )
|
||||
or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) )
|
||||
expected_output = open(expected_output_path, 'rt', encoding = 'utf-8').read()
|
||||
detail = (compareOutputs(expected_output, actual_output, 'input')
|
||||
or compareOutputs(expected_output, actual_rewrite_output, 'rewrite'))
|
||||
if detail:
|
||||
print('FAILED')
|
||||
failed_tests.append( (input_path, detail) )
|
||||
failed_tests.append((input_path, detail))
|
||||
else:
|
||||
print('OK')
|
||||
|
||||
@ -100,7 +124,7 @@ def runAllTests( jsontest_executable_path, input_dir = None,
|
||||
print(failed_test[1])
|
||||
print()
|
||||
print('Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests),
|
||||
len(failed_tests) ))
|
||||
len(failed_tests)))
|
||||
return 1
|
||||
else:
|
||||
print('All %d tests passed.' % len(tests))
|
||||
@ -108,7 +132,7 @@ def runAllTests( jsontest_executable_path, input_dir = None,
|
||||
|
||||
def main():
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser( usage="%prog [options] <path to jsontestrunner.exe> [test case directory]" )
|
||||
parser = OptionParser(usage="%prog [options] <path to jsontestrunner.exe> [test case directory]")
|
||||
parser.add_option("--valgrind",
|
||||
action="store_true", dest="valgrind", default=False,
|
||||
help="run all the tests using valgrind to detect memory leaks")
|
||||
@ -119,17 +143,32 @@ def main():
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) < 1 or len(args) > 2:
|
||||
parser.error( 'Must provides at least path to jsontestrunner executable.' )
|
||||
sys.exit( 1 )
|
||||
parser.error('Must provides at least path to jsontestrunner executable.')
|
||||
sys.exit(1)
|
||||
|
||||
jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) )
|
||||
jsontest_executable_path = os.path.normpath(os.path.abspath(args[0]))
|
||||
if len(args) > 1:
|
||||
input_path = os.path.normpath( os.path.abspath( args[1] ) )
|
||||
input_path = os.path.normpath(os.path.abspath(args[1]))
|
||||
else:
|
||||
input_path = None
|
||||
status = runAllTests( jsontest_executable_path, input_path,
|
||||
use_valgrind=options.valgrind, with_json_checker=options.with_json_checker )
|
||||
sys.exit( status )
|
||||
status = runAllTests(jsontest_executable_path, input_path,
|
||||
use_valgrind=options.valgrind,
|
||||
with_json_checker=options.with_json_checker,
|
||||
writerClass='StyledWriter')
|
||||
if status:
|
||||
sys.exit(status)
|
||||
status = runAllTests(jsontest_executable_path, input_path,
|
||||
use_valgrind=options.valgrind,
|
||||
with_json_checker=options.with_json_checker,
|
||||
writerClass='StyledStreamWriter')
|
||||
if status:
|
||||
sys.exit(status)
|
||||
status = runAllTests(jsontest_executable_path, input_path,
|
||||
use_valgrind=options.valgrind,
|
||||
with_json_checker=options.with_json_checker,
|
||||
writerClass='BuiltStyledStreamWriter')
|
||||
if status:
|
||||
sys.exit(status)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
46
3rdparty/jsoncpp/test/rununittests.py
vendored
46
3rdparty/jsoncpp/test/rununittests.py
vendored
@ -1,4 +1,11 @@
|
||||
# Copyright 2009 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
from io import open
|
||||
from glob import glob
|
||||
import sys
|
||||
import os
|
||||
@ -9,37 +16,41 @@ import optparse
|
||||
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
|
||||
|
||||
class TestProxy(object):
|
||||
def __init__( self, test_exe_path, use_valgrind=False ):
|
||||
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
|
||||
def __init__(self, test_exe_path, use_valgrind=False):
|
||||
self.test_exe_path = os.path.normpath(os.path.abspath(test_exe_path))
|
||||
self.use_valgrind = use_valgrind
|
||||
|
||||
def run( self, options ):
|
||||
def run(self, options):
|
||||
if self.use_valgrind:
|
||||
cmd = VALGRIND_CMD.split()
|
||||
else:
|
||||
cmd = []
|
||||
cmd.extend( [self.test_exe_path, '--test-auto'] + options )
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||
cmd.extend([self.test_exe_path, '--test-auto'] + options)
|
||||
try:
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
except:
|
||||
print(cmd)
|
||||
raise
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode:
|
||||
return False, stdout
|
||||
return True, stdout
|
||||
|
||||
def runAllTests( exe_path, use_valgrind=False ):
|
||||
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
|
||||
status, test_names = test_proxy.run( ['--list-tests'] )
|
||||
def runAllTests(exe_path, use_valgrind=False):
|
||||
test_proxy = TestProxy(exe_path, use_valgrind=use_valgrind)
|
||||
status, test_names = test_proxy.run(['--list-tests'])
|
||||
if not status:
|
||||
print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr)
|
||||
return 1
|
||||
test_names = [name.strip() for name in test_names.strip().split('\n')]
|
||||
test_names = [name.strip() for name in test_names.decode('utf-8').strip().split('\n')]
|
||||
failures = []
|
||||
for name in test_names:
|
||||
print('TESTING %s:' % name, end=' ')
|
||||
succeed, result = test_proxy.run( ['--test', name] )
|
||||
succeed, result = test_proxy.run(['--test', name])
|
||||
if succeed:
|
||||
print('OK')
|
||||
else:
|
||||
failures.append( (name, result) )
|
||||
failures.append((name, result))
|
||||
print('FAILED')
|
||||
failed_count = len(failures)
|
||||
pass_count = len(test_names) - failed_count
|
||||
@ -47,8 +58,7 @@ def runAllTests( exe_path, use_valgrind=False ):
|
||||
print()
|
||||
for name, result in failures:
|
||||
print(result)
|
||||
print('%d/%d tests passed (%d failure(s))' % (
|
||||
pass_count, len(test_names), failed_count))
|
||||
print('%d/%d tests passed (%d failure(s))' % ( pass_count, len(test_names), failed_count))
|
||||
return 1
|
||||
else:
|
||||
print('All %d tests passed' % len(test_names))
|
||||
@ -56,7 +66,7 @@ def runAllTests( exe_path, use_valgrind=False ):
|
||||
|
||||
def main():
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
|
||||
parser = OptionParser(usage="%prog [options] <path to test_lib_json.exe>")
|
||||
parser.add_option("--valgrind",
|
||||
action="store_true", dest="valgrind", default=False,
|
||||
help="run all the tests using valgrind to detect memory leaks")
|
||||
@ -64,11 +74,11 @@ def main():
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error( 'Must provides at least path to test_lib_json executable.' )
|
||||
sys.exit( 1 )
|
||||
parser.error('Must provides at least path to test_lib_json executable.')
|
||||
sys.exit(1)
|
||||
|
||||
exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
|
||||
sys.exit( exit_code )
|
||||
exit_code = runAllTests(args[0], use_valgrind=options.valgrind)
|
||||
sys.exit(exit_code)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
29
3rdparty/jsoncpp/travis.sh
vendored
Normal file
29
3rdparty/jsoncpp/travis.sh
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env sh
|
||||
# This is called by `.travis.yml` via Travis CI.
|
||||
# Travis supplies $TRAVIS_OS_NAME.
|
||||
# http://docs.travis-ci.com/user/multi-os/
|
||||
# Our .travis.yml also defines:
|
||||
# - SHARED_LIB=ON/OFF
|
||||
# - STATIC_LIB=ON/OFF
|
||||
# - CMAKE_PKG=ON/OFF
|
||||
# - BUILD_TYPE=release/debug
|
||||
# - VERBOSE_MAKE=false/true
|
||||
# - VERBOSE (set or not)
|
||||
|
||||
# -e: fail on error
|
||||
# -v: show commands
|
||||
# -x: show expanded commands
|
||||
set -vex
|
||||
|
||||
env | sort
|
||||
|
||||
cmake -DJSONCPP_WITH_CMAKE_PACKAGE=$CMAKE_PKG -DBUILD_SHARED_LIBS=$SHARED_LIB -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE .
|
||||
make
|
||||
|
||||
# Python is not available in Travis for osx.
|
||||
# https://github.com/travis-ci/travis-ci/issues/2320
|
||||
if [ "$TRAVIS_OS_NAME" != "osx" ]
|
||||
then
|
||||
make jsoncpp_check
|
||||
valgrind --error-exitcode=42 --leak-check=full ./src/test_lib_json/jsoncpp_test
|
||||
fi
|
2
3rdparty/jsoncpp/version
vendored
2
3rdparty/jsoncpp/version
vendored
@ -1 +1 @@
|
||||
1.1.0
|
||||
1.6.2
|
||||
|
1
3rdparty/jsoncpp/version.in
vendored
Normal file
1
3rdparty/jsoncpp/version.in
vendored
Normal file
@ -0,0 +1 @@
|
||||
@JSONCPP_VERSION@
|
8
3rdparty/lsqlite3/lsqlite3.c
vendored
8
3rdparty/lsqlite3/lsqlite3.c
vendored
@ -47,7 +47,11 @@
|
||||
#define luaL_openlib(L,name,reg,nup) luaL_setfuncs(L,reg,nup)
|
||||
#endif
|
||||
|
||||
#ifndef USE_SYSTEM_SQLITE
|
||||
#include "sqlite3/sqlite3.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
|
||||
/* compile time features */
|
||||
#if !defined(SQLITE_OMIT_PROGRESS_CALLBACK)
|
||||
@ -260,7 +264,7 @@ static int dbvm_tostring(lua_State *L) {
|
||||
if (svm->vm == NULL)
|
||||
strcpy(buff, "closed");
|
||||
else
|
||||
sprintf(buff, "%p", svm);
|
||||
sprintf(buff, "%p", (void *)svm);
|
||||
lua_pushfstring(L, "sqlite virtual machine (%s)", buff);
|
||||
return 1;
|
||||
}
|
||||
@ -743,7 +747,7 @@ static int lcontext_tostring(lua_State *L) {
|
||||
if (ctx->ctx == NULL)
|
||||
strcpy(buff, "closed");
|
||||
else
|
||||
sprintf(buff, "%p", ctx->ctx);
|
||||
sprintf(buff, "%p", (void *) ctx->ctx);
|
||||
lua_pushfstring(L, "sqlite function context (%s)", buff);
|
||||
return 1;
|
||||
}
|
||||
|
30
3rdparty/mongoose/README.md
vendored
30
3rdparty/mongoose/README.md
vendored
@ -1,5 +1,7 @@
|
||||
# <img src="http://cesanta.com/images/mongoose_logo.png" width="64" height="64"> Mongoose Web Server
|
||||
|
||||
[](https://gitter.im/cesanta/mongoose?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
Mongoose is the most easy to use web server on the planet. A web server of choice for Web developers (PHP, Ruby, Python, etc) and Web designers.
|
||||
|
||||
Mongoose is built on top of Libmongoose embedded library, which can turn
|
||||
@ -45,24 +47,13 @@ Download, double-click to start, run browser -- that's all!
|
||||

|
||||

|
||||
|
||||
# Acknowledgements
|
||||
# Contributions
|
||||
|
||||
Mongoose made better thanks to the contribution of following people:
|
||||
|
||||
Arnout Vandecappelle, Benoît Amiaux, Boris Pek, Cody Hanson, Colin Leitner,
|
||||
Daniel Oaks, Eric Bakan, Erik Oomen, Filipp Kovalev, Ger Hobbelt,
|
||||
Hendrik Polczynski, Igor Okulist, Jay, Joe Mucchiello, John Safranek,
|
||||
José Miguel Gonçalves, Shueng Chuan, Katerina Blinova, Konstantin Sorokin,
|
||||
Marin Atanasov, Matt Healy, Mitch Hendrickson, Nigel Stewart, Pavel Khlebovich,
|
||||
Sebastian Reinhard, Stefan Doehla, abadc0de, nullable.type,
|
||||
T.Barmann, D.Hughes, J.C.Sloan, R.Romeo, L.E.Spencer, S.Kotay, R.M.Shorter,
|
||||
W.Mar, J.Wilander, Santa from Memphis, S.Davies, C.Beck,
|
||||
O.M.Vilhunen, C.Radik, G.Woodcock, M.Szczepkowski,
|
||||
Eternal Lands Dev Team, T.Tollet, C.Tangerino, G.Karsai, A.Bourgett,
|
||||
C.Blakemore, D.Fonaryov, T.Andrle, O.IJsselmuiden, R.Womack, M.Tomlinson,
|
||||
A.Slåttå, L.Farrell, J.D.P.Ballestero, V.Albaev, B.Harker, T.Scheffel, H.Klein,
|
||||
R.Merit, T.Bennett, H.Solis, A.Zincenko, M.S., S.Krul, K.Cooke, S.McCallum,
|
||||
F.Morenius, and 10 others.
|
||||
People who have agreed to the
|
||||
[Cesanta CLA](http://cesanta.com/contributors_la.html)
|
||||
can make contributions. Note that the CLA isn't a copyright
|
||||
_assigment_ but rather a copyright _license_.
|
||||
You retain the copyright on your contributions.
|
||||
|
||||
# Licensing
|
||||
|
||||
@ -72,11 +63,11 @@ source licenses. The GPLv2 open source License does not generally permit
|
||||
incorporating this software into non-open source programs.
|
||||
For those customers who do not wish to comply with the GPLv2 open
|
||||
source license requirements,
|
||||
[Cesanta Software](http://cesanta.com) offers a full,
|
||||
[Cesanta](http://cesanta.com) offers a full,
|
||||
royalty-free commercial license and professional support
|
||||
without any of the GPL restrictions.
|
||||
|
||||
# Other products by Cesanta Software: simple and effective
|
||||
# Other products by Cesanta
|
||||
|
||||
- [Fossa](http://github.com/cesanta/fossa) - Multi-protocol networking library
|
||||
- [SSL Wrapper](https://github.com/cesanta/ssl_wrapper) - application to
|
||||
@ -84,3 +75,4 @@ without any of the GPL restrictions.
|
||||
- [Frozen](https://github.com/cesanta/frozen) - JSON parser and generator
|
||||
- [SLRE](https://github.com/cesanta/slre) - Super Light Regular Expression
|
||||
library
|
||||
- [V7](https://github.com/cesanta/v7) - Embedded JavaScript engine
|
||||
|
4
3rdparty/mongoose/docs/ReleaseNotes.md
vendored
4
3rdparty/mongoose/docs/ReleaseNotes.md
vendored
@ -168,7 +168,7 @@ Changes in pre-compiled binaries:
|
||||
since mongoose buffers all data prior to calling the callback
|
||||
* keep-alive support is the default
|
||||
* Dropped SSI support and throttling support
|
||||
* Several configuraition parameters are gone:
|
||||
* Several configuration parameters are gone:
|
||||
* `cgi_environment` (replaced with MONGOOSE_CGI),
|
||||
* `protect_uri` (not useful)
|
||||
* `ssi_pattern` (SSI support is gone)
|
||||
@ -214,5 +214,5 @@ Changes in pre-compiled binaries:
|
||||
* Couple of bugfixes, thanks to contributors
|
||||
|
||||
|
||||
Eearlier release notes could be found by searching
|
||||
Earlier release notes could be found by searching
|
||||
[Mongoose mailing list](https://groups.google.com/forum/#!forum/mongoose-users)
|
||||
|
21
3rdparty/mongoose/examples/array_vars/Makefile
vendored
Normal file
21
3rdparty/mongoose/examples/array_vars/Makefile
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2014 Cesanta Software
|
||||
# All rights reserved
|
||||
|
||||
PROG = array_vars
|
||||
CFLAGS = -W -Wall -I../.. -pthread -g -O0 $(CFLAGS_EXTRA)
|
||||
SOURCES = $(PROG).c ../../mongoose.c
|
||||
|
||||
all: $(PROG)
|
||||
|
||||
run: $(PROG)
|
||||
./$(PROG)
|
||||
|
||||
$(PROG): $(SOURCES) Makefile
|
||||
$(CC) -o $(PROG) $(SOURCES) $(CFLAGS)
|
||||
|
||||
win:
|
||||
wine cl $(SOURCES) /MD /nologo /DNDEBUG /O1 /I../.. /Fe$(PROG).exe
|
||||
wine $(PROG).exe
|
||||
|
||||
clean:
|
||||
rm -rf $(PROG) *.exe *.dSYM *.obj *.exp .*o *.lib *.gc*
|
45
3rdparty/mongoose/examples/array_vars/array_vars.c
vendored
Normal file
45
3rdparty/mongoose/examples/array_vars/array_vars.c
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
// Copyright (c) 2014 Cesanta Software
|
||||
// All rights reserved
|
||||
//
|
||||
// This example demostrates how to use array get variables using mg_get_n_var
|
||||
// $Date: 2014-09-09 22:20:23 UTC $
|
||||
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include "mongoose.h"
|
||||
|
||||
static int ev_handler(struct mg_connection *conn, enum mg_event ev) {
|
||||
switch (ev) {
|
||||
case MG_AUTH: return MG_TRUE;
|
||||
case MG_REQUEST:
|
||||
{
|
||||
mg_printf_data(conn, "Hello! Requested URI is [%s] ", conn->uri);
|
||||
char buffer[1024];
|
||||
int i, ret;
|
||||
for(i=0; (ret = mg_get_var_n(conn, "foo[]", buffer, 1024, i)) > 0; i++)
|
||||
mg_printf_data(conn, "\nfoo[%d] = %s", i, buffer);
|
||||
|
||||
return MG_TRUE;
|
||||
}
|
||||
default: return MG_FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
int main(void) {
|
||||
struct mg_server *server;
|
||||
|
||||
// Create and configure the server
|
||||
server = mg_create_server(NULL, ev_handler);
|
||||
mg_set_option(server, "listening_port", "8080");
|
||||
|
||||
// Serve request. Hit Ctrl-C to terminate the program
|
||||
printf("Starting on port %s\n", mg_get_option(server, "listening_port"));
|
||||
for (;;) {
|
||||
mg_poll_server(server, 1000);
|
||||
}
|
||||
|
||||
// Cleanup, and free server instance
|
||||
mg_destroy_server(&server);
|
||||
|
||||
return 0;
|
||||
}
|
@ -58,6 +58,7 @@ static int check_login_form_submission(struct mg_connection *conn) {
|
||||
mg_printf(conn,
|
||||
"HTTP/1.1 302 Moved\r\n"
|
||||
"Set-Cookie: ssid=%s; expire=\"%s\"; http-only; HttpOnly;\r\n"
|
||||
"Content-Length: 0\r\n"
|
||||
"Location: /\r\n\r\n",
|
||||
ssid, expire);
|
||||
return MG_TRUE;
|
||||
|
@ -7,23 +7,25 @@
|
||||
|
||||
static int send_index_page(struct mg_connection *conn) {
|
||||
const char *data;
|
||||
int data_len, ofs = 0;
|
||||
int data_len, n1, n2;
|
||||
char var_name[100], file_name[100];
|
||||
|
||||
mg_printf_data(conn, "%s",
|
||||
"<html><body>Upload example."
|
||||
"<form method=\"POST\" action=\"/handle_post_request\" "
|
||||
" enctype=\"multipart/form-data\">"
|
||||
"<input type=\"file\" name=\"file\" /> <br/>"
|
||||
"<input type=\"file\" name=\"file1\" /> <br/>"
|
||||
"<input type=\"file\" name=\"file2\" /> <br/>"
|
||||
"<input type=\"submit\" value=\"Upload\" />"
|
||||
"</form>");
|
||||
|
||||
while ((ofs = mg_parse_multipart(conn->content + ofs, conn->content_len - ofs,
|
||||
var_name, sizeof(var_name),
|
||||
file_name, sizeof(file_name),
|
||||
&data, &data_len)) > 0) {
|
||||
n1 = n2 = 0;
|
||||
while ((n2 = mg_parse_multipart(conn->content + n1, conn->content_len - n1,
|
||||
var_name, sizeof(var_name), file_name,
|
||||
sizeof(file_name), &data, &data_len)) > 0) {
|
||||
mg_printf_data(conn, "var: %s, file_name: %s, size: %d bytes<br>",
|
||||
var_name, file_name, data_len);
|
||||
n1 += n2;
|
||||
}
|
||||
|
||||
mg_printf_data(conn, "%s", "</body></html>");
|
||||
|
@ -2,7 +2,7 @@
|
||||
# All rights reserved
|
||||
|
||||
PROG = web_server
|
||||
CFLAGS = -W -Wall -I../.. -pthread -g -O0 -DMONGOOSE_ENABLE_THREADS $(CFLAGS_EXTRA)
|
||||
CFLAGS = -W -Wall -I../.. -g -O0 $(CFLAGS_EXTRA)
|
||||
SOURCES = $(PROG).c ../../mongoose.c
|
||||
OPENSSL_FLAGS = -DNS_ENABLE_SSL -lssl
|
||||
|
||||
@ -13,16 +13,18 @@ SOURCES_POLAR = $(SOURCES) $(POLARSSLCOMPAT_PATH)/polarssl_compat.c
|
||||
INCDIR_POLAR = -I$(POLARSSLCOMPAT_PATH) -I$(POLARSSL_PATH)/include
|
||||
LDFLAGS_POLAR = -L$(POLARSSL_PATH)/lib -lmbedtls
|
||||
CFLAGS_POLAR = $(CFLAGS) $(INCDIR_POLAR) -DNS_ENABLE_SSL
|
||||
#
|
||||
|
||||
$(PROG): $(SOURCES)
|
||||
$(CC) -o $(PROG) $(SOURCES) $(CFLAGS)
|
||||
|
||||
$(PROG).exe: $(SOURCES)
|
||||
cl -Fo $(PROG) $(SOURCES) -nologo -MD -I../..
|
||||
|
||||
openssl:
|
||||
$(CC) -o $(PROG) $(SOURCES) $(CFLAGS) $(OPENSSL_FLAGS)
|
||||
|
||||
polarssl:
|
||||
$(CC) -o $(PROG) $(SOURCES_POLAR) $(LDFLAGS_POLAR) $(CFLAGS_POLAR)
|
||||
|
||||
|
||||
clean:
|
||||
rm -rf $(PROG) *.exe *.dSYM *.obj *.exp .*o *.lib
|
||||
|
@ -40,7 +40,9 @@
|
||||
#define DIRSEP '\\'
|
||||
#define snprintf _snprintf
|
||||
#define vsnprintf _vsnprintf
|
||||
#ifndef sleep
|
||||
#define sleep(x) Sleep((x) * 1000)
|
||||
#endif
|
||||
#define abs_path(rel, abs, abs_size) _fullpath((abs), (rel), (abs_size))
|
||||
#define SIGCHLD 0
|
||||
typedef struct _stat file_stat_t;
|
||||
|
228
3rdparty/mongoose/mongoose.c
vendored
228
3rdparty/mongoose/mongoose.c
vendored
@ -63,6 +63,10 @@
|
||||
#pragma warning (disable : 4204) // missing c99 support
|
||||
#endif
|
||||
|
||||
#if defined(_WIN32) && !defined(MONGOOSE_NO_CGI) && !defined(MONGOOSE_ENABLE_THREADS)
|
||||
#define MONGOOSE_ENABLE_THREADS /* Windows uses stdio threads for CGI */
|
||||
#endif
|
||||
|
||||
#ifndef MONGOOSE_ENABLE_THREADS
|
||||
#define NS_DISABLE_THREADS
|
||||
#endif
|
||||
@ -91,6 +95,9 @@
|
||||
#include <BaseTsd.h>
|
||||
typedef SSIZE_T ssize_t;
|
||||
#endif
|
||||
#ifndef FD_SETSIZE
|
||||
#define FD_SETSIZE 1024
|
||||
#endif
|
||||
#include <winsock2.h>
|
||||
#include <ws2tcpip.h>
|
||||
#include <windows.h>
|
||||
@ -231,6 +238,7 @@ struct ns_connection {
|
||||
|
||||
sock_t sock; // Socket
|
||||
union socket_address sa; // Peer address
|
||||
size_t recv_iobuf_limit; /* Max size of recv buffer */
|
||||
struct iobuf recv_iobuf; // Received data
|
||||
struct iobuf send_iobuf; // Data scheduled for sending
|
||||
SSL *ssl;
|
||||
@ -250,6 +258,7 @@ struct ns_connection {
|
||||
#define NSF_WANT_WRITE (1 << 6)
|
||||
#define NSF_LISTENING (1 << 7)
|
||||
#define NSF_UDP (1 << 8)
|
||||
#define NSF_DISCARD (1 << 9)
|
||||
|
||||
#define NSF_USER_1 (1 << 20)
|
||||
#define NSF_USER_2 (1 << 21)
|
||||
@ -326,6 +335,7 @@ int ns_resolve(const char *domain_name, char *ip_addr_buf, size_t buf_len);
|
||||
#define NS_CALLOC calloc
|
||||
#endif
|
||||
|
||||
#define NS_MAX_SOCKETPAIR_ATTEMPTS 10
|
||||
#define NS_CTL_MSG_MESSAGE_SIZE (8 * 1024)
|
||||
#define NS_READ_BUFFER_SIZE 2048
|
||||
#define NS_UDP_RECEIVE_BUFFER_SIZE 2000
|
||||
@ -854,7 +864,17 @@ static int ns_is_error(int n) {
|
||||
#ifdef _WIN32
|
||||
&& WSAGetLastError() != WSAEINTR && WSAGetLastError() != WSAEWOULDBLOCK
|
||||
#endif
|
||||
);
|
||||
)
|
||||
#ifdef NS_ENABLE_SSL
|
||||
/*
|
||||
* OpenSSL can return an error when the peer is closing the socket.
|
||||
* We don't encounter this error with openssl actually, but it's returned
|
||||
* by our polarssl <-> openssl wrapper who tries to speak the openssl API
|
||||
* as we understood it.
|
||||
*/
|
||||
|| n == SSL_AD_CLOSE_NOTIFY
|
||||
#endif
|
||||
;
|
||||
}
|
||||
|
||||
void ns_sock_to_str(sock_t sock, char *buf, size_t len, int flags) {
|
||||
@ -1056,7 +1076,7 @@ static void ns_handle_udp(struct ns_connection *ls) {
|
||||
}
|
||||
|
||||
static void ns_add_to_set(sock_t sock, fd_set *set, sock_t *max_fd) {
|
||||
if (sock != INVALID_SOCKET) {
|
||||
if ( (sock != INVALID_SOCKET) && (sock < FD_SETSIZE) ) {
|
||||
FD_SET(sock, set);
|
||||
if (*max_fd == INVALID_SOCKET || sock > *max_fd) {
|
||||
*max_fd = sock;
|
||||
@ -1099,7 +1119,9 @@ time_t ns_mgr_poll(struct ns_mgr *mgr, int milli) {
|
||||
tv.tv_sec = milli / 1000;
|
||||
tv.tv_usec = (milli % 1000) * 1000;
|
||||
|
||||
if (select((int) max_fd + 1, &read_set, &write_set, NULL, &tv) > 0) {
|
||||
if (select((int) max_fd + 1, &read_set, &write_set, NULL, &tv) < 0) {
|
||||
return 0;
|
||||
} else {
|
||||
// select() might have been waiting for a long time, reset current_time
|
||||
// now to prevent last_io_time being set to the past.
|
||||
current_time = time(NULL);
|
||||
@ -1250,9 +1272,12 @@ void ns_mgr_init(struct ns_mgr *s, void *user_data) {
|
||||
#endif
|
||||
|
||||
#ifndef NS_DISABLE_SOCKETPAIR
|
||||
do {
|
||||
ns_socketpair2(s->ctl, SOCK_DGRAM);
|
||||
} while (s->ctl[0] == INVALID_SOCKET);
|
||||
{
|
||||
int attempts = 0, max_attempts = NS_MAX_SOCKETPAIR_ATTEMPTS;
|
||||
do {
|
||||
ns_socketpair2(s->ctl, SOCK_DGRAM);
|
||||
} while (s->ctl[0] == INVALID_SOCKET && ++attempts < max_attempts);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef NS_ENABLE_SSL
|
||||
@ -1404,7 +1429,7 @@ struct dir_entry {
|
||||
file_stat_t st;
|
||||
};
|
||||
|
||||
// NOTE(lsm): this enum shoulds be in sync with the config_options.
|
||||
// NOTE(lsm): this enum should be in sync with the config_options.
|
||||
enum {
|
||||
ACCESS_CONTROL_LIST,
|
||||
#ifndef MONGOOSE_NO_FILESYSTEM
|
||||
@ -1948,13 +1973,21 @@ static void write_chunk(struct connection *conn, const char *buf, int len) {
|
||||
}
|
||||
|
||||
size_t mg_printf(struct mg_connection *conn, const char *fmt, ...) {
|
||||
struct connection *c = MG_CONN_2_CONN(conn);
|
||||
va_list ap;
|
||||
int ret;
|
||||
|
||||
va_start(ap, fmt);
|
||||
ns_vprintf(c->ns_conn, fmt, ap);
|
||||
ret = mg_vprintf(conn, fmt, ap);
|
||||
va_end(ap);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
size_t mg_vprintf(struct mg_connection *conn, const char *fmt, va_list ap) {
|
||||
struct connection *c = MG_CONN_2_CONN(conn);
|
||||
|
||||
ns_vprintf(c->ns_conn, fmt, ap);
|
||||
|
||||
return c->ns_conn->send_iobuf.len;
|
||||
}
|
||||
|
||||
@ -1973,6 +2006,8 @@ struct threadparam {
|
||||
|
||||
static int wait_until_ready(sock_t sock, int for_read) {
|
||||
fd_set set;
|
||||
if ( (sock == INVALID_SOCKET) || (sock >= FD_SETSIZE) )
|
||||
return 0;
|
||||
FD_ZERO(&set);
|
||||
FD_SET(sock, &set);
|
||||
select(sock + 1, for_read ? &set : 0, for_read ? 0 : &set, 0, 0);
|
||||
@ -1992,7 +2027,7 @@ static void *push_to_stdin(void *arg) {
|
||||
if (!WriteFile(tp->hPipe, buf + sent, n - sent, &k, 0)) stop = 1;
|
||||
}
|
||||
}
|
||||
DBG(("%s", "FORWARED EVERYTHING TO CGI"));
|
||||
DBG(("%s", "FORWARDED EVERYTHING TO CGI"));
|
||||
CloseHandle(tp->hPipe);
|
||||
NS_FREE(tp);
|
||||
_endthread();
|
||||
@ -2312,9 +2347,17 @@ static void open_cgi_endpoint(struct connection *conn, const char *prog) {
|
||||
// Try to create socketpair in a loop until success. ns_socketpair()
|
||||
// can be interrupted by a signal and fail.
|
||||
// TODO(lsm): use sigaction to restart interrupted syscall
|
||||
do {
|
||||
ns_socketpair(fds);
|
||||
} while (fds[0] == INVALID_SOCKET);
|
||||
{
|
||||
int attempts = 0, max_attempts = NS_MAX_SOCKETPAIR_ATTEMPTS;
|
||||
do {
|
||||
ns_socketpair(fds);
|
||||
} while (fds[0] == INVALID_SOCKET && ++attempts < max_attempts);
|
||||
|
||||
if (fds[0] == INVALID_SOCKET) {
|
||||
closesocket(fds[0]);
|
||||
send_http_error(conn, 500, "ns_socketpair() failed");
|
||||
}
|
||||
}
|
||||
|
||||
if (start_process(conn->server->config_options[CGI_INTERPRETER],
|
||||
prog, blk.buf, blk.vars, dir, fds[1]) != 0) {
|
||||
@ -2503,7 +2546,7 @@ static size_t parse_http_message(char *buf, size_t len,
|
||||
|
||||
buf[len - 1] = '\0';
|
||||
|
||||
// RFC says that all initial whitespaces should be ingored
|
||||
// RFC says that all initial whitespaces should be ignored
|
||||
while (*buf != '\0' && isspace(* (unsigned char *) buf)) {
|
||||
buf++;
|
||||
}
|
||||
@ -2677,7 +2720,8 @@ static int convert_uri_to_file_name(struct connection *conn, char *buf,
|
||||
#endif
|
||||
const char *uri = conn->mg_conn.uri;
|
||||
const char *domain = mg_get_header(&conn->mg_conn, "Host");
|
||||
size_t match_len, root_len = root == NULL ? 0 : strlen(root);
|
||||
// Important: match_len has to be declared as int, unless rewrites break.
|
||||
int match_len, root_len = root == NULL ? 0 : strlen(root);
|
||||
|
||||
// Perform virtual hosting rewrites
|
||||
if (rewrites != NULL && domain != NULL) {
|
||||
@ -2784,16 +2828,24 @@ size_t mg_send_data(struct mg_connection *c, const void *data, int data_len) {
|
||||
}
|
||||
|
||||
size_t mg_printf_data(struct mg_connection *c, const char *fmt, ...) {
|
||||
struct connection *conn = MG_CONN_2_CONN(c);
|
||||
va_list ap;
|
||||
int ret;
|
||||
|
||||
va_start(ap, fmt);
|
||||
ret = mg_vprintf_data(c, fmt, ap);
|
||||
va_end(ap);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
size_t mg_vprintf_data(struct mg_connection *c, const char *fmt, va_list ap) {
|
||||
struct connection *conn = MG_CONN_2_CONN(c);
|
||||
int len;
|
||||
char mem[IOBUF_SIZE], *buf = mem;
|
||||
|
||||
terminate_headers(c);
|
||||
|
||||
va_start(ap, fmt);
|
||||
len = ns_avprintf(&buf, sizeof(mem), fmt, ap);
|
||||
va_end(ap);
|
||||
|
||||
if (len >= 0) {
|
||||
write_chunk((struct connection *) conn, buf, len);
|
||||
@ -2816,7 +2868,7 @@ static int is_big_endian(void) {
|
||||
// Copyright(c) By Steve Reid <steve@edmweb.com>
|
||||
#define SHA1HANDSOFF
|
||||
#if defined(__sun)
|
||||
//#include "solarisfixes.h"
|
||||
#include "solarisfixes.h"
|
||||
#endif
|
||||
|
||||
union char64long16 { unsigned char c[64]; uint32_t l[16]; };
|
||||
@ -3033,7 +3085,8 @@ static size_t deliver_websocket_frame(struct connection *conn) {
|
||||
}
|
||||
|
||||
// Call the handler and remove frame from the iobuf
|
||||
if (call_user(conn, MG_REQUEST) == MG_FALSE) {
|
||||
if (call_user(conn, MG_REQUEST) == MG_FALSE ||
|
||||
(buf[0] & 0x0f) == WEBSOCKET_OPCODE_CONNECTION_CLOSE) {
|
||||
conn->ns_conn->flags |= NSF_FINISHED_SENDING_DATA;
|
||||
}
|
||||
iobuf_remove(&conn->ns_conn->recv_iobuf, frame_len);
|
||||
@ -3314,14 +3367,17 @@ static int find_index_file(struct connection *conn, char *path,
|
||||
|
||||
// If no index file exists, restore directory path
|
||||
if (!found) {
|
||||
path[n] = '\0';
|
||||
path[n] = '/';
|
||||
path[n + 1] = '\0';
|
||||
}
|
||||
|
||||
return found;
|
||||
}
|
||||
|
||||
static int parse_range_header(const char *header, int64_t *a, int64_t *b) {
|
||||
return sscanf(header, "bytes=%" INT64_FMT "-%" INT64_FMT, a, b);
|
||||
// return sscanf(header, "bytes=%" INT64_FMT "-%" INT64_FMT, a, b);
|
||||
// return sscanf(header, "bytes=%ld-%ld" INT64_FMT, a, b);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gmt_time_string(char *buf, size_t buf_len, time_t *t) {
|
||||
@ -3332,7 +3388,7 @@ static void open_file_endpoint(struct connection *conn, const char *path,
|
||||
file_stat_t *st, const char *extra_headers) {
|
||||
char date[64], lm[64], etag[64], range[64], headers[1000];
|
||||
const char *msg = "OK", *hdr;
|
||||
time_t curtime = time(NULL);
|
||||
time_t t, curtime = time(NULL);
|
||||
int64_t r1, r2;
|
||||
struct vec mime_vec;
|
||||
int n;
|
||||
@ -3362,7 +3418,7 @@ static void open_file_endpoint(struct connection *conn, const char *path,
|
||||
// Prepare Etag, Date, Last-Modified headers. Must be in UTC, according to
|
||||
// http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.3
|
||||
gmt_time_string(date, sizeof(date), &curtime);
|
||||
time_t t = st->st_mtime;
|
||||
t = st->st_mtime; // store in local variable for NDK compile
|
||||
gmt_time_string(lm, sizeof(lm), &t);
|
||||
construct_etag(etag, sizeof(etag), st);
|
||||
|
||||
@ -3512,11 +3568,11 @@ static int scan_directory(struct connection *conn, const char *dir,
|
||||
}
|
||||
mg_snprintf(path, sizeof(path), "%s%c%s", dir, '/', dp->d_name);
|
||||
|
||||
// Resize the array if nesessary
|
||||
// Resize the array if necessary
|
||||
if (arr_ind >= arr_size) {
|
||||
if ((p = (struct dir_entry *)
|
||||
NS_REALLOC(*arr, (inc + arr_size) * sizeof(**arr))) != NULL) {
|
||||
// Memset new chunk to zero, otherwize st_mtime will have garbage which
|
||||
// Memset new chunk to zero, otherwise st_mtime will have garbage which
|
||||
// can make strftime() segfault, see
|
||||
// http://code.google.com/p/mongoose/issues/detail?id=79
|
||||
memset(p + arr_size, 0, sizeof(**arr) * inc);
|
||||
@ -3567,6 +3623,7 @@ static void print_dir_entry(const struct dir_entry *de) {
|
||||
int64_t fsize = de->st.st_size;
|
||||
int is_dir = S_ISDIR(de->st.st_mode);
|
||||
const char *slash = is_dir ? "/" : "";
|
||||
time_t t;
|
||||
|
||||
if (is_dir) {
|
||||
mg_snprintf(size, sizeof(size), "%s", "[DIRECTORY]");
|
||||
@ -3583,7 +3640,7 @@ static void print_dir_entry(const struct dir_entry *de) {
|
||||
mg_snprintf(size, sizeof(size), "%.1fG", (double) fsize / 1073741824);
|
||||
}
|
||||
}
|
||||
time_t t = de->st.st_mtime;
|
||||
t = de->st.st_mtime; // store in local variable for NDK compile
|
||||
strftime(mod, sizeof(mod), "%d-%b-%Y %H:%M", localtime(&t));
|
||||
mg_url_encode(de->file_name, strlen(de->file_name), href, sizeof(href));
|
||||
mg_printf_data(&de->conn->mg_conn,
|
||||
@ -3639,12 +3696,14 @@ static void send_directory_listing(struct connection *conn, const char *dir) {
|
||||
sort_direction, sort_direction, sort_direction);
|
||||
|
||||
num_entries = scan_directory(conn, dir, &arr);
|
||||
qsort(arr, num_entries, sizeof(arr[0]), compare_dir_entries);
|
||||
for (i = 0; i < num_entries; i++) {
|
||||
print_dir_entry(&arr[i]);
|
||||
NS_FREE(arr[i].file_name);
|
||||
if (arr) {
|
||||
qsort(arr, num_entries, sizeof(arr[0]), compare_dir_entries);
|
||||
for (i = 0; i < num_entries; i++) {
|
||||
print_dir_entry(&arr[i]);
|
||||
NS_FREE(arr[i].file_name);
|
||||
}
|
||||
NS_FREE(arr);
|
||||
}
|
||||
NS_FREE(arr);
|
||||
|
||||
write_terminating_chunk(conn);
|
||||
close_local_endpoint(conn);
|
||||
@ -3655,7 +3714,7 @@ static void send_directory_listing(struct connection *conn, const char *dir) {
|
||||
static void print_props(struct connection *conn, const char *uri,
|
||||
file_stat_t *stp) {
|
||||
char mtime[64];
|
||||
time_t t = stp->st_mtime;
|
||||
time_t t = stp->st_mtime; // store in local variable for NDK compile
|
||||
gmt_time_string(mtime, sizeof(mtime), &t);
|
||||
mg_printf(&conn->mg_conn,
|
||||
"<d:response>"
|
||||
@ -3861,11 +3920,16 @@ void mg_send_digest_auth_request(struct mg_connection *c) {
|
||||
c->status_code = 401;
|
||||
mg_printf(c,
|
||||
"HTTP/1.1 401 Unauthorized\r\n"
|
||||
"Content-Length: 0\r\n"
|
||||
"WWW-Authenticate: Digest qop=\"auth\", "
|
||||
"realm=\"%s\", nonce=\"%lu\"\r\n\r\n",
|
||||
conn->server->config_options[AUTH_DOMAIN],
|
||||
(unsigned long) time(NULL));
|
||||
close_local_endpoint(conn);
|
||||
if (conn->cl > 0) {
|
||||
conn->ns_conn->flags |= NSF_DISCARD;
|
||||
} else {
|
||||
close_local_endpoint(conn);
|
||||
}
|
||||
}
|
||||
|
||||
// Use the global passwords file, if specified by auth_gpass option,
|
||||
@ -4285,7 +4349,7 @@ static void do_ssi_include(struct mg_connection *conn, const char *ssi,
|
||||
mg_snprintf(path, sizeof(path), "%s", file_name);
|
||||
} else if (sscanf(tag, " file=\"%[^\"]\"", file_name) == 1 ||
|
||||
sscanf(tag, " \"%[^\"]\"", file_name) == 1) {
|
||||
// File name is relative to the currect document
|
||||
// File name is relative to the current document
|
||||
mg_snprintf(path, sizeof(path), "%s", ssi);
|
||||
if ((p = strrchr(path, '/')) != NULL) {
|
||||
p[1] = '\0';
|
||||
@ -4697,6 +4761,10 @@ static void try_parse(struct connection *conn) {
|
||||
// iobuf could be reallocated, and pointers in parsed request could
|
||||
// become invalid.
|
||||
conn->request = (char *) NS_MALLOC(conn->request_len);
|
||||
if (conn->request == NULL) {
|
||||
conn->ns_conn->flags |= NSF_CLOSE_IMMEDIATELY;
|
||||
return;
|
||||
}
|
||||
memcpy(conn->request, io->buf, conn->request_len);
|
||||
//DBG(("%p [%.*s]", conn, conn->request_len, conn->request));
|
||||
iobuf_remove(io, conn->request_len);
|
||||
@ -4734,6 +4802,19 @@ static void on_recv_data(struct connection *conn) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (conn->ns_conn->flags & NSF_DISCARD) {
|
||||
size_t n = conn->cl;
|
||||
if (n > io->len) {
|
||||
n = io->len;
|
||||
}
|
||||
iobuf_remove(io, n);
|
||||
conn->cl -= n;
|
||||
if (conn->cl == 0) {
|
||||
close_local_endpoint(conn);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
try_parse(conn);
|
||||
DBG(("%p %d %lu %d", conn, conn->request_len, (unsigned long)io->len,
|
||||
conn->ns_conn->flags));
|
||||
@ -4913,7 +4994,7 @@ static void close_local_endpoint(struct connection *conn) {
|
||||
|
||||
conn->endpoint_type = EP_NONE;
|
||||
conn->cl = conn->num_bytes_recv = conn->request_len = 0;
|
||||
conn->ns_conn->flags &= ~(NSF_FINISHED_SENDING_DATA |
|
||||
conn->ns_conn->flags &= ~(NSF_FINISHED_SENDING_DATA | NSF_DISCARD |
|
||||
NSF_BUFFER_BUT_DONT_SEND | NSF_CLOSE_IMMEDIATELY |
|
||||
MG_HEADERS_SENT | MG_USING_CHUNKED_API);
|
||||
|
||||
@ -4921,7 +5002,7 @@ static void close_local_endpoint(struct connection *conn) {
|
||||
// (IP addresses & ports, server_param) must survive. Nullify the rest.
|
||||
c->request_method = c->uri = c->http_version = c->query_string = NULL;
|
||||
c->num_headers = c->status_code = c->is_websocket = c->content_len = 0;
|
||||
c->connection_param = c->callback_param = NULL;
|
||||
c->callback_param = NULL;
|
||||
|
||||
if (keep_alive) {
|
||||
on_recv_data(conn); // Can call us recursively if pipelining is used
|
||||
@ -4983,20 +5064,17 @@ struct mg_connection *mg_next(struct mg_server *s, struct mg_connection *c) {
|
||||
}
|
||||
|
||||
static int get_var(const char *data, size_t data_len, const char *name,
|
||||
char *dst, size_t dst_len) {
|
||||
const char *p, *e, *s;
|
||||
char *dst, size_t dst_len, int n) {
|
||||
const char *p, *e = data + data_len, *s;
|
||||
size_t name_len;
|
||||
int len;
|
||||
int i = 0, len = -1;
|
||||
|
||||
if (dst == NULL || dst_len == 0) {
|
||||
len = -2;
|
||||
} else if (data == NULL || name == NULL || data_len == 0) {
|
||||
len = -1;
|
||||
dst[0] = '\0';
|
||||
} else {
|
||||
name_len = strlen(name);
|
||||
e = data + data_len;
|
||||
len = -1;
|
||||
dst[0] = '\0';
|
||||
|
||||
// data is "var1=val1&var2=val2...". Find variable first
|
||||
@ -5004,6 +5082,8 @@ static int get_var(const char *data, size_t data_len, const char *name,
|
||||
if ((p == data || p[-1] == '&') && p[name_len] == '=' &&
|
||||
!mg_strncasecmp(name, p, name_len)) {
|
||||
|
||||
if (n != i++) continue;
|
||||
|
||||
// Point p to variable value
|
||||
p += name_len + 1;
|
||||
|
||||
@ -5029,16 +5109,21 @@ static int get_var(const char *data, size_t data_len, const char *name,
|
||||
return len;
|
||||
}
|
||||
|
||||
int mg_get_var(const struct mg_connection *conn, const char *name,
|
||||
char *dst, size_t dst_len) {
|
||||
int mg_get_var_n(const struct mg_connection *conn, const char *name,
|
||||
char *dst, size_t dst_len, int n) {
|
||||
int len = get_var(conn->query_string, conn->query_string == NULL ? 0 :
|
||||
strlen(conn->query_string), name, dst, dst_len);
|
||||
if (len < 0) {
|
||||
len = get_var(conn->content, conn->content_len, name, dst, dst_len);
|
||||
strlen(conn->query_string), name, dst, dst_len, n);
|
||||
if (len == -1) {
|
||||
len = get_var(conn->content, conn->content_len, name, dst, dst_len, n);
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
int mg_get_var(const struct mg_connection *conn, const char *name,
|
||||
char *dst, size_t dst_len) {
|
||||
return mg_get_var_n(conn, name, dst, dst_len, 0);
|
||||
}
|
||||
|
||||
static int get_line_len(const char *buf, int buf_len) {
|
||||
int len = 0;
|
||||
while (len < buf_len && buf[len] != '\n') len++;
|
||||
@ -5095,6 +5180,14 @@ void mg_copy_listeners(struct mg_server *s, struct mg_server *to) {
|
||||
if ((c->flags & NSF_LISTENING) &&
|
||||
(tmp = (struct ns_connection *) NS_MALLOC(sizeof(*tmp))) != NULL) {
|
||||
memcpy(tmp, c, sizeof(*tmp));
|
||||
|
||||
#if defined(NS_ENABLE_SSL) && defined(HEADER_SSL_H)
|
||||
/* OpenSSL only. See https://github.com/cesanta/mongoose/issues/441 */
|
||||
if (tmp->ssl_ctx != NULL) {
|
||||
tmp->ssl_ctx->references++;
|
||||
}
|
||||
#endif
|
||||
|
||||
tmp->mgr = &to->ns_mgr;
|
||||
ns_add_conn(tmp->mgr, tmp);
|
||||
}
|
||||
@ -5153,6 +5246,7 @@ const char *mg_set_option(struct mg_server *server, const char *name,
|
||||
char buf[500] = "";
|
||||
size_t n = 0;
|
||||
struct vec vec;
|
||||
|
||||
/*
|
||||
* Ports can be specified as 0, meaning that OS has to choose any
|
||||
* free port that is available. In order to pass chosen port number to
|
||||
@ -5247,31 +5341,32 @@ static void process_udp(struct ns_connection *nc) {
|
||||
//ns_printf(nc, "%s", "HTTP/1.0 200 OK\r\n\r\n");
|
||||
}
|
||||
|
||||
#ifdef MONGOOSE_SEND_NS_EVENTS
|
||||
static void send_ns_event(struct ns_connection *nc, int ev, void *p) {
|
||||
struct connection *conn = (struct connection *) nc->user_data;
|
||||
if (conn != NULL) {
|
||||
void *param[2] = { nc, p };
|
||||
conn->mg_conn.callback_param = param;
|
||||
call_user(conn, (enum mg_event) ev);
|
||||
}
|
||||
}
|
||||
#else
|
||||
static void send_ns_event(struct ns_connection *nc, int ev, void *p) {
|
||||
(void) nc; (void) p; (void) ev;
|
||||
}
|
||||
#endif
|
||||
|
||||
static void mg_ev_handler(struct ns_connection *nc, int ev, void *p) {
|
||||
struct connection *conn = (struct connection *) nc->user_data;
|
||||
|
||||
// Send NS event to the handler. Note that call_user won't send an event
|
||||
// if conn == NULL. Therefore, repeat this for NS_ACCEPT event as well.
|
||||
#ifdef MONGOOSE_SEND_NS_EVENTS
|
||||
{
|
||||
struct connection *conn = (struct connection *) nc->user_data;
|
||||
void *param[2] = { nc, p };
|
||||
if (conn != NULL) conn->mg_conn.callback_param = param;
|
||||
call_user(conn, (enum mg_event) ev);
|
||||
}
|
||||
#endif
|
||||
send_ns_event(nc, ev, p);
|
||||
|
||||
switch (ev) {
|
||||
case NS_ACCEPT:
|
||||
on_accept(nc, (union socket_address *) p);
|
||||
#ifdef MONGOOSE_SEND_NS_EVENTS
|
||||
{
|
||||
struct connection *conn = (struct connection *) nc->user_data;
|
||||
void *param[2] = { nc, p };
|
||||
if (conn != NULL) conn->mg_conn.callback_param = param;
|
||||
call_user(conn, (enum mg_event) ev);
|
||||
}
|
||||
#endif
|
||||
send_ns_event(nc, ev, p);
|
||||
break;
|
||||
|
||||
case NS_CONNECT:
|
||||
@ -5343,6 +5438,11 @@ static void mg_ev_handler(struct ns_connection *nc, int ev, void *p) {
|
||||
write_terminating_chunk(conn);
|
||||
}
|
||||
close_local_endpoint(conn);
|
||||
/*
|
||||
* MG_POLL callback returned MG_TRUE,
|
||||
* i.e. data is sent, set corresponding flag
|
||||
*/
|
||||
conn->ns_conn->flags |= NSF_FINISHED_SENDING_DATA;
|
||||
}
|
||||
|
||||
if (conn->endpoint_type == EP_FILE) {
|
||||
|
7
3rdparty/mongoose/mongoose.h
vendored
7
3rdparty/mongoose/mongoose.h
vendored
@ -60,7 +60,8 @@ struct mg_connection {
|
||||
struct mg_server; // Opaque structure describing server instance
|
||||
enum mg_result { MG_FALSE, MG_TRUE, MG_MORE };
|
||||
enum mg_event {
|
||||
MG_POLL = 100, // Callback return value is ignored
|
||||
MG_POLL = 100, // If callback returns MG_TRUE connection closes
|
||||
// after all of data is sent
|
||||
MG_CONNECT, // If callback returns MG_FALSE, connect fails
|
||||
MG_AUTH, // If callback returns MG_FALSE, authentication fails
|
||||
MG_REQUEST, // If callback returns MG_FALSE, Mongoose continues with req
|
||||
@ -103,8 +104,10 @@ void mg_send_status(struct mg_connection *, int status_code);
|
||||
void mg_send_header(struct mg_connection *, const char *name, const char *val);
|
||||
size_t mg_send_data(struct mg_connection *, const void *data, int data_len);
|
||||
size_t mg_printf_data(struct mg_connection *, const char *format, ...);
|
||||
size_t mg_vprintf_data(struct mg_connection *, const char *format, va_list ap);
|
||||
size_t mg_write(struct mg_connection *, const void *buf, size_t len);
|
||||
size_t mg_printf(struct mg_connection *conn, const char *fmt, ...);
|
||||
size_t mg_vprintf(struct mg_connection *conn, const char *fmt, va_list ap);
|
||||
|
||||
size_t mg_websocket_write(struct mg_connection *, int opcode,
|
||||
const char *data, size_t data_len);
|
||||
@ -118,6 +121,8 @@ const char *mg_get_header(const struct mg_connection *, const char *name);
|
||||
const char *mg_get_mime_type(const char *name, const char *default_mime_type);
|
||||
int mg_get_var(const struct mg_connection *conn, const char *var_name,
|
||||
char *buf, size_t buf_len);
|
||||
int mg_get_var_n(const struct mg_connection *conn, const char *var_name,
|
||||
char *buf, size_t buf_len, int n);
|
||||
int mg_parse_header(const char *hdr, const char *var_name, char *buf, size_t);
|
||||
int mg_parse_multipart(const char *buf, int buf_len,
|
||||
char *var_name, int var_name_len,
|
||||
|
30
3rdparty/mongoose/test/unit_test.c
vendored
30
3rdparty/mongoose/test/unit_test.c
vendored
@ -169,6 +169,7 @@ static const char *test_match_prefix(void) {
|
||||
ASSERT(mg_match_prefix("/api", 4, "/api") == 4);
|
||||
ASSERT(mg_match_prefix("/a/", 3, "/a/b/c") == 3);
|
||||
ASSERT(mg_match_prefix("/a/", 3, "/ab/c") == -1);
|
||||
ASSERT(mg_match_prefix("/blog/", 6, "/") == -1);
|
||||
ASSERT(mg_match_prefix("/*/", 3, "/ab/c") == 4);
|
||||
ASSERT(mg_match_prefix("**", 2, "/a/b/c") == 6);
|
||||
ASSERT(mg_match_prefix("/*", 2, "/a/b/c") == 2);
|
||||
@ -223,29 +224,28 @@ static const char *test_remove_double_dots() {
|
||||
}
|
||||
|
||||
static const char *test_get_var(void) {
|
||||
static const char *post[] = {
|
||||
"a=1&&b=2&d&=&c=3%20&e=",
|
||||
"q=&st=2012%2F11%2F13+17%3A05&et=&team_id=",
|
||||
NULL
|
||||
};
|
||||
static const char *data = "a=1&&b=2&d&=&c=3%20&e=&k=aa&a=23";
|
||||
static const char *data2 = "q=&st=2012%2F11%2F13+17%3A05&et=&team_id=";
|
||||
char buf[20];
|
||||
|
||||
ASSERT(get_var(post[0], strlen(post[0]), "a", buf, sizeof(buf)) == 1);
|
||||
ASSERT(get_var(data, strlen(data), "a", buf, sizeof(buf), 0) == 1);
|
||||
ASSERT(buf[0] == '1' && buf[1] == '\0');
|
||||
ASSERT(get_var(post[0], strlen(post[0]), "b", buf, sizeof(buf)) == 1);
|
||||
ASSERT(get_var(data, strlen(data), "a", buf, sizeof(buf), 1) == 2);
|
||||
ASSERT(strcmp(buf, "23") == 0);
|
||||
ASSERT(get_var(data, strlen(data), "b", buf, sizeof(buf), 0) == 1);
|
||||
ASSERT(buf[0] == '2' && buf[1] == '\0');
|
||||
ASSERT(get_var(post[0], strlen(post[0]), "c", buf, sizeof(buf)) == 2);
|
||||
ASSERT(get_var(data, strlen(data), "c", buf, sizeof(buf), 0) == 2);
|
||||
ASSERT(buf[0] == '3' && buf[1] == ' ' && buf[2] == '\0');
|
||||
ASSERT(get_var(post[0], strlen(post[0]), "e", buf, sizeof(buf)) == 0);
|
||||
ASSERT(get_var(data, strlen(data), "e", buf, sizeof(buf), 0) == 0);
|
||||
ASSERT(buf[0] == '\0');
|
||||
|
||||
ASSERT(get_var(post[0], strlen(post[0]), "d", buf, sizeof(buf)) == -1);
|
||||
ASSERT(get_var(post[0], strlen(post[0]), "c", buf, 2) == -2);
|
||||
ASSERT(get_var(data, strlen(data), "d", buf, sizeof(buf), 0) == -1);
|
||||
ASSERT(get_var(data, strlen(data), "c", buf, 2, 0) == -2);
|
||||
|
||||
ASSERT(get_var(post[0], strlen(post[0]), "x", NULL, 10) == -2);
|
||||
ASSERT(get_var(post[0], strlen(post[0]), "x", buf, 0) == -2);
|
||||
ASSERT(get_var(post[1], strlen(post[1]), "st", buf, 16) == -2);
|
||||
ASSERT(get_var(post[1], strlen(post[1]), "st", buf, 17) == 16);
|
||||
ASSERT(get_var(data, strlen(data), "x", NULL, 10, 0) == -2);
|
||||
ASSERT(get_var(data, strlen(data), "x", buf, 0, 0) == -2);
|
||||
ASSERT(get_var(data2, strlen(data2), "st", buf, 16, 0) == -2);
|
||||
ASSERT(get_var(data2, strlen(data2), "st", buf, 17, 0) == 16);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
@ -10,7 +10,7 @@
|
||||
|
||||
#include <vector>
|
||||
|
||||
UNITTEST_STDVECTOR_LINKAGE(UnitTest::DeferredTestResult);
|
||||
UNITTEST_STDVECTOR_LINKAGE(UnitTest::DeferredTestResult)
|
||||
|
||||
namespace UnitTest
|
||||
{
|
||||
|
@ -23,7 +23,7 @@ public:
|
||||
|
||||
}
|
||||
|
||||
UNITTEST_STDVECTOR_LINKAGE(UnitTest::DeferredTestFailure);
|
||||
UNITTEST_STDVECTOR_LINKAGE(UnitTest::DeferredTestFailure)
|
||||
|
||||
namespace UnitTest
|
||||
{
|
||||
@ -34,14 +34,14 @@ public:
|
||||
DeferredTestResult();
|
||||
DeferredTestResult(char const* suite, char const* test);
|
||||
~DeferredTestResult();
|
||||
|
||||
|
||||
std::string suiteName;
|
||||
std::string testName;
|
||||
std::string failureFile;
|
||||
|
||||
|
||||
typedef std::vector< DeferredTestFailure > FailureVec;
|
||||
FailureVec failures;
|
||||
|
||||
|
||||
float timeElapsed;
|
||||
bool failed;
|
||||
};
|
||||
|
@ -1,8 +1,7 @@
|
||||
|
||||
# **MAME** #
|
||||
|
||||
[](https://gitter.im/mamedev/mame?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
[](https://travis-ci.org/mamedev/mame) [](https://ci.appveyor.com/project/startaq/mame) [](https://gitter.im/mamedev/mame?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
What is MAME?
|
||||
=============
|
||||
|
@ -159,6 +159,14 @@ Page DN [SDL ONLY]
|
||||
|
||||
Alt+ENTER Toggles between full-screen and windowed mode.
|
||||
|
||||
Scroll Lock Default mapping for the uimodekey. This key allows user to
|
||||
disable and enable the emulated keyboard in machines that require
|
||||
it. All emulations which require emulated keyboards will start in
|
||||
that mode and you can only access the internal UI (hitting TAB) by
|
||||
first hitting this key. You can change the initial status of the
|
||||
emulated keyboard is presented upon start by using -ui_active trigger
|
||||
as detailed below.
|
||||
|
||||
Escape Exits emulator.
|
||||
|
||||
|
||||
@ -332,9 +340,29 @@ of your command:
|
||||
in the rompath are verified; however, you can limit this list by specifying a
|
||||
specific softwarelistname (without .XML) after the -verifysoftlist command.
|
||||
|
||||
|
||||
OSD related options
|
||||
-------------------
|
||||
|
||||
-uimodekey [keystring]
|
||||
|
||||
Key used to toggle emulated keyboard on and off. Default setting is SCRLOCK.
|
||||
|
||||
-uifontprovider
|
||||
|
||||
Chooses provider for UI font: win, none or auto. The Default setting is AUTO.
|
||||
|
||||
|
||||
OSD CLI options
|
||||
---------------
|
||||
|
||||
-listmidi
|
||||
|
||||
Create a list of list available MIDI I/O devices for use with emulation.
|
||||
Create a list of available MIDI I/O devices for use with emulation.
|
||||
|
||||
-listnetwork
|
||||
|
||||
Create a list of available Network Adapters for use with emulation.
|
||||
|
||||
|
||||
|
||||
@ -901,8 +929,8 @@ Core input options
|
||||
the coin lockout outputs were actually connected to the coin
|
||||
mechanisms. If this feature is enabled, then attempts to enter a coin
|
||||
while the lockout is active will fail and will display a popup message
|
||||
in the user interface. If this feature is disabled, the coin lockout
|
||||
signal will be ignored. The default is ON (-coin_lockout).
|
||||
in the user interface (In debug mode). If this feature is disabled, the
|
||||
coin lockout signal will be ignored. The default is ON (-coin_lockout).
|
||||
|
||||
-ctrlr <controller>
|
||||
|
||||
@ -1131,6 +1159,32 @@ Debugging options
|
||||
(-noupdate_in_pause).
|
||||
|
||||
|
||||
Core communication options
|
||||
--------------------------
|
||||
|
||||
-comm_localhost <string>
|
||||
|
||||
Local address to bind to. This can be a traditional xxx.xxx.xxx.xxx
|
||||
address or a string containing a resolvable hostname. The default is
|
||||
value is "0.0.0.0"
|
||||
|
||||
-comm_localport <string>
|
||||
|
||||
Local port to bind to. This can be any traditional communications port
|
||||
as an unsigned 16-bit integer (0-65535). The default value is "15122".
|
||||
|
||||
-comm_remotehost <string>
|
||||
|
||||
Remote address to connect to. This can be a traditional xxx.xxx.xxx.xxx
|
||||
address or a string containing a resolvable hostname. The default is
|
||||
value is "0.0.0.0"
|
||||
|
||||
-comm_remoteport <string>
|
||||
|
||||
Remote port to connect to. This can be any traditional communications port
|
||||
as an unsigned 16-bit integer (0-65535). The default value is "15122".
|
||||
|
||||
|
||||
|
||||
Core misc options
|
||||
-----------------
|
||||
|
@ -233,8 +233,8 @@ subclass has been created. It's called memory_interface, declared in
|
||||
m6502_device, and provides the following accessors:
|
||||
|
||||
- UINT8 read(UINT16 adr) - normal read
|
||||
- UINT8 read_direct(UINT16 adr) - direct read
|
||||
- UINT8 read_decrypted(UINT16 adr) - decrypted data read
|
||||
- UINT8 read_sync(UINT16 adr) - opcode read with sync active (first byte of opcode)
|
||||
- UINT8 read_arg(UINT16 adr) - opcode read with sync inactive (rest of opcode)
|
||||
- void write(UINT16 adr, UINT8 val) - normal write
|
||||
|
||||
- UINT8 read_9(UINT16 adr) - special y-indexed 6509 read, defaults to read()
|
||||
|
@ -105,7 +105,7 @@ disable hardware stretching, otherwise you won't get that "perfect"
|
||||
So, I recommend starting with these initial options and then tweaking
|
||||
from there. One additional option you might want to try in
|
||||
combination with the above is the -prescale option. -prescale takes
|
||||
an integer parameter from 1 to 8, and specifies a magnification
|
||||
an integer parameter from 1 to 3, and specifies a magnification
|
||||
amount by which the screen pixels are expanded before they are drawn
|
||||
to the screen. Why is this useful? And how much of a performance
|
||||
impact does it have? Well, that depends on the mode you are running
|
||||
@ -117,7 +117,7 @@ them to the screen. Depending on the video card, this is usually a
|
||||
small performance hit, but not too significant. The benefit is that
|
||||
each prescale factor reduces the blurriness of the pixels.
|
||||
-prescale 1 is the default, which does no scaling. -prescale 2 will
|
||||
double each pixel, -prescale 3 will triple each pixel, etc. For my
|
||||
double each pixel, and -prescale 3 will triple each pixel. For my
|
||||
money, -prescale 2 is sufficient, but people with super high
|
||||
resolution displays claim that larger -prescale factors work even
|
||||
better.
|
||||
|
@ -374,6 +374,80 @@
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="pbw128">
|
||||
<description>Paperback Writer 128</description>
|
||||
<year>1985</year>
|
||||
<publisher>Digital Solutions</publisher>
|
||||
<info name="protection" value="none" />
|
||||
<info name="serial" value="KC002264" />
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="287512">
|
||||
<rom name="kc002264.g64" size="287512" crc="ea540ddd" sha1="6e1b1679a4a5525c2b2cd82f7d2ea9115f62ab4c" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="pbw128a" cloneof="pbw128">
|
||||
<description>Paperback Writer 128 (Alt)</description>
|
||||
<year>1985</year>
|
||||
<publisher>Digital Solutions</publisher>
|
||||
<info name="protection" value="none" />
|
||||
<info name="serial" value="UC004217" />
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="287512">
|
||||
<rom name="uc004217.g64" size="287512" crc="63c96e73" sha1="05cf4f02448d72fd57be0b335810b8f429b8d81f" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="tpw">
|
||||
<description>Term Paper Writer</description>
|
||||
<year>1986</year>
|
||||
<publisher>Activision</publisher>
|
||||
<info name="protection" value="xemag" />
|
||||
<info name="serial" value="DD-606-04" />
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="572316">
|
||||
<rom name="dd-606-04.g71" size="572316" crc="d3490406" sha1="8ece5c70dacfc7005a601534f07c9e3e05e3e266" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="greatwar">
|
||||
<description>The Great War</description>
|
||||
<year>1987</year>
|
||||
<publisher>Free Spirit Software</publisher>
|
||||
<info name="protection" value="none" />
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="279582">
|
||||
<rom name="greatwar.g64" size="279582" crc="177ea4f4" sha1="e6f86ba6221ef3676f8560dc13a3b5df3eff2d57" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="ypfp128">
|
||||
<description>Sylvia Porter's Personal Finance Series - Your Personal Financial Planner 128</description>
|
||||
<year>1984</year>
|
||||
<publisher>Timeworks</publisher>
|
||||
<info name="protection" value="none" />
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="287512">
|
||||
<rom name="ypfp128-a.g64" size="287512" crc="5d1ef4c7" sha1="07dcd3c21cd4d3d0be1425f44e701c6d68af4f40" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
|
||||
<part name="flop2" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="279582">
|
||||
<rom name="ypfp128-b.g64" size="279582" crc="ec4b0485" sha1="4d0820ee76f00a43cf6cae48407b11ba8652274d" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="1570demo">
|
||||
<description>Commodore 1570/1571 Test Demo</description>
|
||||
<year>198?</year>
|
||||
|
@ -1,13 +1,13 @@
|
||||
<?xml version="1.0"?>
|
||||
<!DOCTYPE softwarelist SYSTEM "softwarelist.dtd">
|
||||
<softwarelist name="cgenie_cart" description="EACA Colour Genie cartridges">
|
||||
|
||||
<!-- Where was this plugged exactly?!? -->
|
||||
<softwarelist name="cgenie_cart" description="EACA Colour Genie EG2000 cartridges">
|
||||
|
||||
<software name="cdosintf">
|
||||
<description>Colour DOS Interface</description>
|
||||
<year>19??</year>
|
||||
<year>1997</year>
|
||||
<publisher><homebrew?></publisher>
|
||||
<info name="author" value="C. Poetzsch, Jürgen Buchmüller" />
|
||||
<part name="cart" interface="cgenie_cart">
|
||||
<dataarea name="rom" size="2772">
|
||||
<rom name="newe000.bin" size="2772" crc="953491a7" sha1="8d6a739a9058b3834897a15bcda5348b94008f5a" offset="0" />
|
||||
@ -15,5 +15,16 @@
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="colmon2">
|
||||
<description>Colour-Monitor 2.0</description>
|
||||
<year>198?</year>
|
||||
<publisher><unknown></publisher>
|
||||
<info name="usage" value="Enter CALL E000 to start" />
|
||||
<part name="cart" interface="cgenie_cart">
|
||||
<dataarea name="rom" size="0x1000">
|
||||
<rom name="colour_monitor_2.bin" size="0x1000" crc="a6b08d4d" sha1="2398f8ea430468dbd29fb0fabe89b2c21c2404f7" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
</softwarelist>
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -3909,7 +3909,7 @@ a certain item) -->
|
||||
<description>Lemmings 2 - The Tribes (Euro, Prototype)</description>
|
||||
<year>1994</year>
|
||||
<publisher>Psygnosis</publisher>
|
||||
<part name="cart" interface="sms_cart">
|
||||
<part name="cart" interface="gamegear_cart">
|
||||
<dataarea name="rom" size="524288">
|
||||
<rom name="lemmings 2 - the tribes [proto].bin" size="524288" crc="fbc807e1" sha1="ad0ce8fc8ce9e5ef9b68b76fb7f3eced4245d5c4" offset="000000" />
|
||||
</dataarea>
|
||||
|
127
hash/guab.xml
Normal file
127
hash/guab.xml
Normal file
@ -0,0 +1,127 @@
|
||||
<?xml version="1.0"?>
|
||||
<!DOCTYPE softwarelist SYSTEM "softwarelist.dtd">
|
||||
|
||||
<softwarelist name="guab" description="JPM Give us a Break floppy disks">
|
||||
|
||||
<software name="guab3">
|
||||
<description>Give us a Break (3rd edition)</description>
|
||||
<year>1986</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="guab3.dsk" size="737280" crc="ecb41e06" sha1="2900a95046b38312c6035ea394b04b62c1d29f42" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="guab3a">
|
||||
<description>Give us a Break (3rd edition alt?)</description>
|
||||
<year>1986</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="guab7c.dsk" size="737280" crc="f5f10f87" sha1="80e540339efbfe59b656d6cd4f466a17df84c123" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="guab4">
|
||||
<description>Give us a Break (4th edition)</description>
|
||||
<year>1986</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="guab8d.dsk" size="737280" crc="b87c55ce" sha1="20debbefae194276b0813518634cf52bed093e73" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="guab6">
|
||||
<description>Give us a Break (6th edition)</description>
|
||||
<year>1986</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="guabf6.dsk" size="737280" crc="08804c28" sha1="608d89d598b7acb133814540dba98cea29c6cad6" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="guab6a">
|
||||
<description>Give us a Break (6th edition alt?)</description>
|
||||
<year>1986</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="guab9f.dsk" size="737280" crc="85329fe9" sha1="f5ca1956c37b786fa0a4f8f0607a0a2b19d3fbb0" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="guab7">
|
||||
<description>Give us a Break (7th edition)</description>
|
||||
<year>1986</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="guab9g.dsk" size="737280" crc="6b8c36f9" sha1="1d9f4d943f2962603bbe39e5b58befe15954a6d6" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="guab21">
|
||||
<description>Give us a Break (21st edition)</description>
|
||||
<year>1986</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="guab21.dsk" size="737280" crc="558ee009" sha1="d2a02662216e10f80a215044f1929df7255b6136" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="guab43">
|
||||
<description>Give us a Break (43rd edition)</description>
|
||||
<year>1986</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="guab43.dsk" size="737280" crc="1fd8f614" sha1="656ce33c93a9d0e81ec7fcdd8f4e4b2f9ad1e485" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="crisscrs">
|
||||
<description>Criss Cross (Sweden)</description>
|
||||
<year>1986</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="crisscrs.dsk" size="737280" status="nodump" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="tenup">
|
||||
<description>Ten Up (compendium 17)</description>
|
||||
<year>1988</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="10up17.dsk" size="737280" crc="7bc328df" sha1="5f8e40d8ffc370fb19be9e386befa5fcd1f35a75" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="tenup3">
|
||||
<description>Ten Up (compendium 3)</description>
|
||||
<year>1988</year>
|
||||
<publisher>JPM</publisher>
|
||||
<part name="flop1" interface="floppy_3_5">
|
||||
<dataarea name="flop" size="737280">
|
||||
<rom name="10up3.dsk" size="737280" crc="2767f017" sha1="1c6551b089c3e3df48e0c03bd502b91fd88f0e94" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
</softwarelist>
|
147
hash/i7000_card.xml
Normal file
147
hash/i7000_card.xml
Normal file
@ -0,0 +1,147 @@
|
||||
<?xml version="1.0"?>
|
||||
<!DOCTYPE softwarelist SYSTEM "softwarelist.dtd">
|
||||
<!--
|
||||
Thanks to Alexandre Souza (Tabajara) for contributing the initial
|
||||
cartridges data available in this softlist.
|
||||
-->
|
||||
<softwarelist name="i7000_card" description="Itautec I-7000 cartridges">
|
||||
<software name="set78">
|
||||
<description>I-7101 SET 78 COML v1.3 R01</description>
|
||||
<year>198?</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x2000">
|
||||
<rom name="i-7101_set_78_coml_v1.3_r01_703d.rom" size="0x2000" crc="12da1687" sha1="b370c2b7718cf89d69e37fc7d1d3c4d80dc5f1cc" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="telex">
|
||||
<description>I-7104 TELEX v1.0 R4 (Aug 31st, 1987)</description>
|
||||
<year>1987</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x8000">
|
||||
<rom name="i-7104_cart_telex_v1.0_r4_a5e5_31_08_87_1.rom" size="0x2000" crc="5ae6b20d" sha1="f3cbfa81bdc828872790290a53e62750d720b457" offset="0x0000" />
|
||||
<rom name="i-7104_cart_telex_v1.0_r4_632c_31_08_87_2.rom" size="0x2000" crc="e95dd757" sha1="f90886b8c36063643509fcad4df1061de1dc7a90" offset="0x2000" />
|
||||
<rom name="i-7104_cart_telex_v1.0_r4_a3b4_31_08_87_3.rom" size="0x2000" crc="ef884b22" sha1="bbc3688a64292dd15fe8103bf6cadfd4a991abb9" offset="0x4000" />
|
||||
<rom name="i-7104_cart_telex_v1.0_r4_fdcc_31_08_87_4.rom" size="0x2000" crc="82202eb9" sha1="ee2018b5b58a656630c5057b3a65316f38099265" offset="0x6000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="redator">
|
||||
<description>I-7105 REDATOR v1.2 R02 (Sept 16th, 1983)</description>
|
||||
<year>1983</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x8000">
|
||||
<rom name="i-7105_redator_v1.2_r02_16_09_83_1.rom" size="0x2000" crc="2b5bf15f" sha1="1384cab00e1596619e1ae5ae072bd23af6fe6b2e" offset="0x0000" />
|
||||
<rom name="i-7105_redator_v1.2_r02_16_09_83_2.rom" size="0x2000" crc="22c64fc2" sha1="ef4fbe93deeea65f1705e9b5fb2ad4d376650215" offset="0x2000" />
|
||||
<rom name="i-7105_redator_v1.2_r02_16_09_83_3.rom" size="0x2000" crc="cfb8e70b" sha1="08683feafbe28e009458a0cfb1301bedd7e03aaa" offset="0x4000" />
|
||||
<rom name="i-7105_redator_v1.2_r02_16_09_83_4.rom" size="0x2000" crc="f8510fee" sha1="275528441d4c4a410e6be45d0ecbe465aef2afe1" offset="0x6000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="set3278">
|
||||
<description>I-7106 SET 3278 v1.2 R00 (Mar 21st, 1985) IBM-3278 terminal emulation</description>
|
||||
<year>1985</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x2000">
|
||||
<rom name="i-7106_set_3278_v1.2_r00_21_03_85.rom" size="0x2000" crc="004275c8" sha1="ff42eb526f6142cff0d6ecfef8e41ef17bfac2c7" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="setvt52">
|
||||
<description>I-7107 SET VT52 v1.0 R02 (Feb 25th, 1986)</description>
|
||||
<year>1986</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x6000">
|
||||
<rom name="i-7107_set_vt52_v1.0_r02_25_02_86_1.rom" size="0x2000" crc="ead3f48e" sha1="ec7951591cab8e8c2cb22f7e6352c709d24b9706" offset="0x0000" />
|
||||
<!-- the 2nd EPROM connector is unpopulated -->
|
||||
<rom name="i-7107_set_vt52_v1.0_r02_25_02_86_2.rom" size="0x2000" crc="da848ec0" sha1="7ed1898de94c7382c081ec3a78f60a6850daa25a" offset="0x4000" />
|
||||
<!-- the 4th EPROM connector is unpopulated -->
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="i7113">
|
||||
<description>I-7113 v1.2 R02 (Mar 4th, 1986)</description>
|
||||
<year>1986</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x2000">
|
||||
<rom name="i-7113_v1.2_r02_04_03_86.rom" size="0x2000" crc="865da8b7" sha1="da65dc6d65bbe0b8e84c0eeda15021e2575fd696" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="redelocl">
|
||||
<description>I-7119 REDE LOCAL v1.0 R01 (Mar 21st, 1987)</description>
|
||||
<year>1987</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x4000">
|
||||
<rom name="i-7119_rede_local_v1.0_r01_21_03_87_1.rom" size="0x2000" crc="f0e95bc4" sha1="fa4482b005d8647dca411911b0f7048c940632c8" offset="0x0000" />
|
||||
<rom name="i-7119_rede_local_v1.0_r01_21_03_87_2.rom" size="0x2000" crc="3b9461bf" sha1="958a71e61a91433645d402c31955fe4f64efcba0" offset="0x2000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="telex2">
|
||||
<description>I-7120 TELEX II v1.0 R04 (Aug 31st, 1987)</description>
|
||||
<year>1987</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x8000">
|
||||
<rom name="i-7120_telex_ii_v1.0_r04_31_08_87_1.rom" size="0x2000" crc="5ae6b20d" sha1="f3cbfa81bdc828872790290a53e62750d720b457" offset="0x0000" />
|
||||
<rom name="i-7120_telex_ii_v1.0_r04_31_08_87_2.rom" size="0x2000" crc="e95dd757" sha1="f90886b8c36063643509fcad4df1061de1dc7a90" offset="0x2000" />
|
||||
<rom name="i-7120_telex_ii_v1.0_r04_31_08_87_3.rom" size="0x2000" crc="ef884b22" sha1="bbc3688a64292dd15fe8103bf6cadfd4a991abb9" offset="0x4000" />
|
||||
<rom name="i-7120_telex_ii_v1.0_r04_31_08_87_4.rom" size="0x2000" crc="82202eb9" sha1="ee2018b5b58a656630c5057b3a65316f38099265" offset="0x6000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="redtrv12">
|
||||
<description>I-71XX REDATOR v1.2 R04</description>
|
||||
<year>198?</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x8000">
|
||||
<rom name="i-71xx_redator_v1.2_r04_1.rom" size="0x2000" crc="98105005" sha1="3cbd9de8c7e37e16d1dd143a7e735f582cf303c8" offset="0x0000" />
|
||||
<rom name="i-71xx_redator_v1.2_r04_2.rom" size="0x2000" crc="848d665c" sha1="2095c9007f090d4510265fd3da6ef0d037c7ee86" offset="0x2000" />
|
||||
<rom name="i-71xx_redator_v1.2_r04_3.rom" size="0x2000" crc="1e61ff21" sha1="d8e28264e7020e912774fd9bda6bacd4b227a14d" offset="0x4000" />
|
||||
<rom name="i-71xx_redator_v1.2_r04_4.rom" size="0x2000" crc="6c6b96a6" sha1="26bac8e902d40490fcd2e9021a5de3753c0fc26a" offset="0x6000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="setdisc">
|
||||
<description>I-71XX SETDISC</description>
|
||||
<year>198?</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x4000">
|
||||
<rom name="i-71xx_setdisc_1.rom" size="0x2000" crc="0ad7d534" sha1="aecf5f01b8ffb4120f3cd4752705f6538ef70f4e" offset="0x0000" />
|
||||
<rom name="i-71xx_setdisc_2.rom" size="0x2000" crc="51e24c2b" sha1="172699394690ee46096d9395f6894ebaa26ea6ac" offset="0x2000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="vdeotxto">
|
||||
<description>I-71XX VIDEOTEXTO</description>
|
||||
<year>198?</year>
|
||||
<publisher>Itautec</publisher>
|
||||
<part name="card" interface="i7000_card">
|
||||
<dataarea name="rom" size="0x6000">
|
||||
<rom name="i-71xx_videotexto_1.rom" size="0x2000" crc="28dda7db" sha1="ced755c40fcdf2dc2cd5263a494d51917e1010d1" offset="0x0000" />
|
||||
<rom name="i-71xx_videotexto_2.rom" size="0x2000" crc="b4293435" sha1="5e2b96c19c4f5c63a5afa2de504d29fe64a4c908" offset="0x2000" />
|
||||
<rom name="i-71xx_videotexto_3.rom" size="0x2000" crc="07486b26" sha1="e54e32a789e73b772516759ac26badf5805abd95" offset="0x4000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
</softwarelist>
|
10
hash/m20.xml
10
hash/m20.xml
@ -144,7 +144,7 @@
|
||||
|
||||
<software name="msdos20" supported="no">
|
||||
<!-- "This is a standard 360k MS-DOS disk image, 512 bytes/sector, 9 sectors, 2 sides, 40 tracks." -->
|
||||
<!-- MESS complains about "Unable to identify the image format" -->
|
||||
<!-- Use with BIOS 2.0 and the 8086 APB -->
|
||||
<description>MS-DOS 2.0</description>
|
||||
<year>19??</year>
|
||||
<publisher>Microsoft</publisher>
|
||||
@ -298,7 +298,7 @@
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="olinum" supported="no"> <!-- Invalid in M20 -->
|
||||
<software name="olinum" supported="no"> <!-- Collection of BASIC programs, boot PCOS and run basic to use -->
|
||||
<description>OliNum</description>
|
||||
<year>19??</year>
|
||||
<publisher>Olivetti</publisher>
|
||||
@ -310,7 +310,7 @@
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="olisort" supported="no"> <!-- Invalid in M20 -->
|
||||
<software name="olisort" supported="no"> <!-- Collection of BASIC programs, boot PCOS and run basic to use -->
|
||||
<description>OliSort 2.0.6</description>
|
||||
<year>19??</year>
|
||||
<publisher>Olivetti</publisher>
|
||||
@ -322,7 +322,7 @@
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="olistat" supported="no"> <!-- Invalid in M20 -->
|
||||
<software name="olistat" supported="no"> <!-- Collection of BASIC programs, boot PCOS and run basic to use -->
|
||||
<description>OliStat</description>
|
||||
<year>19??</year>
|
||||
<publisher>Olivetti</publisher>
|
||||
@ -455,7 +455,7 @@
|
||||
|
||||
|
||||
<!-- This is a user disk with high school math programs -->
|
||||
<software name="m20utent" supported="no"> <!-- Invalid in M20 -->
|
||||
<software name="m20utent" supported="no"> <!-- Collection of BASIC programs, boot PCOS and run basic to use -->
|
||||
<description>M20 Utente</description>
|
||||
<year>19??</year>
|
||||
<publisher><unknown></publisher>
|
||||
|
@ -1379,8 +1379,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5513" />
|
||||
<info name="alt_title" value="アルカザール" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="alcazar - the forgotten fortress (japan).rom" size="16384" crc="3ee454b0" sha1="807676038cbba043b8099eba9c5840a4811a7e59" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -2211,21 +2214,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5502" />
|
||||
<info name="alt_title" value="ビームライダー" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="32768">
|
||||
<rom name="beam rider (japan).rom" size="32768" crc="1553e408" sha1="1231984ae24bf35f9f38596b864420d8ccd3f30b" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="beamridra" cloneof="beamridr">
|
||||
<description>Beam Rider (Jpn, Alt)</description>
|
||||
<year>1984</year>
|
||||
<publisher>Pony Canyon</publisher>
|
||||
<info name="serial" value="R48X5502" />
|
||||
<info name="alt_title" value="ビームライダー" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="beam rider (japan) (alt 1).rom" size="16384" crc="d6a6bee6" sha1="f51f936887498d21f6ee9fe8a7701633be67e79d" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -4155,20 +4148,11 @@ kept for now until finding out what those bytes affect...
|
||||
<publisher>Pony Canyon</publisher>
|
||||
<info name="serial" value="R48X5506" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="32768">
|
||||
<rom name="decathlon (japan).rom" size="32768" crc="cd016b93" sha1="a1656f612360a126e09ef2baaa8002d92054125d" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="decathlna" cloneof="decathln">
|
||||
<description>Decathlon (Jpn, Alt)</description>
|
||||
<year>1984</year>
|
||||
<publisher>Pony Canyon</publisher>
|
||||
<info name="serial" value="R48X5506" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<rom name="decathlon (japan) (alt 1).rom" size="16384" crc="f99b1c22" sha1="5d43cb6ca89f31d5f543e4dcd3fa9987b9769602" offset="0" />
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="decathlon (japan).rom" size="16384" crc="f99b1c22" sha1="5d43cb6ca89f31d5f543e4dcd3fa9987b9769602" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -5983,8 +5967,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5505" />
|
||||
<info name="alt_title" value="ヒーロー" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="32768">
|
||||
<rom name="h.e.r.o. (japan).rom" size="32768" crc="8fdad3af" sha1="ebb70722f75279911cce79e6bd78b8f514561b0f" offset="0" />
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="h.e.r.o. (japan).rom" size="16384" crc="97ab0d70" sha1="6fbb385147a939a7e6b47f5945d8e3b671a8c065" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -7016,21 +7003,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5503" />
|
||||
<info name="alt_title" value="キーストンケーパーズ" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="32768">
|
||||
<rom name="keystone kapers (japan).rom" size="32768" crc="7ff117f9" sha1="fb4724b8159beae132f89f394f71ce3934a61ae2" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="keykapera" cloneof="keykaper">
|
||||
<description>Keystone Kapers (Jpn, Alt)</description>
|
||||
<year>1984</year>
|
||||
<publisher>Pony Canyon</publisher>
|
||||
<info name="serial" value="R48X5503" />
|
||||
<info name="alt_title" value="キーストンケーパーズ" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<rom name="keystone kapers (japan) (alt 1).rom" size="16384" crc="b1cf2097" sha1="3d5160331beb1c5cc54ba6ecef6b3ce2ff4660b6" offset="0" />
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="keystone kapers (japan).rom" size="16384" crc="b1cf2097" sha1="3d5160331beb1c5cc54ba6ecef6b3ce2ff4660b6" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -9659,8 +9636,11 @@ kept for now until finding out what those bytes affect...
|
||||
<publisher>Pony Canyon</publisher>
|
||||
<info name="alt_title" value="パストファインダー" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="pastfinder (japan).rom" size="16384" crc="d6d8d1d7" sha1="8117ec66c0645a54422841a632cfd6602f35c4f9" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -9981,8 +9961,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5508" />
|
||||
<info name="alt_title" value="ピットフォールII" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="pitfall ii - lost caverns (japan).rom" size="16384" crc="d307a7b8" sha1="78079266711e60420480e4d95a39f0d7d974ad32" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -9994,8 +9977,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5508" />
|
||||
<info name="alt_title" value="ピットフォールII" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="pitfall ii - lost caverns (japan) (alt 1).rom" size="16384" crc="71c59868" sha1="ae8c7355c829248305384243f78a870453367e77" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -10007,8 +9993,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5501" />
|
||||
<info name="alt_title" value="ピットフォール" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="pitfall! (japan).rom" size="16384" crc="5a009c55" sha1="b88e9c548873dcfd190e0e38f7b279344eea41ec" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -10020,8 +10009,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5501" />
|
||||
<info name="alt_title" value="ピットフォール" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="pitfall! (japan) (alt 1).rom" size="16384" crc="930aeb2c" sha1="5fb4b6c3735e4d9415565a856bb69f9fb4857161" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -10033,8 +10025,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5501" />
|
||||
<info name="alt_title" value="ピットフォール" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="pitfall! (japan) (alt 2).rom" size="16384" crc="2cb24473" sha1="2fa9c0f016efc2d1752a272c632393f5063ea06c" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -10508,21 +10503,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5504" />
|
||||
<info name="alt_title" value="リバーレイド" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="32768">
|
||||
<rom name="river raid (japan).rom" size="32768" crc="0f22a553" sha1="a1e14912d45944b9a6baef1d4d3a04c1ae8df923" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="riveraida" cloneof="riveraid">
|
||||
<description>River Raid (Jpn, Alt)</description>
|
||||
<year>1985</year>
|
||||
<publisher>Pony Canyon</publisher>
|
||||
<info name="serial" value="R48X5504" />
|
||||
<info name="alt_title" value="リバーレイド" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<rom name="river raid (japan) (alt 1).rom" size="16384" crc="2fc1d75b" sha1="33be9017faf173eae04d0c91ca8d42d1c20596c0" offset="0" />
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="river raid (japan).rom" size="16384" crc="2fc1d75b" sha1="33be9017faf173eae04d0c91ca8d42d1c20596c0" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -10608,8 +10593,11 @@ kept for now until finding out what those bytes affect...
|
||||
<info name="serial" value="R48X5511" />
|
||||
<info name="alt_title" value="ロックンボルト" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="rock'n bolt (japan).rom" size="16384" crc="430e5789" sha1="1edabc3226648b54ae98d524b31f37ca47c8c88b" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -13171,20 +13159,11 @@ kept for now until finding out what those bytes affect...
|
||||
<publisher>Pony Canyon</publisher>
|
||||
<info name="alt_title" value="ゼンジー" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="32768">
|
||||
<rom name="zenji (japan).rom" size="32768" crc="1a4aebb2" sha1="2f4404d141acc40e48af0b12c70cd44b066ece10" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zenjia" cloneof="zenji">
|
||||
<description>Zenji (Jpn, Alt)</description>
|
||||
<year>1984</year>
|
||||
<publisher>Pony Canyon</publisher>
|
||||
<info name="alt_title" value="ゼンジー" />
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<rom name="zenji (japan) (alt 1).rom" size="16384" crc="77b3b0b9" sha1="c9440172802818cc5b9ae559fbd3f346a263605c" offset="0" />
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="zenji (japan).rom" size="16384" crc="77b3b0b9" sha1="c9440172802818cc5b9ae559fbd3f346a263605c" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
@ -13289,8 +13268,11 @@ kept for now until finding out what those bytes affect...
|
||||
<year>1984</year>
|
||||
<publisher>Activision?</publisher>
|
||||
<part name="cart" interface="msx_cart">
|
||||
<dataarea name="rom" size="16384">
|
||||
<dataarea name="rom" size="65536">
|
||||
<rom name="designer's pencil, the (europe) (program).rom" size="16384" crc="ce588c20" sha1="4b4a58a310a1138b95192d7fe0881bbdc45601d4" offset="0" />
|
||||
<rom size="16384" offset="0x4000" loadflag="reload" />
|
||||
<rom size="16384" offset="0x8000" loadflag="reload" />
|
||||
<rom size="16384" offset="0xc000" loadflag="reload" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
104
hash/n64.xml
104
hash/n64.xml
@ -12,10 +12,10 @@ There appear to exist some undumped N64 protos
|
||||
- Wild Waters (a more complete version than the available proto was shown)
|
||||
|
||||
Canceled games (possibly never got to the proto stage)
|
||||
- Acclaim Sports Soccer / Ultra Soccer,
|
||||
- Addams Family Pinball,
|
||||
- Freak Boy,
|
||||
- Ikazuchi No Go Toku,
|
||||
- Acclaim Sports Soccer / Ultra Soccer
|
||||
- Addams Family Pinball
|
||||
- Freak Boy
|
||||
- Ikazuchi No Go Toku
|
||||
- X-Men Mutant Academy
|
||||
-->
|
||||
|
||||
@ -54,6 +54,7 @@ Info on N64 chip labels (from The Cart Scan Repository)
|
||||
|
||||
<!-- List of confirmed carts (info from pictures) -->
|
||||
<softwarelist name="n64" description="Nintendo 64 cartridges">
|
||||
|
||||
<software name="007goldnu" cloneof="007goldn">
|
||||
<description>007 - GoldenEye (USA)</description>
|
||||
<year>1997</year>
|
||||
@ -5877,12 +5878,12 @@ patched out (+ a fix for internal checksum)
|
||||
<!-- Original release, in .z64 format -->
|
||||
<!-- rom name="mm_debug.rom" size="67108864" crc="687d8395" sha1="b38b71d2961dffb523020a67f4807a4b704e347a" offset="000000" /-->
|
||||
<!-- .v64 version -->
|
||||
<rom name="mm_debug.bin" size="67108864" crc="ea2e7abb" sha1="c790b3de31196645034c76e326640ccf3b8c91dd" offset="000000" />
|
||||
<rom name="legend of zelda, the - majora's mask (europe) (en,fr,de,es) (debug edition).bin" size="67108864" crc="ea2e7abb" sha1="c790b3de31196645034c76e326640ccf3b8c91dd" offset="000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zeldamaju1" cloneof="zeldamaj">
|
||||
<software name="zeldamaju2" cloneof="zeldamaj">
|
||||
<description>The Legend of Zelda - Majora's Mask (USA, Demo)</description>
|
||||
<year>2000</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
@ -11662,8 +11663,9 @@ patched out (+ a fix for internal checksum)
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<!-- to be verified -->
|
||||
<software name="zeldaootmq" cloneof="zeldaoot">
|
||||
<!-- to be verified -->
|
||||
|
||||
<software name="zeldaootmqd" cloneof="zeldaoot">
|
||||
<description>The Legend of Zelda - Ocarina of Time - Master Quest (USA, Debug Edition, Ripped from GC)</description>
|
||||
<year>2003</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
@ -11674,39 +11676,104 @@ patched out (+ a fix for internal checksum)
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zeldaootmq1" cloneof="zeldaoot">
|
||||
<description>The Legend of Zelda - Ocarina of Time - Master Quest (Euro, Debug Edition?, Ripped from GC)</description>
|
||||
<software name="zeldaootmqu" cloneof="zeldaoot">
|
||||
<description>The Legend of Zelda - Ocarina of Time - Master Quest (USA, Ripped from GC)</description>
|
||||
<year>2003</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
<part name="cart" interface="n64_cart">
|
||||
<dataarea name="rom" size="33554432">
|
||||
<rom name="legend of zelda, the - ocarina of time - master quest (2003)(nintendo)[gamecube version].bin" size="33554432" crc="177fa73a" sha1="8ebf2e29313f44f2d49e5b4191971d09919e8e48" offset="000000" />
|
||||
<rom name="legend of zelda, the - ocarina of time - master quest (usa)(2003)(nintendo)(ntsc)[gamecube version].bin" size="33554432" crc="7b89b13f" sha1="e1d070ad7b017de9f992b362164dcd9d7f820f7e" offset="000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zeldaootmq" cloneof="zeldaoot">
|
||||
<description>The Legend of Zelda - Ocarina of Time - Master Quest (Euro, Ripped from GC)</description>
|
||||
<year>2003</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
<part name="cart" interface="n64_cart">
|
||||
<dataarea name="rom" size="33554432">
|
||||
<rom name="legend of zelda, the - ocarina of time - master quest (europe)(2003)(nintendo)(pal)[gamecube version].bin" size="33554432" crc="177fa73a" sha1="8ebf2e29313f44f2d49e5b4191971d09919e8e48" offset="000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zeldaootmqj" cloneof="zeldaoot">
|
||||
<description>Zelda no Densetsu - Toki no Ocarina Ura (Jpn, Ripped from GC)</description>
|
||||
<year>2003</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
<part name="cart" interface="n64_cart">
|
||||
<dataarea name="rom" size="33554432">
|
||||
<rom name="zelda no densetsu - toki no ocarina ura (japan)(2003)(nintendo)(ntsc)[gamecube version].bin" size="33554432" crc="d97c20ba" sha1="06c3c098f0e14ed61811dfaf0e8e4519d7d7a826" offset="000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zeldaootu3" cloneof="zeldaoot">
|
||||
<description>The Legend of Zelda - Ocarina of Time (USA, Ripped from GC)</description>
|
||||
<year>2003</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
<part name="cart" interface="n64_cart">
|
||||
<dataarea name="rom" size="33554432">
|
||||
<rom name="legend of zelda, the - ocarina of time (usa)(2003)(nintendo)(ntsc)[gamecube version].bin" size="33554432" crc="84bbc39f" sha1="44c75962911e13bdfdc31b35e0b8e3be6a6a49ab" offset="000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zeldaoot2" cloneof="zeldaoot">
|
||||
<description>The Legend of Zelda - Ocarina of Time (Euro, Ripped from GC)</description>
|
||||
<year>2003</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
<part name="cart" interface="n64_cart">
|
||||
<dataarea name="rom" size="33554432">
|
||||
<rom name="legend of zelda, the - ocarina of time (2003)(nintendo)(pal)[gamecube version].v64" size="33554432" crc="6e658036" sha1="580dd0bd1b6d2c51cc20a764eece84dba558964c" offset="000000" />
|
||||
<rom name="legend of zelda, the - ocarina of time (europe)(2003)(nintendo)(pal)[gamecube version].bin" size="33554432" crc="6e658036" sha1="580dd0bd1b6d2c51cc20a764eece84dba558964c" offset="000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zeldamaju2" cloneof="zeldamaj">
|
||||
<description>The Legend of Zelda - Majora's Mask (USA, Ripped from GC)</description>
|
||||
<year>2003?</year>
|
||||
<software name="zeldaootj3" cloneof="zeldaoot">
|
||||
<description>Zelda no Densetsu - Toki no Ocarina (Jpn, Ripped from GC)</description>
|
||||
<year>2003</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
<part name="cart" interface="n64_cart">
|
||||
<dataarea name="rom" size="33554432">
|
||||
<rom name="legend of zelda, the - majora's mask (2003)(nintendo)(us)[gamecube version].v64" size="33554432" crc="52245acb" sha1="8c378b87c83b3f4de20b14accf91e7590399f5dc" offset="000000" />
|
||||
<rom name="zelda no densetsu - toki no ocarina (japan)(2003)(nintendo)(ntsc)[gamecube version].bin" size="33554432" crc="0ac22de8" sha1="245410280d152f28d5b1c0c0fc37f384db0020cd" offset="000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zeldamaju1" cloneof="zeldamaj">
|
||||
<description>The Legend of Zelda - Majora's Mask (USA, Ripped from GC)</description>
|
||||
<year>2003</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
<part name="cart" interface="n64_cart">
|
||||
<dataarea name="rom" size="33554432">
|
||||
<rom name="legend of zelda, the - majora's mask (2003)(nintendo)(us)[gamecube version].bin" size="33554432" crc="52245acb" sha1="8c378b87c83b3f4de20b14accf91e7590399f5dc" offset="000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zeldamaj2" cloneof="zeldamaj">
|
||||
<description>The Legend of Zelda - Majora's Mask (Euro, Ripped from GC)</description>
|
||||
<year>2003</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
<part name="cart" interface="n64_cart">
|
||||
<dataarea name="rom" size="33554432">
|
||||
<rom name="legend of zelda, the - majora's mask (2003)(nintendo)(europe)[gamecube version].bin" size="33554432" crc="19139e89" sha1="f4b0bedafc45c78c4428882036d46d691b650d8b" offset="000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="zeldamajj2" cloneof="zeldamaj">
|
||||
<description>Zelda no Densetsu - Mujura no Kamen (Jpn, Ripped from GC)</description>
|
||||
<year>2003</year>
|
||||
<publisher>Nintendo</publisher>
|
||||
<part name="cart" interface="n64_cart">
|
||||
<dataarea name="rom" size="33554432">
|
||||
<rom name="zelda no densetsu - mujura no kamen (2003)(nintendo)(japan)[gamecube version].bin" size="33554432" crc="766ebdeb" sha1="7beadea493f24f77b5be85bf2c1dbd813481549b" offset="000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<!-- Non game cartridges -->
|
||||
|
||||
@ -11754,7 +11821,7 @@ patched out (+ a fix for internal checksum)
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="gsharka" cloneof="gshark" supported="no">
|
||||
<software name="gsharka" cloneof="arp64" supported="no">
|
||||
<description>GameShark Pro (USA, v2.0)</description>
|
||||
<year>19??</year>
|
||||
<publisher><unknown></publisher>
|
||||
@ -11765,7 +11832,7 @@ patched out (+ a fix for internal checksum)
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="gshark" supported="no">
|
||||
<software name="gshark" cloneof="arp64" supported="no">
|
||||
<description>GameShark Pro (USA, v3.3)</description>
|
||||
<year>19??</year>
|
||||
<publisher><unknown></publisher>
|
||||
@ -11834,5 +11901,4 @@ patched out (+ a fix for internal checksum)
|
||||
</part>
|
||||
</software>
|
||||
|
||||
|
||||
</softwarelist>
|
||||
|
1996
hash/pet_cass.xml
1996
hash/pet_cass.xml
File diff suppressed because it is too large
Load Diff
@ -26,6 +26,18 @@
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="1001demoa" cloneof="1001demo">
|
||||
<description>Commodore SFD-1001 Test/Demo</description>
|
||||
<year>198?</year>
|
||||
<publisher>Commodore</publisher>
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="1066496">
|
||||
<rom name="commodore sfd-1001 test demo.d82" size="1066496" crc="87f79d8c" sha1="6f73fd233871fae7e16a9eda9f628c907606ce9d" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="2040demov1" cloneof="2040demo">
|
||||
<description>Commodore 2040 Floppy Disk Drive Test/Demo Disk (DOS v1)</description>
|
||||
<year>197?</year>
|
||||
@ -38,6 +50,115 @@
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="apshaid">
|
||||
<description>Temple of Apshai</description>
|
||||
<year>1979</year>
|
||||
<publisher>Automated Simulations</publisher>
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
<rom name="temple of apshai (1979)(automated simulations).d64" size="174848" crc="cf6b2a6f" sha1="c40fefc57db4d8eb13d575e8f3731a299104482c" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="dateston">
|
||||
<description>The Datestones of Ryn A Microquest</description>
|
||||
<year>1979</year>
|
||||
<publisher>Automated Simulations</publisher>
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
<rom name="datestones of ryn (1979)(automated simulations).d64" size="174848" crc="1fbcf28a" sha1="936099e06ff1843db92956e8647f89f553ae7b89" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="escaped">
|
||||
<description>Escape from the Death Planet</description>
|
||||
<year>1979</year>
|
||||
<publisher>Fantasy Games Software</publisher>
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
<rom name="escape from the death planet (1979)(fantasy games software).d64" size="174848" crc="b6a5e95f" sha1="d2dd418a3b7803ffdeef1572c498307a01cb2db0" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="galaxyd">
|
||||
<description>Galaxy!</description>
|
||||
<year>1981</year>
|
||||
<publisher>Microcomputer Games, Inc.</publisher>
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
<rom name="galaxy! (1981)(avalon hill).d64" size="174848" crc="ee09100e" sha1="e94add6ab81d706cf2812e23eb6039715bdad126" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="hellfire">
|
||||
<description>Hellfire Warrior</description>
|
||||
<year>1980</year>
|
||||
<publisher>Automated Simulations</publisher>
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
<rom name="hellfire warrior (1980)(automated simulations).d64" size="174848" crc="19ff4a71" sha1="f155ed22c781a6883bf5119056ba7a1870a9fb2e" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="mapscaps">
|
||||
<description>Maps and Capitals</description>
|
||||
<year>1978</year>
|
||||
<publisher>Solomon</publisher>
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
<rom name="maps and capitals (1978)(solomon).d64" size="174848" crc="6d3111dd" sha1="7730b5cff7afb01efedf0f3cece7fe596b22c0cd" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="oriond">
|
||||
<description>Invasion Orion</description>
|
||||
<year>1979</year>
|
||||
<publisher>Automated Simulations</publisher>
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
<rom name="invasion orion (1979)(automated simulations).d64" size="174848" crc="2fa76e5c" sha1="bf1b41364962cd1fb7faa805536740ef3b96b5f4" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="states">
|
||||
<description>The States</description>
|
||||
<year>1978</year>
|
||||
<publisher>Solomon</publisher>
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
<rom name="states (1978)(solomon).d64" size="174848" crc="4b45669d" sha1="df769e3d302b6ed0229174b19f825d4515d15574" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
|
||||
<software name="tunvkatm">
|
||||
<description>Tunnel Vision and Cat and Mouse</description>
|
||||
<year>1978</year>
|
||||
<publisher>Solomon</publisher>
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
<rom name="tunnel vision and kat and mouse (1978)(michael riley).d64" size="174848" crc="5a26138d" sha1="a20b52cb9c086d8acfbd063814c412e6d04bf259" offset="0" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
||||
<software name="visicalc">
|
||||
<description>VisiCalc</description>
|
||||
<year>1981</year>
|
||||
@ -47,16 +168,16 @@
|
||||
<!--
|
||||
VisiCalc(R) PET 2001, CBM 2001
|
||||
Program Diskette and CBM 8032 32K
|
||||
[VisiCorp logo] for 2040 and 4040
|
||||
[VisiCorp logo] for 2040 and 4040
|
||||
One of the VisiTM programs Diskette Drives
|
||||
from VisiCorp by Software Arts, Inc.
|
||||
|
||||
VisiCorpTM
|
||||
PERSONAL SOFTWARETM
|
||||
VisiCorpTM
|
||||
PERSONAL SOFTWARETM
|
||||
|
||||
Program Copyright (C) 1979, 1981
|
||||
Software Arts, Inc. All Rights Reserved
|
||||
20910-5511
|
||||
20910-5511
|
||||
-->
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
@ -80,16 +201,16 @@
|
||||
<!--
|
||||
VisiCalc(R) PET 2001, CBM 2001
|
||||
Program Diskette CBM 8032 and CBM 8096
|
||||
[VisiCorp logo] for 8050 Diskette Drive
|
||||
[VisiCorp logo] for 8050 Diskette Drive
|
||||
One of the VisiTM programs by Software Arts, Inc.
|
||||
from VisiCorp
|
||||
|
||||
VisiCorpTM
|
||||
PERSONAL SOFTWARETM
|
||||
VisiCorpTM
|
||||
PERSONAL SOFTWARETM
|
||||
|
||||
Program Copyright (C) 1979, 1982
|
||||
Software Arts, Inc. All Rights Reserved
|
||||
20910-5612
|
||||
20910-5612
|
||||
-->
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="533248">
|
||||
@ -704,7 +825,7 @@
|
||||
<info name="serial" value="CP2-620-580" />
|
||||
|
||||
<!--
|
||||
C P / M ( R ) V e r s i o n 2 . 2
|
||||
C P / M ( R ) V e r s i o n 2 . 2
|
||||
( C ) 1 9 7 6 - 1 9 8 0 D i g i t a l R e s e a r c h
|
||||
s e r i a l # C P 2 - 6 2 0 - 5 8 0 # #
|
||||
8050 format / 4040 on back (C) 1982 Madison Computer
|
||||
@ -728,7 +849,7 @@
|
||||
<year>2012</year>
|
||||
<publisher>Steve Gray</publisher>
|
||||
<!-- These disks contain some programs typed in from the HSG manual,
|
||||
along with new programs that were written in 2012. -->
|
||||
along with new programs that were written in 2012. -->
|
||||
|
||||
<part name="flop1" interface="floppy_5_25">
|
||||
<dataarea name="flop" size="174848">
|
||||
|
@ -290,6 +290,14 @@
|
||||
<publisher>Palo Alto ICs Inc.</publisher>
|
||||
|
||||
<part name="rom" interface="pet_b000_rom">
|
||||
<!--
|
||||
TODO: The screen fills with garbage when run like this:
|
||||
$ mame64 pet2001n32 toolkit2
|
||||
and started with:
|
||||
SYS 45056
|
||||
We need to check this and should add notes with the emulator
|
||||
options required to run the various versions of Toolkit.
|
||||
-->
|
||||
<dataarea name="rom" size="0x800">
|
||||
<rom name="toolkit2-b000.bin" size="0x800" crc="bf8d29f3" sha1="324508c0cec374c80387b4286ed1b0fee9159486" offset="0" />
|
||||
</dataarea>
|
||||
|
@ -3692,7 +3692,7 @@ Beyond that last category are the roms waiting to be classified.
|
||||
<part name="cart" interface="snes_cart">
|
||||
<feature name="slot" value="lorom" />
|
||||
<dataarea name="rom" size="1048576">
|
||||
<rom name="spellcraft (usa) (proto).sfc" size="1048576" crc="3daea8a1" sha1="fe4dcca5c3cb2e721f4dbf971cc06fe10f78e629" offset="0x000000" />
|
||||
<rom name="spellcraft (usa) (proto).sfc" size="1048576" crc="47e900bf" sha1="67107c028eb23b830c9fdfc4136c416d793dd98f" offset="0x000000" />
|
||||
</dataarea>
|
||||
</part>
|
||||
</software>
|
||||
|
82
makefile
82
makefile
@ -27,6 +27,7 @@
|
||||
# USE_DISPATCH_GL = 0
|
||||
# DIRECTINPUT = 7
|
||||
# USE_SDL = 1
|
||||
# SDL_INI_PATH = .;$HOME/.mame/;ini;
|
||||
# SDL2_MULTIAPI = 1
|
||||
# NO_USE_MIDI = 1
|
||||
# DONT_USE_NETWORK = 1
|
||||
@ -49,9 +50,17 @@
|
||||
# MAP = 1
|
||||
# PROFILE = 1
|
||||
# ARCHOPTS =
|
||||
# OPT_FLAGS =
|
||||
# LDOPTS =
|
||||
|
||||
# USE_SYSTEM_LIB_EXPAT = 1
|
||||
# USE_SYSTEM_LIB_ZLIB = 1
|
||||
# USE_SYSTEM_LIB_JPEG = 1
|
||||
# USE_SYSTEM_LIB_FLAC = 1
|
||||
# USE_SYSTEM_LIB_LUA = 1
|
||||
# USE_SYSTEM_LIB_SQLITE3 = 1
|
||||
# USE_SYSTEM_LIB_PORTMIDI = 1
|
||||
# USE_SYSTEM_LIB_PORTAUDIO = 1
|
||||
|
||||
# MESA_INSTALL_ROOT = /opt/mesa
|
||||
# SDL_INSTALL_ROOT = /opt/sdl2
|
||||
@ -81,6 +90,8 @@
|
||||
|
||||
# QT_HOME = /usr/lib64/qt48/
|
||||
|
||||
# DRIVERS = src/mame/drivers/1942.c,src/mame/drivers/cops.c
|
||||
|
||||
-include useroptions.mak
|
||||
|
||||
###########################################################################
|
||||
@ -229,6 +240,16 @@ endif
|
||||
endif
|
||||
|
||||
ifeq ($(findstring arm,$(UNAME)),arm)
|
||||
ARCHITECTURE :=
|
||||
ifndef NOASM
|
||||
NOASM := 1
|
||||
endif
|
||||
endif
|
||||
|
||||
# Emscripten
|
||||
ifeq ($(findstring emcc,$(CC)),emcc)
|
||||
TARGETOS := asmjs
|
||||
ARCHITECTURE :=
|
||||
ifndef NOASM
|
||||
NOASM := 1
|
||||
endif
|
||||
@ -293,6 +314,34 @@ ifndef USE_SYSTEM_LIB_EXPAT
|
||||
PARAMS += --with-bundled-expat
|
||||
endif
|
||||
|
||||
ifndef USE_SYSTEM_LIB_ZLIB
|
||||
PARAMS += --with-bundled-zlib
|
||||
endif
|
||||
|
||||
ifndef USE_SYSTEM_LIB_JPEG
|
||||
PARAMS += --with-bundled-jpeg
|
||||
endif
|
||||
|
||||
ifndef USE_SYSTEM_LIB_FLAC
|
||||
PARAMS += --with-bundled-flac
|
||||
endif
|
||||
|
||||
ifndef USE_SYSTEM_LIB_LUA
|
||||
PARAMS += --with-bundled-lua
|
||||
endif
|
||||
|
||||
ifndef USE_SYSTEM_LIB_SQLITE3
|
||||
PARAMS += --with-bundled-sqlite3
|
||||
endif
|
||||
|
||||
ifndef USE_SYSTEM_LIB_PORTMIDI
|
||||
PARAMS += --with-bundled-portmidi
|
||||
endif
|
||||
|
||||
ifndef USE_SYSTEM_LIB_PORTAUDIO
|
||||
PARAMS += --with-bundled-portaudio
|
||||
endif
|
||||
|
||||
#-------------------------------------------------
|
||||
# distribution may change things
|
||||
#-------------------------------------------------
|
||||
@ -401,6 +450,10 @@ ifdef ARCHOPTS
|
||||
PARAMS += --ARCHOPTS='$(ARCHOPTS)'
|
||||
endif
|
||||
|
||||
ifdef OPT_FLAGS
|
||||
PARAMS += --OPT_FLAGS='$(OPT_FLAGS)'
|
||||
endif
|
||||
|
||||
ifdef MAP
|
||||
PARAMS += --MAP='$(MAP)'
|
||||
endif
|
||||
@ -469,6 +522,10 @@ ifdef USE_SDL
|
||||
PARAMS += --USE_SDL='$(USE_SDL)'
|
||||
endif
|
||||
|
||||
ifdef SDL_INI_PATH
|
||||
PARAMS += --SDL_INI_PATH='$(SDL_INI_PATH)'
|
||||
endif
|
||||
|
||||
ifdef CYGWIN_BUILD
|
||||
PARAMS += --CYGWIN_BUILD='$(CYGWIN_BUILD)'
|
||||
endif
|
||||
@ -557,6 +614,10 @@ ifdef QT_HOME
|
||||
PARAMS += --QT_HOME='$(QT_HOME)'
|
||||
endif
|
||||
|
||||
ifdef DRIVERS
|
||||
PARAMS += --DRIVERS='$(DRIVERS)'
|
||||
endif
|
||||
|
||||
#-------------------------------------------------
|
||||
# All scripts
|
||||
#-------------------------------------------------
|
||||
@ -579,9 +640,13 @@ SCRIPTS = scripts/genie.lua \
|
||||
scripts/src/netlist.lua \
|
||||
scripts/toolchain.lua \
|
||||
scripts/src/osd/modules.lua \
|
||||
scripts/target/$(TARGET)/$(SUBTARGET).lua \
|
||||
$(wildcard src/osd/$(OSD)/$(OSD).mak) \
|
||||
$(wildcard src/$(TARGET)/$(SUBTARGET).mak)
|
||||
|
||||
ifndef DRIVERS
|
||||
SCRIPTS += scripts/target/$(TARGET)/$(SUBTARGET).lua
|
||||
endif
|
||||
|
||||
ifdef REGENIE
|
||||
SCRIPTS+= regenie
|
||||
endif
|
||||
@ -632,7 +697,7 @@ CHECK_CLANG :=
|
||||
else
|
||||
GCC_VERSION := $(shell $(subst @,,$(CC)) -dumpversion 2> /dev/null)
|
||||
ifneq ($(OS),solaris)
|
||||
CLANG_VERSION := $(shell clang --version 2> /dev/null | grep 'LLVM [0-9]\.[0-9]' -o | grep '[0-9]\.[0-9]' -o | head -n 1)
|
||||
CLANG_VERSION := $(shell clang --version 2> /dev/null | head -n 1 | grep '[0-9]\.[0-9]' -o | tail -n 1)
|
||||
endif
|
||||
PYTHON_AVAILABLE := $(shell $(PYTHON) --version > /dev/null 2>&1 && echo python)
|
||||
CHECK_CLANG := $(shell gcc --version 2> /dev/null | grep 'clang' | head -n 1)
|
||||
@ -839,13 +904,14 @@ $(PROJECTDIR)/gmake-linux/Makefile: makefile $(SCRIPTS) $(GENIE)
|
||||
linux_x64: generate $(PROJECTDIR)/gmake-linux/Makefile
|
||||
$(SILENT) $(MAKE) $(MAKEPARAMS) -C $(PROJECTDIR)/gmake-linux config=$(CONFIG)64
|
||||
|
||||
.PHONY: linux
|
||||
linux: linux_x86
|
||||
|
||||
.PHONY: linux_x86
|
||||
linux_x86: generate $(PROJECTDIR)/gmake-linux/Makefile
|
||||
$(SILENT) $(MAKE) $(MAKEPARAMS) -C $(PROJECTDIR)/gmake-linux config=$(CONFIG)32
|
||||
|
||||
.PHONY: linux
|
||||
linux: generate $(PROJECTDIR)/gmake-linux/Makefile
|
||||
$(SILENT) $(MAKE) $(MAKEPARAMS) -C $(PROJECTDIR)/gmake-linux config=$(CONFIG)
|
||||
|
||||
#-------------------------------------------------
|
||||
# gmake-linux-clang
|
||||
#-------------------------------------------------
|
||||
@ -1043,8 +1109,12 @@ CPPCHECK_PARAMS += -Isrc/osd/modules/render
|
||||
CPPCHECK_PARAMS += -Isrc/osd/windows
|
||||
CPPCHECK_PARAMS += -Isrc/emu/cpu/m68000
|
||||
CPPCHECK_PARAMS += -I3rdparty
|
||||
ifndef USE_SYSTEM_LIB_LUA
|
||||
CPPCHECK_PARAMS += -I3rdparty/lua/src
|
||||
endif
|
||||
ifndef USE_SYSTEM_LIB_ZLIB
|
||||
CPPCHECK_PARAMS += -I3rdparty/zlib
|
||||
endif
|
||||
CPPCHECK_PARAMS += -I3rdparty/bgfx/include
|
||||
CPPCHECK_PARAMS += -I3rdparty/bx/include
|
||||
CPPCHECK_PARAMS += -Ibuild/generated/emu
|
||||
@ -1057,7 +1127,9 @@ CPPCHECK_PARAMS += -DMAME_DEBUG
|
||||
CPPCHECK_PARAMS += -DMAME_PROFILER
|
||||
CPPCHECK_PARAMS += -DCRLF=3
|
||||
CPPCHECK_PARAMS += -DLSB_FIRST
|
||||
ifndef USE_SYSTEM_LIB_FLAC
|
||||
CPPCHECK_PARAMS += -DFLAC__NO_DLL
|
||||
endif
|
||||
CPPCHECK_PARAMS += -DNATIVE_DRC=drcbe_x64
|
||||
CPPCHECK_PARAMS += -DLUA_COMPAT_APIINTCASTS
|
||||
CPPCHECK_PARAMS += -DWIN32
|
||||
|
450
nl_examples/congo_bongo.c
Normal file
450
nl_examples/congo_bongo.c
Normal file
@ -0,0 +1,450 @@
|
||||
#include "netlist/devices/net_lib.h"
|
||||
#include "netlist/devices/nld_system.h"
|
||||
#include "netlist/analog/nld_bjt.h"
|
||||
#include "netlist/analog/nld_twoterm.h"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Library section header START
|
||||
* ---------------------------------------------------------------------------*/
|
||||
|
||||
#ifndef __PLIB_PREPROCESSOR__
|
||||
|
||||
#define LM358_DIP(_name) \
|
||||
NET_REGISTER_DEV_X(LM358_DIP, _name)
|
||||
|
||||
#define G501534_DIP(_name) \
|
||||
NET_REGISTER_DEV_X(G501534_DIP, _name)
|
||||
|
||||
|
||||
NETLIST_EXTERNAL(congob_lib)
|
||||
|
||||
#endif
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Library section header END
|
||||
* ---------------------------------------------------------------------------*/
|
||||
|
||||
|
||||
NETLIST_START(dummy)
|
||||
// EESCHEMA NETLIST VERSION 1.1 (SPICE FORMAT) CREATION DATE: WED 01 JUL 2015 11:09:25 PM CEST
|
||||
// TO EXCLUDE A COMPONENT FROM THE SPICE NETLIST ADD [SPICE_NETLIST_ENABLED] USER FIELD SET TO: N
|
||||
// TO REORDER THE COMPONENT SPICE NODE SEQUENCE ADD [SPICE_NODE_SEQUENCE] USER FIELD AND DEFINE SEQUENCE: 2,1,0
|
||||
// SHEET NAME:/
|
||||
// IGNORED O_AUDIO0: O_AUDIO0 64 0
|
||||
// .END
|
||||
|
||||
SOLVER(Solver, 24000)
|
||||
PARAM(Solver.ACCURACY, 1e-7)
|
||||
PARAM(Solver.NR_LOOPS, 90)
|
||||
PARAM(Solver.SOR_FACTOR, 0.001)
|
||||
PARAM(Solver.GS_LOOPS, 1)
|
||||
//PARAM(Solver.GS_THRESHOLD, 99)
|
||||
PARAM(Solver.ITERATIVE, "SOR")
|
||||
|
||||
LOCAL_SOURCE(congob_lib)
|
||||
INCLUDE(congob_lib)
|
||||
|
||||
TTL_INPUT(I_BASS_DRUM0, 0)
|
||||
//CLOCK(I_BASS_DRUM0, 2)
|
||||
TTL_INPUT(I_CONGA_H0, 0)
|
||||
//CLOCK(I_CONGA_H0, 2)
|
||||
TTL_INPUT(I_CONGA_L0, 0)
|
||||
//CLOCK(I_CONGA_L0, 2)
|
||||
//TTL_INPUT(I_GORILLA0, 0)
|
||||
CLOCK(I_GORILLA0, 2)
|
||||
TTL_INPUT(I_RIM0, 0)
|
||||
//CLOCK(I_RIM0, 2)
|
||||
|
||||
ALIAS(I_V0.Q, GND.Q)
|
||||
|
||||
ANALOG_INPUT(I_V12, 12)
|
||||
ANALOG_INPUT(I_V5, 5)
|
||||
ANALOG_INPUT(I_V6, 6)
|
||||
|
||||
/* temporary output stage */
|
||||
RES(RO, RES_K(50))
|
||||
CAP(CO, CAP_U(10))
|
||||
|
||||
NET_C(R94.1, CO.1)
|
||||
NET_C(CO.2, RO.1)
|
||||
NET_C(RO.2, GND)
|
||||
|
||||
// FIXME: Same as 1N4148
|
||||
NET_MODEL(".model 1S2075 D(Is=2.52n Rs=.568 N=1.752 Cjo=4p M=.4 tt=20n Iave=200m Vpk=75)")
|
||||
NET_MODEL(".model 2SC1941 NPN(IS=46.416f BF=210 NF=1.0022 VAF=600 IKF=500m ISE=60f NE=1.5 BR=2.0122 NR=1.0022 VAR=10G IKR=10G ISC=300p NC=2 RB=13.22 IRB=10G RBM=13.22 RE=100m RC=790m CJE=26.52p VJE=900m MJE=518m TF=1.25n XTF=10 VTF=10 ITF=500m PTF=0 CJC=4.89p VJC=750m MJC=237m XCJC=500m TR=100n CJS=0 VJS=750m MJS=500m XTB=1.5 EG=1.11 XTI=3 KF=0 AF=1 FC=500m)")
|
||||
|
||||
INCLUDE(CongoBongo_schematics)
|
||||
|
||||
/* The opamp actually has an FPF of about 500k. This doesn't work here and causes oscillations.
|
||||
* FPF here therefore about half the Solver clock.
|
||||
*/
|
||||
PARAM(XU16.B.model, "MB3614_SLOW")
|
||||
PARAM(XU17.C.model, "MB3614_SLOW")
|
||||
|
||||
OPTIMIZE_FRONTIER(C51.1, RES_K(20), 50)
|
||||
OPTIMIZE_FRONTIER(R77.2, RES_K(20), 50)
|
||||
|
||||
OPTIMIZE_FRONTIER(C25.2, RES_K(240), 50)
|
||||
OPTIMIZE_FRONTIER(C29.2, RES_K(390), 50)
|
||||
OPTIMIZE_FRONTIER(C37.2, RES_K(390), 50)
|
||||
OPTIMIZE_FRONTIER(C44.2, RES_K(200), 50)
|
||||
|
||||
OPTIMIZE_FRONTIER(R90.2, RES_K(100), 50)
|
||||
OPTIMIZE_FRONTIER(R92.2, RES_K(15), 50)
|
||||
|
||||
NETLIST_END()
|
||||
|
||||
NETLIST_START(CongoBongo_schematics)
|
||||
CAP(C20, CAP_N(68))
|
||||
CAP(C21, CAP_U(1))
|
||||
CAP(C22, CAP_U(47))
|
||||
CAP(C23, CAP_N(100))
|
||||
CAP(C24, CAP_N(100))
|
||||
CAP(C25, CAP_U(1))
|
||||
CAP(C26, CAP_N(68))
|
||||
CAP(C27, CAP_N(33))
|
||||
CAP(C28, CAP_U(47))
|
||||
CAP(C29, CAP_U(1))
|
||||
CAP(C30, CAP_N(33))
|
||||
CAP(C31, CAP_N(33))
|
||||
CAP(C32, CAP_N(68))
|
||||
CAP(C33, CAP_N(33))
|
||||
CAP(C34, CAP_U(47))
|
||||
CAP(C35, CAP_N(33))
|
||||
CAP(C36, CAP_N(33))
|
||||
CAP(C37, CAP_U(1))
|
||||
CAP(C38, CAP_N(10))
|
||||
CAP(C39, CAP_N(3.3))
|
||||
CAP(C40, CAP_U(2.2))
|
||||
CAP(C41, CAP_N(6.8))
|
||||
CAP(C42, CAP_N(6.8))
|
||||
CAP(C43, CAP_N(47))
|
||||
CAP(C44, CAP_U(1))
|
||||
CAP(C45, CAP_U(33))
|
||||
CAP(C46, CAP_N(100))
|
||||
CAP(C47, CAP_P(470))
|
||||
CAP(C48, CAP_N(1.5))
|
||||
CAP(C49, CAP_P(220))
|
||||
CAP(C50, CAP_N(3.9))
|
||||
CAP(C51, CAP_U(1))
|
||||
CAP(C52, CAP_U(1))
|
||||
CAP(C53, CAP_U(1))
|
||||
CAP(C54, CAP_U(1))
|
||||
CAP(C55, CAP_U(1))
|
||||
CAP(C56, CAP_U(10))
|
||||
CAP(C57, CAP_N(47))
|
||||
CAP(C58, CAP_N(22))
|
||||
CAP(C59, CAP_U(10))
|
||||
CAP(C60, CAP_N(22))
|
||||
CAP(C62, CAP_N(22))
|
||||
CAP(C61, CAP_U(1))
|
||||
DIODE(D1, "1S2075")
|
||||
DIODE(D2, "1S2075")
|
||||
DIODE(D3, "1S2075")
|
||||
DIODE(D4, "1S2075")
|
||||
DIODE(D5, "1S2075")
|
||||
DIODE(D6, "1S2075")
|
||||
DIODE(D7, "1S2075")
|
||||
DIODE(D8, "1S2075")
|
||||
QBJT_EB(Q2, "2SC1941")
|
||||
RES(R21, RES_K(10))
|
||||
RES(R22, RES_K(47))
|
||||
RES(R23, RES_K(47))
|
||||
RES(R24, RES_K(10))
|
||||
RES(R25, RES_K(47))
|
||||
RES(R26, RES_K(22))
|
||||
RES(R27, RES_K(10))
|
||||
RES(R28, RES_K(470))
|
||||
RES(R29, RES_K(1))
|
||||
RES(R30, RES_K(240))
|
||||
RES(R31, RES_K(10))
|
||||
RES(R32, RES_K(47))
|
||||
RES(R33, RES_K(47))
|
||||
RES(R34, RES_K(47))
|
||||
RES(R35, RES_K(47))
|
||||
RES(R36, RES_K(22))
|
||||
RES(R37, RES_K(10))
|
||||
RES(R38, RES_M(1))
|
||||
RES(R39, 330)
|
||||
RES(R40, RES_K(390))
|
||||
RES(R41, RES_K(10))
|
||||
RES(R42, RES_K(47))
|
||||
RES(R43, RES_K(47))
|
||||
RES(R44, RES_K(47))
|
||||
RES(R45, RES_K(47))
|
||||
RES(R46, RES_K(22))
|
||||
RES(R47, RES_K(10))
|
||||
RES(R48, RES_M(1))
|
||||
RES(R49, 220)
|
||||
RES(R50, RES_K(390))
|
||||
RES(R51, RES_K(10))
|
||||
RES(R52, RES_K(22))
|
||||
RES(R53, RES_K(22))
|
||||
RES(R54, RES_K(22))
|
||||
RES(R55, RES_K(22))
|
||||
RES(R56, RES_K(10))
|
||||
RES(R57, RES_K(4.7))
|
||||
RES(R58, RES_M(1))
|
||||
RES(R59, 470)
|
||||
RES(R60, RES_M(2.2))
|
||||
RES(R61, RES_M(2.2))
|
||||
RES(R62, RES_K(200))
|
||||
RES(R63, RES_K(22))
|
||||
RES(R64, RES_K(22))
|
||||
RES(R65, RES_K(20))
|
||||
RES(R66, RES_K(20))
|
||||
RES(R67, RES_K(20))
|
||||
RES(R68, RES_K(20))
|
||||
RES(R69, RES_K(20))
|
||||
RES(R70, RES_K(100))
|
||||
RES(R71, RES_K(150))
|
||||
RES(R72, RES_K(330))
|
||||
RES(R73, RES_K(1))
|
||||
RES(R74, RES_K(1))
|
||||
RES(R75, RES_K(470))
|
||||
RES(R76, RES_K(10))
|
||||
RES(R77, RES_K(20))
|
||||
RES(R78, RES_K(47))
|
||||
RES(R79, RES_K(22))
|
||||
RES(R80, RES_K(20))
|
||||
RES(R81, RES_K(10))
|
||||
RES(R82, RES_K(100))
|
||||
RES(R83, RES_K(51))
|
||||
RES(R84, RES_K(51))
|
||||
RES(R85, RES_K(51))
|
||||
RES(R86, RES_K(51))
|
||||
RES(R87, RES_K(100))
|
||||
RES(R88, RES_K(2.2))
|
||||
RES(R89, RES_K(10))
|
||||
RES(R90, RES_K(100))
|
||||
RES(R91, RES_K(10))
|
||||
RES(R92, RES_K(15))
|
||||
RES(R93, RES_K(15))
|
||||
RES(R94, RES_K(51))
|
||||
MB3614_DIP(XU13)
|
||||
G501534_DIP(XU15)
|
||||
MB3614_DIP(XU16)
|
||||
MB3614_DIP(XU17)
|
||||
CD4001_DIP(XU18)
|
||||
CD4538_DIP(XU19)
|
||||
MM5837_DIP(XU20)
|
||||
TTL_7416_DIP(XU6)
|
||||
|
||||
NET_C(D1.A, C21.2, R23.1)
|
||||
NET_C(D1.K, C20.1, R22.1)
|
||||
NET_C(XU13.1, C37.2, C36.1, R48.1)
|
||||
NET_C(XU13.2, C35.2, R48.2)
|
||||
NET_C(XU13.3, R44.1, R46.2, R45.1)
|
||||
NET_C(XU13.4, R27.1, R21.1, R37.1, R31.1, R47.1, R41.1, R57.1, R51.1, C46.2, C45.2, XU17.4, R80.2, XU16.4, XU20.4, XU15.12, I_V12.Q)
|
||||
NET_C(XU13.5, R54.1, R56.2, R55.1)
|
||||
NET_C(XU13.6, C41.2, R58.2, R60.2)
|
||||
NET_C(XU13.7, C44.2, C42.1, R58.1, R61.1)
|
||||
NET_C(XU13.8, C29.2, C31.1, R38.1)
|
||||
NET_C(XU13.9, C30.2, R38.2)
|
||||
NET_C(XU13.10, R34.1, R36.2, R35.1)
|
||||
NET_C(XU13.11, C22.2, R29.2, R25.2, R23.2, R22.2, XU6.1, XU6.3, XU6.7, C28.2, R39.2, R35.2, R33.2, R32.2, C34.2, R49.2, R45.2, R43.2, R42.2, C40.2, R59.2, R55.2, R53.2, R52.2, C43.2, R69.1, R64.1, C49.2, C48.2, C47.2, C46.1, C45.1, XU17.11, XU19.1, XU19.4, XU19.8, XU19.12, XU19.15, R81.1, C56.2, C55.2, C53.2, C52.2, XU18.1, XU18.2, XU18.7, XU18.12, XU18.13, C54.2, XU16.11, R84.1, R88.1, Q2.E, C58.2, C60.2, XU20.1, XU20.2, XU15.4, I_V0.Q)
|
||||
NET_C(XU13.12, R24.1, R26.2, R25.1)
|
||||
NET_C(XU13.13, C23.2, R28.2)
|
||||
NET_C(XU13.14, C25.2, C24.1, R28.1)
|
||||
NET_C(C25.1, R30.2)
|
||||
NET_C(C24.2, C23.1, R29.1)
|
||||
NET_C(C21.1, R24.2)
|
||||
NET_C(C20.2, R21.2, XU6.8)
|
||||
NET_C(C22.1, R27.2, R26.1)
|
||||
NET_C(R30.1, R40.1, R50.1, R62.1, R94.1)
|
||||
//NET_C(XU6.2, XU6.4, XU19.7, XU18.3, XU18.11, XU15.5, XU15.6, XU15.7, XU15.8, XU15.9, XU15.10, XU15.11, XU15.14)
|
||||
NET_C(XU6.5, I_CONGA_L0.Q)
|
||||
NET_C(XU6.6, C26.2, R31.2)
|
||||
NET_C(XU6.9, I_BASS_DRUM0.Q)
|
||||
NET_C(XU6.10, C38.2, R51.2)
|
||||
NET_C(XU6.11, I_RIM0.Q)
|
||||
NET_C(XU6.12, C32.2, R41.2)
|
||||
NET_C(XU6.13, I_CONGA_H0.Q)
|
||||
NET_C(XU6.14, D5.K, XU19.16, R70.2, R76.2, R71.2, XU18.14, I_V5.Q)
|
||||
NET_C(D2.A, C27.2, R33.1)
|
||||
NET_C(D2.K, C26.1, R32.1)
|
||||
NET_C(C29.1, R40.2)
|
||||
NET_C(C31.2, C30.1, R39.1)
|
||||
NET_C(C27.1, R34.2)
|
||||
NET_C(C28.1, R37.2, R36.1)
|
||||
NET_C(D3.A, C33.2, R43.1)
|
||||
NET_C(D3.K, C32.1, R42.1)
|
||||
NET_C(C37.1, R50.2)
|
||||
NET_C(C36.2, C35.1, R49.1)
|
||||
NET_C(C33.1, R44.2)
|
||||
NET_C(C34.1, R47.2, R46.1)
|
||||
NET_C(D4.A, C39.2, R53.1)
|
||||
NET_C(D4.K, C38.1, R52.1)
|
||||
NET_C(C44.1, R62.2)
|
||||
NET_C(C42.2, C41.1, R59.1)
|
||||
NET_C(C39.1, R54.2)
|
||||
NET_C(C40.1, R57.2, R56.1)
|
||||
NET_C(R60.1, R61.2, C43.1)
|
||||
NET_C(R63.1, R64.2, C47.1, D5.A, XU18.5, XU18.6)
|
||||
NET_C(R63.2, XU20.3)
|
||||
NET_C(R65.1, R66.2, C48.1)
|
||||
NET_C(R65.2, XU18.4)
|
||||
NET_C(R66.1, R67.2, C50.2)
|
||||
NET_C(R67.1, C49.1, XU17.10)
|
||||
NET_C(R68.1, R69.2, XU17.9)
|
||||
NET_C(R68.2, C50.1, XU17.8, C51.1)
|
||||
NET_C(XU17.1, XU16.6, C62.1)
|
||||
NET_C(XU17.2, R82.1, C62.2, R85.2)
|
||||
NET_C(XU17.3, R83.1, R84.2)
|
||||
NET_C(XU17.5, C55.1, R72.1, R73.1)
|
||||
NET_C(XU17.6, XU17.7, R77.2)
|
||||
NET_C(XU17.12, R80.1, R81.2, C56.1)
|
||||
NET_C(XU17.13, R78.1, R79.2, R77.1)
|
||||
NET_C(XU17.14, R79.1, R82.2, R83.2)
|
||||
NET_C(C51.2, R78.2)
|
||||
NET_C(XU19.2, R70.1, C52.1)
|
||||
NET_C(XU19.3, XU19.13, R76.1)
|
||||
NET_C(XU19.5, XU19.11, I_GORILLA0.Q)
|
||||
NET_C(XU19.6, XU18.9)
|
||||
NET_C(XU19.9, XU18.8)
|
||||
NET_C(XU19.10, D7.A, R75.2)
|
||||
NET_C(XU19.14, R71.1, C53.1)
|
||||
NET_C(R72.2, D6.A, XU18.10)
|
||||
NET_C(R73.2, D6.K)
|
||||
NET_C(D7.K, R74.2)
|
||||
NET_C(R74.1, R75.1, C54.1, XU16.10)
|
||||
NET_C(XU16.1, R91.1, R92.2)
|
||||
NET_C(XU16.2, R90.1, R91.2)
|
||||
NET_C(XU16.3, R86.2, I_V6.Q)
|
||||
NET_C(XU16.5, R86.1, R87.2)
|
||||
NET_C(XU16.7, R87.1, D8.A, R90.2)
|
||||
NET_C(XU16.8, XU16.9, XU15.13)
|
||||
NET_C(XU16.12, R93.1, C58.1)
|
||||
NET_C(XU16.13, XU16.14, C57.1, C59.2)
|
||||
NET_C(R85.1, Q2.C)
|
||||
NET_C(R89.1, D8.K)
|
||||
NET_C(R89.2, R88.2, Q2.B)
|
||||
NET_C(R92.1, C57.2, R93.2)
|
||||
NET_C(C59.1, XU15.1)
|
||||
NET_C(C60.1, XU15.2)
|
||||
NET_C(XU15.3, C61.2)
|
||||
NET_C(C61.1, R94.2)
|
||||
NETLIST_END()
|
||||
|
||||
NETLIST_START(opamp_mod)
|
||||
|
||||
/* Opamp model from
|
||||
*
|
||||
* http://www.ecircuitcenter.com/Circuits/opmodel1/opmodel1.htm
|
||||
*
|
||||
* MB3614 Unit Gain frequency is about 500 kHz and the first pole frequency
|
||||
* about 5 Hz. We have to keep the Unity Gain Frequency below our sampling
|
||||
* frequency of 24 Khz.
|
||||
*
|
||||
* Simple Opamp Model Calculation
|
||||
*
|
||||
* First Pole Frequency 5 Hz
|
||||
* Unity Gain Frequency 11,000 Hz
|
||||
* RP 100,000 Ohm
|
||||
* DC Gain / Aol 2200
|
||||
* CP 0.318 uF
|
||||
* KG 0.022
|
||||
*
|
||||
*/
|
||||
|
||||
/* Terminal definitions for calling netlists */
|
||||
|
||||
ALIAS(PLUS, G1.IP) // Positive input
|
||||
ALIAS(MINUS, G1.IN) // Negative input
|
||||
ALIAS(OUT, EBUF.OP) // Opamp output ...
|
||||
|
||||
AFUNC(fUH, 1, "A0 1.2 -")
|
||||
AFUNC(fUL, 1, "A0 1.2 +")
|
||||
|
||||
ALIAS(VCC, fUH.A0) // VCC terminal
|
||||
ALIAS(GND, fUL.A0) // VGND terminal
|
||||
|
||||
AFUNC(fVREF, 2, "A0 A1 + 0.5 *")
|
||||
NET_C(fUH.A0, fVREF.A0)
|
||||
NET_C(fUL.A0, fVREF.A1)
|
||||
|
||||
NET_C(EBUF.ON, fVREF)
|
||||
/* The opamp model */
|
||||
|
||||
LVCCS(G1)
|
||||
PARAM(G1.RI, RES_K(1000))
|
||||
#if 0
|
||||
PARAM(G1.G, 0.0022)
|
||||
RES(RP1, 1e6)
|
||||
CAP(CP1, 0.0318e-6)
|
||||
#else
|
||||
PARAM(G1.G, 0.002)
|
||||
PARAM(G1.CURLIM, 0.002)
|
||||
RES(RP1, 9.5e6)
|
||||
CAP(CP1, 0.0033e-6)
|
||||
#endif
|
||||
VCVS(EBUF)
|
||||
PARAM(EBUF.RO, 50)
|
||||
PARAM(EBUF.G, 1)
|
||||
|
||||
NET_C(G1.ON, fVREF)
|
||||
NET_C(RP1.2, fVREF)
|
||||
NET_C(CP1.2, fVREF)
|
||||
NET_C(EBUF.IN, fVREF)
|
||||
|
||||
NET_C(RP1.1, G1.OP)
|
||||
NET_C(CP1.1, RP1.1)
|
||||
|
||||
DIODE(DP,".model tt D(IS=1e-15 N=1)")
|
||||
DIODE(DN,".model tt D(IS=1e-15 N=1)")
|
||||
#if 1
|
||||
NET_C(DP.K, fUH.Q)
|
||||
NET_C(fUL.Q, DN.A)
|
||||
NET_C(DP.A, DN.K, RP1.1)
|
||||
#else
|
||||
/*
|
||||
* This doesn't add any performance by decreasing iteration loops.
|
||||
* To the contrary, it significantly decreases iterations
|
||||
*/
|
||||
RES(RH1, 0.1)
|
||||
RES(RL1, 0.1)
|
||||
NET_C(DP.K, RH1.1)
|
||||
NET_C(RH1.2, fUH.Q)
|
||||
NET_C(fUL.Q, RL1.1)
|
||||
NET_C(RL1.2, DN.A)
|
||||
NET_C(DP.A, DN.K, RP1.1)
|
||||
|
||||
#endif
|
||||
NET_C(EBUF.IP, RP1.1)
|
||||
|
||||
NETLIST_END()
|
||||
|
||||
|
||||
NETLIST_START(G501534_DIP)
|
||||
AFUNC(f, 2, "A0 A1 0.2 * *")
|
||||
|
||||
/*
|
||||
* 12: VCC
|
||||
* 4: GND
|
||||
* 1: IN
|
||||
* 3: OUT
|
||||
* 13: CV
|
||||
* 2: RDL - connected via Capacitor to ground
|
||||
*/
|
||||
|
||||
DUMMY_INPUT(DU1)
|
||||
DUMMY_INPUT(DU2)
|
||||
DUMMY_INPUT(DU3)
|
||||
|
||||
RES(RO, 1000)
|
||||
|
||||
ALIAS(12, DU1.I)
|
||||
ALIAS(4, DU2.I)
|
||||
ALIAS(2, DU3.I)
|
||||
ALIAS(1, f.A0)
|
||||
ALIAS(13, f.A1)
|
||||
NET_C(f.Q, RO.1)
|
||||
ALIAS(3, RO.2)
|
||||
|
||||
NETLIST_END()
|
||||
|
||||
NETLIST_START(congob_lib)
|
||||
|
||||
LOCAL_LIB_ENTRY(G501534_DIP)
|
||||
|
||||
NETLIST_END()
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user