Compare commits

..

No commits in common. "master" and "v2.13" have entirely different histories.

77 changed files with 1150 additions and 7542 deletions

View file

@ -1,15 +0,0 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
groups:
github-actions:
patterns:
- "*"

View file

@ -1,31 +0,0 @@
name: oss-fuzz
on:
pull_request:
branches: [ master ]
paths:
- '**.c'
- '**.h'
jobs:
fuzz:
runs-on: ubuntu-latest
steps:
- name: Build Fuzzers
id: build
uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master
with:
oss-fuzz-project-name: 'jansson'
dry-run: false
- name: Run Fuzzers
uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master
with:
oss-fuzz-project-name: 'jansson'
fuzz-seconds: 600
dry-run: false
- name: Upload Crash
uses: actions/upload-artifact@v4
if: failure() && steps.build.outcome == 'success'
with:
name: artifacts
path: ./out/artifacts

View file

@ -1,67 +0,0 @@
name: tests
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: ./scripts/clang-format-check
autotools:
strategy:
matrix:
os: ["ubuntu-latest", "macos-latest"]
cc: ["gcc", "clang"]
dtoa: ["yes", "no"]
runs-on: ${{ matrix.os }}
steps:
- if: ${{runner.os == 'macOS'}}
run: brew install autoconf automake libtool
- uses: actions/checkout@v4
- run: autoreconf -fi
- env:
CC: ${{ matrix.cc }}
CFLAGS: -Werror
run: ./configure --enable-dtoa=${{ matrix.dtoa }}
- run: make check
cmake:
strategy:
matrix:
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
cc: ["gcc", "clang"]
exclude:
- os: windows-latest
cc: gcc
- os: windows-latest
cc: clang
include:
- os: windows-latest
cc: 'msvc' # Doesn't really matter, MSVC is always used on Windows
runs-on: ${{matrix.os}}
steps:
- uses: actions/checkout@v4
- env:
CC: ${{matrix.cc}}
run: cmake .
- run: cmake --build .
- run: ctest --output-on-failure
valgrind:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: sudo apt update && sudo apt install valgrind
- run: cmake -DJANSSON_TEST_WITH_VALGRIND=ON .
- run: cmake --build .
- run: ctest --output-on-failure

34
.travis.yml Normal file
View file

@ -0,0 +1,34 @@
env:
global:
- CLANG_FORMAT_VERSION=9
matrix:
- JANSSON_BUILD_METHOD=cmake JANSSON_CMAKE_OPTIONS="-DJANSSON_TEST_WITH_VALGRIND=ON" JANSSON_EXTRA_INSTALL="valgrind"
- JANSSON_BUILD_METHOD=autotools
- JANSSON_BUILD_METHOD=coverage JANSSON_CMAKE_OPTIONS="-DJANSSON_COVERAGE=ON -DJANSSON_COVERALLS=ON -DCMAKE_BUILD_TYPE=Debug" JANSSON_EXTRA_INSTALL="lcov curl"
- JANSSON_BUILD_METHOD=fuzzer
- JANSSON_BUILD_METHOD=lint CLANG_FORMAT=clang-format-9
dist: bionic
language: c
compiler:
- gcc
- clang
matrix:
exclude:
- compiler: clang
env: JANSSON_BUILD_METHOD=coverage JANSSON_CMAKE_OPTIONS="-DJANSSON_COVERAGE=ON -DJANSSON_COVERALLS=ON -DCMAKE_BUILD_TYPE=Debug" JANSSON_EXTRA_INSTALL="lcov curl"
- compiler: clang
env: JANSSON_BUILD_METHOD=fuzzer
- compiler: gcc
env: JANSSON_BUILD_METHOD=lint CLANG_FORMAT=clang-format-9
allow_failures:
- env: JANSSON_BUILD_METHOD=coverage JANSSON_CMAKE_OPTIONS="-DJANSSON_COVERAGE=ON -DJANSSON_COVERALLS=ON -DCMAKE_BUILD_TYPE=Debug" JANSSON_EXTRA_INSTALL="lcov curl"
install:
- sudo apt-get update -qq
- sudo apt-get install -y -qq cmake $JANSSON_EXTRA_INSTALL
- if [ "$TRAVIS_COMPILER" = "clang" ]; then sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y && wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add - && sudo apt-add-repository "deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-9 main" -y && sudo apt-get install -y -qq clang-9 clang-format-9; fi
script:
- if [ "$JANSSON_BUILD_METHOD" = "autotools" ]; then autoreconf -f -i && CFLAGS=-Werror ./configure && make check; fi
- if [ "$JANSSON_BUILD_METHOD" = "cmake" ]; then mkdir build && cd build && cmake $JANSSON_CMAKE_OPTIONS .. && cmake --build . && ctest --output-on-failure; fi
- if [ "$JANSSON_BUILD_METHOD" = "coverage" ]; then mkdir build && cd build && cmake $JANSSON_CMAKE_OPTIONS .. && cmake --build . && cmake --build . --target coveralls; fi
- if [ "$JANSSON_BUILD_METHOD" = "fuzzer" ]; then ./test/ossfuzz/travisoss.sh; fi
- if [ "$JANSSON_BUILD_METHOD" = "lint" ]; then ./scripts/clang-format-check; fi

64
CHANGES
View file

@ -1,63 +1,3 @@
Version 2.14.1
==============
Released 2025-03-23
* Fixes:
- Fix thread safety of encoding and decoding when `uselocale` or `newlocale`
is used to switch locales inside the threads (#674, #675, #677. Thanks to
Bruno Haible for the report and help with fixing.)
- Use David M. Gay's `dtoa()` algorithm to avoid misprinting issues of real
numbers that are not exactly representable as a `double` (#680).
If this is not desirable, use `./configure --disable-dtoa` or `cmake
-DUSE_DTOA=OFF .`
* Build:
- Make test output nicer in CMake based builds (#683)
- Simplify tests (#685)
Version 2.14
============
Released 2021-09-09
* New Features:
- Add `json_object_getn`, `json_object_setn`, `json_object_deln`, and the
corresponding `nocheck` functions. (#520, by Maxim Zhukov)
* Fixes:
- Handle `sprintf` corner cases (#537, by Tobias Stoeckmann)
* Build:
- Symbol versioning for all exported symbols (#540, by Simon McVittie)
- Fix compiler warnings (#555, by Kelvin Lee)
* Documentation:
- Small fixes (#544, #546, by @i-ky)
- Sphinx 3 compatibility (#543, by Pierce Lopez)
Version 2.13.1
==============
Released 2020-05-07
* Build:
- Include `jansson_version_str()` and `jansson_version_cmp()` in
shared library. (#534)
- Include ``scripts/`` in tarball. (#535)
Version 2.13
============
@ -174,7 +114,7 @@ Released 2018-02-09
- Work around gcc's -Wimplicit-fallthrough.
- Fix CMake detection of ``sys/types.h`` header (#375).
- Fix CMake detection of `sys/types.h` header (#375).
- Fix `jansson.pc` generated by CMake to be more consistent with the one
generated using GNU Autotools (#368).
@ -638,7 +578,7 @@ Released 2011-10-06
- Fix identifier decoding under non-UTF-8 locales. (#35)
- `json_load_file()`: Open the input file in binary mode for maximum
compatibility.
compatiblity.
* Documentation:

View file

@ -1,11 +1,58 @@
cmake_minimum_required (VERSION 3.10)
# Notes:
#
# Author: Paul Harris, June 2012
# Additions: Joakim Soderberg, February 2013
#
# Supports: building static/shared, release/debug/etc, can also build html docs
# and some of the tests.
# Note that its designed for out-of-tree builds, so it will not pollute your
# source tree.
#
# TODO 1: Finish implementing tests. api tests are working, but the valgrind
# variants are not flagging problems.
#
# TODO 2: There is a check_exports script that would try and incorporate.
#
# TODO 3: Consolidate version numbers, currently the version number is written
# into: * cmake (here) * autotools (the configure) * source code header files.
# Should not be written directly into header files, autotools/cmake can do
# that job.
#
# Brief intro on how to use cmake:
# > mkdir build (somewhere - we do out-of-tree builds)
# > use cmake, ccmake, or cmake-gui to configure the project. for linux, you
# can only choose one variant: release,debug,etc... and static or shared.
# >> example:
# >> cd build
# >> ccmake -i ../path_to_jansson_dir
# >> inside, configure your options. press C until there are no lines
# with * next to them.
# >> note, I like to configure the 'install' path to ../install, so I get
# self-contained clean installs I can point other projects to.
# >> press G to 'generate' the project files.
# >> make (to build the project)
# >> make install
# >> make test (to run the tests, if you enabled them)
#
# Brief description on how it works:
# There is a small hierarchy of CMakeLists.txt files which define how the
# project is built.
# Header file detection etc is done, and the results are written into config.h
# and jansson_config.h, which are generated from the corresponding
# config.h.cmake and jansson_config.h.cmake template files.
# The generated header files end up in the build directory - not in
# the source directory.
# The rest is down to the usual make process.
cmake_minimum_required (VERSION 3.1)
project(jansson C)
# Options
option(JANSSON_BUILD_SHARED_LIBS "Build shared libraries." OFF)
option(USE_URANDOM "Use /dev/urandom to seed the hash function." ON)
option(USE_WINDOWS_CRYPTOAPI "Use CryptGenRandom to seed the hash function." ON)
option(USE_DTOA "Use dtoa for optimal floating-point to string conversions." ON)
if (MSVC)
# This option must match the settings used in your program, in particular if you
@ -17,6 +64,8 @@ option(JANSSON_EXAMPLES "Compile example applications" ON)
if (UNIX)
option(JANSSON_COVERAGE "(GCC Only! Requires gcov/lcov to be installed). Include target for doing coverage analysis for the test suite. Note that -DCMAKE_BUILD_TYPE=Debug must be set" OFF)
option(JANSSON_COVERALLS "Generate coverage info for Coveralls" OFF)
option(JANSSON_COVERALLS_UPLOAD "Upload coverage info to Coveralls (Only works via Travis)" ON)
endif ()
# Set some nicer output dirs.
@ -36,10 +85,10 @@ endif()
# set (JANSSON_VERSION "2.3.1")
# set (JANSSON_SOVERSION 2)
set(JANSSON_DISPLAY_VERSION "2.14.1")
set(JANSSON_DISPLAY_VERSION "2.13")
# This is what is required to match the same numbers as automake's
set(JANSSON_VERSION "4.14.0")
set(JANSSON_VERSION "4.12.0")
set(JANSSON_SOVERSION 4)
# for CheckFunctionKeywords
@ -70,9 +119,17 @@ endif()
message("C compiler: ${CMAKE_C_COMPILER_ID}")
# Coverage only works with GCC for a debug build.
if (JANSSON_COVERALLS)
set(JANSSON_COVERAGE ON)
endif()
if (JANSSON_COVERAGE)
include(CodeCoverage)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
include(Coveralls)
# This adds coverage arguments to gcc/clang.
coveralls_turn_on_coverage()
endif()
check_include_files (endian.h HAVE_ENDIAN_H)
@ -94,9 +151,6 @@ check_function_exists (sched_yield HAVE_SCHED_YIELD)
# Check for the int-type includes
check_include_files (stdint.h HAVE_STDINT_H)
include (TestBigEndian)
TEST_BIG_ENDIAN(WORDS_BIGENDIAN)
# Check our 64 bit integer sizes
check_type_size (__int64 __INT64)
check_type_size (int64_t INT64_T)
@ -197,8 +251,6 @@ endif ()
# detect what to use for the 64 bit type.
# Note: I will prefer long long if I can get it, as that is what the automake system aimed for.
if (NOT DEFINED JSON_INT_T)
set (JSON_INTEGER_IS_LONG_LONG 1)
if (HAVE_LONG_LONG_INT AND (LONG_LONG_INT EQUAL 8))
set (JSON_INT_T "long long")
elseif (HAVE_INT64_T)
@ -221,7 +273,18 @@ if (NOT DEFINED JSON_INT_T)
endif ()
endif ()
# If locale.h and localeconv() are available, define to 1, otherwise to 0.
check_include_files (locale.h HAVE_LOCALE_H)
check_function_exists (localeconv HAVE_LOCALECONV)
if (HAVE_LOCALECONV AND HAVE_LOCALE_H)
set (JSON_HAVE_LOCALECONV 1)
else ()
set (JSON_HAVE_LOCALECONV 0)
endif()
# check if we have setlocale
check_function_exists(setlocale HAVE_SETLOCALE)
# Check what the inline keyword is.
@ -266,20 +329,20 @@ configure_file (${CMAKE_CURRENT_SOURCE_DIR}/cmake/jansson_config.h.cmake
file (COPY ${CMAKE_CURRENT_SOURCE_DIR}/src/jansson.h
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/include/)
add_definitions(-DJANSSON_USING_CMAKE)
# configure the private config file
configure_file (${CMAKE_CURRENT_SOURCE_DIR}/cmake/jansson_private_config.h.cmake
${CMAKE_CURRENT_BINARY_DIR}/private_include/jansson_private_config.h)
# and tell the source code to include it
add_definitions(-DHAVE_CONFIG_H)
include_directories (${CMAKE_CURRENT_BINARY_DIR}/include)
include_directories (${CMAKE_CURRENT_BINARY_DIR}/private_include)
# Configuration flags will be set on project later once we have defined the target
# Add the lib sources.
file(GLOB JANSSON_SRC src/*.c)
if (NOT USE_DTOA)
list(FILTER JANSSON_SRC EXCLUDE REGEX ".*dtoa\\.c$")
endif()
set(JANSSON_HDR_PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/src/hashtable.h
@ -303,44 +366,6 @@ if(JANSSON_BUILD_SHARED_LIBS)
${JANSSON_HDR_PUBLIC}
src/jansson.def)
# check if linker support --default-symver
list(APPEND CMAKE_REQUIRED_LIBRARIES "-Wl,--default-symver")
check_c_source_compiles(
"
int main (void)
{
return 0;
}
"
DSYMVER_WORKS
)
list(REMOVE_ITEM CMAKE_REQUIRED_LIBRARIES "-Wl,--default-symver")
if (SYMVER_WORKS)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--default-symver")
else()
# some linkers may only support --version-script
file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/jansson.sym" "JANSSON_${JANSSON_SOVERSION} {
global:
*;
};
")
list(APPEND CMAKE_REQUIRED_LIBRARIES "-Wl,--version-script,${CMAKE_CURRENT_BINARY_DIR}/jansson.sym")
check_c_source_compiles(
"
int main (void)
{
return 0;
}
"
VSCRIPT_WORKS
)
list(REMOVE_ITEM CMAKE_REQUIRED_LIBRARIES "-Wl,--version-script,${CMAKE_CURRENT_BINARY_DIR}/jansson.sym")
if (VSCRIPT_WORKS)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--version-script,${CMAKE_CURRENT_BINARY_DIR}/jansson.sym")
endif()
endif()
set_target_properties(jansson PROPERTIES
VERSION ${JANSSON_VERSION}
SOVERSION ${JANSSON_SOVERSION})
@ -353,20 +378,6 @@ else()
POSITION_INDEPENDENT_CODE true)
endif()
# Now target jansson is declared, set per-target values
target_compile_definitions(jansson PUBLIC JANSSON_USING_CMAKE)
target_compile_definitions(jansson PRIVATE HAVE_CONFIG_H)
target_include_directories(jansson
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}/include>
INTERFACE $<INSTALL_INTERFACE:include>
)
add_library( jansson::jansson ALIAS jansson )
if (JANSSON_EXAMPLES)
add_executable(simple_parse "${CMAKE_CURRENT_SOURCE_DIR}/examples/simple_parse.c")
target_link_libraries(simple_parse jansson)
@ -490,15 +501,14 @@ if (NOT JANSSON_WITHOUT_TESTS)
set(api_tests
test_array
test_chaos
test_copy
test_chaos
test_dump
test_dump_callback
test_equal
test_fixed_size
test_load
test_load_callback
test_loadb
test_load_callback
test_number
test_object
test_pack
@ -546,11 +556,6 @@ if (NOT JANSSON_WITHOUT_TESTS)
if (IS_DIRECTORY ${TESTDIR})
get_filename_component(TNAME ${TESTDIR} NAME)
if ((USE_DTOA AND EXISTS ${TESTDIR}/skip_if_dtoa) OR
(NOT USE_DTOA AND EXISTS ${TESTDIR}/skip_unless_dtoa))
continue()
endif()
if (JANSSON_TEST_WITH_VALGRIND)
add_test(memcheck__${SUITE}__${TNAME}
${MEMCHECK_COMMAND} ${SUITE_TEST_CMD} ${TESTDIR})
@ -573,7 +578,16 @@ if (NOT JANSSON_WITHOUT_TESTS)
endforeach ()
if (JANSSON_COVERAGE)
SETUP_TARGET_FOR_COVERAGE(coverage coverage ctest)
setup_target_for_coverage(
coverage # Coverage make target "make coverage".
coverage # Name of output directory.
make # Name of test runner executable.
test) # Arguments to the test runner above (make test).
if (JANSSON_COVERALLS)
set(COVERAGE_SRCS ${JANSSON_SRC})
coveralls_setup("${COVERAGE_SRCS}" ${JANSSON_COVERALLS_UPLOAD})
endif ()
endif ()
# Enable using "make check" just like the autotools project.
@ -616,7 +630,9 @@ foreach(p LIB BIN INCLUDE CMAKE)
endforeach()
# Generate the config file for the build-tree.
set(JANSSON__INCLUDE_DIRS "${CMAKE_CURRENT_BINARY_DIR}/include")
set(JANSSON__INCLUDE_DIRS
"${CMAKE_CURRENT_SOURCE_DIR}/include"
"${CMAKE_CURRENT_BINARY_DIR}/include")
set(JANSSON_INCLUDE_DIRS ${JANSSON__INCLUDE_DIRS} CACHE PATH "Jansson include directories")
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/janssonConfig.cmake.in
${CMAKE_CURRENT_BINARY_DIR}/janssonConfig.cmake

View file

@ -1,3 +0,0 @@
Hi, and thanks for contributing!
Please remember to add tests and documentation for new functionality. Backwards incompatible changes or features that are not directly related to JSON are likely to be rejected.

26
LICENSE
View file

@ -1,11 +1,4 @@
# License
This project is licensed under the MIT license, except where otherwise noted.
The full text of the MIT license is included below.
## MIT License
Copyright (c) 2009-2024 Petri Lehtinen <petri@digip.org>
Copyright (c) 2009-2020 Petri Lehtinen <petri@digip.org>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@ -24,20 +17,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
## Exceptions
### `src/dtoa.c`
Copyright (c) 1991, 2000, 2001 by Lucent Technologies.
Permission to use, copy, modify, and distribute this software for any
purpose without fee is hereby granted, provided that this entire notice
is included in all copies of any software which is or includes a copy
or modification of this software and in all copies of the supporting
documentation for such software.
THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
WARRANTY. IN PARTICULAR, NEITHER THE AUTHOR NOR LUCENT MAKES ANY
REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.

View file

@ -1,4 +1,4 @@
EXTRA_DIST = CHANGES LICENSE README.rst CMakeLists.txt cmake android examples scripts
EXTRA_DIST = CHANGES LICENSE README.rst CMakeLists.txt cmake android examples
SUBDIRS = doc src test
# "make distcheck" builds the dvi target, so use it to check that the

View file

@ -1,10 +1,14 @@
Jansson README
==============
.. |tests| image:: https://github.com/akheron/jansson/workflows/tests/badge.svg
.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/lmhkkc4q8cwc65ko
.. image:: https://travis-ci.org/akheron/jansson.png
:target: https://travis-ci.org/akheron/jansson
.. image:: https://ci.appveyor.com/api/projects/status/lmhkkc4q8cwc65ko
:target: https://ci.appveyor.com/project/akheron/jansson
|tests| |appveyor|
.. image:: https://coveralls.io/repos/akheron/jansson/badge.png?branch=master
:target: https://coveralls.io/r/akheron/jansson?branch=master
Jansson_ is a C library for encoding, decoding and manipulating JSON
data. Its main features and design principles are:
@ -22,11 +26,24 @@ data. Its main features and design principles are:
Jansson is licensed under the `MIT license`_; see LICENSE in the
source distribution for details.
Compilation and Installation
----------------------------
If you obtained a ``jansson-X.Y.tar.*`` tarball from GitHub Releases, just use
the standard autotools commands::
You can download and install Jansson using the `vcpkg <https://github.com/Microsoft/vcpkg/>`_ dependency manager:
.. code-block:: bash
git clone https://github.com/Microsoft/vcpkg.git
cd vcpkg
./bootstrap-vcpkg.sh
./vcpkg integrate install
vcpkg install jansson
The Jansson port in vcpkg is kept up to date by Microsoft team members and community contributors. If the version is out of date, please `create an issue or pull request <https://github.com/Microsoft/vcpkg/>`_ on the vcpkg repository.
If you obtained a `source tarball`_ from the "Releases" section of the main
site just use the standard autotools commands::
$ ./configure
$ make
@ -36,8 +53,9 @@ To run the test suite, invoke::
$ make check
If the source has been checked out from a Git repository, the ``configure``
script has to be generated first. The easiest way is to use autoreconf::
If the source has been checked out from a Git repository, the
./configure script has to be generated first. The easiest way is to
use autoreconf::
$ autoreconf -i
@ -56,15 +74,8 @@ Then, point your browser to ``doc/_build/html/index.html``. Sphinx_
1.0 or newer is required to generate the documentation.
Community
---------
* `Documentation <http://jansson.readthedocs.io/en/latest/>`_
* `Issue tracker <https://github.com/akheron/jansson/issues>`_
* `Mailing list <http://groups.google.com/group/jansson-users>`_
* `Wiki <https://github.com/akheron/jansson/wiki>`_ contains some development documentation
.. _Jansson: http://www.digip.org/jansson/
.. _`Comprehensive documentation`: http://jansson.readthedocs.io/en/latest/
.. _`MIT license`: http://www.opensource.org/licenses/mit-license.php
.. _`source tarball`: http://www.digip.org/jansson#releases
.. _Sphinx: http://sphinx.pocoo.org/

View file

@ -1,9 +0,0 @@
# Security Policy
## Supported Versions
Latest released version.
## Reporting a Vulnerability
Send an email to petri@digip.org.

View file

@ -32,6 +32,10 @@
otherwise to 0. */
#define JSON_INTEGER_IS_LONG_LONG 1
/* If locale.h and localeconv() are available, define to 1,
otherwise to 0. */
#define JSON_HAVE_LOCALECONV 0
/* Maximum recursion depth for parsing JSON input.
This limits the depth of e.g. array-within-array constructions. */
#define JSON_PARSER_MAX_DEPTH 2048

View file

@ -7,8 +7,6 @@ environment:
- VS: Visual Studio 14 2015
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
VS: Visual Studio 15 2017
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
VS: Visual Studio 16 2019
build_script:
- md build

View file

@ -111,9 +111,9 @@ FUNCTION(SETUP_TARGET_FOR_COVERAGE _targetname _outputname _testrunner)
# Capturing lcov counters and generating report
COMMAND ${LCOV_PATH} --directory . --capture --output-file ${_outputname}.info --rc lcov_branch_coverage=1
COMMAND ${LCOV_PATH} --remove ${_outputname}.info '*/build/include/*' '*/test/*' '/usr/include/*' --output-file ${_outputname}.info --rc lcov_branch_coverage=1
# COMMAND ${GENHTML_PATH} --branch-coverage -o ${_outputname} ${_outputname}.info.cleaned
# COMMAND ${CMAKE_COMMAND} -E remove ${_outputname}.info ${_outputname}.info.cleaned
COMMAND ${LCOV_PATH} --remove ${_outputname}.info '*/build/include/*' '*/test/*' '/usr/include/*' --output-file ${_outputname}.info.cleaned --rc lcov_branch_coverage=1
COMMAND ${GENHTML_PATH} --branch-coverage -o ${_outputname} ${_outputname}.info.cleaned
COMMAND ${CMAKE_COMMAND} -E remove ${_outputname}.info ${_outputname}.info.cleaned
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
COMMENT "Resetting code coverage counters to zero.\nProcessing code coverage counters and generating report."

111
cmake/Coveralls.cmake Normal file
View file

@ -0,0 +1,111 @@
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Copyright (C) 2014 Joakim Söderberg <joakim.soderberg@gmail.com>
#
#
# Param _COVERAGE_SRCS A list of source files that coverage should be collected for.
# Param _COVERALLS_UPLOAD Upload the result to coveralls?
#
function(coveralls_setup _COVERAGE_SRCS _COVERALLS_UPLOAD)
# When passing a CMake list to an external process, the list
# will be converted from the format "1;2;3" to "1 2 3".
# This means the script we're calling won't see it as a list
# of sources, but rather just one long path. We remedy this
# by replacing ";" with "*" and then reversing that in the script
# that we're calling.
# http://cmake.3232098.n2.nabble.com/Passing-a-CMake-list-quot-as-is-quot-to-a-custom-target-td6505681.html
set(COVERAGE_SRCS_TMP ${_COVERAGE_SRCS})
set(COVERAGE_SRCS "")
foreach (COVERAGE_SRC ${COVERAGE_SRCS_TMP})
set(COVERAGE_SRCS "${COVERAGE_SRCS}*${COVERAGE_SRC}")
endforeach()
#message("Coverage sources: ${COVERAGE_SRCS}")
set(COVERALLS_FILE ${PROJECT_BINARY_DIR}/coveralls.json)
add_custom_target(coveralls_generate
# Zero the coverage counters.
COMMAND ${CMAKE_COMMAND}
-P "${PROJECT_SOURCE_DIR}/cmake/CoverallsClear.cmake"
# Run regress tests.
COMMAND ${CMAKE_CTEST_COMMAND} --output-on-failure
# Generate Gcov and translate it into coveralls JSON.
# We do this by executing an external CMake script.
# (We don't want this to run at CMake generation time, but after compilation and everything has run).
COMMAND ${CMAKE_COMMAND}
-DCOVERAGE_SRCS="${COVERAGE_SRCS}" # TODO: This is passed like: "a b c", not "a;b;c"
-DCOVERALLS_OUTPUT_FILE="${COVERALLS_FILE}"
-DCOV_PATH="${PROJECT_BINARY_DIR}"
-DPROJECT_ROOT="${PROJECT_SOURCE_DIR}"
-P "${PROJECT_SOURCE_DIR}/cmake/CoverallsGenerateGcov.cmake"
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
COMMENT "Generating coveralls output..."
)
if (_COVERALLS_UPLOAD)
message("COVERALLS UPLOAD: ON")
find_program(CURL_EXECUTABLE curl)
if (NOT CURL_EXECUTABLE)
message(FATAL_ERROR "Coveralls: curl not found! Aborting")
endif()
add_custom_target(coveralls_upload
# Upload the JSON to coveralls.
COMMAND ${CURL_EXECUTABLE}
-S -F json_file=@${COVERALLS_FILE}
https://coveralls.io/api/v1/jobs
DEPENDS coveralls_generate
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
COMMENT "Uploading coveralls output...")
add_custom_target(coveralls DEPENDS coveralls_upload)
else()
message("COVERALLS UPLOAD: OFF")
add_custom_target(coveralls DEPENDS coveralls_generate)
endif()
endfunction()
macro(coveralls_turn_on_coverage)
if(NOT (CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX)
AND (NOT "${CMAKE_C_COMPILER_ID}" STREQUAL "Clang"))
message(FATAL_ERROR "Coveralls: Compiler ${CMAKE_C_COMPILER_ID} is not GNU gcc! Aborting... You can set this on the command line using CC=/usr/bin/gcc CXX=/usr/bin/g++ cmake <options> ..")
endif()
if(NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
message(FATAL_ERROR "Coveralls: Code coverage results with an optimised (non-Debug) build may be misleading! Add -DCMAKE_BUILD_TYPE=Debug")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
endmacro()

View file

@ -0,0 +1,24 @@
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Copyright (C) 2014 Joakim Söderberg <joakim.soderberg@gmail.com>
#
file(REMOVE_RECURSE ${PROJECT_BINARY_DIR}/*.gcda)

View file

@ -0,0 +1,380 @@
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Copyright (C) 2014 Joakim Söderberg <joakim.soderberg@gmail.com>
#
# This is intended to be run by a custom target in a CMake project like this.
# 0. Compile program with coverage support.
# 1. Clear coverage data. (Recursively delete *.gcda in build dir)
# 2. Run the unit tests.
# 3. Run this script specifying which source files the coverage should be performed on.
#
# This script will then use gcov to generate .gcov files in the directory specified
# via the COV_PATH var. This should probably be the same as your cmake build dir.
#
# It then parses the .gcov files to convert them into the Coveralls JSON format:
# https://coveralls.io/docs/api
#
# Example for running as standalone CMake script from the command line:
# (Note it is important the -P is at the end...)
# $ cmake -DCOV_PATH=$(pwd)
# -DCOVERAGE_SRCS="catcierge_rfid.c;catcierge_timer.c"
# -P ../cmake/CoverallsGcovUpload.cmake
#
CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
#
# Make sure we have the needed arguments.
#
if (NOT COVERALLS_OUTPUT_FILE)
message(FATAL_ERROR "Coveralls: No coveralls output file specified. Please set COVERALLS_OUTPUT_FILE")
endif()
if (NOT COV_PATH)
message(FATAL_ERROR "Coveralls: Missing coverage directory path where gcov files will be generated. Please set COV_PATH")
endif()
if (NOT COVERAGE_SRCS)
message(FATAL_ERROR "Coveralls: Missing the list of source files that we should get the coverage data for COVERAGE_SRCS")
endif()
if (NOT PROJECT_ROOT)
message(FATAL_ERROR "Coveralls: Missing PROJECT_ROOT.")
endif()
# Since it's not possible to pass a CMake list properly in the
# "1;2;3" format to an external process, we have replaced the
# ";" with "*", so reverse that here so we get it back into the
# CMake list format.
string(REGEX REPLACE "\\*" ";" COVERAGE_SRCS ${COVERAGE_SRCS})
find_program(GCOV_EXECUTABLE gcov)
if (NOT GCOV_EXECUTABLE)
message(FATAL_ERROR "gcov not found! Aborting...")
endif()
find_package(Git)
# TODO: Add these git things to the coveralls json.
if (GIT_FOUND)
# Branch.
execute_process(
COMMAND ${GIT_EXECUTABLE} rev-parse --abbrev-ref HEAD
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE GIT_BRANCH
OUTPUT_STRIP_TRAILING_WHITESPACE
)
macro (git_log_format FORMAT_CHARS VAR_NAME)
execute_process(
COMMAND ${GIT_EXECUTABLE} log -1 --pretty=format:%${FORMAT_CHARS}
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE ${VAR_NAME}
OUTPUT_STRIP_TRAILING_WHITESPACE
)
endmacro()
git_log_format(an GIT_AUTHOR_EMAIL)
git_log_format(ae GIT_AUTHOR_EMAIL)
git_log_format(cn GIT_COMMITTER_NAME)
git_log_format(ce GIT_COMMITTER_EMAIL)
git_log_format(B GIT_COMMIT_MESSAGE)
message("Git exe: ${GIT_EXECUTABLE}")
message("Git branch: ${GIT_BRANCH}")
message("Git author: ${GIT_AUTHOR_NAME}")
message("Git e-mail: ${GIT_AUTHOR_EMAIL}")
message("Git committer name: ${GIT_COMMITTER_NAME}")
message("Git committer e-mail: ${GIT_COMMITTER_EMAIL}")
message("Git commit message: ${GIT_COMMIT_MESSAGE}")
endif()
############################# Macros #########################################
#
# This macro converts from the full path format gcov outputs:
#
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
#
# to the original source file path the .gcov is for:
#
# /path/to/project/root/subdir/the_file.c
#
macro(get_source_path_from_gcov_filename _SRC_FILENAME _GCOV_FILENAME)
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
# ->
# #path#to#project#root#subdir#the_file.c.gcov
get_filename_component(_GCOV_FILENAME_WEXT ${_GCOV_FILENAME} NAME)
# #path#to#project#root#subdir#the_file.c.gcov -> /path/to/project/root/subdir/the_file.c
string(REGEX REPLACE "\\.gcov$" "" SRC_FILENAME_TMP ${_GCOV_FILENAME_WEXT})
string(REGEX REPLACE "\#" "/" SRC_FILENAME_TMP ${SRC_FILENAME_TMP})
set(${_SRC_FILENAME} "${SRC_FILENAME_TMP}")
endmacro()
##############################################################################
# Get the coverage data.
file(GLOB_RECURSE GCDA_FILES "${COV_PATH}/*.gcda")
message("GCDA files:")
# Get a list of all the object directories needed by gcov
# (The directories the .gcda files and .o files are found in)
# and run gcov on those.
foreach(GCDA ${GCDA_FILES})
message("Process: ${GCDA}")
message("------------------------------------------------------------------------------")
get_filename_component(GCDA_DIR ${GCDA} PATH)
#
# The -p below refers to "Preserve path components",
# This means that the generated gcov filename of a source file will
# keep the original files entire filepath, but / is replaced with #.
# Example:
#
# /path/to/project/root/build/CMakeFiles/the_file.dir/subdir/the_file.c.gcda
# ------------------------------------------------------------------------------
# File '/path/to/project/root/subdir/the_file.c'
# Lines executed:68.34% of 199
# /path/to/project/root/subdir/the_file.c:creating '#path#to#project#root#subdir#the_file.c.gcov'
#
# If -p is not specified then the file is named only "the_file.c.gcov"
#
execute_process(
COMMAND ${GCOV_EXECUTABLE} -p -o ${GCDA_DIR} ${GCDA}
WORKING_DIRECTORY ${COV_PATH}
)
endforeach()
# TODO: Make these be absolute path
file(GLOB ALL_GCOV_FILES ${COV_PATH}/*.gcov)
# Get only the filenames to use for filtering.
#set(COVERAGE_SRCS_NAMES "")
#foreach (COVSRC ${COVERAGE_SRCS})
# get_filename_component(COVSRC_NAME ${COVSRC} NAME)
# message("${COVSRC} -> ${COVSRC_NAME}")
# list(APPEND COVERAGE_SRCS_NAMES "${COVSRC_NAME}")
#endforeach()
#
# Filter out all but the gcov files we want.
#
# We do this by comparing the list of COVERAGE_SRCS filepaths that the
# user wants the coverage data for with the paths of the generated .gcov files,
# so that we only keep the relevant gcov files.
#
# Example:
# COVERAGE_SRCS =
# /path/to/project/root/subdir/the_file.c
#
# ALL_GCOV_FILES =
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
# /path/to/project/root/build/#path#to#project#root#subdir#other_file.c.gcov
#
# Result should be:
# GCOV_FILES =
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
#
set(GCOV_FILES "")
#message("Look in coverage sources: ${COVERAGE_SRCS}")
message("\nFilter out unwanted GCOV files:")
message("===============================")
set(COVERAGE_SRCS_REMAINING ${COVERAGE_SRCS})
foreach (GCOV_FILE ${ALL_GCOV_FILES})
#
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
# ->
# /path/to/project/root/subdir/the_file.c
get_source_path_from_gcov_filename(GCOV_SRC_PATH ${GCOV_FILE})
# Is this in the list of source files?
# TODO: We want to match against relative path filenames from the source file root...
list(FIND COVERAGE_SRCS ${GCOV_SRC_PATH} WAS_FOUND)
if (NOT WAS_FOUND EQUAL -1)
message("YES: ${GCOV_FILE}")
list(APPEND GCOV_FILES ${GCOV_FILE})
# We remove it from the list, so we don't bother searching for it again.
# Also files left in COVERAGE_SRCS_REMAINING after this loop ends should
# have coverage data generated from them (no lines are covered).
list(REMOVE_ITEM COVERAGE_SRCS_REMAINING ${GCOV_SRC_PATH})
else()
message("NO: ${GCOV_FILE}")
endif()
endforeach()
# TODO: Enable setting these
set(JSON_SERVICE_NAME "travis-ci")
set(JSON_SERVICE_JOB_ID $ENV{TRAVIS_JOB_ID})
set(JSON_TEMPLATE
"{
\"service_name\": \"\@JSON_SERVICE_NAME\@\",
\"service_job_id\": \"\@JSON_SERVICE_JOB_ID\@\",
\"source_files\": \@JSON_GCOV_FILES\@
}"
)
set(SRC_FILE_TEMPLATE
"{
\"name\": \"\@GCOV_SRC_REL_PATH\@\",
\"source\": \"\@GCOV_FILE_SOURCE\@\",
\"coverage\": \@GCOV_FILE_COVERAGE\@
}"
)
message("\nGenerate JSON for files:")
message("=========================")
set(JSON_GCOV_FILES "[")
# Read the GCOV files line by line and get the coverage data.
foreach (GCOV_FILE ${GCOV_FILES})
get_source_path_from_gcov_filename(GCOV_SRC_PATH ${GCOV_FILE})
file(RELATIVE_PATH GCOV_SRC_REL_PATH "${PROJECT_ROOT}" "${GCOV_SRC_PATH}")
# Loads the gcov file as a list of lines.
file(STRINGS ${GCOV_FILE} GCOV_LINES)
# Instead of trying to parse the source from the
# gcov file, simply read the file contents from the source file.
# (Parsing it from the gcov is hard because C-code uses ; in many places
# which also happens to be the same as the CMake list delimiter).
file(READ ${GCOV_SRC_PATH} GCOV_FILE_SOURCE)
string(REPLACE "\\" "\\\\" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
string(REGEX REPLACE "\"" "\\\\\"" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
string(REPLACE "\t" "\\\\t" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
string(REPLACE "\r" "\\\\r" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
string(REPLACE "\n" "\\\\n" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
# According to http://json.org/ these should be escaped as well.
# Don't know how to do that in CMake however...
#string(REPLACE "\b" "\\\\b" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
#string(REPLACE "\f" "\\\\f" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
#string(REGEX REPLACE "\u([a-fA-F0-9]{4})" "\\\\u\\1" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
# We want a json array of coverage data as a single string
# start building them from the contents of the .gcov
set(GCOV_FILE_COVERAGE "[")
foreach (GCOV_LINE ${GCOV_LINES})
# Example of what we're parsing:
# Hitcount |Line | Source
# " 8: 26: if (!allowed || (strlen(allowed) == 0))"
string(REGEX REPLACE
"^([^:]*):([^:]*):(.*)$"
"\\1;\\2;\\3"
RES
"${GCOV_LINE}")
list(LENGTH RES RES_COUNT)
if (RES_COUNT GREATER 2)
list(GET RES 0 HITCOUNT)
list(GET RES 1 LINE)
list(GET RES 2 SOURCE)
string(STRIP ${HITCOUNT} HITCOUNT)
string(STRIP ${LINE} LINE)
# Lines with 0 line numbers are metadata and can be ignored.
if (NOT ${LINE} EQUAL 0)
# Translate the hitcount into valid JSON values.
if (${HITCOUNT} STREQUAL "#####")
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}0, ")
elseif (${HITCOUNT} STREQUAL "-")
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}null, ")
else()
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}${HITCOUNT}, ")
endif()
# TODO: Look for LCOV_EXCL_LINE in SOURCE to get rid of false positives.
endif()
else()
message(WARNING "Failed to properly parse line --> ${GCOV_LINE}")
endif()
endforeach()
# Advanced way of removing the trailing comma in the JSON array.
# "[1, 2, 3, " -> "[1, 2, 3"
string(REGEX REPLACE ",[ ]*$" "" GCOV_FILE_COVERAGE ${GCOV_FILE_COVERAGE})
# Append the trailing ] to complete the JSON array.
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}]")
# Generate the final JSON for this file.
message("Generate JSON for file: ${GCOV_SRC_REL_PATH}...")
string(CONFIGURE ${SRC_FILE_TEMPLATE} FILE_JSON)
set(JSON_GCOV_FILES "${JSON_GCOV_FILES}${FILE_JSON}, ")
endforeach()
# Loop through all files we couldn't find any coverage for
# as well, and generate JSON for those as well with 0% coverage.
foreach(NOT_COVERED_SRC ${COVERAGE_SRCS_REMAINING})
# Loads the source file as a list of lines.
file(STRINGS ${NOT_COVERED_SRC} SRC_LINES)
set(GCOV_FILE_COVERAGE "[")
set(GCOV_FILE_SOURCE "")
foreach (SOURCE ${SRC_LINES})
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}0, ")
string(REPLACE "\\" "\\\\" SOURCE "${SOURCE}")
string(REGEX REPLACE "\"" "\\\\\"" SOURCE "${SOURCE}")
string(REPLACE "\t" "\\\\t" SOURCE "${SOURCE}")
string(REPLACE "\r" "\\\\r" SOURCE "${SOURCE}")
set(GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}${SOURCE}\\n")
endforeach()
# Remove trailing comma, and complete JSON array with ]
string(REGEX REPLACE ",[ ]*$" "" GCOV_FILE_COVERAGE ${GCOV_FILE_COVERAGE})
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}]")
# Generate the final JSON for this file.
message("Generate JSON for non-gcov file: ${NOT_COVERED_SRC}...")
string(CONFIGURE ${SRC_FILE_TEMPLATE} FILE_JSON)
set(JSON_GCOV_FILES "${JSON_GCOV_FILES}${FILE_JSON}, ")
endforeach()
# Get rid of trailing comma.
string(REGEX REPLACE ",[ ]*$" "" JSON_GCOV_FILES ${JSON_GCOV_FILES})
set(JSON_GCOV_FILES "${JSON_GCOV_FILES}]")
# Generate the final complete JSON!
message("Generate final JSON...")
string(CONFIGURE ${JSON_TEMPLATE} JSON)
file(WRITE "${COVERALLS_OUTPUT_FILE}" "${JSON}")
message("###########################################################################")
message("Generated coveralls JSON containing coverage data:")
message("${COVERALLS_OUTPUT_FILE}")
message("###########################################################################")

View file

@ -21,10 +21,9 @@
#define JANSSON_USING_CMAKE
#endif
/* If your compiler supports the `long long` type and the strtoll()
library function, JSON_INTEGER_IS_LONG_LONG is defined to 1,
otherwise to 0. */
#cmakedefine JSON_INTEGER_IS_LONG_LONG 1
/* Note: when using cmake, JSON_INTEGER_IS_LONG_LONG is not defined nor used,
* as we will also check for __int64 etc types.
* (the definition was used in the automake system) */
/* Bring in the cmake-detected defines */
#cmakedefine HAVE_STDINT_H 1
@ -57,6 +56,9 @@
#define JSON_INTEGER_FORMAT @JSON_INTEGER_FORMAT@
/* If locale.h and localeconv() are available, define to 1, otherwise to 0. */
#define JSON_HAVE_LOCALECONV @JSON_HAVE_LOCALECONV@
/* If __atomic builtins are available they will be used to manage
reference counts of json_t. */
#define JSON_HAVE_ATOMIC_BUILTINS @JSON_HAVE_ATOMIC_BUILTINS@

View file

@ -21,8 +21,6 @@
#cmakedefine HAVE_LOCALE_H 1
#cmakedefine HAVE_SETLOCALE 1
#cmakedefine WORDS_BIGENDIAN 1
#cmakedefine HAVE_INT32_T 1
#ifndef HAVE_INT32_T
# define int32_t @JSON_INT32@
@ -52,11 +50,4 @@
#cmakedefine USE_URANDOM 1
#cmakedefine USE_WINDOWS_CRYPTOAPI 1
#cmakedefine USE_DTOA 1
#if USE_DTOA
# define DTOA_ENABLED 1
#else
# define DTOA_ENABLED 0
#endif
#define INITIAL_HASHTABLE_ORDER @JANSSON_INITIAL_HASHTABLE_ORDER@

View file

@ -1,5 +1,5 @@
AC_PREREQ([2.60])
AC_INIT([jansson], [2.14.1], [https://github.com/akheron/jansson/issues])
AC_INIT([jansson], [2.13], [https://github.com/akheron/jansson/issues])
AC_CONFIG_AUX_DIR([.])
AM_INIT_AUTOMAKE([1.10 foreign])
@ -25,8 +25,6 @@ AC_TYPE_UINT16_T
AC_TYPE_UINT8_T
AC_TYPE_LONG_LONG_INT
AC_C_BIGENDIAN
AC_C_INLINE
case $ac_cv_c_inline in
yes) json_inline=inline;;
@ -36,7 +34,7 @@ esac
AC_SUBST([json_inline])
# Checks for library functions.
AC_CHECK_FUNCS([close getpid gettimeofday open read setlocale sched_yield strtoll])
AC_CHECK_FUNCS([close getpid gettimeofday localeconv open read sched_yield strtoll])
AC_MSG_CHECKING([for gcc __sync builtins])
have_sync_builtins=no
@ -76,6 +74,12 @@ case "$ac_cv_type_long_long_int$ac_cv_func_strtoll" in
esac
AC_SUBST([json_have_long_long])
case "$ac_cv_header_locale_h$ac_cv_func_localeconv" in
yesyes) json_have_localeconv=1;;
*) json_have_localeconv=0;;
esac
AC_SUBST([json_have_localeconv])
# Features
AC_ARG_ENABLE([urandom],
[AS_HELP_STRING([--disable-urandom],
@ -133,23 +137,6 @@ fi
AS_IF([test "x$with_Bsymbolic" = "xyes"], [JSON_BSYMBOLIC_LDFLAGS=-Wl[,]-Bsymbolic-functions])
AC_SUBST(JSON_BSYMBOLIC_LDFLAGS)
# Enable symbol versioning on GNU libc
JSON_SYMVER_LDFLAGS=
AC_CHECK_DECL([__GLIBC__], [JSON_SYMVER_LDFLAGS=-Wl,--default-symver])
AC_SUBST([JSON_SYMVER_LDFLAGS])
AC_ARG_ENABLE([dtoa],
[AS_HELP_STRING([--enable-dtoa], [Use dtoa for optimal floating point to string conversion])],
[case "$enableval" in
yes) dtoa=yes ;;
no) dtoa=no ;;
*) AC_MSG_ERROR([bad value ${enableval} for --enable-dtoa]) ;;
esac], [dtoa=yes])
if test "$dtoa" = "yes"; then
AC_DEFINE([DTOA_ENABLED], [1],
[Define to 1 to use dtoa to convert floating points to strings])
fi
AM_CONDITIONAL([DTOA_ENABLED], [test "$dtoa" = "yes"])
AC_ARG_ENABLE([ossfuzzers],
[AS_HELP_STRING([--enable-ossfuzzers],

View file

@ -1,9 +0,0 @@
version: 2
build:
os: ubuntu-22.04
tools:
python: "3.12"
sphinx:
configuration: doc/conf.py

View file

@ -1,5 +1,5 @@
EXTRA_DIST = conf.py apiref.rst changes.rst conformance.rst \
gettingstarted.rst github_commits.c index.rst threadsafety.rst \
EXTRA_DIST = conf.py apiref.rst changes.rst conformance.rst \
gettingstarted.rst github_commits.c index.rst portability.rst \
tutorial.rst upgrading.rst ext/refcounting.py
SPHINXBUILD = sphinx-build

View file

@ -114,7 +114,7 @@ also cause errors.
Type
----
.. c:enum:: json_type
.. type:: enum json_type
The type of a JSON value. The following members are defined:
@ -145,33 +145,33 @@ Type
.. function:: int json_typeof(const json_t *json)
Return the type of the JSON value (a :type:`json_type` cast to
``int``). *json* MUST NOT be *NULL*. This function is actually
:type:`int`). *json* MUST NOT be *NULL*. This function is actually
implemented as a macro for speed.
.. function:: int json_is_object(const json_t *json)
int json_is_array(const json_t *json)
int json_is_string(const json_t *json)
int json_is_integer(const json_t *json)
int json_is_real(const json_t *json)
int json_is_true(const json_t *json)
int json_is_false(const json_t *json)
int json_is_null(const json_t *json)
.. function:: json_is_object(const json_t *json)
json_is_array(const json_t *json)
json_is_string(const json_t *json)
json_is_integer(const json_t *json)
json_is_real(const json_t *json)
json_is_true(const json_t *json)
json_is_false(const json_t *json)
json_is_null(const json_t *json)
These functions (actually macros) return true (non-zero) for values
of the given type, and false (zero) for values of other types and
for *NULL*.
.. function:: int json_is_number(const json_t *json)
.. function:: json_is_number(const json_t *json)
Returns true for values of types ``JSON_INTEGER`` and
``JSON_REAL``, and false for other types and for *NULL*.
.. function:: int json_is_boolean(const json_t *json)
.. function:: json_is_boolean(const json_t *json)
Returns true for types ``JSON_TRUE`` and ``JSON_FALSE``, and false
for values of other types and for *NULL*.
.. function:: int json_boolean_value(const json_t *json)
.. function:: json_boolean_value(const json_t *json)
Alias of :func:`json_is_true()`, i.e. returns 1 for ``JSON_TRUE``
and 0 otherwise.
@ -594,12 +594,12 @@ A JSON array is an ordered collection of other JSON values.
Appends all elements in *other_array* to the end of *array*.
Returns 0 on success and -1 on error.
.. function:: void json_array_foreach(array, index, value)
.. function:: json_array_foreach(array, index, value)
Iterate over every element of ``array``, running the block
of code that follows each time with the proper values set to
variables ``index`` and ``value``, of types :type:`size_t` and
:type:`json_t` pointer respectively. Example::
:type:`json_t *` respectively. Example::
/* array is a JSON array */
size_t index;
@ -648,15 +648,6 @@ allowed in object keys.
Get a value corresponding to *key* from *object*. Returns *NULL* if
*key* is not found and on error.
.. function:: json_t *json_object_getn(const json_t *object, const char *key, size_t key_len)
.. refcounting:: borrow
Like :func:`json_object_get`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_set(json_t *object, const char *key, json_t *value)
Set the value of *key* to *value* in *object*. *key* must be a
@ -664,13 +655,6 @@ allowed in object keys.
already is a value for *key*, it is replaced by the new value.
Returns 0 on success and -1 on error.
.. function:: int json_object_setn(json_t *object, const char *key, size_t key_len, json_t *value)
Like :func:`json_object_set`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_set_nocheck(json_t *object, const char *key, json_t *value)
Like :func:`json_object_set`, but doesn't check that *key* is
@ -678,26 +662,12 @@ allowed in object keys.
really is the case (e.g. you have already checked it by other
means).
.. function:: int json_object_setn_nocheck(json_t *object, const char *key, size_t key_len, json_t *value)
Like :func:`json_object_set_nocheck`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_set_new(json_t *object, const char *key, json_t *value)
Like :func:`json_object_set()` but steals the reference to
*value*. This is useful when *value* is newly created and not used
after the call.
.. function:: int json_object_setn_new(json_t *object, const char *key, size_t key_len, json_t *value)
Like :func:`json_object_set_new`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_set_new_nocheck(json_t *object, const char *key, json_t *value)
Like :func:`json_object_set_new`, but doesn't check that *key* is
@ -705,26 +675,12 @@ allowed in object keys.
really is the case (e.g. you have already checked it by other
means).
.. function:: int json_object_setn_new_nocheck(json_t *object, const char *key, size_t key_len, json_t *value)
Like :func:`json_object_set_new_nocheck`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_del(json_t *object, const char *key)
Delete *key* from *object* if it exists. Returns 0 on success, or
-1 if *key* was not found. The reference count of the removed value
is decremented.
.. function:: int json_object_deln(json_t *object, const char *key, size_t key_len)
Like :func:`json_object_del`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_clear(json_t *object)
Remove all elements from *object*. Returns 0 on success and -1 if
@ -776,12 +732,12 @@ allowed in object keys.
recursively merged with the corresponding values in *object* if they are also
objects, instead of overwriting them. Returns 0 on success or -1 on error.
.. function:: void json_object_foreach(object, key, value)
.. function:: json_object_foreach(object, key, value)
Iterate over every key-value pair of ``object``, running the block
of code that follows each time with the proper values set to
variables ``key`` and ``value``, of types ``const char *`` and
:type:`json_t` pointer respectively. Example::
variables ``key`` and ``value``, of types :type:`const char *` and
:type:`json_t *` respectively. Example::
/* obj is a JSON object */
const char *key;
@ -794,7 +750,7 @@ allowed in object keys.
The items are returned in the order they were inserted to the
object.
**Note:** It's not safe to call ``json_object_del(object, key)`` or ``json_object_deln(object, key, key_len)``
**Note:** It's not safe to call ``json_object_del(object, key)``
during iteration. If you need to, use
:func:`json_object_foreach_safe` instead.
@ -808,42 +764,14 @@ allowed in object keys.
.. versionadded:: 2.3
.. function:: void json_object_foreach_safe(object, tmp, key, value)
.. function:: json_object_foreach_safe(object, tmp, key, value)
Like :func:`json_object_foreach()`, but it's safe to call
``json_object_del(object, key)`` or ``json_object_deln(object, key, key_len)`` during iteration.
You need to pass an extra ``void *`` parameter ``tmp`` that is used for temporary storage.
``json_object_del(object, key)`` during iteration. You need to pass
an extra ``void *`` parameter ``tmp`` that is used for temporary storage.
.. versionadded:: 2.8
.. function:: void json_object_keylen_foreach(object, key, key_len, value)
Like :c:func:`json_object_foreach`, but in *key_len* stored length of the *key*.
Example::
/* obj is a JSON object */
const char *key;
json_t *value;
size_t len;
json_object_keylen_foreach(obj, key, len, value) {
printf("got key %s with length %zu\n", key, len);
}
**Note:** It's not safe to call ``json_object_deln(object, key, key_len)``
during iteration. If you need to, use
:func:`json_object_keylen_foreach_safe` instead.
.. versionadded:: 2.14
.. function:: void json_object_keylen_foreach_safe(object, tmp, key, key_len, value)
Like :func:`json_object_keylen_foreach()`, but it's safe to call
``json_object_deln(object, key, key_len)`` during iteration.
You need to pass an extra ``void *`` parameter ``tmp`` that is used for temporary storage.
.. versionadded:: 2.14
The following functions can be used to iterate through all key-value
pairs in an object. The items are returned in the order they were
@ -872,12 +800,6 @@ inserted to the object.
Extract the associated key from *iter*.
.. function:: size_t json_object_iter_key_len(void *iter)
Extract the associated key length from *iter*.
.. versionadded:: 2.14
.. function:: json_t *json_object_iter_value(void *iter)
.. refcounting:: borrow
@ -933,7 +855,8 @@ inserted to the object.
:func:`json_object()`, either explicit or implicit. If this
function is not called by the user, the first call to
:func:`json_object()` (either explicit or implicit) seeds the hash
function. See :ref:`thread-safety` for notes on thread safety.
function. See :ref:`portability-thread-safety` for notes on thread
safety.
If repeatable results are required, for e.g. unit tests, the hash
function can be "unrandomized" by calling :func:`json_object_seed`
@ -1003,7 +926,7 @@ success. See :ref:`apiref-decoding` for more info.
All functions also accept *NULL* as the :type:`json_error_t` pointer,
in which case no error information is returned to the caller.
.. c:enum:: json_error_code
.. type:: enum json_error_code
An enumeration containing numeric error codes. The following errors are
currently defined:
@ -1098,7 +1021,7 @@ in which case no error information is returned to the caller.
Encoding
========
This section describes the functions that can be used to encode
This sections describes the functions that can be used to encode
values to JSON. By default, only objects and arrays can be encoded
directly, since they are the only valid *root* values of a JSON text.
To encode any JSON value, use the ``JSON_ENCODE_ANY`` flag (see
@ -1283,7 +1206,7 @@ These functions output UTF-8:
Decoding
========
This section describes the functions that can be used to decode JSON
This sections describes the functions that can be used to decode JSON
text to the Jansson representation of JSON data. The JSON
specification requires that a JSON text is either a serialized array
or object, and this requirement is also enforced with the following
@ -1565,17 +1488,17 @@ arguments.
Output a JSON null value. No argument is consumed.
``b`` (boolean) [int]
Convert a C ``int`` to JSON boolean value. Zero is converted
Convert a C :type:`int` to JSON boolean value. Zero is converted
to ``false`` and non-zero to ``true``.
``i`` (integer) [int]
Convert a C ``int`` to JSON integer.
Convert a C :type:`int` to JSON integer.
``I`` (integer) [json_int_t]
Convert a C :type:`json_int_t` to JSON integer.
``f`` (real) [double]
Convert a C ``double`` to JSON real.
Convert a C :type:`double` to JSON real.
``o`` (any value) [json_t \*]
Output any given JSON value as-is. If the value is added to an
@ -1702,20 +1625,20 @@ type whose address should be passed.
Expect a JSON null value. Nothing is extracted.
``b`` (boolean) [int]
Convert a JSON boolean value to a C ``int``, so that ``true``
Convert a JSON boolean value to a C :type:`int`, so that ``true``
is converted to 1 and ``false`` to 0.
``i`` (integer) [int]
Convert a JSON integer to C ``int``.
Convert a JSON integer to C :type:`int`.
``I`` (integer) [json_int_t]
Convert a JSON integer to C :type:`json_int_t`.
``f`` (real) [double]
Convert a JSON real to C ``double``.
Convert a JSON real to C :type:`double`.
``F`` (integer or real) [double]
Convert a JSON number (integer or real) to C ``double``.
Convert a JSON number (integer or real) to C :type:`double`.
``o`` (any value) [json_t \*]
Store a JSON value with no conversion to a :type:`json_t` pointer.
@ -1986,79 +1909,3 @@ memory, see
http://www.dwheeler.com/secure-programs/Secure-Programs-HOWTO/protect-secrets.html.
The page also explains the :func:`guaranteed_memset()` function used
in the example and gives a sample implementation for it.
.. _fixed_length_keys:
Fixed-Length keys
=================
The Jansson API allows work with fixed-length keys. This can be useful in the following cases:
* The key is contained inside a buffer and is not null-terminated. In this case creating a new temporary buffer is not needed.
* The key contains U+0000 inside it.
List of API for fixed-length keys:
* :c:func:`json_object_getn`
* :c:func:`json_object_setn`
* :c:func:`json_object_setn_nocheck`
* :c:func:`json_object_setn_new`
* :c:func:`json_object_setn_new_nocheck`
* :c:func:`json_object_deln`
* :c:func:`json_object_iter_key_len`
* :c:func:`json_object_keylen_foreach`
* :c:func:`json_object_keylen_foreach_safe`
**Examples:**
Try to write a new function to get :c:struct:`json_t` by path separated by ``.``
This requires:
* string iterator (no need to modify the input for better performance)
* API for working with fixed-size keys
The iterator::
struct string {
const char *string;
size_t length;
};
size_t string_try_next(struct string *str, const char *delimiter) {
str->string += strspn(str->string, delimiter);
str->length = strcspn(str->string, delimiter);
return str->length;
}
#define string_foreach(_string, _delimiter) \
for (; string_try_next(&(_string), _delimiter); (_string).string += (_string).length)
The function::
json_t *json_object_get_by_path(json_t *object, const char *path) {
struct string str;
json_t *out = object;
str.string = path;
string_foreach(str, ".") {
out = json_object_getn(out, str.string, str.length);
if (out == NULL)
return NULL;
}
return out;
}
And usage::
int main(void) {
json_t *obj = json_pack("{s:{s:{s:b}}}", "a", "b", "c", 1);
json_t *c = json_object_get_by_path(obj, "a.b.c");
assert(json_is_true(c));
json_decref(obj);
}

View file

@ -48,7 +48,7 @@ copyright = u'2009-2020, Petri Lehtinen'
# built documents.
#
# The short X.Y version.
version = '2.14.1'
version = '2.13'
# The full version, including alpha/beta/rc tags.
release = version

View file

@ -22,7 +22,8 @@ JSON strings are mapped to C-style null-terminated character arrays,
and UTF-8 encoding is used internally.
All Unicode codepoints U+0000 through U+10FFFF are allowed in string
values. However, U+0000 is allowed in object keys only for length-aware functions.
values. However, U+0000 is not allowed in object keys because of API
restrictions.
Unicode normalization or any other transformation is never performed
on any strings (string values or object keys). When checking for
@ -109,7 +110,7 @@ to overflow semantics). Also, no support or hooks are provided for any
supplemental "bignum" type add-on packages.
Depth of nested values
======================
----------------------
To avoid stack exhaustion, Jansson currently limits the nesting depth
for arrays and objects to a certain value (default: 2048), defined as

View file

@ -24,8 +24,8 @@
"""
from docutils import nodes
from docutils.parsers.rst import Directive
class refcounting(nodes.emphasis): pass
def visit(self, node):
self.visit_emphasis(node)
@ -40,25 +40,16 @@ def html_depart(self, node):
self.body.append('</em>')
class refcounting(nodes.emphasis):
pass
class refcounting_directive(Directive):
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
def run(self):
if self.arguments[0] == 'borrow':
text = 'Return value: Borrowed reference.'
elif self.arguments[0] == 'new':
text = 'Return value: New reference.'
else:
raise Error('Valid arguments: new, borrow')
return [refcounting(text, text)]
def refcounting_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
if arguments[0] == 'borrow':
text = 'Return value: Borrowed reference.'
elif arguments[0] == 'new':
text = 'Return value: New reference.'
else:
raise Error('Valid arguments: new, borrow')
return [refcounting(text, text)]
def setup(app):
app.add_node(refcounting,
@ -66,4 +57,4 @@ def setup(app):
latex=(visit, depart),
text=(visit, depart),
man=(visit, depart))
app.add_directive('refcounting', refcounting_directive)
app.add_directive('refcounting', refcounting_directive, 0, (1, 0, 0))

View file

@ -153,7 +153,6 @@ int main(int argc, char *argv[]) {
sha = json_object_get(data, "sha");
if (!json_is_string(sha)) {
fprintf(stderr, "error: commit %d: sha is not a string\n", (int)(i + 1));
json_decref(root);
return 1;
}

View file

@ -41,7 +41,7 @@ Contents
upgrading
tutorial
conformance
threadsafety
portability
apiref
changes

View file

@ -1,8 +1,11 @@
.. _thread-safety:
***********
Portability
***********
.. _portability-thread-safety:
*************
Thread safety
*************
-------------
Jansson as a library is thread safe and has no mutable global state.
The only exceptions are the hash function seed and memory allocation
@ -61,7 +64,7 @@ program startup. See :ref:`apiref-custom-memory-allocation`.
Locale
======
------
Jansson works fine under any locale.

View file

@ -47,13 +47,13 @@ List of Incompatible Changes
**Underlying type of JSON integers**
The underlying C type of JSON integers has been changed from
``int`` to the widest available signed integer type, i.e.
``long long`` or ``long``, depending on whether
``long long`` is supported on your system or not. This makes
:type:`int` to the widest available signed integer type, i.e.
:type:`long long` or :type:`long`, depending on whether
:type:`long long` is supported on your system or not. This makes
the whole 64-bit integer range available on most modern systems.
``jansson.h`` has a typedef :type:`json_int_t` to the underlying
integer type. ``int`` should still be used in most cases when
integer type. :type:`int` should still be used in most cases when
dealing with smallish JSON integers, as the compiler handles
implicit type coercion. Only when the full 64-bit range is needed,
:type:`json_int_t` should be explicitly used.
@ -69,8 +69,8 @@ List of Incompatible Changes
**Unsigned integers in API functions**
Version 2.0 unifies unsigned integer usage in the API. All uses of
``unsigned int`` and ``unsigned long`` have been replaced
with ``size_t``. This includes flags, container sizes, etc.
:type:`unsigned int` and :type:`unsigned long` have been replaced
with :type:`size_t`. This includes flags, container sizes, etc.
This should not require source code changes, as both
``unsigned int`` and ``unsigned long`` are usually
compatible with ``size_t``.
:type:`unsigned int` and :type:`unsigned long` are usually
compatible with :type:`size_t`.

View file

@ -30,7 +30,7 @@
void print_json(json_t *root);
void print_json_aux(json_t *element, int indent);
void print_json_indent(int indent);
const char *json_plural(size_t count);
const char *json_plural(int count);
void print_json_object(json_t *element, int indent);
void print_json_array(json_t *element, int indent);
void print_json_string(json_t *element, int indent);
@ -80,7 +80,7 @@ void print_json_indent(int indent) {
}
}
const char *json_plural(size_t count) { return count == 1 ? "" : "s"; }
const char *json_plural(int count) { return count == 1 ? "" : "s"; }
void print_json_object(json_t *element, int indent) {
size_t size;
@ -90,7 +90,7 @@ void print_json_object(json_t *element, int indent) {
print_json_indent(indent);
size = json_object_size(element);
printf("JSON Object of %lld pair%s:\n", (long long)size, json_plural(size));
printf("JSON Object of %ld pair%s:\n", size, json_plural(size));
json_object_foreach(element, key, value) {
print_json_indent(indent + 2);
printf("JSON Key: \"%s\"\n", key);
@ -103,7 +103,7 @@ void print_json_array(json_t *element, int indent) {
size_t size = json_array_size(element);
print_json_indent(indent);
printf("JSON Array of %lld element%s:\n", (long long)size, json_plural(size));
printf("JSON Array of %ld element%s:\n", size, json_plural(size));
for (i = 0; i < size; i++) {
print_json_aux(json_array_get(element, i), indent + 2);
}

View file

@ -1,3 +1,3 @@
#!/bin/bash
git ls-files | grep '\.[ch]$' | xargs clang-format -i
find . -type f -a '(' -name '*.c' -o -name '*.h' ')' | xargs clang-format -i

View file

@ -12,16 +12,13 @@ fi
errors=0
paths=$(git ls-files | grep '\.[ch]$')
for path in $paths; do
echo "Checking $path"
$CLANG_FORMAT $path > $path.formatted
in=$(cat $path)
out=$(cat $path.formatted)
out=$($CLANG_FORMAT $path)
if [ "$in" != "$out" ]; then
diff -u $path $path.formatted
diff -u -L $path -L "$path.formatted" $path - <<<$out
errors=1
fi
rm $path.formatted
done
if [ $errors -ne 0 ]; then

View file

@ -1,4 +1,4 @@
EXTRA_DIST = jansson.def dtoa.c
EXTRA_DIST = jansson.def
include_HEADERS = jansson.h
nodist_include_HEADERS = jansson_config.h
@ -22,14 +22,8 @@ libjansson_la_SOURCES = \
utf.h \
value.c \
version.c
if DTOA_ENABLED
libjansson_la_SOURCES += dtoa.c
endif
libjansson_la_LDFLAGS = \
-no-undefined \
-export-symbols-regex '^json_|^jansson_' \
-version-info 18:1:14 \
@JSON_SYMVER_LDFLAGS@ \
-export-symbols-regex '^json_' \
-version-info 16:0:12 \
@JSON_BSYMBOLIC_LDFLAGS@

6265
src/dtoa.c

File diff suppressed because it is too large Load diff

View file

@ -23,10 +23,10 @@
#include "strbuffer.h"
#include "utf.h"
#define MAX_INTEGER_STR_LENGTH 25
#define MAX_REAL_STR_LENGTH 25
#define MAX_INTEGER_STR_LENGTH 100
#define MAX_REAL_STR_LENGTH 100
#define FLAGS_TO_INDENT(f) ((f) & 0x1F)
#define FLAGS_TO_INDENT(f) ((f)&0x1F)
#define FLAGS_TO_PRECISION(f) (((f) >> 11) & 0x1F)
struct buffer {
@ -195,21 +195,8 @@ static int dump_string(const char *str, size_t len, json_dump_callback_t dump, v
return dump("\"", 1, data);
}
struct key_len {
const char *key;
int len;
};
static int compare_keys(const void *key1, const void *key2) {
const struct key_len *k1 = key1;
const struct key_len *k2 = key2;
const size_t min_size = k1->len < k2->len ? k1->len : k2->len;
int res = memcmp(k1->key, k2->key, min_size);
if (res)
return res;
return k1->len - k2->len;
return strcmp(*(const char **)key1, *(const char **)key2);
}
static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *parents,
@ -266,10 +253,9 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
/* Space for "0x", double the sizeof a pointer for the hex and a
* terminator. */
char key[2 + (sizeof(json) * 2) + 1];
size_t key_len;
/* detect circular references */
if (jsonp_loop_check(parents, json, key, sizeof(key), &key_len))
if (jsonp_loop_check(parents, json, key, sizeof(key)))
return -1;
n = json_array_size(json);
@ -277,7 +263,7 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
if (!embed && dump("[", 1, data))
return -1;
if (n == 0) {
hashtable_del(parents, key, key_len);
hashtable_del(parents, key);
return embed ? 0 : dump("]", 1, data);
}
if (dump_indent(flags, depth + 1, 0, dump, data))
@ -298,7 +284,7 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
}
}
hashtable_del(parents, key, key_len);
hashtable_del(parents, key);
return embed ? 0 : dump("]", 1, data);
}
@ -307,7 +293,6 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
const char *separator;
int separator_length;
char loop_key[LOOP_KEY_LEN];
size_t loop_key_len;
if (flags & JSON_COMPACT) {
separator = ":";
@ -318,8 +303,7 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
}
/* detect circular references */
if (jsonp_loop_check(parents, json, loop_key, sizeof(loop_key),
&loop_key_len))
if (jsonp_loop_check(parents, json, loop_key, sizeof(loop_key)))
return -1;
iter = json_object_iter((json_t *)json);
@ -327,44 +311,40 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
if (!embed && dump("{", 1, data))
return -1;
if (!iter) {
hashtable_del(parents, loop_key, loop_key_len);
hashtable_del(parents, loop_key);
return embed ? 0 : dump("}", 1, data);
}
if (dump_indent(flags, depth + 1, 0, dump, data))
return -1;
if (flags & JSON_SORT_KEYS) {
struct key_len *keys;
const char **keys;
size_t size, i;
size = json_object_size(json);
keys = jsonp_malloc(size * sizeof(struct key_len));
keys = jsonp_malloc(size * sizeof(const char *));
if (!keys)
return -1;
i = 0;
while (iter) {
struct key_len *keylen = &keys[i];
keylen->key = json_object_iter_key(iter);
keylen->len = json_object_iter_key_len(iter);
keys[i] = json_object_iter_key(iter);
iter = json_object_iter_next((json_t *)json, iter);
i++;
}
assert(i == size);
qsort(keys, size, sizeof(struct key_len), compare_keys);
qsort(keys, size, sizeof(const char *), compare_keys);
for (i = 0; i < size; i++) {
const struct key_len *key;
const char *key;
json_t *value;
key = &keys[i];
value = json_object_getn(json, key->key, key->len);
key = keys[i];
value = json_object_get(json, key);
assert(value);
dump_string(key->key, key->len, dump, data, flags);
dump_string(key, strlen(key), dump, data, flags);
if (dump(separator, separator_length, data) ||
do_dump(value, flags, depth + 1, parents, dump, data)) {
jsonp_free(keys);
@ -392,9 +372,8 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
while (iter) {
void *next = json_object_iter_next((json_t *)json, iter);
const char *key = json_object_iter_key(iter);
const size_t key_len = json_object_iter_key_len(iter);
dump_string(key, key_len, dump, data, flags);
dump_string(key, strlen(key), dump, data, flags);
if (dump(separator, separator_length, data) ||
do_dump(json_object_iter_value(iter), flags, depth + 1, parents,
dump, data))
@ -413,7 +392,7 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
}
}
hashtable_del(parents, loop_key, loop_key_len);
hashtable_del(parents, loop_key);
return embed ? 0 : dump("}", 1, data);
}

View file

@ -5,14 +5,14 @@
* it under the terms of the MIT license. See LICENSE for details.
*/
#ifdef HAVE_CONFIG_H
#if HAVE_CONFIG_H
#include <jansson_private_config.h>
#endif
#include <stdlib.h>
#include <string.h>
#ifdef HAVE_STDINT_H
#if HAVE_STDINT_H
#include <stdint.h>
#endif
@ -35,7 +35,7 @@ extern volatile uint32_t hashtable_seed;
#define list_to_pair(list_) container_of(list_, pair_t, list)
#define ordered_list_to_pair(list_) container_of(list_, pair_t, ordered_list)
#define hash_str(key, len) ((size_t)hashlittle((key), len, hashtable_seed))
#define hash_str(key) ((size_t)hashlittle((key), strlen(key), hashtable_seed))
static JSON_INLINE void list_init(list_t *list) {
list->next = list;
@ -69,7 +69,7 @@ static void insert_to_bucket(hashtable_t *hashtable, bucket_t *bucket, list_t *l
}
static pair_t *hashtable_find_pair(hashtable_t *hashtable, bucket_t *bucket,
const char *key, size_t key_len, size_t hash) {
const char *key, size_t hash) {
list_t *list;
pair_t *pair;
@ -79,8 +79,7 @@ static pair_t *hashtable_find_pair(hashtable_t *hashtable, bucket_t *bucket,
list = bucket->first;
while (1) {
pair = list_to_pair(list);
if (pair->hash == hash && pair->key_len == key_len &&
memcmp(pair->key, key, key_len) == 0)
if (pair->hash == hash && strcmp(pair->key, key) == 0)
return pair;
if (list == bucket->last)
@ -93,8 +92,7 @@ static pair_t *hashtable_find_pair(hashtable_t *hashtable, bucket_t *bucket,
}
/* returns 0 on success, -1 if key was not found */
static int hashtable_do_del(hashtable_t *hashtable, const char *key, size_t key_len,
size_t hash) {
static int hashtable_do_del(hashtable_t *hashtable, const char *key, size_t hash) {
pair_t *pair;
bucket_t *bucket;
size_t index;
@ -102,7 +100,7 @@ static int hashtable_do_del(hashtable_t *hashtable, const char *key, size_t key_
index = hash & hashmask(hashtable->order);
bucket = &hashtable->buckets[index];
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
pair = hashtable_find_pair(hashtable, bucket, key, hash);
if (!pair)
return -1;
@ -195,37 +193,7 @@ void hashtable_close(hashtable_t *hashtable) {
jsonp_free(hashtable->buckets);
}
static pair_t *init_pair(json_t *value, const char *key, size_t key_len, size_t hash) {
pair_t *pair;
/* offsetof(...) returns the size of pair_t without the last,
flexible member. This way, the correct amount is
allocated. */
if (key_len >= (size_t)-1 - offsetof(pair_t, key)) {
/* Avoid an overflow if the key is very long */
return NULL;
}
pair = jsonp_malloc(offsetof(pair_t, key) + key_len + 1);
if (!pair)
return NULL;
pair->hash = hash;
memcpy(pair->key, key, key_len);
pair->key[key_len] = '\0';
pair->key_len = key_len;
pair->value = value;
list_init(&pair->list);
list_init(&pair->ordered_list);
return pair;
}
int hashtable_set(hashtable_t *hashtable, const char *key, size_t key_len,
json_t *value) {
int hashtable_set(hashtable_t *hashtable, const char *key, json_t *value) {
pair_t *pair;
bucket_t *bucket;
size_t hash, index;
@ -235,20 +203,35 @@ int hashtable_set(hashtable_t *hashtable, const char *key, size_t key_len,
if (hashtable_do_rehash(hashtable))
return -1;
hash = hash_str(key, key_len);
hash = hash_str(key);
index = hash & hashmask(hashtable->order);
bucket = &hashtable->buckets[index];
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
pair = hashtable_find_pair(hashtable, bucket, key, hash);
if (pair) {
json_decref(pair->value);
pair->value = value;
} else {
pair = init_pair(value, key, key_len, hash);
/* offsetof(...) returns the size of pair_t without the last,
flexible member. This way, the correct amount is
allocated. */
size_t len = strlen(key);
if (len >= (size_t)-1 - offsetof(pair_t, key)) {
/* Avoid an overflow if the key is very long */
return -1;
}
pair = jsonp_malloc(offsetof(pair_t, key) + len + 1);
if (!pair)
return -1;
pair->hash = hash;
strncpy(pair->key, key, len + 1);
pair->value = value;
list_init(&pair->list);
list_init(&pair->ordered_list);
insert_to_bucket(hashtable, bucket, &pair->list);
list_insert(&hashtable->ordered_list, &pair->ordered_list);
@ -257,24 +240,24 @@ int hashtable_set(hashtable_t *hashtable, const char *key, size_t key_len,
return 0;
}
void *hashtable_get(hashtable_t *hashtable, const char *key, size_t key_len) {
void *hashtable_get(hashtable_t *hashtable, const char *key) {
pair_t *pair;
size_t hash;
bucket_t *bucket;
hash = hash_str(key, key_len);
hash = hash_str(key);
bucket = &hashtable->buckets[hash & hashmask(hashtable->order)];
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
pair = hashtable_find_pair(hashtable, bucket, key, hash);
if (!pair)
return NULL;
return pair->value;
}
int hashtable_del(hashtable_t *hashtable, const char *key, size_t key_len) {
size_t hash = hash_str(key, key_len);
return hashtable_do_del(hashtable, key, key_len, hash);
int hashtable_del(hashtable_t *hashtable, const char *key) {
size_t hash = hash_str(key);
return hashtable_do_del(hashtable, key, hash);
}
void hashtable_clear(hashtable_t *hashtable) {
@ -295,15 +278,15 @@ void *hashtable_iter(hashtable_t *hashtable) {
return hashtable_iter_next(hashtable, &hashtable->ordered_list);
}
void *hashtable_iter_at(hashtable_t *hashtable, const char *key, size_t key_len) {
void *hashtable_iter_at(hashtable_t *hashtable, const char *key) {
pair_t *pair;
size_t hash;
bucket_t *bucket;
hash = hash_str(key, key_len);
hash = hash_str(key);
bucket = &hashtable->buckets[hash & hashmask(hashtable->order)];
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
pair = hashtable_find_pair(hashtable, bucket, key, hash);
if (!pair)
return NULL;
@ -322,11 +305,6 @@ void *hashtable_iter_key(void *iter) {
return pair->key;
}
size_t hashtable_iter_key_len(void *iter) {
pair_t *pair = ordered_list_to_pair((list_t *)iter);
return pair->key_len;
}
void *hashtable_iter_value(void *iter) {
pair_t *pair = ordered_list_to_pair((list_t *)iter);
return pair->value;

View file

@ -24,7 +24,6 @@ struct hashtable_pair {
struct hashtable_list ordered_list;
size_t hash;
json_t *value;
size_t key_len;
char key[1];
};
@ -70,7 +69,6 @@ void hashtable_close(hashtable_t *hashtable);
*
* @hashtable: The hashtable object
* @key: The key
* @key: The length of key
* @serial: For addition order of keys
* @value: The value
*
@ -81,29 +79,27 @@ void hashtable_close(hashtable_t *hashtable);
*
* Returns 0 on success, -1 on failure (out of memory).
*/
int hashtable_set(hashtable_t *hashtable, const char *key, size_t key_len, json_t *value);
int hashtable_set(hashtable_t *hashtable, const char *key, json_t *value);
/**
* hashtable_get - Get a value associated with a key
*
* @hashtable: The hashtable object
* @key: The key
* @key: The length of key
*
* Returns value if it is found, or NULL otherwise.
*/
void *hashtable_get(hashtable_t *hashtable, const char *key, size_t key_len);
void *hashtable_get(hashtable_t *hashtable, const char *key);
/**
* hashtable_del - Remove a value from the hashtable
*
* @hashtable: The hashtable object
* @key: The key
* @key: The length of key
*
* Returns 0 on success, or -1 if the key was not found.
*/
int hashtable_del(hashtable_t *hashtable, const char *key, size_t key_len);
int hashtable_del(hashtable_t *hashtable, const char *key);
/**
* hashtable_clear - Clear hashtable
@ -136,12 +132,11 @@ void *hashtable_iter(hashtable_t *hashtable);
*
* @hashtable: The hashtable object
* @key: The key that the iterator should point to
* @key: The length of key
*
* Like hashtable_iter() but returns an iterator pointing to a
* specific key.
*/
void *hashtable_iter_at(hashtable_t *hashtable, const char *key, size_t key_len);
void *hashtable_iter_at(hashtable_t *hashtable, const char *key);
/**
* hashtable_iter_next - Advance an iterator
@ -161,13 +156,6 @@ void *hashtable_iter_next(hashtable_t *hashtable, void *iter);
*/
void *hashtable_iter_key(void *iter);
/**
* hashtable_iter_key_len - Retrieve the key length pointed by an iterator
*
* @iter: The iterator
*/
size_t hashtable_iter_key_len(void *iter);
/**
* hashtable_iter_value - Retrieve the value pointed by an iterator
*

View file

@ -34,13 +34,9 @@ EXPORTS
json_object
json_object_size
json_object_get
json_object_getn
json_object_set_new
json_object_setn_new
json_object_set_new_nocheck
json_object_setn_new_nocheck
json_object_del
json_object_deln
json_object_clear
json_object_update
json_object_update_existing
@ -50,7 +46,6 @@ EXPORTS
json_object_iter_at
json_object_iter_next
json_object_iter_key
json_object_iter_key_len
json_object_iter_value
json_object_iter_set_new
json_object_key_to_iter

View file

@ -21,11 +21,11 @@ extern "C" {
/* version */
#define JANSSON_MAJOR_VERSION 2
#define JANSSON_MINOR_VERSION 14
#define JANSSON_MICRO_VERSION 1
#define JANSSON_MINOR_VERSION 13
#define JANSSON_MICRO_VERSION 0
/* Micro version is omitted if it's 0 */
#define JANSSON_VERSION "2.14.1"
#define JANSSON_VERSION "2.13"
/* Version as a 3-byte hex number, e.g. 0x010201 == 1.2.1. Use this
for numeric comparisons, e.g. #if JANSSON_VERSION_HEX >= ... */
@ -188,15 +188,9 @@ void json_object_seed(size_t seed);
size_t json_object_size(const json_t *object);
json_t *json_object_get(const json_t *object, const char *key)
JANSSON_ATTRS((warn_unused_result));
json_t *json_object_getn(const json_t *object, const char *key, size_t key_len)
JANSSON_ATTRS((warn_unused_result));
int json_object_set_new(json_t *object, const char *key, json_t *value);
int json_object_setn_new(json_t *object, const char *key, size_t key_len, json_t *value);
int json_object_set_new_nocheck(json_t *object, const char *key, json_t *value);
int json_object_setn_new_nocheck(json_t *object, const char *key, size_t key_len,
json_t *value);
int json_object_del(json_t *object, const char *key);
int json_object_deln(json_t *object, const char *key, size_t key_len);
int json_object_clear(json_t *object);
int json_object_update(json_t *object, json_t *other);
int json_object_update_existing(json_t *object, json_t *other);
@ -207,7 +201,6 @@ void *json_object_iter_at(json_t *object, const char *key);
void *json_object_key_to_iter(const char *key);
void *json_object_iter_next(json_t *object, void *iter);
const char *json_object_iter_key(void *iter);
size_t json_object_iter_key_len(void *iter);
json_t *json_object_iter_value(void *iter);
int json_object_iter_set_new(json_t *object, void *iter, json_t *value);
@ -217,14 +210,6 @@ int json_object_iter_set_new(json_t *object, void *iter, json_t *value);
key = json_object_iter_key( \
json_object_iter_next(object, json_object_key_to_iter(key))))
#define json_object_keylen_foreach(object, key, key_len, value) \
for (key = json_object_iter_key(json_object_iter(object)), \
key_len = json_object_iter_key_len(json_object_key_to_iter(key)); \
key && (value = json_object_iter_value(json_object_key_to_iter(key))); \
key = json_object_iter_key( \
json_object_iter_next(object, json_object_key_to_iter(key))), \
key_len = json_object_iter_key_len(json_object_key_to_iter(key)))
#define json_object_foreach_safe(object, n, key, value) \
for (key = json_object_iter_key(json_object_iter(object)), \
n = json_object_iter_next(object, json_object_key_to_iter(key)); \
@ -232,14 +217,6 @@ int json_object_iter_set_new(json_t *object, void *iter, json_t *value);
key = json_object_iter_key(n), \
n = json_object_iter_next(object, json_object_key_to_iter(key)))
#define json_object_keylen_foreach_safe(object, n, key, key_len, value) \
for (key = json_object_iter_key(json_object_iter(object)), \
n = json_object_iter_next(object, json_object_key_to_iter(key)), \
key_len = json_object_iter_key_len(json_object_key_to_iter(key)); \
key && (value = json_object_iter_value(json_object_key_to_iter(key))); \
key = json_object_iter_key(n), key_len = json_object_iter_key_len(n), \
n = json_object_iter_next(object, json_object_key_to_iter(key)))
#define json_array_foreach(array, index, value) \
for (index = 0; \
index < json_array_size(array) && (value = json_array_get(array, index)); \
@ -249,21 +226,11 @@ static JSON_INLINE int json_object_set(json_t *object, const char *key, json_t *
return json_object_set_new(object, key, json_incref(value));
}
static JSON_INLINE int json_object_setn(json_t *object, const char *key, size_t key_len,
json_t *value) {
return json_object_setn_new(object, key, key_len, json_incref(value));
}
static JSON_INLINE int json_object_set_nocheck(json_t *object, const char *key,
json_t *value) {
return json_object_set_new_nocheck(object, key, json_incref(value));
}
static JSON_INLINE int json_object_setn_nocheck(json_t *object, const char *key,
size_t key_len, json_t *value) {
return json_object_setn_new_nocheck(object, key, key_len, json_incref(value));
}
static JSON_INLINE int json_object_iter_set(json_t *object, void *iter, json_t *value) {
return json_object_iter_set_new(object, iter, json_incref(value));
}
@ -379,14 +346,14 @@ json_t *json_load_callback(json_load_callback_t callback, void *data, size_t fla
/* encoding */
#define JSON_MAX_INDENT 0x1F
#define JSON_INDENT(n) ((n) & JSON_MAX_INDENT)
#define JSON_INDENT(n) ((n)&JSON_MAX_INDENT)
#define JSON_COMPACT 0x20
#define JSON_ENSURE_ASCII 0x40
#define JSON_SORT_KEYS 0x80
#define JSON_PRESERVE_ORDER 0x100
#define JSON_ENCODE_ANY 0x200
#define JSON_ESCAPE_SLASH 0x400
#define JSON_REAL_PRECISION(n) (((n) & 0x1F) << 11)
#define JSON_REAL_PRECISION(n) (((n)&0x1F) << 11)
#define JSON_EMBED 0x10000
typedef int (*json_dump_callback_t)(const char *buffer, size_t size, void *data);

View file

@ -32,6 +32,10 @@
otherwise to 0. */
#define JSON_INTEGER_IS_LONG_LONG @json_have_long_long@
/* If locale.h and localeconv() are available, define to 1,
otherwise to 0. */
#define JSON_HAVE_LOCALECONV @json_have_localeconv@
/* If __atomic builtins are available they will be used to manage
reference counts of json_t. */
#define JSON_HAVE_ATOMIC_BUILTINS @json_have_atomic_builtins@

View file

@ -91,8 +91,8 @@ char *jsonp_strndup(const char *str, size_t len) JANSSON_ATTRS((warn_unused_resu
/* Circular reference check*/
/* Space for "0x", double the sizeof a pointer for the hex and a terminator. */
#define LOOP_KEY_LEN (2 + (sizeof(json_t *) * 2) + 1)
int jsonp_loop_check(hashtable_t *parents, const json_t *json, char *key, size_t key_size,
size_t *key_len_out);
int jsonp_loop_check(hashtable_t *parents, const json_t *json, char *key,
size_t key_size);
/* Windows compatibility */
#if defined(_WIN32) || defined(WIN32)

View file

@ -689,7 +689,7 @@ static json_t *parse_object(lex_t *lex, size_t flags, json_error_t *error) {
}
if (flags & JSON_REJECT_DUPLICATES) {
if (json_object_getn(object, key, len)) {
if (json_object_get(object, key)) {
jsonp_free(key);
error_set(error, lex, json_error_duplicate_key, "duplicate object key");
goto error;
@ -710,7 +710,7 @@ static json_t *parse_object(lex_t *lex, size_t flags, json_error_t *error) {
goto error;
}
if (json_object_setn_new_nocheck(object, key, len, value)) {
if (json_object_set_new_nocheck(object, key, value)) {
jsonp_free(key);
goto error;
}

View file

@ -73,7 +73,7 @@ on 1 byte), but shoehorning those bytes into integers efficiently is messy.
# define HASH_BIG_ENDIAN 0
#endif
#define hashsize(n) ((size_t)1<<(n))
#define hashsize(n) ((uint32_t)1<<(n))
#define hashmask(n) (hashsize(n)-1)
#define rot(x,k) (((x)<<(k)) | ((x)>>(32-(k))))

View file

@ -544,7 +544,7 @@ static int unpack_object(scanner_t *s, json_t *root, va_list *ap) {
if (unpack(s, value, ap))
goto out;
hashtable_set(&key_set, key, strlen(key), json_null());
hashtable_set(&key_set, key, json_null());
next_token(s);
}
@ -554,7 +554,6 @@ static int unpack_object(scanner_t *s, json_t *root, va_list *ap) {
if (root && strict == 1) {
/* We need to check that all non optional items have been parsed */
const char *key;
size_t key_len;
/* keys_res is 1 for uninitialized, 0 for success, -1 for error. */
int keys_res = 1;
strbuffer_t unrecognized_keys;
@ -562,8 +561,8 @@ static int unpack_object(scanner_t *s, json_t *root, va_list *ap) {
long unpacked = 0;
if (gotopt || json_object_size(root) != key_set.size) {
json_object_keylen_foreach(root, key, key_len, value) {
if (!hashtable_get(&key_set, key, key_len)) {
json_object_foreach(root, key, value) {
if (!hashtable_get(&key_set, key)) {
unpacked++;
/* Save unrecognized keys for the error message */
@ -575,7 +574,7 @@ static int unpack_object(scanner_t *s, json_t *root, va_list *ap) {
if (!keys_res)
keys_res =
strbuffer_append_bytes(&unrecognized_keys, key, key_len);
strbuffer_append_bytes(&unrecognized_keys, key, strlen(key));
}
}
}

View file

@ -16,7 +16,7 @@
#define STRBUFFER_MIN_SIZE 16
#define STRBUFFER_FACTOR 2
#define STRBUFFER_SIZE_MAX ((size_t)(-1))
#define STRBUFFER_SIZE_MAX ((size_t)-1)
int strbuffer_init(strbuffer_t *strbuff) {
strbuff->size = STRBUFFER_MIN_SIZE;

View file

@ -11,42 +11,57 @@
#include <jansson_private_config.h>
#endif
#if JSON_HAVE_LOCALECONV
#include <locale.h>
/*
- This code assumes that the decimal separator is exactly one
character.
- If setlocale() is called by another thread between the call to
get_decimal_point() and the call to sprintf() or strtod(), the
result may be wrong. setlocale() is not thread-safe and should
not be used this way. Multi-threaded programs should use
uselocale() instead.
localeconv() and the call to sprintf() or strtod(), the result may
be wrong. setlocale() is not thread-safe and should not be used
this way. Multi-threaded programs should use uselocale() instead.
*/
static char get_decimal_point() {
char buf[3];
sprintf(buf, "%#.0f", 1.0); // "1." in the current locale
return buf[1];
}
static void to_locale(strbuffer_t *strbuffer) {
char point;
const char *point;
char *pos;
point = get_decimal_point();
if (point == '.') {
point = localeconv()->decimal_point;
if (*point == '.') {
/* No conversion needed */
return;
}
pos = strchr(strbuffer->value, '.');
if (pos)
*pos = point;
*pos = *point;
}
static void from_locale(char *buffer) {
const char *point;
char *pos;
point = localeconv()->decimal_point;
if (*point == '.') {
/* No conversion needed */
return;
}
pos = strchr(buffer, *point);
if (pos)
*pos = '.';
}
#endif
int jsonp_strtod(strbuffer_t *strbuffer, double *out) {
double value;
char *end;
#if JSON_HAVE_LOCALECONV
to_locale(strbuffer);
#endif
errno = 0;
value = strtod(strbuffer->value, &end);
@ -61,127 +76,6 @@ int jsonp_strtod(strbuffer_t *strbuffer, double *out) {
return 0;
}
#if DTOA_ENABLED
/* see dtoa.c */
char *dtoa_r(double dd, int mode, int ndigits, int *decpt, int *sign, char **rve,
char *buf, size_t blen);
int jsonp_dtostr(char *buffer, size_t size, double value, int precision) {
/* adapted from `format_float_short()` in
* https://github.com/python/cpython/blob/2cf18a44303b6d84faa8ecffaecc427b53ae121e/Python/pystrtod.c#L969
*/
char digits[25];
char *digits_end;
int mode = precision == 0 ? 0 : 2;
int decpt, sign, exp_len, exp = 0, use_exp = 0;
int digits_len, vdigits_start, vdigits_end;
char *p;
if (dtoa_r(value, mode, precision, &decpt, &sign, &digits_end, digits, 25) == NULL) {
// digits is too short => should not happen
return -1;
}
digits_len = digits_end - digits;
if (decpt <= -4 || decpt > 16) {
use_exp = 1;
exp = decpt - 1;
decpt = 1;
}
vdigits_start = decpt <= 0 ? decpt - 1 : 0;
vdigits_end = digits_len;
if (!use_exp) {
/* decpt + 1 to add ".0" if value is an integer */
vdigits_end = vdigits_end > decpt ? vdigits_end : decpt + 1;
} else {
vdigits_end = vdigits_end > decpt ? vdigits_end : decpt;
}
if (
/* sign, decimal point and trailing 0 byte */
(size_t)(3 +
/* total digit count (including zero padding on both sides) */
(vdigits_end - vdigits_start) +
/* exponent "e+100", max 3 numerical digits */
(use_exp ? 5 : 0)) > size) {
/* buffer is too short */
return -1;
}
p = buffer;
if (sign == 1) {
*p++ = '-';
}
/* note that exactly one of the three 'if' conditions is true,
so we include exactly one decimal point */
/* Zero padding on left of digit string */
if (decpt <= 0) {
memset(p, '0', decpt - vdigits_start);
p += decpt - vdigits_start;
*p++ = '.';
memset(p, '0', 0 - decpt);
p += 0 - decpt;
} else {
memset(p, '0', 0 - vdigits_start);
p += 0 - vdigits_start;
}
/* Digits, with included decimal point */
if (0 < decpt && decpt <= digits_len) {
strncpy(p, digits, decpt - 0);
p += decpt - 0;
*p++ = '.';
strncpy(p, digits + decpt, digits_len - decpt);
p += digits_len - decpt;
} else {
strncpy(p, digits, digits_len);
p += digits_len;
}
/* And zeros on the right */
if (digits_len < decpt) {
memset(p, '0', decpt - digits_len);
p += decpt - digits_len;
*p++ = '.';
memset(p, '0', vdigits_end - decpt);
p += vdigits_end - decpt;
} else {
memset(p, '0', vdigits_end - digits_len);
p += vdigits_end - digits_len;
}
if (p[-1] == '.')
p--;
if (use_exp) {
*p++ = 'e';
exp_len = sprintf(p, "%d", exp);
p += exp_len;
}
*p = '\0';
return (int)(p - buffer);
}
#else /* DTOA_ENABLED == 0 */
static void from_locale(char *buffer) {
char point;
char *pos;
point = get_decimal_point();
if (point == '.') {
/* No conversion needed */
return;
}
pos = strchr(buffer, point);
if (pos)
*pos = '.';
}
int jsonp_dtostr(char *buffer, size_t size, double value, int precision) {
int ret;
char *start, *end;
@ -198,7 +92,9 @@ int jsonp_dtostr(char *buffer, size_t size, double value, int precision) {
if (length >= size)
return -1;
#if JSON_HAVE_LOCALECONV
from_locale(buffer);
#endif
/* Make sure there's a dot or 'e' in the output. Otherwise
a real is converted to an integer when decoding */
@ -234,4 +130,3 @@ int jsonp_dtostr(char *buffer, size_t size, double value, int precision) {
return (int)length;
}
#endif

View file

@ -44,17 +44,13 @@ static JSON_INLINE void json_init(json_t *json, json_type type) {
json->refcount = 1;
}
int jsonp_loop_check(hashtable_t *parents, const json_t *json, char *key, size_t key_size,
size_t *key_len_out) {
size_t key_len = snprintf(key, key_size, "%p", json);
if (key_len_out)
*key_len_out = key_len;
if (hashtable_get(parents, key, key_len))
int jsonp_loop_check(hashtable_t *parents, const json_t *json, char *key,
size_t key_size) {
snprintf(key, key_size, "%p", json);
if (hashtable_get(parents, key))
return -1;
return hashtable_set(parents, key, key_len, json_null());
return hashtable_set(parents, key, json_null());
}
/*** object ***/
@ -97,32 +93,16 @@ size_t json_object_size(const json_t *json) {
}
json_t *json_object_get(const json_t *json, const char *key) {
if (!key)
return NULL;
return json_object_getn(json, key, strlen(key));
}
json_t *json_object_getn(const json_t *json, const char *key, size_t key_len) {
json_object_t *object;
if (!key || !json_is_object(json))
return NULL;
object = json_to_object(json);
return hashtable_get(&object->hashtable, key, key_len);
return hashtable_get(&object->hashtable, key);
}
int json_object_set_new_nocheck(json_t *json, const char *key, json_t *value) {
if (!key) {
json_decref(value);
return -1;
}
return json_object_setn_new_nocheck(json, key, strlen(key), value);
}
int json_object_setn_new_nocheck(json_t *json, const char *key, size_t key_len,
json_t *value) {
json_object_t *object;
if (!value)
@ -134,7 +114,7 @@ int json_object_setn_new_nocheck(json_t *json, const char *key, size_t key_len,
}
object = json_to_object(json);
if (hashtable_set(&object->hashtable, key, key_len, value)) {
if (hashtable_set(&object->hashtable, key, value)) {
json_decref(value);
return -1;
}
@ -143,38 +123,22 @@ int json_object_setn_new_nocheck(json_t *json, const char *key, size_t key_len,
}
int json_object_set_new(json_t *json, const char *key, json_t *value) {
if (!key) {
if (!key || !utf8_check_string(key, strlen(key))) {
json_decref(value);
return -1;
}
return json_object_setn_new(json, key, strlen(key), value);
}
int json_object_setn_new(json_t *json, const char *key, size_t key_len, json_t *value) {
if (!key || !utf8_check_string(key, key_len)) {
json_decref(value);
return -1;
}
return json_object_setn_new_nocheck(json, key, key_len, value);
return json_object_set_new_nocheck(json, key, value);
}
int json_object_del(json_t *json, const char *key) {
if (!key)
return -1;
return json_object_deln(json, key, strlen(key));
}
int json_object_deln(json_t *json, const char *key, size_t key_len) {
json_object_t *object;
if (!key || !json_is_object(json))
return -1;
object = json_to_object(json);
return hashtable_del(&object->hashtable, key, key_len);
return hashtable_del(&object->hashtable, key);
}
int json_object_clear(json_t *json) {
@ -191,14 +155,13 @@ int json_object_clear(json_t *json) {
int json_object_update(json_t *object, json_t *other) {
const char *key;
size_t key_len;
json_t *value;
if (!json_is_object(object) || !json_is_object(other))
return -1;
json_object_keylen_foreach(other, key, key_len, value) {
if (json_object_setn_nocheck(object, key, key_len, value))
json_object_foreach(other, key, value) {
if (json_object_set_nocheck(object, key, value))
return -1;
}
@ -207,15 +170,14 @@ int json_object_update(json_t *object, json_t *other) {
int json_object_update_existing(json_t *object, json_t *other) {
const char *key;
size_t key_len;
json_t *value;
if (!json_is_object(object) || !json_is_object(other))
return -1;
json_object_keylen_foreach(other, key, key_len, value) {
if (json_object_getn(object, key, key_len))
json_object_setn_nocheck(object, key, key_len, value);
json_object_foreach(other, key, value) {
if (json_object_get(object, key))
json_object_set_nocheck(object, key, value);
}
return 0;
@ -223,15 +185,14 @@ int json_object_update_existing(json_t *object, json_t *other) {
int json_object_update_missing(json_t *object, json_t *other) {
const char *key;
size_t key_len;
json_t *value;
if (!json_is_object(object) || !json_is_object(other))
return -1;
json_object_keylen_foreach(other, key, key_len, value) {
if (!json_object_getn(object, key, key_len))
json_object_setn_nocheck(object, key, key_len, value);
json_object_foreach(other, key, value) {
if (!json_object_get(object, key))
json_object_set_nocheck(object, key, value);
}
return 0;
@ -239,20 +200,18 @@ int json_object_update_missing(json_t *object, json_t *other) {
int do_object_update_recursive(json_t *object, json_t *other, hashtable_t *parents) {
const char *key;
size_t key_len;
json_t *value;
char loop_key[LOOP_KEY_LEN];
int res = 0;
size_t loop_key_len;
if (!json_is_object(object) || !json_is_object(other))
return -1;
if (jsonp_loop_check(parents, other, loop_key, sizeof(loop_key), &loop_key_len))
if (jsonp_loop_check(parents, other, loop_key, sizeof(loop_key)))
return -1;
json_object_keylen_foreach(other, key, key_len, value) {
json_t *v = json_object_getn(object, key, key_len);
json_object_foreach(other, key, value) {
json_t *v = json_object_get(object, key);
if (json_is_object(v) && json_is_object(value)) {
if (do_object_update_recursive(v, value, parents)) {
@ -260,14 +219,14 @@ int do_object_update_recursive(json_t *object, json_t *other, hashtable_t *paren
break;
}
} else {
if (json_object_setn_nocheck(object, key, key_len, value)) {
if (json_object_set_nocheck(object, key, value)) {
res = -1;
break;
}
}
}
hashtable_del(parents, loop_key, loop_key_len);
hashtable_del(parents, loop_key);
return res;
}
@ -301,7 +260,7 @@ void *json_object_iter_at(json_t *json, const char *key) {
return NULL;
object = json_to_object(json);
return hashtable_iter_at(&object->hashtable, key, strlen(key));
return hashtable_iter_at(&object->hashtable, key);
}
void *json_object_iter_next(json_t *json, void *iter) {
@ -321,13 +280,6 @@ const char *json_object_iter_key(void *iter) {
return hashtable_iter_key(iter);
}
size_t json_object_iter_key_len(void *iter) {
if (!iter)
return 0;
return hashtable_iter_key_len(iter);
}
json_t *json_object_iter_value(void *iter) {
if (!iter)
return NULL;
@ -354,14 +306,13 @@ void *json_object_key_to_iter(const char *key) {
static int json_object_equal(const json_t *object1, const json_t *object2) {
const char *key;
size_t key_len;
const json_t *value1, *value2;
if (json_object_size(object1) != json_object_size(object2))
return 0;
json_object_keylen_foreach((json_t *)object1, key, key_len, value1) {
value2 = json_object_getn(object2, key, key_len);
json_object_foreach((json_t *)object1, key, value1) {
value2 = json_object_get(object2, key);
if (!json_equal(value1, value2))
return 0;
@ -374,15 +325,13 @@ static json_t *json_object_copy(json_t *object) {
json_t *result;
const char *key;
size_t key_len;
json_t *value;
result = json_object();
if (!result)
return NULL;
json_object_keylen_foreach(object, key, key_len, value)
json_object_setn_nocheck(result, key, key_len, value);
json_object_foreach(object, key, value) json_object_set_nocheck(result, key, value);
return result;
}
@ -391,9 +340,8 @@ static json_t *json_object_deep_copy(const json_t *object, hashtable_t *parents)
json_t *result;
void *iter;
char loop_key[LOOP_KEY_LEN];
size_t loop_key_len;
if (jsonp_loop_check(parents, object, loop_key, sizeof(loop_key), &loop_key_len))
if (jsonp_loop_check(parents, object, loop_key, sizeof(loop_key)))
return NULL;
result = json_object();
@ -405,14 +353,11 @@ static json_t *json_object_deep_copy(const json_t *object, hashtable_t *parents)
iter = json_object_iter((json_t *)object);
while (iter) {
const char *key;
size_t key_len;
const json_t *value;
key = json_object_iter_key(iter);
key_len = json_object_iter_key_len(iter);
value = json_object_iter_value(iter);
if (json_object_setn_new_nocheck(result, key, key_len,
do_deep_copy(value, parents))) {
if (json_object_set_new_nocheck(result, key, do_deep_copy(value, parents))) {
json_decref(result);
result = NULL;
break;
@ -421,7 +366,7 @@ static json_t *json_object_deep_copy(const json_t *object, hashtable_t *parents)
}
out:
hashtable_del(parents, loop_key, loop_key_len);
hashtable_del(parents, loop_key);
return result;
}
@ -688,9 +633,8 @@ static json_t *json_array_deep_copy(const json_t *array, hashtable_t *parents) {
json_t *result;
size_t i;
char loop_key[LOOP_KEY_LEN];
size_t loop_key_len;
if (jsonp_loop_check(parents, array, loop_key, sizeof(loop_key), &loop_key_len))
if (jsonp_loop_check(parents, array, loop_key, sizeof(loop_key)))
return NULL;
result = json_array();
@ -707,7 +651,7 @@ static json_t *json_array_deep_copy(const json_t *array, hashtable_t *parents) {
}
out:
hashtable_del(parents, loop_key, loop_key_len);
hashtable_del(parents, loop_key);
return result;
}
@ -853,18 +797,16 @@ json_t *json_vsprintf(const char *fmt, va_list ap) {
va_copy(aq, ap);
length = vsnprintf(NULL, 0, fmt, ap);
if (length < 0)
goto out;
if (length == 0) {
json = json_string("");
goto out;
}
buf = jsonp_malloc((size_t)length + 1);
buf = jsonp_malloc(length + 1);
if (!buf)
goto out;
vsnprintf(buf, (size_t)length + 1, fmt, aq);
vsnprintf(buf, length + 1, fmt, aq);
if (!utf8_check_string(buf, length)) {
jsonp_free(buf);
goto out;

1
test/.gitignore vendored
View file

@ -7,7 +7,6 @@ suites/api/test_cpp
suites/api/test_dump
suites/api/test_dump_callback
suites/api/test_equal
suites/api/test_fixed_size
suites/api/test_load
suites/api/test_load_callback
suites/api/test_loadb

View file

@ -35,6 +35,7 @@ struct config {
int ensure_ascii;
int sort_keys;
int strip;
int use_env;
int have_hashseed;
int hashseed;
int precision;
@ -62,7 +63,7 @@ static const char *strip(char *str) {
}
static char *loadfile(FILE *file) {
size_t fsize, ret;
long fsize, ret;
char *buf;
fseek(file, 0, SEEK_END);
@ -80,10 +81,11 @@ static char *loadfile(FILE *file) {
static void read_conf(FILE *conffile) {
char *buffer, *line, *val;
conf.have_hashseed = 0;
buffer = loadfile(conffile);
for (line = strtok(buffer, "\r\n"); line; line = strtok(NULL, "\r\n")) {
if (!strncmp(line, "export ", 7))
continue;
val = strchr(line, '=');
if (!val) {
printf("invalid configuration line\n");
@ -108,6 +110,8 @@ static void read_conf(FILE *conffile) {
if (!strcmp(line, "HASHSEED")) {
conf.have_hashseed = 1;
conf.hashseed = atoi(val);
} else {
conf.have_hashseed = 0;
}
}
@ -134,16 +138,10 @@ static int cmpfile(const char *str, const char *path, const char *fname) {
}
buffer = loadfile(file);
if (strcmp(buffer, str) != 0) {
fprintf(stderr, "=== Expected %s ===\n", fname);
fprintf(stderr, "%s\n", buffer);
fprintf(stderr, "=== Actual %s ===\n", fname);
fprintf(stderr, "%s\n", str);
if (strcmp(buffer, str) != 0)
ret = 1;
} else {
else
ret = 0;
}
free(buffer);
fclose(file);
@ -208,9 +206,8 @@ int use_conf(char *test_path) {
buffer = loadfile(infile);
json = json_loads(strip(buffer), 0, &error);
free(buffer);
} else {
} else
json = json_loadf(infile, 0, &error);
}
fclose(infile);
@ -230,6 +227,108 @@ int use_conf(char *test_path) {
return ret;
}
static int getenv_int(const char *name) {
char *value, *end;
long result;
value = getenv(name);
if (!value)
return 0;
result = strtol(value, &end, 10);
if (*end != '\0')
return 0;
return (int)result;
}
int use_env() {
int indent, precision;
size_t flags = 0;
json_t *json;
json_error_t error;
#ifdef _WIN32
/* On Windows, set stdout and stderr to binary mode to avoid
outputting DOS line terminators */
_setmode(_fileno(stdout), _O_BINARY);
_setmode(_fileno(stderr), _O_BINARY);
#endif
indent = getenv_int("JSON_INDENT");
if (indent < 0 || indent > 31) {
fprintf(stderr, "invalid value for JSON_INDENT: %d\n", indent);
return 2;
}
if (indent > 0)
flags |= JSON_INDENT(indent);
if (getenv_int("JSON_COMPACT") > 0)
flags |= JSON_COMPACT;
if (getenv_int("JSON_ENSURE_ASCII"))
flags |= JSON_ENSURE_ASCII;
if (getenv_int("JSON_PRESERVE_ORDER"))
flags |= JSON_PRESERVE_ORDER;
if (getenv_int("JSON_SORT_KEYS"))
flags |= JSON_SORT_KEYS;
precision = getenv_int("JSON_REAL_PRECISION");
if (precision < 0 || precision > 31) {
fprintf(stderr, "invalid value for JSON_REAL_PRECISION: %d\n", precision);
return 2;
}
if (getenv("HASHSEED"))
json_object_seed(getenv_int("HASHSEED"));
if (precision > 0)
flags |= JSON_REAL_PRECISION(precision);
if (getenv_int("STRIP")) {
/* Load to memory, strip leading and trailing whitespace */
size_t size = 0, used = 0;
char *buffer = NULL, *buf_ck = NULL;
while (1) {
size_t count;
size = (size == 0 ? 128 : size * 2);
buf_ck = realloc(buffer, size);
if (!buf_ck) {
fprintf(stderr, "Unable to allocate %d bytes\n", (int)size);
free(buffer);
return 1;
}
buffer = buf_ck;
count = fread(buffer + used, 1, size - used, stdin);
if (count < size - used) {
buffer[used + count] = '\0';
break;
}
used += count;
}
json = json_loads(strip(buffer), 0, &error);
free(buffer);
} else
json = json_loadf(stdin, 0, &error);
if (!json) {
fprintf(stderr, "%d %d %d\n%s\n", error.line, error.column, error.position,
error.text);
return 1;
}
json_dumpf(json, stdout, flags);
json_decref(json);
return 0;
}
int main(int argc, char *argv[]) {
int i;
char *test_path = NULL;
@ -245,17 +344,23 @@ int main(int argc, char *argv[]) {
for (i = 1; i < argc; i++) {
if (!strcmp(argv[i], "--strip"))
conf.strip = 1;
else if (!strcmp(argv[i], "--env"))
conf.use_env = 1;
else
test_path = argv[i];
}
if (!test_path) {
goto usage;
if (conf.use_env)
return use_env();
else {
if (!test_path)
goto usage;
return use_conf(test_path);
}
return use_conf(test_path);
usage:
fprintf(stderr, "usage: %s [--strip] test_dir\n", argv[0]);
fprintf(stderr, "argc =%d\n", argc);
fprintf(stderr, "usage: %s [--strip] [--env] test_dir\n", argv[0]);
return 2;
}

38
test/ossfuzz/travisoss.sh Executable file
View file

@ -0,0 +1,38 @@
#!/bin/bash
set -ex
PROJECT_NAME=jansson
# Clone the oss-fuzz repository
git clone https://github.com/google/oss-fuzz.git /tmp/ossfuzz
if [[ ! -d /tmp/ossfuzz/projects/${PROJECT_NAME} ]]
then
echo "Could not find the ${PROJECT_NAME} project in ossfuzz"
# Exit with a success code while the jansson project is not expected to exist
# on oss-fuzz.
exit 0
fi
# Work out which repo to clone from, inside Docker
if [[ ${TRAVIS_PULL_REQUEST} != "false" ]]
then
# Pull-request branch
REPO=${TRAVIS_PULL_REQUEST_SLUG}
BRANCH=${TRAVIS_PULL_REQUEST_BRANCH}
else
# Push build.
REPO=${TRAVIS_REPO_SLUG}
BRANCH=${TRAVIS_BRANCH}
fi
# Modify the oss-fuzz Dockerfile so that we're checking out the current branch on travis.
sed -i "s@https://github.com/akheron/jansson.git@-b ${BRANCH} https://github.com/${REPO}.git@" /tmp/ossfuzz/projects/${PROJECT_NAME}/Dockerfile
# Try and build the fuzzers
pushd /tmp/ossfuzz
python infra/helper.py build_image --pull ${PROJECT_NAME}
python infra/helper.py build_fuzzers ${PROJECT_NAME}
popd

View file

@ -7,10 +7,9 @@ check_PROGRAMS = \
test_dump \
test_dump_callback \
test_equal \
test_fixed_size \
test_load \
test_load_callback \
test_loadb \
test_load_callback \
test_memory_funcs \
test_number \
test_object \
@ -25,7 +24,6 @@ test_chaos_SOURCES = test_chaos.c util.h
test_copy_SOURCES = test_copy.c util.h
test_dump_SOURCES = test_dump.c util.h
test_dump_callback_SOURCES = test_dump_callback.c util.h
test_fixed_size_SOURCES = test_fixed_size.c util.h
test_load_SOURCES = test_load.c util.h
test_loadb_SOURCES = test_loadb.c util.h
test_memory_funcs_SOURCES = test_memory_funcs.c util.h

View file

@ -7,7 +7,7 @@ SOFILE="../src/.libs/libjansson.so"
# The list of symbols, which the shared object should export, is read
# from the def file, which is used in Windows builds
grep 'json_\|jansson_' $top_srcdir/src/jansson.def \
grep 'json_' $top_srcdir/src/jansson.def \
| sed -e 's/ //g' \
| sort \
>$test_log/exports
@ -15,7 +15,7 @@ grep 'json_\|jansson_' $top_srcdir/src/jansson.def \
nm -D $SOFILE >/dev/null >$test_log/symbols 2>/dev/null \
|| exit 77 # Skip if "nm -D" doesn't seem to work
grep ' [DT] ' $test_log/symbols | cut -d' ' -f3 | grep -v '^_' | sed 's/@@libjansson.*//' | sort >$test_log/output
grep ' [DT] ' $test_log/symbols | cut -d' ' -f3 | grep -v '^_' | sort >$test_log/output
if ! cmp -s $test_log/exports $test_log/output; then
diff -u $test_log/exports $test_log/output >&2

View file

@ -1,228 +0,0 @@
/*
* Copyright (c) 2020 Petri Lehtinen <petri@digip.org>
*
* Jansson is free software; you can redistribute it and/or modify
* it under the terms of the MIT license. See LICENSE for details.
*/
#include "util.h"
#include <jansson.h>
#include <string.h>
static void test_keylen_iterator(json_t *object) {
const char key1[] = {'t', 'e', 's', 't', '1'};
const char key2[] = {'t', 'e', 's', 't'};
const char key3[] = {'t', 'e', 's', '\0', 't'};
const char key4[] = {'t', 'e', 's', 't', '\0'};
const char *reference_keys[] = {key1, key2, key3, key4};
const size_t reference_keys_len[] = {sizeof(key1), sizeof(key2), sizeof(key3),
sizeof(key4)};
size_t index = 0;
json_t *value;
const char *key;
size_t keylen;
json_object_keylen_foreach(object, key, keylen, value) {
if (keylen != reference_keys_len[index])
fail("invalid key len in iterator");
if (memcmp(key, reference_keys[index], reference_keys_len[index]) != 0)
fail("invalid key in iterator");
index++;
}
}
static void test_keylen(void) {
json_t *obj = json_object();
const char key[] = {'t', 'e', 's', 't', '1'};
const char key2[] = {'t', 'e', 's', 't'};
const char key3[] = {'t', 'e', 's', '\0', 't'};
const char key4[] = {'t', 'e', 's', 't', '\0'};
if (json_object_size(obj) != 0)
fail("incorrect json");
json_object_set_new_nocheck(obj, "test1", json_true());
if (json_object_size(obj) != 1)
fail("incorrect json");
if (json_object_getn(obj, key, sizeof(key)) != json_true())
fail("json_object_getn failed");
if (json_object_getn(obj, key2, sizeof(key2)) != NULL)
fail("false positive json_object_getn by key2");
if (json_object_setn_nocheck(obj, key2, sizeof(key2), json_false()))
fail("json_object_setn_nocheck for key2 failed");
if (json_object_size(obj) != 2)
fail("incorrect json");
if (json_object_get(obj, "test") != json_false())
fail("json_object_setn_nocheck for key2 failed");
if (json_object_getn(obj, key2, sizeof(key2)) != json_false())
fail("json_object_getn by key 2 failed");
if (json_object_getn(obj, key3, sizeof(key3)) != NULL)
fail("false positive json_object_getn by key3");
if (json_object_setn_nocheck(obj, key3, sizeof(key3), json_false()))
fail("json_object_setn_nocheck for key3 failed");
if (json_object_size(obj) != 3)
fail("incorrect json");
if (json_object_getn(obj, key3, sizeof(key3)) != json_false())
fail("json_object_getn by key 3 failed");
if (json_object_getn(obj, key4, sizeof(key4)) != NULL)
fail("false positive json_object_getn by key3");
if (json_object_setn_nocheck(obj, key4, sizeof(key4), json_false()))
fail("json_object_setn_nocheck for key3 failed");
if (json_object_size(obj) != 4)
fail("incorrect json");
test_keylen_iterator(obj);
if (json_object_getn(obj, key4, sizeof(key4)) != json_false())
fail("json_object_getn by key 3 failed");
if (json_object_size(obj) != 4)
fail("incorrect json");
if (json_object_deln(obj, key4, sizeof(key4)))
fail("json_object_deln failed");
if (json_object_getn(obj, key4, sizeof(key4)) != NULL)
fail("json_object_deln failed");
if (json_object_size(obj) != 3)
fail("incorrect json");
if (json_object_deln(obj, key3, sizeof(key3)))
fail("json_object_deln failed");
if (json_object_getn(obj, key3, sizeof(key3)) != NULL)
fail("json_object_deln failed");
if (json_object_size(obj) != 2)
fail("incorrect json");
if (json_object_deln(obj, key2, sizeof(key2)))
fail("json_object_deln failed");
if (json_object_getn(obj, key2, sizeof(key2)) != NULL)
fail("json_object_deln failed");
if (json_object_size(obj) != 1)
fail("incorrect json");
if (json_object_deln(obj, key, sizeof(key)))
fail("json_object_deln failed");
if (json_object_getn(obj, key, sizeof(key)) != NULL)
fail("json_object_deln failed");
if (json_object_size(obj) != 0)
fail("incorrect json");
json_decref(obj);
}
static void test_invalid_keylen(void) {
json_t *obj = json_object();
json_t *empty_obj = json_object();
const char key[] = {'t', 'e', 's', 't', '1'};
json_object_set_new_nocheck(obj, "test1", json_true());
if (json_object_getn(NULL, key, sizeof(key)) != NULL)
fail("json_object_getn on NULL failed");
if (json_object_getn(obj, NULL, sizeof(key)) != NULL)
fail("json_object_getn on NULL failed");
if (json_object_getn(obj, key, 0) != NULL)
fail("json_object_getn on NULL failed");
if (!json_object_setn_new(obj, NULL, sizeof(key), json_true()))
fail("json_object_setn_new with NULL key failed");
if (!json_object_setn_new_nocheck(obj, NULL, sizeof(key), json_true()))
fail("json_object_setn_new_nocheck with NULL key failed");
if (!json_object_del(obj, NULL))
fail("json_object_del with NULL failed");
if (!json_object_deln(empty_obj, key, sizeof(key)))
fail("json_object_deln with empty object failed");
if (!json_object_deln(obj, key, sizeof(key) - 1))
fail("json_object_deln with incomplete key failed");
json_decref(obj);
json_decref(empty_obj);
}
static void test_binary_keys(void) {
json_t *obj = json_object();
int key1 = 0;
int key2 = 1;
json_object_setn_nocheck(obj, (const char *)&key1, sizeof(key1), json_true());
json_object_setn_nocheck(obj, (const char *)&key2, sizeof(key2), json_true());
if (!json_is_true(json_object_getn(obj, (const char *)&key1, sizeof(key1))))
fail("cannot get integer key1");
if (!json_is_true(json_object_getn(obj, (const char *)&key1, sizeof(key2))))
fail("cannot get integer key2");
if (json_object_size(obj) != 2)
fail("binary object size missmatch");
if (json_object_deln(obj, (const char *)&key1, sizeof(key1)))
fail("cannot del integer key1");
if (json_object_size(obj) != 1)
fail("binary object size missmatch");
if (json_object_deln(obj, (const char *)&key2, sizeof(key2)))
fail("cannot del integer key2");
if (json_object_size(obj) != 0)
fail("binary object size missmatch");
json_decref(obj);
}
static void test_dump_order(void) {
json_t *obj = json_object();
char key1[] = {'k', '\0', '-', '2'};
char key2[] = {'k', '\0', '-', '1'};
const char expected_sorted_str[] =
"{\"k\\u0000-1\": \"first\", \"k\\u0000-2\": \"second\"}";
const char expected_nonsorted_str[] =
"{\"k\\u0000-2\": \"second\", \"k\\u0000-1\": \"first\"}";
char *out;
json_object_setn_new_nocheck(obj, key1, sizeof(key1), json_string("second"));
json_object_setn_new_nocheck(obj, key2, sizeof(key2), json_string("first"));
out = malloc(512);
json_dumpb(obj, out, 512, 0);
if (memcmp(expected_nonsorted_str, out, sizeof(expected_nonsorted_str) - 1) != 0)
fail("preserve order failed");
json_dumpb(obj, out, 512, JSON_SORT_KEYS);
if (memcmp(expected_sorted_str, out, sizeof(expected_sorted_str) - 1) != 0)
fail("utf-8 sort failed");
free(out);
json_decref(obj);
}
static void run_tests() {
test_keylen();
test_invalid_keylen();
test_binary_keys();
test_dump_order();
}

View file

@ -1 +1,2 @@
JSON_COMPACT=1
export JSON_COMPACT

View file

@ -1,2 +1,3 @@
JSON_COMPACT=1
HASHSEED=1
export JSON_COMPACT HASHSEED

View file

@ -1 +1,2 @@
JSON_ENSURE_ASCII=1
export JSON_ENSURE_ASCII

View file

@ -1 +1,2 @@
JSON_INDENT=4
export JSON_INDENT

View file

@ -1,2 +1,3 @@
JSON_INDENT=4
JSON_COMPACT=1
export JSON_INDENT JSON_COMPACT

View file

@ -1,3 +1,4 @@
JSON_INDENT=4
JSON_COMPACT=1
HASHSEED=1
export JSON_INDENT JSON_COMPACT HASHSEED

View file

@ -1,2 +1,3 @@
JSON_INDENT=4
HASHSEED=1
export JSON_INDENT HASHSEED

View file

@ -1 +1,2 @@
HASHSEED=1
export HASHSEED

View file

@ -1 +1,2 @@
JSON_PRESERVE_ORDER=1
export JSON_PRESERVE_ORDER

View file

@ -1 +1,2 @@
JSON_REAL_PRECISION=4
export JSON_REAL_PRECISION

View file

@ -1 +1 @@
[1.23456789, 1.0, 1.0000000000000002, 1.23456e99, 1.23456e-99, 0.0000000000012345]
[1.23456789, 1.0, 1.0000000000000002]

View file

@ -1 +1 @@
[1.235, 1.0, 1.0, 1.235e99, 1.235e-99, 1.235e-12]
[1.235, 1.0, 1.0]

View file

@ -10,13 +10,23 @@ is_test() {
}
run_test() {
$json_process $test_path >$test_log/stdout 2>$test_log/stderr || return 1
(
if [ -f $test_path/env ]; then
. $test_path/env
fi
$json_process --env <$test_path/input >$test_log/stdout 2>$test_log/stderr
)
valgrind_check $test_log/stderr || return 1
cmp -s $test_path/output $test_log/stdout
}
show_error() {
valgrind_show_error && return
cat $test_log/stderr
echo "EXPECTED OUTPUT:"
nl -bn $test_path/output
echo "ACTUAL OUTPUT:"
nl -bn $test_log/stdout
}
. $top_srcdir/test/scripts/run-tests.sh

View file

@ -1 +1,2 @@
JSON_SORT_KEYS=1
export JSON_SORT_KEYS

View file

@ -10,13 +10,18 @@ is_test() {
}
run_test() {
$json_process $test_path >$test_log/stdout 2>$test_log/stderr || return 1
valgrind_check $test_log/stderr$s || return 1
$json_process --env <$test_path/input >$test_log/stdout 2>$test_log/stderr
valgrind_check $test_log/stderr || return 1
cmp -s $test_path/error $test_log/stderr
}
show_error() {
valgrind_show_error && return
cat $test_log/stderr
echo "EXPECTED ERROR:"
nl -bn $test_path/error
echo "ACTUAL ERROR:"
nl -bn $test_log/stderr
}
. $top_srcdir/test/scripts/run-tests.sh

View file

@ -13,18 +13,24 @@ do_run() {
variant=$1
s=".$1"
strip=""
strip=0
if [ "$variant" = "strip" ]; then
# This test should not be stripped
[ -f $test_path/nostrip ] && return
strip="--strip"
strip=1
fi
if ! $json_process $strip $test_path >$test_log/stdout$s 2>$test_log/stderr$s; then
echo $variant >$test_log/variant
STRIP=$strip $json_process --env \
<$test_path/input >$test_log/stdout$s 2>$test_log/stderr$s
valgrind_check $test_log/stderr$s || return 1
ref=error
[ -f $test_path/error$s ] && ref=error$s
if ! cmp -s $test_path/$ref $test_log/stderr$s; then
echo $variant > $test_log/variant
return 1
fi
valgrind_check $test_log/stderr$s || return 1
}
run_test() {
@ -38,7 +44,14 @@ show_error() {
s=".$variant"
echo "VARIANT: $variant"
cat $test_log/stderr$s
echo "EXPECTED ERROR:"
ref=error
[ -f $test_path/error$s ] && ref=error$s
nl -bn $test_path/$ref
echo "ACTUAL ERROR:"
nl -bn $test_log/stderr$s
}
. $top_srcdir/test/scripts/run-tests.sh

View file

@ -1 +0,0 @@
[1.23e47, 0.1, 0.3, 9.99]

View file

@ -1 +0,0 @@
[1.2299999999999999e47, 0.10000000000000001, 0.29999999999999999, 9.9900000000000002]

View file

@ -1 +1 @@
[1.23e47, 0.1, 0.3, 9.99]
[123e45]

View file

@ -1 +1 @@
[1.23e47, 0.1, 0.3, 9.99]
[1.2299999999999999e47]

View file

@ -5,33 +5,31 @@
# Jansson is free software; you can redistribute it and/or modify
# it under the terms of the MIT license. See LICENSE for details.
dtoa_enabled() {
grep -q "DTOA_ENABLED 1" $top_builddir/jansson_private_config.h
}
JSON_SORT_KEYS=1
export JSON_SORT_KEYS
is_test() {
test -d $test_path
}
do_run() {
if [ -f $test_path/skip_unless_dtoa ]; then
dtoa_enabled || return 77
fi
if [ -f $test_path/skip_if_dtoa ]; then
dtoa_enabled && return 77
fi
variant=$1
s=".$1"
strip=""
[ "$variant" = "strip" ] && strip="--strip"
strip=0
[ "$variant" = "strip" ] && strip=1
if ! $json_process $strip $test_path >$test_log/stdout$s 2>$test_log/stderr$s; then
echo $variant >$test_log/variant
STRIP=$strip $json_process --env \
<$test_path/input >$test_log/stdout$s 2>$test_log/stderr$s
valgrind_check $test_log/stderr$s || return 1
ref=output
[ -f $test_path/output$s ] && ref=output$s
if ! cmp -s $test_path/$ref $test_log/stdout$s; then
echo $variant > $test_log/variant
return 1
fi
valgrind_check $test_log/stderr$s || return 1
}
run_test() {
@ -45,7 +43,14 @@ show_error() {
s=".$variant"
echo "VARIANT: $variant"
cat $test_log/stderr$s
echo "EXPECTED OUTPUT:"
ref=output
[ -f $test_path/output$s ] && ref=output$s
nl -bn $test_path/$ref
echo "ACTUAL OUTPUT:"
nl -bn $test_log/stdout$s
}
. $top_srcdir/test/scripts/run-tests.sh