Compare commits

...

95 commits

Author SHA1 Message Date
Petri Lehtinen
96d160df90
Merge pull request #692 from Andrew-Au/cmake_update/revised/merge
Some checks failed
tests / autotools (clang, no, macos-latest) (push) Has been cancelled
tests / autotools (clang, yes, macos-latest) (push) Has been cancelled
tests / autotools (gcc, no, macos-latest) (push) Has been cancelled
tests / autotools (gcc, yes, macos-latest) (push) Has been cancelled
tests / cmake (clang, macos-latest) (push) Has been cancelled
tests / cmake (gcc, macos-latest) (push) Has been cancelled
tests / cmake (msvc, windows-latest) (push) Has been cancelled
tests / lint (push) Failing after 4s
tests / autotools (clang, no, ubuntu-latest) (push) Failing after 3s
tests / autotools (clang, yes, ubuntu-latest) (push) Failing after 3s
tests / autotools (gcc, no, ubuntu-latest) (push) Failing after 3s
tests / autotools (gcc, yes, ubuntu-latest) (push) Failing after 3s
tests / cmake (clang, ubuntu-latest) (push) Successful in 23s
tests / cmake (gcc, ubuntu-latest) (push) Successful in 20s
tests / valgrind (push) Failing after 17m10s
Use target-based cmake settings
2025-04-04 07:18:04 +03:00
Petri Lehtinen
aef13f87f1 Set minimum cmake version to 3.10 2025-04-04 07:15:20 +03:00
Petri Lehtinen
c16ac732e4
Merge pull request #712 from akheron/fix-lint
Some checks failed
tests / lint (push) Failing after 31s
tests / autotools (clang, no, ubuntu-latest) (push) Failing after 30s
tests / autotools (clang, yes, ubuntu-latest) (push) Failing after 29s
tests / autotools (gcc, no, ubuntu-latest) (push) Failing after 30s
tests / autotools (gcc, yes, ubuntu-latest) (push) Failing after 28s
tests / cmake (clang, ubuntu-latest) (push) Failing after 39s
tests / cmake (gcc, ubuntu-latest) (push) Failing after 39s
tests / valgrind (push) Failing after 35s
tests / cmake (msvc, windows-latest) (push) Has been cancelled
tests / autotools (gcc, yes, macos-latest) (push) Has been cancelled
tests / cmake (clang, macos-latest) (push) Has been cancelled
tests / cmake (gcc, macos-latest) (push) Has been cancelled
tests / autotools (gcc, no, macos-latest) (push) Has been cancelled
tests / autotools (clang, no, macos-latest) (push) Has been cancelled
tests / autotools (clang, yes, macos-latest) (push) Has been cancelled
Fix code formatting
2025-03-23 22:39:12 +02:00
Petri Lehtinen
05a10aa8af Fix code formatting 2025-03-23 22:31:56 +02:00
Petri Lehtinen
4d7ac97b89
Merge pull request #710 from akheron/fix-readthedocs
Add readthedocs config
2025-03-23 21:59:36 +02:00
Petri Lehtinen
23905f372c Add readthedocs config 2025-03-23 21:58:05 +02:00
Petri Lehtinen
ed5cae4ed0 jansson 2.14.1
Some checks failed
tests / lint (push) Failing after 28s
tests / autotools (clang, no, ubuntu-latest) (push) Failing after 32s
tests / autotools (clang, yes, ubuntu-latest) (push) Failing after 30s
tests / autotools (gcc, no, ubuntu-latest) (push) Failing after 39s
tests / autotools (gcc, yes, ubuntu-latest) (push) Failing after 26s
tests / cmake (clang, ubuntu-latest) (push) Failing after 31s
tests / cmake (gcc, ubuntu-latest) (push) Failing after 23s
tests / valgrind (push) Failing after 29s
tests / autotools (gcc, yes, macos-latest) (push) Has been cancelled
tests / cmake (clang, macos-latest) (push) Has been cancelled
tests / cmake (gcc, macos-latest) (push) Has been cancelled
tests / cmake (msvc, windows-latest) (push) Has been cancelled
tests / autotools (clang, no, macos-latest) (push) Has been cancelled
tests / autotools (clang, yes, macos-latest) (push) Has been cancelled
tests / autotools (gcc, no, macos-latest) (push) Has been cancelled
2025-03-23 14:25:44 +02:00
Andrew White
0f9c18dd12 Use target-based cmake settings
- Update minimum required to CMake version 3.5 (versions older than 3.5 are
deprecated as of 3.27)
- update add_definitions to target_compile_definitions
- use target_include_directories for public library includes
- add jansson::jansson alias
2024-07-10 10:31:49 +10:00
Petri Lehtinen
61fc3d0e28
Merge pull request #686 from akheron/readme-badges
Update badges in README
2024-03-31 22:09:15 +03:00
Petri Lehtinen
cce8caba26 Update badges in README 2024-03-31 22:08:10 +03:00
Petri Lehtinen
50953fb1fa
Merge pull request #680 from akheron/dtoa
Use `dtoa()` for optimal encoding of reals
2024-03-31 22:02:09 +03:00
Petri Lehtinen
c780171cf3 Update CHANGES 2024-03-25 20:19:41 +02:00
Petri Lehtinen
2297a2e320 Update LICENSE to add an exception for src/dtoa.c 2024-03-25 20:19:23 +02:00
Petri Lehtinen
f5b3ab323c Add more tests 2024-03-25 20:08:23 +02:00
Petri Lehtinen
9d3abab610 Test with and without dtoa in CI 2024-03-25 20:08:23 +02:00
Petri Lehtinen
9699de8600 Fix tests 2024-03-25 20:08:23 +02:00
Petri Lehtinen
33a6c95d56 Fix compiler warnings 2024-03-25 20:08:23 +02:00
Petri Lehtinen
ed06f65412 Rename a symbol to avoid collision on Windows 2024-03-25 20:08:23 +02:00
Petri Lehtinen
8b975abca1 Use dtoa for double to string conversion 2024-03-25 20:08:23 +02:00
Petri Lehtinen
8660da0f7c
Merge pull request #685 from akheron/simplify-tests
Simplify tests even more
2024-03-25 20:07:53 +02:00
Petri Lehtinen
dcbeb58829 Simplify tests even more
Drop the useless `export` lines from `env` files.
2024-03-25 19:16:34 +02:00
Petri Lehtinen
53383b9e26
Merge pull request #683 from akheron/refactor-tests
Refactor tests
2024-03-21 20:43:31 +02:00
Petri Lehtinen
649c9357c6 Update CHANGES 2024-03-21 20:43:04 +02:00
Petri Lehtinen
73dc6960ad Show ctest output on failure in CI 2024-03-21 20:36:42 +02:00
Petri Lehtinen
88375fb10e Refactor tests to work better with CMake 2024-03-21 20:36:38 +02:00
Petri Lehtinen
0247b5e2e7 Improve clang-format scripts 2024-03-15 21:06:34 +02:00
Petri Lehtinen
842708ac0c
Merge pull request #677 from akheron/ditch-localeconv
Use sprintf() to determine locale's decimal point
2024-03-08 21:53:07 +02:00
Petri Lehtinen
2d1c13224f Use sprintf() to determine locale's decimal point
This should fix thread safety of encoding and decoding, since
localeconv() is not tread safe after all.
2024-03-08 21:36:21 +02:00
Petri Lehtinen
9b9b5e81cf
Merge pull request #679 from akheron/dependabot/github_actions/github-actions-cb1dee63db
Bump the github-actions group with 1 update
2024-03-08 06:42:02 +02:00
dependabot[bot]
0c9c11a89d
Bump the github-actions group with 1 update
Bumps the github-actions group with 1 update: [actions/upload-artifact](https://github.com/actions/upload-artifact).


Updates `actions/upload-artifact` from 3 to 4
- [Release notes](https://github.com/actions/upload-artifact/releases)
- [Commits](https://github.com/actions/upload-artifact/compare/v3...v4)

---
updated-dependencies:
- dependency-name: actions/upload-artifact
  dependency-type: direct:production
  update-type: version-update:semver-major
  dependency-group: github-actions
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-03-07 20:09:47 +00:00
Petri Lehtinen
f52d79a4d3
Merge pull request #672 from pnacht/add-dependabot
Add dependabot, update GitHub Actions
2024-03-07 22:09:25 +02:00
Petri Lehtinen
2f1777ba80
Merge branch 'master' into add-dependabot 2024-03-07 22:08:46 +02:00
Petri Lehtinen
1e57cadbd3
Merge pull request #678 from akheron/ditch-coveralls
Ditch coveralls
2024-03-07 21:54:05 +02:00
Petri Lehtinen
0db4db1048 Ditch coveralls 2024-03-07 21:47:18 +02:00
Pedro Kaj Kjellerup Nacht
73d968feef
Bump GHA versions
Signed-off-by: Pedro Kaj Kjellerup Nacht <pnacht@google.com>
2023-12-06 23:10:14 +00:00
Pedro Kaj Kjellerup Nacht
0154c4af07
Add dependabot to update GHA
Signed-off-by: Pedro Kaj Kjellerup Nacht <pnacht@google.com>
2023-12-06 23:10:03 +00:00
Petri Lehtinen
60097f0096
Create SECURITY.md 2023-06-28 07:42:28 +03:00
Petri Lehtinen
bde28463f8
Merge pull request #646 from Bigpet/patch-1
Remove unused ${SOURCE_DIR}/include include path
2023-03-29 22:07:32 +03:00
Peter Tissen
e7c9ef8e52
Remove unused ${SOURCE_DIR}/include include path
The directory `${CMAKE_CURRENT_SOURCE_DIR}/include` is never created or used. Except in case of an in-source build but then  `${CMAKE_CURRENT_BINARY_DIR}/include` would be the same, so it would only duplicate the correct entry.

Not sure if the intermediary `JANSSON__INCLUDE_DIRS` is still necessary but I thought I'd change as little as possible.

The reason for eliminating this is that when you use CLion and sync into a docker container it will try to copy all include dirs back. Non-existant paths cause warnings every time.
2023-03-29 12:44:28 +02:00
Petri Lehtinen
a22dc95311
Merge pull request #640 from uniontech-lilinjie/master
fix typo
2023-01-11 22:25:45 +02:00
lilinjie
a7d04c8554 fix typo
Signed-off-by: lilinjie <lilinjie@uniontech.com>
2023-01-10 16:35:44 +08:00
Petri Lehtinen
e23f558007
Merge pull request #628 from neheb/patch-1
hashtable: change to ifdef
2022-08-24 17:10:48 +03:00
Rosen Penev
1894366598
hashtable: change to ifdef
All other usages use ifdef.
2022-08-22 18:13:57 -07:00
Petri Lehtinen
7e04530916 Remove the confusing vcpkg ad from README 2022-08-09 22:24:38 +03:00
Petri Lehtinen
011e625769
Merge pull request #626 from akheron/fix-example
Add a missing json_decref in the example
2022-08-09 22:05:05 +03:00
Petri Lehtinen
de5f2963ab Add a missing json_decref in the example 2022-08-09 22:00:45 +03:00
Petri Lehtinen
128e9c5f37
Merge pull request #610 from Thomas1664/master
Fix overwriting linker flags
2022-05-03 05:48:09 +03:00
Thomas Heinrichs
fe6e8eec7e Fix overwriting linker flags 2022-04-28 17:36:54 +02:00
Petri Lehtinen
addeeef408 Add some links to README 2021-11-22 16:30:12 +02:00
Petri Lehtinen
d82b436b2f
Merge pull request #598 from Mephistophiles/remove_internal_strlen
Reduce strlen's in jansson internals
2021-11-21 07:38:59 +02:00
Maxim Zhukov
586b4461e6 tests: add some cases in fixed size test
Signed-off-by: Maxim Zhukov <mussitantesmortem@gmail.com>
2021-11-20 22:57:10 +03:00
Maxim Zhukov
78418c84f1 value, pack: use key length from json_object iternals
Reduce the number of strlen calls in the jansson backend.

Test on my laptop (count of the instructions)

  Before:
  ❯ valgrind --tool=callgrind ./bin/test_object
  ==3105045== Events    : Ir
  ==3105045== Collected : 441453

  After:
  ❯ valgrind --tool=callgrind ./bin/test_object
  ==3144451== Events    : Ir
  ==3144451== Collected : 440597

Signed-off-by: Maxim Zhukov <mussitantesmortem@gmail.com>
2021-11-20 22:47:14 +03:00
Petri Lehtinen
eb81670881
Create CONTRIBUTING.md 2021-09-12 18:37:43 +03:00
Petri Lehtinen
0677666f65 Fix the check-exports tests for versioned symbols 2021-09-09 21:55:27 +03:00
Petri Lehtinen
684e18c927 jansson 2.14 2021-09-09 21:18:40 +03:00
Tomasz Paweł Gajc
e950e57e13 fix --version-script for good 2021-09-06 20:02:14 +03:00
tpgxyz
0dffb4284e use --version-script in case of linkers that does not support --default-symver 2021-04-23 06:41:58 +03:00
Petri Lehtinen
2de2c3d5fc Fix and simplify coveralls reporting 2021-01-26 22:07:32 +02:00
Petri Lehtinen
2cd3126651
Add name to the fuzz tests workflow 2021-01-26 20:03:12 +02:00
Petri Lehtinen
bde3cab216 Fix GitHub actions workflow syntax 2021-01-26 19:33:16 +02:00
Petri Lehtinen
f7aee00928 Use cifuzz actions 2021-01-25 22:20:33 +02:00
Petri Lehtinen
aa52a60ca8 README: Update build badge 2021-01-14 21:12:02 +02:00
Petri Lehtinen
d8aedd3682 Delete Travis CI config 2021-01-14 21:12:02 +02:00
Petri Lehtinen
0441ccd3c6 Also build on Visual Studio 2019 on AppVeyor 2021-01-14 21:12:02 +02:00
Petri Lehtinen
0bc4325222 GitHub Actions 2021-01-14 21:12:02 +02:00
Petri Lehtinen
fd3e9e3051 doc: Hoist a section 2020-11-19 17:31:13 +02:00
Petri Lehtinen
55ea6d44dd doc: Rename "Portability" to "Thread-safety"
Fixes #545
2020-11-19 17:27:03 +02:00
Petri Lehtinen
9a0fc069bf
Merge pull request #520 from Mephistophiles/getn
Add support getn, setn functions
2020-11-19 17:21:12 +02:00
Petri Lehtinen
cc318fc042
Merge pull request #555 from kiyolee/size_t-warnings
Fix size_t related MSVC compiler warnings
2020-11-15 06:32:29 +02:00
Kelvin Lee
638449c43d C11 %zd is bit overkill here. Especially requiring conditional compile. 2020-11-06 15:51:14 +11:00
Kelvin Lee
38c4b80ab7 Fix hashsize() should return size_t. 2020-11-06 08:40:20 +11:00
Kelvin Lee
ec1b6318e4 Use size_t to receive result from fread()/ftell(). 2020-11-05 22:06:38 +11:00
Kelvin Lee
1d8201c656 Print size_t properly with C11 %zd support. 2020-11-05 22:05:54 +11:00
Maxim Zhukov
0758caaac0 reuse key len from loop check for better performance
Signed-off-by: Maxim Zhukov <mussitantesmortem@gmail.com>
2020-09-01 10:22:14 +03:00
Maxim Zhukov
16a3899a9e conformance: remove U+0000 restriction for object keys
Signed-off-by: Maxim Zhukov <mussitantesmortem@gmail.com>
2020-09-01 10:22:14 +03:00
Maxim Zhukov
ca6775dee4 introduce new fixed-size key API
This commit added functions working with fixed-size strings (non null-terminated also).
It's helpful for the following cases:
* getting key from substring without copying to separate buffer (better perfomance)
* using pure UTF-8 keys for the objets
* hack: set binary structs as the keys (see test_binary_keys)

added functions:
 * json_object_getn
 * json_object_setn
 * json_object_setn_nocheck
 * json_object_setn_new
 * json_object_setn_new_nocheck
 * json_object_deln
 * json_object_iter_key_len

added iterators:
 * json_object_keylen_foreach
 * json_object_keylen_foreach_safe

Signed-off-by: Maxim Zhukov <mussitantesmortem@gmail.com>
2020-09-01 10:22:14 +03:00
Maxim Zhukov
ba4503804b pass length of the key to internal API
Use key length for internal API

Signed-off-by: Maxim Zhukov <mussitantesmortem@gmail.com>
2020-08-12 09:57:14 +03:00
Maxim Zhukov
966f9cc20a sort tests by alpha order
Signed-off-by: Maxim Zhukov <mussitantesmortem@gmail.com>
2020-08-12 09:57:07 +03:00
Petri Lehtinen
a740f15c17
Merge pull request #546 from i-ky/patch-1
Fix couple of typos
2020-08-09 10:28:12 +03:00
i-ky
601b568b8d
Fix couple of typos
"This sections describes" -> "This section describes"
2020-08-09 00:19:39 +03:00
Petri Lehtinen
1112580f4a
Merge pull request #544 from i-ky/patch-1
Lift "Depth of nested values" to a higher level
2020-08-08 20:25:37 +03:00
i-ky
d9c5b1d4df
Lift "Depth of nested values" to a higher level
Currently this section is under "Numbers" which seems to be wrong.
2020-08-08 14:10:16 +03:00
Petri Lehtinen
a154389827
Merge pull request #543 from ploxiln/sphinx3
doc: compatibility with Sphinx-3
2020-08-08 13:15:23 +03:00
Pierce Lopez
c329fd7bc8 doc: add return types for function-like macros
The Sphinx-3.0 c:function:: directive requires the return type.

Sphinx-3.0 also adds function-like macro support to the c:macro::
directive, which Sphinx-1.x and Sphinx-2.x do not support, but it
is probably a good idea to keep compatibility with slightly older
Sphinx for now.
2020-08-07 02:35:27 -04:00
Pierce Lopez
94318890c0 doc: do not use references for standard C types or file names
Use double-backtick quoting instead. It has the same effect
(because these links had nowhere to link to) but it does not
result in loud warnings about broken references by default with Sphinx-3.x.
2020-08-07 02:34:25 -04:00
Pierce Lopez
798d40c3f3 doc: convert refcounting directive to a class
Directive functions are no longer supported in Sphinx-3.0
but directive classes have been supported since early 1.x
2020-08-07 01:54:45 -04:00
Petri Lehtinen
73ccec0601
Merge pull request #540 from smcv/default-symver
build: Add a symbol version to all exported symbols
2020-07-20 13:01:34 +03:00
Simon McVittie
ca80d5127e build: Add a symbol version to all exported symbols for glibc
The --default-symver linker option attaches a default version definition
(the SONAME) to every exported symbol. It is supported since at least
GNU binutils 2.22 in 2011 (older versions not tested).

With this version definition, newly-linked binaries that depend on the
jansson shared library will refer to its symbols in a versioned form,
preventing their references from being resolved to a symbol of the same
name exported by json-c or json-glib if those libraries appear in
dependency search order before jansson, which will usually result in
a crash. This is necessary because ELF symbol resolution normally uses
a single flat namespace, not a tree like Windows symbol resolution.
At least one symbol (json_object_iter_next()) is exported by all three
JSON libraries.

Linking with -Bsymbolic is not enough to have this effect in all cases,
because -Bsymbolic only affects symbol lookup within a shared object,
for example when parse_json() calls json_decref(). It does not affect
calls from external code into jansson, unless jansson was statically
linked into the external caller.

This change will also not prevent code that depends on json-c or
json-glib from finding jansson's symbols and crashing; to prevent
that, a corresponding change in json-c or json-glib would be needed.

Adding a symbol-version is a backwards-compatible change, but once
added, removing or changing the symbol-version would be an incompatible
change that requires a SONAME bump.

Resolves: https://github.com/akheron/jansson/issues/523
(when combined with an equivalent change to json-c).

Signed-off-by: Simon McVittie <smcv@collabora.com>
2020-07-02 09:54:40 +01:00
Petri Lehtinen
52dfc3dd4a
Merge pull request #537 from stoeckmann/vsnprintf
Handle vsnprintf corner cases.
2020-05-23 20:42:25 +03:00
Tobias Stoeckmann
38b001edbd Handle vsnprintf corner cases.
The function vsnprintf returns a negative value on error, e.g. on
an invalid format. It's best to return NULL in such a case.

Also avoid a signed integer overflow if vsnprintf returns INT_MAX.
This is undefined behaviour in C and has to be avoided.

A negative value is returned with a call like:
json_sprintf("%111111111111111s", "", "");

INT_MAX is returned with a call like:
json_sprintf("%647s%2147483000s", "", "");
2020-05-23 12:34:40 +02:00
Petri Lehtinen
e9ebfa7e77 jansson 2.13.1 2020-05-07 21:38:59 +03:00
Petri Lehtinen
3f4d948652
Merge pull request #535 from xry111/clang-format-check-dist-fix
add `scripts` directory to EXTRA_DIST
2020-05-07 21:29:30 +03:00
Petri Lehtinen
a839d3cad6
Merge pull request #534 from xry111/shared_lib_interface_fix
add jansson_* to shared library exports
2020-05-07 21:28:47 +03:00
Xℹ Ruoyao
2d2efde34a add scripts directory to EXTRA_DIST 2020-05-06 18:01:19 +08:00
Xℹ Ruoyao
bcb6b6f3fd add jansson_* to shared library exports 2020-05-06 17:31:48 +08:00
77 changed files with 7542 additions and 1150 deletions

15
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,15 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
groups:
github-actions:
patterns:
- "*"

31
.github/workflows/fuzz.yml vendored Normal file
View file

@ -0,0 +1,31 @@
name: oss-fuzz
on:
pull_request:
branches: [ master ]
paths:
- '**.c'
- '**.h'
jobs:
fuzz:
runs-on: ubuntu-latest
steps:
- name: Build Fuzzers
id: build
uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master
with:
oss-fuzz-project-name: 'jansson'
dry-run: false
- name: Run Fuzzers
uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master
with:
oss-fuzz-project-name: 'jansson'
fuzz-seconds: 600
dry-run: false
- name: Upload Crash
uses: actions/upload-artifact@v4
if: failure() && steps.build.outcome == 'success'
with:
name: artifacts
path: ./out/artifacts

67
.github/workflows/tests.yml vendored Normal file
View file

@ -0,0 +1,67 @@
name: tests
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: ./scripts/clang-format-check
autotools:
strategy:
matrix:
os: ["ubuntu-latest", "macos-latest"]
cc: ["gcc", "clang"]
dtoa: ["yes", "no"]
runs-on: ${{ matrix.os }}
steps:
- if: ${{runner.os == 'macOS'}}
run: brew install autoconf automake libtool
- uses: actions/checkout@v4
- run: autoreconf -fi
- env:
CC: ${{ matrix.cc }}
CFLAGS: -Werror
run: ./configure --enable-dtoa=${{ matrix.dtoa }}
- run: make check
cmake:
strategy:
matrix:
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
cc: ["gcc", "clang"]
exclude:
- os: windows-latest
cc: gcc
- os: windows-latest
cc: clang
include:
- os: windows-latest
cc: 'msvc' # Doesn't really matter, MSVC is always used on Windows
runs-on: ${{matrix.os}}
steps:
- uses: actions/checkout@v4
- env:
CC: ${{matrix.cc}}
run: cmake .
- run: cmake --build .
- run: ctest --output-on-failure
valgrind:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: sudo apt update && sudo apt install valgrind
- run: cmake -DJANSSON_TEST_WITH_VALGRIND=ON .
- run: cmake --build .
- run: ctest --output-on-failure

View file

@ -1,34 +0,0 @@
env:
global:
- CLANG_FORMAT_VERSION=9
matrix:
- JANSSON_BUILD_METHOD=cmake JANSSON_CMAKE_OPTIONS="-DJANSSON_TEST_WITH_VALGRIND=ON" JANSSON_EXTRA_INSTALL="valgrind"
- JANSSON_BUILD_METHOD=autotools
- JANSSON_BUILD_METHOD=coverage JANSSON_CMAKE_OPTIONS="-DJANSSON_COVERAGE=ON -DJANSSON_COVERALLS=ON -DCMAKE_BUILD_TYPE=Debug" JANSSON_EXTRA_INSTALL="lcov curl"
- JANSSON_BUILD_METHOD=fuzzer
- JANSSON_BUILD_METHOD=lint CLANG_FORMAT=clang-format-9
dist: bionic
language: c
compiler:
- gcc
- clang
matrix:
exclude:
- compiler: clang
env: JANSSON_BUILD_METHOD=coverage JANSSON_CMAKE_OPTIONS="-DJANSSON_COVERAGE=ON -DJANSSON_COVERALLS=ON -DCMAKE_BUILD_TYPE=Debug" JANSSON_EXTRA_INSTALL="lcov curl"
- compiler: clang
env: JANSSON_BUILD_METHOD=fuzzer
- compiler: gcc
env: JANSSON_BUILD_METHOD=lint CLANG_FORMAT=clang-format-9
allow_failures:
- env: JANSSON_BUILD_METHOD=coverage JANSSON_CMAKE_OPTIONS="-DJANSSON_COVERAGE=ON -DJANSSON_COVERALLS=ON -DCMAKE_BUILD_TYPE=Debug" JANSSON_EXTRA_INSTALL="lcov curl"
install:
- sudo apt-get update -qq
- sudo apt-get install -y -qq cmake $JANSSON_EXTRA_INSTALL
- if [ "$TRAVIS_COMPILER" = "clang" ]; then sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y && wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add - && sudo apt-add-repository "deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-9 main" -y && sudo apt-get install -y -qq clang-9 clang-format-9; fi
script:
- if [ "$JANSSON_BUILD_METHOD" = "autotools" ]; then autoreconf -f -i && CFLAGS=-Werror ./configure && make check; fi
- if [ "$JANSSON_BUILD_METHOD" = "cmake" ]; then mkdir build && cd build && cmake $JANSSON_CMAKE_OPTIONS .. && cmake --build . && ctest --output-on-failure; fi
- if [ "$JANSSON_BUILD_METHOD" = "coverage" ]; then mkdir build && cd build && cmake $JANSSON_CMAKE_OPTIONS .. && cmake --build . && cmake --build . --target coveralls; fi
- if [ "$JANSSON_BUILD_METHOD" = "fuzzer" ]; then ./test/ossfuzz/travisoss.sh; fi
- if [ "$JANSSON_BUILD_METHOD" = "lint" ]; then ./scripts/clang-format-check; fi

64
CHANGES
View file

@ -1,3 +1,63 @@
Version 2.14.1
==============
Released 2025-03-23
* Fixes:
- Fix thread safety of encoding and decoding when `uselocale` or `newlocale`
is used to switch locales inside the threads (#674, #675, #677. Thanks to
Bruno Haible for the report and help with fixing.)
- Use David M. Gay's `dtoa()` algorithm to avoid misprinting issues of real
numbers that are not exactly representable as a `double` (#680).
If this is not desirable, use `./configure --disable-dtoa` or `cmake
-DUSE_DTOA=OFF .`
* Build:
- Make test output nicer in CMake based builds (#683)
- Simplify tests (#685)
Version 2.14
============
Released 2021-09-09
* New Features:
- Add `json_object_getn`, `json_object_setn`, `json_object_deln`, and the
corresponding `nocheck` functions. (#520, by Maxim Zhukov)
* Fixes:
- Handle `sprintf` corner cases (#537, by Tobias Stoeckmann)
* Build:
- Symbol versioning for all exported symbols (#540, by Simon McVittie)
- Fix compiler warnings (#555, by Kelvin Lee)
* Documentation:
- Small fixes (#544, #546, by @i-ky)
- Sphinx 3 compatibility (#543, by Pierce Lopez)
Version 2.13.1
==============
Released 2020-05-07
* Build:
- Include `jansson_version_str()` and `jansson_version_cmp()` in
shared library. (#534)
- Include ``scripts/`` in tarball. (#535)
Version 2.13
============
@ -114,7 +174,7 @@ Released 2018-02-09
- Work around gcc's -Wimplicit-fallthrough.
- Fix CMake detection of `sys/types.h` header (#375).
- Fix CMake detection of ``sys/types.h`` header (#375).
- Fix `jansson.pc` generated by CMake to be more consistent with the one
generated using GNU Autotools (#368).
@ -578,7 +638,7 @@ Released 2011-10-06
- Fix identifier decoding under non-UTF-8 locales. (#35)
- `json_load_file()`: Open the input file in binary mode for maximum
compatiblity.
compatibility.
* Documentation:

View file

@ -1,58 +1,11 @@
# Notes:
#
# Author: Paul Harris, June 2012
# Additions: Joakim Soderberg, February 2013
#
# Supports: building static/shared, release/debug/etc, can also build html docs
# and some of the tests.
# Note that its designed for out-of-tree builds, so it will not pollute your
# source tree.
#
# TODO 1: Finish implementing tests. api tests are working, but the valgrind
# variants are not flagging problems.
#
# TODO 2: There is a check_exports script that would try and incorporate.
#
# TODO 3: Consolidate version numbers, currently the version number is written
# into: * cmake (here) * autotools (the configure) * source code header files.
# Should not be written directly into header files, autotools/cmake can do
# that job.
#
# Brief intro on how to use cmake:
# > mkdir build (somewhere - we do out-of-tree builds)
# > use cmake, ccmake, or cmake-gui to configure the project. for linux, you
# can only choose one variant: release,debug,etc... and static or shared.
# >> example:
# >> cd build
# >> ccmake -i ../path_to_jansson_dir
# >> inside, configure your options. press C until there are no lines
# with * next to them.
# >> note, I like to configure the 'install' path to ../install, so I get
# self-contained clean installs I can point other projects to.
# >> press G to 'generate' the project files.
# >> make (to build the project)
# >> make install
# >> make test (to run the tests, if you enabled them)
#
# Brief description on how it works:
# There is a small hierarchy of CMakeLists.txt files which define how the
# project is built.
# Header file detection etc is done, and the results are written into config.h
# and jansson_config.h, which are generated from the corresponding
# config.h.cmake and jansson_config.h.cmake template files.
# The generated header files end up in the build directory - not in
# the source directory.
# The rest is down to the usual make process.
cmake_minimum_required (VERSION 3.1)
cmake_minimum_required (VERSION 3.10)
project(jansson C)
# Options
option(JANSSON_BUILD_SHARED_LIBS "Build shared libraries." OFF)
option(USE_URANDOM "Use /dev/urandom to seed the hash function." ON)
option(USE_WINDOWS_CRYPTOAPI "Use CryptGenRandom to seed the hash function." ON)
option(USE_DTOA "Use dtoa for optimal floating-point to string conversions." ON)
if (MSVC)
# This option must match the settings used in your program, in particular if you
@ -64,8 +17,6 @@ option(JANSSON_EXAMPLES "Compile example applications" ON)
if (UNIX)
option(JANSSON_COVERAGE "(GCC Only! Requires gcov/lcov to be installed). Include target for doing coverage analysis for the test suite. Note that -DCMAKE_BUILD_TYPE=Debug must be set" OFF)
option(JANSSON_COVERALLS "Generate coverage info for Coveralls" OFF)
option(JANSSON_COVERALLS_UPLOAD "Upload coverage info to Coveralls (Only works via Travis)" ON)
endif ()
# Set some nicer output dirs.
@ -85,10 +36,10 @@ endif()
# set (JANSSON_VERSION "2.3.1")
# set (JANSSON_SOVERSION 2)
set(JANSSON_DISPLAY_VERSION "2.13")
set(JANSSON_DISPLAY_VERSION "2.14.1")
# This is what is required to match the same numbers as automake's
set(JANSSON_VERSION "4.12.0")
set(JANSSON_VERSION "4.14.0")
set(JANSSON_SOVERSION 4)
# for CheckFunctionKeywords
@ -119,17 +70,9 @@ endif()
message("C compiler: ${CMAKE_C_COMPILER_ID}")
# Coverage only works with GCC for a debug build.
if (JANSSON_COVERALLS)
set(JANSSON_COVERAGE ON)
endif()
if (JANSSON_COVERAGE)
include(CodeCoverage)
include(Coveralls)
# This adds coverage arguments to gcc/clang.
coveralls_turn_on_coverage()
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
endif()
check_include_files (endian.h HAVE_ENDIAN_H)
@ -151,6 +94,9 @@ check_function_exists (sched_yield HAVE_SCHED_YIELD)
# Check for the int-type includes
check_include_files (stdint.h HAVE_STDINT_H)
include (TestBigEndian)
TEST_BIG_ENDIAN(WORDS_BIGENDIAN)
# Check our 64 bit integer sizes
check_type_size (__int64 __INT64)
check_type_size (int64_t INT64_T)
@ -251,6 +197,8 @@ endif ()
# detect what to use for the 64 bit type.
# Note: I will prefer long long if I can get it, as that is what the automake system aimed for.
if (NOT DEFINED JSON_INT_T)
set (JSON_INTEGER_IS_LONG_LONG 1)
if (HAVE_LONG_LONG_INT AND (LONG_LONG_INT EQUAL 8))
set (JSON_INT_T "long long")
elseif (HAVE_INT64_T)
@ -273,18 +221,7 @@ if (NOT DEFINED JSON_INT_T)
endif ()
endif ()
# If locale.h and localeconv() are available, define to 1, otherwise to 0.
check_include_files (locale.h HAVE_LOCALE_H)
check_function_exists (localeconv HAVE_LOCALECONV)
if (HAVE_LOCALECONV AND HAVE_LOCALE_H)
set (JSON_HAVE_LOCALECONV 1)
else ()
set (JSON_HAVE_LOCALECONV 0)
endif()
# check if we have setlocale
check_function_exists(setlocale HAVE_SETLOCALE)
# Check what the inline keyword is.
@ -329,20 +266,20 @@ configure_file (${CMAKE_CURRENT_SOURCE_DIR}/cmake/jansson_config.h.cmake
file (COPY ${CMAKE_CURRENT_SOURCE_DIR}/src/jansson.h
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/include/)
add_definitions(-DJANSSON_USING_CMAKE)
# configure the private config file
configure_file (${CMAKE_CURRENT_SOURCE_DIR}/cmake/jansson_private_config.h.cmake
${CMAKE_CURRENT_BINARY_DIR}/private_include/jansson_private_config.h)
# and tell the source code to include it
add_definitions(-DHAVE_CONFIG_H)
include_directories (${CMAKE_CURRENT_BINARY_DIR}/include)
include_directories (${CMAKE_CURRENT_BINARY_DIR}/private_include)
# Configuration flags will be set on project later once we have defined the target
# Add the lib sources.
file(GLOB JANSSON_SRC src/*.c)
if (NOT USE_DTOA)
list(FILTER JANSSON_SRC EXCLUDE REGEX ".*dtoa\\.c$")
endif()
set(JANSSON_HDR_PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/src/hashtable.h
@ -366,6 +303,44 @@ if(JANSSON_BUILD_SHARED_LIBS)
${JANSSON_HDR_PUBLIC}
src/jansson.def)
# check if linker support --default-symver
list(APPEND CMAKE_REQUIRED_LIBRARIES "-Wl,--default-symver")
check_c_source_compiles(
"
int main (void)
{
return 0;
}
"
DSYMVER_WORKS
)
list(REMOVE_ITEM CMAKE_REQUIRED_LIBRARIES "-Wl,--default-symver")
if (SYMVER_WORKS)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--default-symver")
else()
# some linkers may only support --version-script
file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/jansson.sym" "JANSSON_${JANSSON_SOVERSION} {
global:
*;
};
")
list(APPEND CMAKE_REQUIRED_LIBRARIES "-Wl,--version-script,${CMAKE_CURRENT_BINARY_DIR}/jansson.sym")
check_c_source_compiles(
"
int main (void)
{
return 0;
}
"
VSCRIPT_WORKS
)
list(REMOVE_ITEM CMAKE_REQUIRED_LIBRARIES "-Wl,--version-script,${CMAKE_CURRENT_BINARY_DIR}/jansson.sym")
if (VSCRIPT_WORKS)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--version-script,${CMAKE_CURRENT_BINARY_DIR}/jansson.sym")
endif()
endif()
set_target_properties(jansson PROPERTIES
VERSION ${JANSSON_VERSION}
SOVERSION ${JANSSON_SOVERSION})
@ -378,6 +353,20 @@ else()
POSITION_INDEPENDENT_CODE true)
endif()
# Now target jansson is declared, set per-target values
target_compile_definitions(jansson PUBLIC JANSSON_USING_CMAKE)
target_compile_definitions(jansson PRIVATE HAVE_CONFIG_H)
target_include_directories(jansson
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}/include>
INTERFACE $<INSTALL_INTERFACE:include>
)
add_library( jansson::jansson ALIAS jansson )
if (JANSSON_EXAMPLES)
add_executable(simple_parse "${CMAKE_CURRENT_SOURCE_DIR}/examples/simple_parse.c")
target_link_libraries(simple_parse jansson)
@ -501,14 +490,15 @@ if (NOT JANSSON_WITHOUT_TESTS)
set(api_tests
test_array
test_copy
test_chaos
test_copy
test_dump
test_dump_callback
test_equal
test_fixed_size
test_load
test_loadb
test_load_callback
test_loadb
test_number
test_object
test_pack
@ -556,6 +546,11 @@ if (NOT JANSSON_WITHOUT_TESTS)
if (IS_DIRECTORY ${TESTDIR})
get_filename_component(TNAME ${TESTDIR} NAME)
if ((USE_DTOA AND EXISTS ${TESTDIR}/skip_if_dtoa) OR
(NOT USE_DTOA AND EXISTS ${TESTDIR}/skip_unless_dtoa))
continue()
endif()
if (JANSSON_TEST_WITH_VALGRIND)
add_test(memcheck__${SUITE}__${TNAME}
${MEMCHECK_COMMAND} ${SUITE_TEST_CMD} ${TESTDIR})
@ -578,16 +573,7 @@ if (NOT JANSSON_WITHOUT_TESTS)
endforeach ()
if (JANSSON_COVERAGE)
setup_target_for_coverage(
coverage # Coverage make target "make coverage".
coverage # Name of output directory.
make # Name of test runner executable.
test) # Arguments to the test runner above (make test).
if (JANSSON_COVERALLS)
set(COVERAGE_SRCS ${JANSSON_SRC})
coveralls_setup("${COVERAGE_SRCS}" ${JANSSON_COVERALLS_UPLOAD})
endif ()
SETUP_TARGET_FOR_COVERAGE(coverage coverage ctest)
endif ()
# Enable using "make check" just like the autotools project.
@ -630,9 +616,7 @@ foreach(p LIB BIN INCLUDE CMAKE)
endforeach()
# Generate the config file for the build-tree.
set(JANSSON__INCLUDE_DIRS
"${CMAKE_CURRENT_SOURCE_DIR}/include"
"${CMAKE_CURRENT_BINARY_DIR}/include")
set(JANSSON__INCLUDE_DIRS "${CMAKE_CURRENT_BINARY_DIR}/include")
set(JANSSON_INCLUDE_DIRS ${JANSSON__INCLUDE_DIRS} CACHE PATH "Jansson include directories")
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/janssonConfig.cmake.in
${CMAKE_CURRENT_BINARY_DIR}/janssonConfig.cmake

3
CONTRIBUTING.md Normal file
View file

@ -0,0 +1,3 @@
Hi, and thanks for contributing!
Please remember to add tests and documentation for new functionality. Backwards incompatible changes or features that are not directly related to JSON are likely to be rejected.

26
LICENSE
View file

@ -1,4 +1,11 @@
Copyright (c) 2009-2020 Petri Lehtinen <petri@digip.org>
# License
This project is licensed under the MIT license, except where otherwise noted.
The full text of the MIT license is included below.
## MIT License
Copyright (c) 2009-2024 Petri Lehtinen <petri@digip.org>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@ -17,3 +24,20 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
## Exceptions
### `src/dtoa.c`
Copyright (c) 1991, 2000, 2001 by Lucent Technologies.
Permission to use, copy, modify, and distribute this software for any
purpose without fee is hereby granted, provided that this entire notice
is included in all copies of any software which is or includes a copy
or modification of this software and in all copies of the supporting
documentation for such software.
THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
WARRANTY. IN PARTICULAR, NEITHER THE AUTHOR NOR LUCENT MAKES ANY
REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.

View file

@ -1,4 +1,4 @@
EXTRA_DIST = CHANGES LICENSE README.rst CMakeLists.txt cmake android examples
EXTRA_DIST = CHANGES LICENSE README.rst CMakeLists.txt cmake android examples scripts
SUBDIRS = doc src test
# "make distcheck" builds the dvi target, so use it to check that the

View file

@ -1,14 +1,10 @@
Jansson README
==============
.. image:: https://travis-ci.org/akheron/jansson.png
:target: https://travis-ci.org/akheron/jansson
.. image:: https://ci.appveyor.com/api/projects/status/lmhkkc4q8cwc65ko
:target: https://ci.appveyor.com/project/akheron/jansson
.. |tests| image:: https://github.com/akheron/jansson/workflows/tests/badge.svg
.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/lmhkkc4q8cwc65ko
.. image:: https://coveralls.io/repos/akheron/jansson/badge.png?branch=master
:target: https://coveralls.io/r/akheron/jansson?branch=master
|tests| |appveyor|
Jansson_ is a C library for encoding, decoding and manipulating JSON
data. Its main features and design principles are:
@ -26,24 +22,11 @@ data. Its main features and design principles are:
Jansson is licensed under the `MIT license`_; see LICENSE in the
source distribution for details.
Compilation and Installation
----------------------------
You can download and install Jansson using the `vcpkg <https://github.com/Microsoft/vcpkg/>`_ dependency manager:
.. code-block:: bash
git clone https://github.com/Microsoft/vcpkg.git
cd vcpkg
./bootstrap-vcpkg.sh
./vcpkg integrate install
vcpkg install jansson
The Jansson port in vcpkg is kept up to date by Microsoft team members and community contributors. If the version is out of date, please `create an issue or pull request <https://github.com/Microsoft/vcpkg/>`_ on the vcpkg repository.
If you obtained a `source tarball`_ from the "Releases" section of the main
site just use the standard autotools commands::
If you obtained a ``jansson-X.Y.tar.*`` tarball from GitHub Releases, just use
the standard autotools commands::
$ ./configure
$ make
@ -53,9 +36,8 @@ To run the test suite, invoke::
$ make check
If the source has been checked out from a Git repository, the
./configure script has to be generated first. The easiest way is to
use autoreconf::
If the source has been checked out from a Git repository, the ``configure``
script has to be generated first. The easiest way is to use autoreconf::
$ autoreconf -i
@ -74,8 +56,15 @@ Then, point your browser to ``doc/_build/html/index.html``. Sphinx_
1.0 or newer is required to generate the documentation.
Community
---------
* `Documentation <http://jansson.readthedocs.io/en/latest/>`_
* `Issue tracker <https://github.com/akheron/jansson/issues>`_
* `Mailing list <http://groups.google.com/group/jansson-users>`_
* `Wiki <https://github.com/akheron/jansson/wiki>`_ contains some development documentation
.. _Jansson: http://www.digip.org/jansson/
.. _`Comprehensive documentation`: http://jansson.readthedocs.io/en/latest/
.. _`MIT license`: http://www.opensource.org/licenses/mit-license.php
.. _`source tarball`: http://www.digip.org/jansson#releases
.. _Sphinx: http://sphinx.pocoo.org/

9
SECURITY.md Normal file
View file

@ -0,0 +1,9 @@
# Security Policy
## Supported Versions
Latest released version.
## Reporting a Vulnerability
Send an email to petri@digip.org.

View file

@ -32,10 +32,6 @@
otherwise to 0. */
#define JSON_INTEGER_IS_LONG_LONG 1
/* If locale.h and localeconv() are available, define to 1,
otherwise to 0. */
#define JSON_HAVE_LOCALECONV 0
/* Maximum recursion depth for parsing JSON input.
This limits the depth of e.g. array-within-array constructions. */
#define JSON_PARSER_MAX_DEPTH 2048

View file

@ -7,6 +7,8 @@ environment:
- VS: Visual Studio 14 2015
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
VS: Visual Studio 15 2017
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
VS: Visual Studio 16 2019
build_script:
- md build

View file

@ -111,9 +111,9 @@ FUNCTION(SETUP_TARGET_FOR_COVERAGE _targetname _outputname _testrunner)
# Capturing lcov counters and generating report
COMMAND ${LCOV_PATH} --directory . --capture --output-file ${_outputname}.info --rc lcov_branch_coverage=1
COMMAND ${LCOV_PATH} --remove ${_outputname}.info '*/build/include/*' '*/test/*' '/usr/include/*' --output-file ${_outputname}.info.cleaned --rc lcov_branch_coverage=1
COMMAND ${GENHTML_PATH} --branch-coverage -o ${_outputname} ${_outputname}.info.cleaned
COMMAND ${CMAKE_COMMAND} -E remove ${_outputname}.info ${_outputname}.info.cleaned
COMMAND ${LCOV_PATH} --remove ${_outputname}.info '*/build/include/*' '*/test/*' '/usr/include/*' --output-file ${_outputname}.info --rc lcov_branch_coverage=1
# COMMAND ${GENHTML_PATH} --branch-coverage -o ${_outputname} ${_outputname}.info.cleaned
# COMMAND ${CMAKE_COMMAND} -E remove ${_outputname}.info ${_outputname}.info.cleaned
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
COMMENT "Resetting code coverage counters to zero.\nProcessing code coverage counters and generating report."

View file

@ -1,111 +0,0 @@
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Copyright (C) 2014 Joakim Söderberg <joakim.soderberg@gmail.com>
#
#
# Param _COVERAGE_SRCS A list of source files that coverage should be collected for.
# Param _COVERALLS_UPLOAD Upload the result to coveralls?
#
function(coveralls_setup _COVERAGE_SRCS _COVERALLS_UPLOAD)
# When passing a CMake list to an external process, the list
# will be converted from the format "1;2;3" to "1 2 3".
# This means the script we're calling won't see it as a list
# of sources, but rather just one long path. We remedy this
# by replacing ";" with "*" and then reversing that in the script
# that we're calling.
# http://cmake.3232098.n2.nabble.com/Passing-a-CMake-list-quot-as-is-quot-to-a-custom-target-td6505681.html
set(COVERAGE_SRCS_TMP ${_COVERAGE_SRCS})
set(COVERAGE_SRCS "")
foreach (COVERAGE_SRC ${COVERAGE_SRCS_TMP})
set(COVERAGE_SRCS "${COVERAGE_SRCS}*${COVERAGE_SRC}")
endforeach()
#message("Coverage sources: ${COVERAGE_SRCS}")
set(COVERALLS_FILE ${PROJECT_BINARY_DIR}/coveralls.json)
add_custom_target(coveralls_generate
# Zero the coverage counters.
COMMAND ${CMAKE_COMMAND}
-P "${PROJECT_SOURCE_DIR}/cmake/CoverallsClear.cmake"
# Run regress tests.
COMMAND ${CMAKE_CTEST_COMMAND} --output-on-failure
# Generate Gcov and translate it into coveralls JSON.
# We do this by executing an external CMake script.
# (We don't want this to run at CMake generation time, but after compilation and everything has run).
COMMAND ${CMAKE_COMMAND}
-DCOVERAGE_SRCS="${COVERAGE_SRCS}" # TODO: This is passed like: "a b c", not "a;b;c"
-DCOVERALLS_OUTPUT_FILE="${COVERALLS_FILE}"
-DCOV_PATH="${PROJECT_BINARY_DIR}"
-DPROJECT_ROOT="${PROJECT_SOURCE_DIR}"
-P "${PROJECT_SOURCE_DIR}/cmake/CoverallsGenerateGcov.cmake"
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
COMMENT "Generating coveralls output..."
)
if (_COVERALLS_UPLOAD)
message("COVERALLS UPLOAD: ON")
find_program(CURL_EXECUTABLE curl)
if (NOT CURL_EXECUTABLE)
message(FATAL_ERROR "Coveralls: curl not found! Aborting")
endif()
add_custom_target(coveralls_upload
# Upload the JSON to coveralls.
COMMAND ${CURL_EXECUTABLE}
-S -F json_file=@${COVERALLS_FILE}
https://coveralls.io/api/v1/jobs
DEPENDS coveralls_generate
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
COMMENT "Uploading coveralls output...")
add_custom_target(coveralls DEPENDS coveralls_upload)
else()
message("COVERALLS UPLOAD: OFF")
add_custom_target(coveralls DEPENDS coveralls_generate)
endif()
endfunction()
macro(coveralls_turn_on_coverage)
if(NOT (CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX)
AND (NOT "${CMAKE_C_COMPILER_ID}" STREQUAL "Clang"))
message(FATAL_ERROR "Coveralls: Compiler ${CMAKE_C_COMPILER_ID} is not GNU gcc! Aborting... You can set this on the command line using CC=/usr/bin/gcc CXX=/usr/bin/g++ cmake <options> ..")
endif()
if(NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
message(FATAL_ERROR "Coveralls: Code coverage results with an optimised (non-Debug) build may be misleading! Add -DCMAKE_BUILD_TYPE=Debug")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
endmacro()

View file

@ -1,24 +0,0 @@
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Copyright (C) 2014 Joakim Söderberg <joakim.soderberg@gmail.com>
#
file(REMOVE_RECURSE ${PROJECT_BINARY_DIR}/*.gcda)

View file

@ -1,380 +0,0 @@
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Copyright (C) 2014 Joakim Söderberg <joakim.soderberg@gmail.com>
#
# This is intended to be run by a custom target in a CMake project like this.
# 0. Compile program with coverage support.
# 1. Clear coverage data. (Recursively delete *.gcda in build dir)
# 2. Run the unit tests.
# 3. Run this script specifying which source files the coverage should be performed on.
#
# This script will then use gcov to generate .gcov files in the directory specified
# via the COV_PATH var. This should probably be the same as your cmake build dir.
#
# It then parses the .gcov files to convert them into the Coveralls JSON format:
# https://coveralls.io/docs/api
#
# Example for running as standalone CMake script from the command line:
# (Note it is important the -P is at the end...)
# $ cmake -DCOV_PATH=$(pwd)
# -DCOVERAGE_SRCS="catcierge_rfid.c;catcierge_timer.c"
# -P ../cmake/CoverallsGcovUpload.cmake
#
CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
#
# Make sure we have the needed arguments.
#
if (NOT COVERALLS_OUTPUT_FILE)
message(FATAL_ERROR "Coveralls: No coveralls output file specified. Please set COVERALLS_OUTPUT_FILE")
endif()
if (NOT COV_PATH)
message(FATAL_ERROR "Coveralls: Missing coverage directory path where gcov files will be generated. Please set COV_PATH")
endif()
if (NOT COVERAGE_SRCS)
message(FATAL_ERROR "Coveralls: Missing the list of source files that we should get the coverage data for COVERAGE_SRCS")
endif()
if (NOT PROJECT_ROOT)
message(FATAL_ERROR "Coveralls: Missing PROJECT_ROOT.")
endif()
# Since it's not possible to pass a CMake list properly in the
# "1;2;3" format to an external process, we have replaced the
# ";" with "*", so reverse that here so we get it back into the
# CMake list format.
string(REGEX REPLACE "\\*" ";" COVERAGE_SRCS ${COVERAGE_SRCS})
find_program(GCOV_EXECUTABLE gcov)
if (NOT GCOV_EXECUTABLE)
message(FATAL_ERROR "gcov not found! Aborting...")
endif()
find_package(Git)
# TODO: Add these git things to the coveralls json.
if (GIT_FOUND)
# Branch.
execute_process(
COMMAND ${GIT_EXECUTABLE} rev-parse --abbrev-ref HEAD
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE GIT_BRANCH
OUTPUT_STRIP_TRAILING_WHITESPACE
)
macro (git_log_format FORMAT_CHARS VAR_NAME)
execute_process(
COMMAND ${GIT_EXECUTABLE} log -1 --pretty=format:%${FORMAT_CHARS}
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE ${VAR_NAME}
OUTPUT_STRIP_TRAILING_WHITESPACE
)
endmacro()
git_log_format(an GIT_AUTHOR_EMAIL)
git_log_format(ae GIT_AUTHOR_EMAIL)
git_log_format(cn GIT_COMMITTER_NAME)
git_log_format(ce GIT_COMMITTER_EMAIL)
git_log_format(B GIT_COMMIT_MESSAGE)
message("Git exe: ${GIT_EXECUTABLE}")
message("Git branch: ${GIT_BRANCH}")
message("Git author: ${GIT_AUTHOR_NAME}")
message("Git e-mail: ${GIT_AUTHOR_EMAIL}")
message("Git committer name: ${GIT_COMMITTER_NAME}")
message("Git committer e-mail: ${GIT_COMMITTER_EMAIL}")
message("Git commit message: ${GIT_COMMIT_MESSAGE}")
endif()
############################# Macros #########################################
#
# This macro converts from the full path format gcov outputs:
#
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
#
# to the original source file path the .gcov is for:
#
# /path/to/project/root/subdir/the_file.c
#
macro(get_source_path_from_gcov_filename _SRC_FILENAME _GCOV_FILENAME)
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
# ->
# #path#to#project#root#subdir#the_file.c.gcov
get_filename_component(_GCOV_FILENAME_WEXT ${_GCOV_FILENAME} NAME)
# #path#to#project#root#subdir#the_file.c.gcov -> /path/to/project/root/subdir/the_file.c
string(REGEX REPLACE "\\.gcov$" "" SRC_FILENAME_TMP ${_GCOV_FILENAME_WEXT})
string(REGEX REPLACE "\#" "/" SRC_FILENAME_TMP ${SRC_FILENAME_TMP})
set(${_SRC_FILENAME} "${SRC_FILENAME_TMP}")
endmacro()
##############################################################################
# Get the coverage data.
file(GLOB_RECURSE GCDA_FILES "${COV_PATH}/*.gcda")
message("GCDA files:")
# Get a list of all the object directories needed by gcov
# (The directories the .gcda files and .o files are found in)
# and run gcov on those.
foreach(GCDA ${GCDA_FILES})
message("Process: ${GCDA}")
message("------------------------------------------------------------------------------")
get_filename_component(GCDA_DIR ${GCDA} PATH)
#
# The -p below refers to "Preserve path components",
# This means that the generated gcov filename of a source file will
# keep the original files entire filepath, but / is replaced with #.
# Example:
#
# /path/to/project/root/build/CMakeFiles/the_file.dir/subdir/the_file.c.gcda
# ------------------------------------------------------------------------------
# File '/path/to/project/root/subdir/the_file.c'
# Lines executed:68.34% of 199
# /path/to/project/root/subdir/the_file.c:creating '#path#to#project#root#subdir#the_file.c.gcov'
#
# If -p is not specified then the file is named only "the_file.c.gcov"
#
execute_process(
COMMAND ${GCOV_EXECUTABLE} -p -o ${GCDA_DIR} ${GCDA}
WORKING_DIRECTORY ${COV_PATH}
)
endforeach()
# TODO: Make these be absolute path
file(GLOB ALL_GCOV_FILES ${COV_PATH}/*.gcov)
# Get only the filenames to use for filtering.
#set(COVERAGE_SRCS_NAMES "")
#foreach (COVSRC ${COVERAGE_SRCS})
# get_filename_component(COVSRC_NAME ${COVSRC} NAME)
# message("${COVSRC} -> ${COVSRC_NAME}")
# list(APPEND COVERAGE_SRCS_NAMES "${COVSRC_NAME}")
#endforeach()
#
# Filter out all but the gcov files we want.
#
# We do this by comparing the list of COVERAGE_SRCS filepaths that the
# user wants the coverage data for with the paths of the generated .gcov files,
# so that we only keep the relevant gcov files.
#
# Example:
# COVERAGE_SRCS =
# /path/to/project/root/subdir/the_file.c
#
# ALL_GCOV_FILES =
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
# /path/to/project/root/build/#path#to#project#root#subdir#other_file.c.gcov
#
# Result should be:
# GCOV_FILES =
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
#
set(GCOV_FILES "")
#message("Look in coverage sources: ${COVERAGE_SRCS}")
message("\nFilter out unwanted GCOV files:")
message("===============================")
set(COVERAGE_SRCS_REMAINING ${COVERAGE_SRCS})
foreach (GCOV_FILE ${ALL_GCOV_FILES})
#
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
# ->
# /path/to/project/root/subdir/the_file.c
get_source_path_from_gcov_filename(GCOV_SRC_PATH ${GCOV_FILE})
# Is this in the list of source files?
# TODO: We want to match against relative path filenames from the source file root...
list(FIND COVERAGE_SRCS ${GCOV_SRC_PATH} WAS_FOUND)
if (NOT WAS_FOUND EQUAL -1)
message("YES: ${GCOV_FILE}")
list(APPEND GCOV_FILES ${GCOV_FILE})
# We remove it from the list, so we don't bother searching for it again.
# Also files left in COVERAGE_SRCS_REMAINING after this loop ends should
# have coverage data generated from them (no lines are covered).
list(REMOVE_ITEM COVERAGE_SRCS_REMAINING ${GCOV_SRC_PATH})
else()
message("NO: ${GCOV_FILE}")
endif()
endforeach()
# TODO: Enable setting these
set(JSON_SERVICE_NAME "travis-ci")
set(JSON_SERVICE_JOB_ID $ENV{TRAVIS_JOB_ID})
set(JSON_TEMPLATE
"{
\"service_name\": \"\@JSON_SERVICE_NAME\@\",
\"service_job_id\": \"\@JSON_SERVICE_JOB_ID\@\",
\"source_files\": \@JSON_GCOV_FILES\@
}"
)
set(SRC_FILE_TEMPLATE
"{
\"name\": \"\@GCOV_SRC_REL_PATH\@\",
\"source\": \"\@GCOV_FILE_SOURCE\@\",
\"coverage\": \@GCOV_FILE_COVERAGE\@
}"
)
message("\nGenerate JSON for files:")
message("=========================")
set(JSON_GCOV_FILES "[")
# Read the GCOV files line by line and get the coverage data.
foreach (GCOV_FILE ${GCOV_FILES})
get_source_path_from_gcov_filename(GCOV_SRC_PATH ${GCOV_FILE})
file(RELATIVE_PATH GCOV_SRC_REL_PATH "${PROJECT_ROOT}" "${GCOV_SRC_PATH}")
# Loads the gcov file as a list of lines.
file(STRINGS ${GCOV_FILE} GCOV_LINES)
# Instead of trying to parse the source from the
# gcov file, simply read the file contents from the source file.
# (Parsing it from the gcov is hard because C-code uses ; in many places
# which also happens to be the same as the CMake list delimiter).
file(READ ${GCOV_SRC_PATH} GCOV_FILE_SOURCE)
string(REPLACE "\\" "\\\\" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
string(REGEX REPLACE "\"" "\\\\\"" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
string(REPLACE "\t" "\\\\t" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
string(REPLACE "\r" "\\\\r" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
string(REPLACE "\n" "\\\\n" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
# According to http://json.org/ these should be escaped as well.
# Don't know how to do that in CMake however...
#string(REPLACE "\b" "\\\\b" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
#string(REPLACE "\f" "\\\\f" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
#string(REGEX REPLACE "\u([a-fA-F0-9]{4})" "\\\\u\\1" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
# We want a json array of coverage data as a single string
# start building them from the contents of the .gcov
set(GCOV_FILE_COVERAGE "[")
foreach (GCOV_LINE ${GCOV_LINES})
# Example of what we're parsing:
# Hitcount |Line | Source
# " 8: 26: if (!allowed || (strlen(allowed) == 0))"
string(REGEX REPLACE
"^([^:]*):([^:]*):(.*)$"
"\\1;\\2;\\3"
RES
"${GCOV_LINE}")
list(LENGTH RES RES_COUNT)
if (RES_COUNT GREATER 2)
list(GET RES 0 HITCOUNT)
list(GET RES 1 LINE)
list(GET RES 2 SOURCE)
string(STRIP ${HITCOUNT} HITCOUNT)
string(STRIP ${LINE} LINE)
# Lines with 0 line numbers are metadata and can be ignored.
if (NOT ${LINE} EQUAL 0)
# Translate the hitcount into valid JSON values.
if (${HITCOUNT} STREQUAL "#####")
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}0, ")
elseif (${HITCOUNT} STREQUAL "-")
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}null, ")
else()
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}${HITCOUNT}, ")
endif()
# TODO: Look for LCOV_EXCL_LINE in SOURCE to get rid of false positives.
endif()
else()
message(WARNING "Failed to properly parse line --> ${GCOV_LINE}")
endif()
endforeach()
# Advanced way of removing the trailing comma in the JSON array.
# "[1, 2, 3, " -> "[1, 2, 3"
string(REGEX REPLACE ",[ ]*$" "" GCOV_FILE_COVERAGE ${GCOV_FILE_COVERAGE})
# Append the trailing ] to complete the JSON array.
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}]")
# Generate the final JSON for this file.
message("Generate JSON for file: ${GCOV_SRC_REL_PATH}...")
string(CONFIGURE ${SRC_FILE_TEMPLATE} FILE_JSON)
set(JSON_GCOV_FILES "${JSON_GCOV_FILES}${FILE_JSON}, ")
endforeach()
# Loop through all files we couldn't find any coverage for
# as well, and generate JSON for those as well with 0% coverage.
foreach(NOT_COVERED_SRC ${COVERAGE_SRCS_REMAINING})
# Loads the source file as a list of lines.
file(STRINGS ${NOT_COVERED_SRC} SRC_LINES)
set(GCOV_FILE_COVERAGE "[")
set(GCOV_FILE_SOURCE "")
foreach (SOURCE ${SRC_LINES})
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}0, ")
string(REPLACE "\\" "\\\\" SOURCE "${SOURCE}")
string(REGEX REPLACE "\"" "\\\\\"" SOURCE "${SOURCE}")
string(REPLACE "\t" "\\\\t" SOURCE "${SOURCE}")
string(REPLACE "\r" "\\\\r" SOURCE "${SOURCE}")
set(GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}${SOURCE}\\n")
endforeach()
# Remove trailing comma, and complete JSON array with ]
string(REGEX REPLACE ",[ ]*$" "" GCOV_FILE_COVERAGE ${GCOV_FILE_COVERAGE})
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}]")
# Generate the final JSON for this file.
message("Generate JSON for non-gcov file: ${NOT_COVERED_SRC}...")
string(CONFIGURE ${SRC_FILE_TEMPLATE} FILE_JSON)
set(JSON_GCOV_FILES "${JSON_GCOV_FILES}${FILE_JSON}, ")
endforeach()
# Get rid of trailing comma.
string(REGEX REPLACE ",[ ]*$" "" JSON_GCOV_FILES ${JSON_GCOV_FILES})
set(JSON_GCOV_FILES "${JSON_GCOV_FILES}]")
# Generate the final complete JSON!
message("Generate final JSON...")
string(CONFIGURE ${JSON_TEMPLATE} JSON)
file(WRITE "${COVERALLS_OUTPUT_FILE}" "${JSON}")
message("###########################################################################")
message("Generated coveralls JSON containing coverage data:")
message("${COVERALLS_OUTPUT_FILE}")
message("###########################################################################")

View file

@ -21,9 +21,10 @@
#define JANSSON_USING_CMAKE
#endif
/* Note: when using cmake, JSON_INTEGER_IS_LONG_LONG is not defined nor used,
* as we will also check for __int64 etc types.
* (the definition was used in the automake system) */
/* If your compiler supports the `long long` type and the strtoll()
library function, JSON_INTEGER_IS_LONG_LONG is defined to 1,
otherwise to 0. */
#cmakedefine JSON_INTEGER_IS_LONG_LONG 1
/* Bring in the cmake-detected defines */
#cmakedefine HAVE_STDINT_H 1
@ -56,9 +57,6 @@
#define JSON_INTEGER_FORMAT @JSON_INTEGER_FORMAT@
/* If locale.h and localeconv() are available, define to 1, otherwise to 0. */
#define JSON_HAVE_LOCALECONV @JSON_HAVE_LOCALECONV@
/* If __atomic builtins are available they will be used to manage
reference counts of json_t. */
#define JSON_HAVE_ATOMIC_BUILTINS @JSON_HAVE_ATOMIC_BUILTINS@

View file

@ -21,6 +21,8 @@
#cmakedefine HAVE_LOCALE_H 1
#cmakedefine HAVE_SETLOCALE 1
#cmakedefine WORDS_BIGENDIAN 1
#cmakedefine HAVE_INT32_T 1
#ifndef HAVE_INT32_T
# define int32_t @JSON_INT32@
@ -50,4 +52,11 @@
#cmakedefine USE_URANDOM 1
#cmakedefine USE_WINDOWS_CRYPTOAPI 1
#cmakedefine USE_DTOA 1
#if USE_DTOA
# define DTOA_ENABLED 1
#else
# define DTOA_ENABLED 0
#endif
#define INITIAL_HASHTABLE_ORDER @JANSSON_INITIAL_HASHTABLE_ORDER@

View file

@ -1,5 +1,5 @@
AC_PREREQ([2.60])
AC_INIT([jansson], [2.13], [https://github.com/akheron/jansson/issues])
AC_INIT([jansson], [2.14.1], [https://github.com/akheron/jansson/issues])
AC_CONFIG_AUX_DIR([.])
AM_INIT_AUTOMAKE([1.10 foreign])
@ -25,6 +25,8 @@ AC_TYPE_UINT16_T
AC_TYPE_UINT8_T
AC_TYPE_LONG_LONG_INT
AC_C_BIGENDIAN
AC_C_INLINE
case $ac_cv_c_inline in
yes) json_inline=inline;;
@ -34,7 +36,7 @@ esac
AC_SUBST([json_inline])
# Checks for library functions.
AC_CHECK_FUNCS([close getpid gettimeofday localeconv open read sched_yield strtoll])
AC_CHECK_FUNCS([close getpid gettimeofday open read setlocale sched_yield strtoll])
AC_MSG_CHECKING([for gcc __sync builtins])
have_sync_builtins=no
@ -74,12 +76,6 @@ case "$ac_cv_type_long_long_int$ac_cv_func_strtoll" in
esac
AC_SUBST([json_have_long_long])
case "$ac_cv_header_locale_h$ac_cv_func_localeconv" in
yesyes) json_have_localeconv=1;;
*) json_have_localeconv=0;;
esac
AC_SUBST([json_have_localeconv])
# Features
AC_ARG_ENABLE([urandom],
[AS_HELP_STRING([--disable-urandom],
@ -137,6 +133,23 @@ fi
AS_IF([test "x$with_Bsymbolic" = "xyes"], [JSON_BSYMBOLIC_LDFLAGS=-Wl[,]-Bsymbolic-functions])
AC_SUBST(JSON_BSYMBOLIC_LDFLAGS)
# Enable symbol versioning on GNU libc
JSON_SYMVER_LDFLAGS=
AC_CHECK_DECL([__GLIBC__], [JSON_SYMVER_LDFLAGS=-Wl,--default-symver])
AC_SUBST([JSON_SYMVER_LDFLAGS])
AC_ARG_ENABLE([dtoa],
[AS_HELP_STRING([--enable-dtoa], [Use dtoa for optimal floating point to string conversion])],
[case "$enableval" in
yes) dtoa=yes ;;
no) dtoa=no ;;
*) AC_MSG_ERROR([bad value ${enableval} for --enable-dtoa]) ;;
esac], [dtoa=yes])
if test "$dtoa" = "yes"; then
AC_DEFINE([DTOA_ENABLED], [1],
[Define to 1 to use dtoa to convert floating points to strings])
fi
AM_CONDITIONAL([DTOA_ENABLED], [test "$dtoa" = "yes"])
AC_ARG_ENABLE([ossfuzzers],
[AS_HELP_STRING([--enable-ossfuzzers],

9
doc/.readthedocs.yaml Normal file
View file

@ -0,0 +1,9 @@
version: 2
build:
os: ubuntu-22.04
tools:
python: "3.12"
sphinx:
configuration: doc/conf.py

View file

@ -1,5 +1,5 @@
EXTRA_DIST = conf.py apiref.rst changes.rst conformance.rst \
gettingstarted.rst github_commits.c index.rst portability.rst \
EXTRA_DIST = conf.py apiref.rst changes.rst conformance.rst \
gettingstarted.rst github_commits.c index.rst threadsafety.rst \
tutorial.rst upgrading.rst ext/refcounting.py
SPHINXBUILD = sphinx-build

View file

@ -114,7 +114,7 @@ also cause errors.
Type
----
.. type:: enum json_type
.. c:enum:: json_type
The type of a JSON value. The following members are defined:
@ -145,33 +145,33 @@ Type
.. function:: int json_typeof(const json_t *json)
Return the type of the JSON value (a :type:`json_type` cast to
:type:`int`). *json* MUST NOT be *NULL*. This function is actually
``int``). *json* MUST NOT be *NULL*. This function is actually
implemented as a macro for speed.
.. function:: json_is_object(const json_t *json)
json_is_array(const json_t *json)
json_is_string(const json_t *json)
json_is_integer(const json_t *json)
json_is_real(const json_t *json)
json_is_true(const json_t *json)
json_is_false(const json_t *json)
json_is_null(const json_t *json)
.. function:: int json_is_object(const json_t *json)
int json_is_array(const json_t *json)
int json_is_string(const json_t *json)
int json_is_integer(const json_t *json)
int json_is_real(const json_t *json)
int json_is_true(const json_t *json)
int json_is_false(const json_t *json)
int json_is_null(const json_t *json)
These functions (actually macros) return true (non-zero) for values
of the given type, and false (zero) for values of other types and
for *NULL*.
.. function:: json_is_number(const json_t *json)
.. function:: int json_is_number(const json_t *json)
Returns true for values of types ``JSON_INTEGER`` and
``JSON_REAL``, and false for other types and for *NULL*.
.. function:: json_is_boolean(const json_t *json)
.. function:: int json_is_boolean(const json_t *json)
Returns true for types ``JSON_TRUE`` and ``JSON_FALSE``, and false
for values of other types and for *NULL*.
.. function:: json_boolean_value(const json_t *json)
.. function:: int json_boolean_value(const json_t *json)
Alias of :func:`json_is_true()`, i.e. returns 1 for ``JSON_TRUE``
and 0 otherwise.
@ -594,12 +594,12 @@ A JSON array is an ordered collection of other JSON values.
Appends all elements in *other_array* to the end of *array*.
Returns 0 on success and -1 on error.
.. function:: json_array_foreach(array, index, value)
.. function:: void json_array_foreach(array, index, value)
Iterate over every element of ``array``, running the block
of code that follows each time with the proper values set to
variables ``index`` and ``value``, of types :type:`size_t` and
:type:`json_t *` respectively. Example::
:type:`json_t` pointer respectively. Example::
/* array is a JSON array */
size_t index;
@ -648,6 +648,15 @@ allowed in object keys.
Get a value corresponding to *key* from *object*. Returns *NULL* if
*key* is not found and on error.
.. function:: json_t *json_object_getn(const json_t *object, const char *key, size_t key_len)
.. refcounting:: borrow
Like :func:`json_object_get`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_set(json_t *object, const char *key, json_t *value)
Set the value of *key* to *value* in *object*. *key* must be a
@ -655,6 +664,13 @@ allowed in object keys.
already is a value for *key*, it is replaced by the new value.
Returns 0 on success and -1 on error.
.. function:: int json_object_setn(json_t *object, const char *key, size_t key_len, json_t *value)
Like :func:`json_object_set`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_set_nocheck(json_t *object, const char *key, json_t *value)
Like :func:`json_object_set`, but doesn't check that *key* is
@ -662,12 +678,26 @@ allowed in object keys.
really is the case (e.g. you have already checked it by other
means).
.. function:: int json_object_setn_nocheck(json_t *object, const char *key, size_t key_len, json_t *value)
Like :func:`json_object_set_nocheck`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_set_new(json_t *object, const char *key, json_t *value)
Like :func:`json_object_set()` but steals the reference to
*value*. This is useful when *value* is newly created and not used
after the call.
.. function:: int json_object_setn_new(json_t *object, const char *key, size_t key_len, json_t *value)
Like :func:`json_object_set_new`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_set_new_nocheck(json_t *object, const char *key, json_t *value)
Like :func:`json_object_set_new`, but doesn't check that *key* is
@ -675,12 +705,26 @@ allowed in object keys.
really is the case (e.g. you have already checked it by other
means).
.. function:: int json_object_setn_new_nocheck(json_t *object, const char *key, size_t key_len, json_t *value)
Like :func:`json_object_set_new_nocheck`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_del(json_t *object, const char *key)
Delete *key* from *object* if it exists. Returns 0 on success, or
-1 if *key* was not found. The reference count of the removed value
is decremented.
.. function:: int json_object_deln(json_t *object, const char *key, size_t key_len)
Like :func:`json_object_del`, but give the fixed-length *key* with length *key_len*.
See :ref:`fixed_length_keys` for details.
.. versionadded:: 2.14
.. function:: int json_object_clear(json_t *object)
Remove all elements from *object*. Returns 0 on success and -1 if
@ -732,12 +776,12 @@ allowed in object keys.
recursively merged with the corresponding values in *object* if they are also
objects, instead of overwriting them. Returns 0 on success or -1 on error.
.. function:: json_object_foreach(object, key, value)
.. function:: void json_object_foreach(object, key, value)
Iterate over every key-value pair of ``object``, running the block
of code that follows each time with the proper values set to
variables ``key`` and ``value``, of types :type:`const char *` and
:type:`json_t *` respectively. Example::
variables ``key`` and ``value``, of types ``const char *`` and
:type:`json_t` pointer respectively. Example::
/* obj is a JSON object */
const char *key;
@ -750,7 +794,7 @@ allowed in object keys.
The items are returned in the order they were inserted to the
object.
**Note:** It's not safe to call ``json_object_del(object, key)``
**Note:** It's not safe to call ``json_object_del(object, key)`` or ``json_object_deln(object, key, key_len)``
during iteration. If you need to, use
:func:`json_object_foreach_safe` instead.
@ -764,14 +808,42 @@ allowed in object keys.
.. versionadded:: 2.3
.. function:: json_object_foreach_safe(object, tmp, key, value)
.. function:: void json_object_foreach_safe(object, tmp, key, value)
Like :func:`json_object_foreach()`, but it's safe to call
``json_object_del(object, key)`` during iteration. You need to pass
an extra ``void *`` parameter ``tmp`` that is used for temporary storage.
``json_object_del(object, key)`` or ``json_object_deln(object, key, key_len)`` during iteration.
You need to pass an extra ``void *`` parameter ``tmp`` that is used for temporary storage.
.. versionadded:: 2.8
.. function:: void json_object_keylen_foreach(object, key, key_len, value)
Like :c:func:`json_object_foreach`, but in *key_len* stored length of the *key*.
Example::
/* obj is a JSON object */
const char *key;
json_t *value;
size_t len;
json_object_keylen_foreach(obj, key, len, value) {
printf("got key %s with length %zu\n", key, len);
}
**Note:** It's not safe to call ``json_object_deln(object, key, key_len)``
during iteration. If you need to, use
:func:`json_object_keylen_foreach_safe` instead.
.. versionadded:: 2.14
.. function:: void json_object_keylen_foreach_safe(object, tmp, key, key_len, value)
Like :func:`json_object_keylen_foreach()`, but it's safe to call
``json_object_deln(object, key, key_len)`` during iteration.
You need to pass an extra ``void *`` parameter ``tmp`` that is used for temporary storage.
.. versionadded:: 2.14
The following functions can be used to iterate through all key-value
pairs in an object. The items are returned in the order they were
@ -800,6 +872,12 @@ inserted to the object.
Extract the associated key from *iter*.
.. function:: size_t json_object_iter_key_len(void *iter)
Extract the associated key length from *iter*.
.. versionadded:: 2.14
.. function:: json_t *json_object_iter_value(void *iter)
.. refcounting:: borrow
@ -855,8 +933,7 @@ inserted to the object.
:func:`json_object()`, either explicit or implicit. If this
function is not called by the user, the first call to
:func:`json_object()` (either explicit or implicit) seeds the hash
function. See :ref:`portability-thread-safety` for notes on thread
safety.
function. See :ref:`thread-safety` for notes on thread safety.
If repeatable results are required, for e.g. unit tests, the hash
function can be "unrandomized" by calling :func:`json_object_seed`
@ -926,7 +1003,7 @@ success. See :ref:`apiref-decoding` for more info.
All functions also accept *NULL* as the :type:`json_error_t` pointer,
in which case no error information is returned to the caller.
.. type:: enum json_error_code
.. c:enum:: json_error_code
An enumeration containing numeric error codes. The following errors are
currently defined:
@ -1021,7 +1098,7 @@ in which case no error information is returned to the caller.
Encoding
========
This sections describes the functions that can be used to encode
This section describes the functions that can be used to encode
values to JSON. By default, only objects and arrays can be encoded
directly, since they are the only valid *root* values of a JSON text.
To encode any JSON value, use the ``JSON_ENCODE_ANY`` flag (see
@ -1206,7 +1283,7 @@ These functions output UTF-8:
Decoding
========
This sections describes the functions that can be used to decode JSON
This section describes the functions that can be used to decode JSON
text to the Jansson representation of JSON data. The JSON
specification requires that a JSON text is either a serialized array
or object, and this requirement is also enforced with the following
@ -1488,17 +1565,17 @@ arguments.
Output a JSON null value. No argument is consumed.
``b`` (boolean) [int]
Convert a C :type:`int` to JSON boolean value. Zero is converted
Convert a C ``int`` to JSON boolean value. Zero is converted
to ``false`` and non-zero to ``true``.
``i`` (integer) [int]
Convert a C :type:`int` to JSON integer.
Convert a C ``int`` to JSON integer.
``I`` (integer) [json_int_t]
Convert a C :type:`json_int_t` to JSON integer.
``f`` (real) [double]
Convert a C :type:`double` to JSON real.
Convert a C ``double`` to JSON real.
``o`` (any value) [json_t \*]
Output any given JSON value as-is. If the value is added to an
@ -1625,20 +1702,20 @@ type whose address should be passed.
Expect a JSON null value. Nothing is extracted.
``b`` (boolean) [int]
Convert a JSON boolean value to a C :type:`int`, so that ``true``
Convert a JSON boolean value to a C ``int``, so that ``true``
is converted to 1 and ``false`` to 0.
``i`` (integer) [int]
Convert a JSON integer to C :type:`int`.
Convert a JSON integer to C ``int``.
``I`` (integer) [json_int_t]
Convert a JSON integer to C :type:`json_int_t`.
``f`` (real) [double]
Convert a JSON real to C :type:`double`.
Convert a JSON real to C ``double``.
``F`` (integer or real) [double]
Convert a JSON number (integer or real) to C :type:`double`.
Convert a JSON number (integer or real) to C ``double``.
``o`` (any value) [json_t \*]
Store a JSON value with no conversion to a :type:`json_t` pointer.
@ -1909,3 +1986,79 @@ memory, see
http://www.dwheeler.com/secure-programs/Secure-Programs-HOWTO/protect-secrets.html.
The page also explains the :func:`guaranteed_memset()` function used
in the example and gives a sample implementation for it.
.. _fixed_length_keys:
Fixed-Length keys
=================
The Jansson API allows work with fixed-length keys. This can be useful in the following cases:
* The key is contained inside a buffer and is not null-terminated. In this case creating a new temporary buffer is not needed.
* The key contains U+0000 inside it.
List of API for fixed-length keys:
* :c:func:`json_object_getn`
* :c:func:`json_object_setn`
* :c:func:`json_object_setn_nocheck`
* :c:func:`json_object_setn_new`
* :c:func:`json_object_setn_new_nocheck`
* :c:func:`json_object_deln`
* :c:func:`json_object_iter_key_len`
* :c:func:`json_object_keylen_foreach`
* :c:func:`json_object_keylen_foreach_safe`
**Examples:**
Try to write a new function to get :c:struct:`json_t` by path separated by ``.``
This requires:
* string iterator (no need to modify the input for better performance)
* API for working with fixed-size keys
The iterator::
struct string {
const char *string;
size_t length;
};
size_t string_try_next(struct string *str, const char *delimiter) {
str->string += strspn(str->string, delimiter);
str->length = strcspn(str->string, delimiter);
return str->length;
}
#define string_foreach(_string, _delimiter) \
for (; string_try_next(&(_string), _delimiter); (_string).string += (_string).length)
The function::
json_t *json_object_get_by_path(json_t *object, const char *path) {
struct string str;
json_t *out = object;
str.string = path;
string_foreach(str, ".") {
out = json_object_getn(out, str.string, str.length);
if (out == NULL)
return NULL;
}
return out;
}
And usage::
int main(void) {
json_t *obj = json_pack("{s:{s:{s:b}}}", "a", "b", "c", 1);
json_t *c = json_object_get_by_path(obj, "a.b.c");
assert(json_is_true(c));
json_decref(obj);
}

View file

@ -48,7 +48,7 @@ copyright = u'2009-2020, Petri Lehtinen'
# built documents.
#
# The short X.Y version.
version = '2.13'
version = '2.14.1'
# The full version, including alpha/beta/rc tags.
release = version

View file

@ -22,8 +22,7 @@ JSON strings are mapped to C-style null-terminated character arrays,
and UTF-8 encoding is used internally.
All Unicode codepoints U+0000 through U+10FFFF are allowed in string
values. However, U+0000 is not allowed in object keys because of API
restrictions.
values. However, U+0000 is allowed in object keys only for length-aware functions.
Unicode normalization or any other transformation is never performed
on any strings (string values or object keys). When checking for
@ -110,7 +109,7 @@ to overflow semantics). Also, no support or hooks are provided for any
supplemental "bignum" type add-on packages.
Depth of nested values
----------------------
======================
To avoid stack exhaustion, Jansson currently limits the nesting depth
for arrays and objects to a certain value (default: 2048), defined as

View file

@ -24,8 +24,8 @@
"""
from docutils import nodes
from docutils.parsers.rst import Directive
class refcounting(nodes.emphasis): pass
def visit(self, node):
self.visit_emphasis(node)
@ -40,16 +40,25 @@ def html_depart(self, node):
self.body.append('</em>')
def refcounting_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
if arguments[0] == 'borrow':
text = 'Return value: Borrowed reference.'
elif arguments[0] == 'new':
text = 'Return value: New reference.'
else:
raise Error('Valid arguments: new, borrow')
class refcounting(nodes.emphasis):
pass
class refcounting_directive(Directive):
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
def run(self):
if self.arguments[0] == 'borrow':
text = 'Return value: Borrowed reference.'
elif self.arguments[0] == 'new':
text = 'Return value: New reference.'
else:
raise Error('Valid arguments: new, borrow')
return [refcounting(text, text)]
return [refcounting(text, text)]
def setup(app):
app.add_node(refcounting,
@ -57,4 +66,4 @@ def setup(app):
latex=(visit, depart),
text=(visit, depart),
man=(visit, depart))
app.add_directive('refcounting', refcounting_directive, 0, (1, 0, 0))
app.add_directive('refcounting', refcounting_directive)

View file

@ -153,6 +153,7 @@ int main(int argc, char *argv[]) {
sha = json_object_get(data, "sha");
if (!json_is_string(sha)) {
fprintf(stderr, "error: commit %d: sha is not a string\n", (int)(i + 1));
json_decref(root);
return 1;
}

View file

@ -41,7 +41,7 @@ Contents
upgrading
tutorial
conformance
portability
threadsafety
apiref
changes

View file

@ -1,11 +1,8 @@
***********
Portability
***********
.. _portability-thread-safety:
.. _thread-safety:
*************
Thread safety
-------------
*************
Jansson as a library is thread safe and has no mutable global state.
The only exceptions are the hash function seed and memory allocation
@ -64,7 +61,7 @@ program startup. See :ref:`apiref-custom-memory-allocation`.
Locale
------
======
Jansson works fine under any locale.

View file

@ -47,13 +47,13 @@ List of Incompatible Changes
**Underlying type of JSON integers**
The underlying C type of JSON integers has been changed from
:type:`int` to the widest available signed integer type, i.e.
:type:`long long` or :type:`long`, depending on whether
:type:`long long` is supported on your system or not. This makes
``int`` to the widest available signed integer type, i.e.
``long long`` or ``long``, depending on whether
``long long`` is supported on your system or not. This makes
the whole 64-bit integer range available on most modern systems.
``jansson.h`` has a typedef :type:`json_int_t` to the underlying
integer type. :type:`int` should still be used in most cases when
integer type. ``int`` should still be used in most cases when
dealing with smallish JSON integers, as the compiler handles
implicit type coercion. Only when the full 64-bit range is needed,
:type:`json_int_t` should be explicitly used.
@ -69,8 +69,8 @@ List of Incompatible Changes
**Unsigned integers in API functions**
Version 2.0 unifies unsigned integer usage in the API. All uses of
:type:`unsigned int` and :type:`unsigned long` have been replaced
with :type:`size_t`. This includes flags, container sizes, etc.
``unsigned int`` and ``unsigned long`` have been replaced
with ``size_t``. This includes flags, container sizes, etc.
This should not require source code changes, as both
:type:`unsigned int` and :type:`unsigned long` are usually
compatible with :type:`size_t`.
``unsigned int`` and ``unsigned long`` are usually
compatible with ``size_t``.

View file

@ -30,7 +30,7 @@
void print_json(json_t *root);
void print_json_aux(json_t *element, int indent);
void print_json_indent(int indent);
const char *json_plural(int count);
const char *json_plural(size_t count);
void print_json_object(json_t *element, int indent);
void print_json_array(json_t *element, int indent);
void print_json_string(json_t *element, int indent);
@ -80,7 +80,7 @@ void print_json_indent(int indent) {
}
}
const char *json_plural(int count) { return count == 1 ? "" : "s"; }
const char *json_plural(size_t count) { return count == 1 ? "" : "s"; }
void print_json_object(json_t *element, int indent) {
size_t size;
@ -90,7 +90,7 @@ void print_json_object(json_t *element, int indent) {
print_json_indent(indent);
size = json_object_size(element);
printf("JSON Object of %ld pair%s:\n", size, json_plural(size));
printf("JSON Object of %lld pair%s:\n", (long long)size, json_plural(size));
json_object_foreach(element, key, value) {
print_json_indent(indent + 2);
printf("JSON Key: \"%s\"\n", key);
@ -103,7 +103,7 @@ void print_json_array(json_t *element, int indent) {
size_t size = json_array_size(element);
print_json_indent(indent);
printf("JSON Array of %ld element%s:\n", size, json_plural(size));
printf("JSON Array of %lld element%s:\n", (long long)size, json_plural(size));
for (i = 0; i < size; i++) {
print_json_aux(json_array_get(element, i), indent + 2);
}

View file

@ -1,3 +1,3 @@
#!/bin/bash
find . -type f -a '(' -name '*.c' -o -name '*.h' ')' | xargs clang-format -i
git ls-files | grep '\.[ch]$' | xargs clang-format -i

View file

@ -12,13 +12,16 @@ fi
errors=0
paths=$(git ls-files | grep '\.[ch]$')
for path in $paths; do
echo "Checking $path"
$CLANG_FORMAT $path > $path.formatted
in=$(cat $path)
out=$($CLANG_FORMAT $path)
out=$(cat $path.formatted)
if [ "$in" != "$out" ]; then
diff -u -L $path -L "$path.formatted" $path - <<<$out
diff -u $path $path.formatted
errors=1
fi
rm $path.formatted
done
if [ $errors -ne 0 ]; then

View file

@ -1,4 +1,4 @@
EXTRA_DIST = jansson.def
EXTRA_DIST = jansson.def dtoa.c
include_HEADERS = jansson.h
nodist_include_HEADERS = jansson_config.h
@ -22,8 +22,14 @@ libjansson_la_SOURCES = \
utf.h \
value.c \
version.c
if DTOA_ENABLED
libjansson_la_SOURCES += dtoa.c
endif
libjansson_la_LDFLAGS = \
-no-undefined \
-export-symbols-regex '^json_' \
-version-info 16:0:12 \
-export-symbols-regex '^json_|^jansson_' \
-version-info 18:1:14 \
@JSON_SYMVER_LDFLAGS@ \
@JSON_BSYMBOLIC_LDFLAGS@

6265
src/dtoa.c Normal file

File diff suppressed because it is too large Load diff

View file

@ -23,10 +23,10 @@
#include "strbuffer.h"
#include "utf.h"
#define MAX_INTEGER_STR_LENGTH 100
#define MAX_REAL_STR_LENGTH 100
#define MAX_INTEGER_STR_LENGTH 25
#define MAX_REAL_STR_LENGTH 25
#define FLAGS_TO_INDENT(f) ((f)&0x1F)
#define FLAGS_TO_INDENT(f) ((f) & 0x1F)
#define FLAGS_TO_PRECISION(f) (((f) >> 11) & 0x1F)
struct buffer {
@ -195,8 +195,21 @@ static int dump_string(const char *str, size_t len, json_dump_callback_t dump, v
return dump("\"", 1, data);
}
struct key_len {
const char *key;
int len;
};
static int compare_keys(const void *key1, const void *key2) {
return strcmp(*(const char **)key1, *(const char **)key2);
const struct key_len *k1 = key1;
const struct key_len *k2 = key2;
const size_t min_size = k1->len < k2->len ? k1->len : k2->len;
int res = memcmp(k1->key, k2->key, min_size);
if (res)
return res;
return k1->len - k2->len;
}
static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *parents,
@ -253,9 +266,10 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
/* Space for "0x", double the sizeof a pointer for the hex and a
* terminator. */
char key[2 + (sizeof(json) * 2) + 1];
size_t key_len;
/* detect circular references */
if (jsonp_loop_check(parents, json, key, sizeof(key)))
if (jsonp_loop_check(parents, json, key, sizeof(key), &key_len))
return -1;
n = json_array_size(json);
@ -263,7 +277,7 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
if (!embed && dump("[", 1, data))
return -1;
if (n == 0) {
hashtable_del(parents, key);
hashtable_del(parents, key, key_len);
return embed ? 0 : dump("]", 1, data);
}
if (dump_indent(flags, depth + 1, 0, dump, data))
@ -284,7 +298,7 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
}
}
hashtable_del(parents, key);
hashtable_del(parents, key, key_len);
return embed ? 0 : dump("]", 1, data);
}
@ -293,6 +307,7 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
const char *separator;
int separator_length;
char loop_key[LOOP_KEY_LEN];
size_t loop_key_len;
if (flags & JSON_COMPACT) {
separator = ":";
@ -303,7 +318,8 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
}
/* detect circular references */
if (jsonp_loop_check(parents, json, loop_key, sizeof(loop_key)))
if (jsonp_loop_check(parents, json, loop_key, sizeof(loop_key),
&loop_key_len))
return -1;
iter = json_object_iter((json_t *)json);
@ -311,40 +327,44 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
if (!embed && dump("{", 1, data))
return -1;
if (!iter) {
hashtable_del(parents, loop_key);
hashtable_del(parents, loop_key, loop_key_len);
return embed ? 0 : dump("}", 1, data);
}
if (dump_indent(flags, depth + 1, 0, dump, data))
return -1;
if (flags & JSON_SORT_KEYS) {
const char **keys;
struct key_len *keys;
size_t size, i;
size = json_object_size(json);
keys = jsonp_malloc(size * sizeof(const char *));
keys = jsonp_malloc(size * sizeof(struct key_len));
if (!keys)
return -1;
i = 0;
while (iter) {
keys[i] = json_object_iter_key(iter);
struct key_len *keylen = &keys[i];
keylen->key = json_object_iter_key(iter);
keylen->len = json_object_iter_key_len(iter);
iter = json_object_iter_next((json_t *)json, iter);
i++;
}
assert(i == size);
qsort(keys, size, sizeof(const char *), compare_keys);
qsort(keys, size, sizeof(struct key_len), compare_keys);
for (i = 0; i < size; i++) {
const char *key;
const struct key_len *key;
json_t *value;
key = keys[i];
value = json_object_get(json, key);
key = &keys[i];
value = json_object_getn(json, key->key, key->len);
assert(value);
dump_string(key, strlen(key), dump, data, flags);
dump_string(key->key, key->len, dump, data, flags);
if (dump(separator, separator_length, data) ||
do_dump(value, flags, depth + 1, parents, dump, data)) {
jsonp_free(keys);
@ -372,8 +392,9 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
while (iter) {
void *next = json_object_iter_next((json_t *)json, iter);
const char *key = json_object_iter_key(iter);
const size_t key_len = json_object_iter_key_len(iter);
dump_string(key, strlen(key), dump, data, flags);
dump_string(key, key_len, dump, data, flags);
if (dump(separator, separator_length, data) ||
do_dump(json_object_iter_value(iter), flags, depth + 1, parents,
dump, data))
@ -392,7 +413,7 @@ static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *par
}
}
hashtable_del(parents, loop_key);
hashtable_del(parents, loop_key, loop_key_len);
return embed ? 0 : dump("}", 1, data);
}

View file

@ -5,14 +5,14 @@
* it under the terms of the MIT license. See LICENSE for details.
*/
#if HAVE_CONFIG_H
#ifdef HAVE_CONFIG_H
#include <jansson_private_config.h>
#endif
#include <stdlib.h>
#include <string.h>
#if HAVE_STDINT_H
#ifdef HAVE_STDINT_H
#include <stdint.h>
#endif
@ -35,7 +35,7 @@ extern volatile uint32_t hashtable_seed;
#define list_to_pair(list_) container_of(list_, pair_t, list)
#define ordered_list_to_pair(list_) container_of(list_, pair_t, ordered_list)
#define hash_str(key) ((size_t)hashlittle((key), strlen(key), hashtable_seed))
#define hash_str(key, len) ((size_t)hashlittle((key), len, hashtable_seed))
static JSON_INLINE void list_init(list_t *list) {
list->next = list;
@ -69,7 +69,7 @@ static void insert_to_bucket(hashtable_t *hashtable, bucket_t *bucket, list_t *l
}
static pair_t *hashtable_find_pair(hashtable_t *hashtable, bucket_t *bucket,
const char *key, size_t hash) {
const char *key, size_t key_len, size_t hash) {
list_t *list;
pair_t *pair;
@ -79,7 +79,8 @@ static pair_t *hashtable_find_pair(hashtable_t *hashtable, bucket_t *bucket,
list = bucket->first;
while (1) {
pair = list_to_pair(list);
if (pair->hash == hash && strcmp(pair->key, key) == 0)
if (pair->hash == hash && pair->key_len == key_len &&
memcmp(pair->key, key, key_len) == 0)
return pair;
if (list == bucket->last)
@ -92,7 +93,8 @@ static pair_t *hashtable_find_pair(hashtable_t *hashtable, bucket_t *bucket,
}
/* returns 0 on success, -1 if key was not found */
static int hashtable_do_del(hashtable_t *hashtable, const char *key, size_t hash) {
static int hashtable_do_del(hashtable_t *hashtable, const char *key, size_t key_len,
size_t hash) {
pair_t *pair;
bucket_t *bucket;
size_t index;
@ -100,7 +102,7 @@ static int hashtable_do_del(hashtable_t *hashtable, const char *key, size_t hash
index = hash & hashmask(hashtable->order);
bucket = &hashtable->buckets[index];
pair = hashtable_find_pair(hashtable, bucket, key, hash);
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
if (!pair)
return -1;
@ -193,7 +195,37 @@ void hashtable_close(hashtable_t *hashtable) {
jsonp_free(hashtable->buckets);
}
int hashtable_set(hashtable_t *hashtable, const char *key, json_t *value) {
static pair_t *init_pair(json_t *value, const char *key, size_t key_len, size_t hash) {
pair_t *pair;
/* offsetof(...) returns the size of pair_t without the last,
flexible member. This way, the correct amount is
allocated. */
if (key_len >= (size_t)-1 - offsetof(pair_t, key)) {
/* Avoid an overflow if the key is very long */
return NULL;
}
pair = jsonp_malloc(offsetof(pair_t, key) + key_len + 1);
if (!pair)
return NULL;
pair->hash = hash;
memcpy(pair->key, key, key_len);
pair->key[key_len] = '\0';
pair->key_len = key_len;
pair->value = value;
list_init(&pair->list);
list_init(&pair->ordered_list);
return pair;
}
int hashtable_set(hashtable_t *hashtable, const char *key, size_t key_len,
json_t *value) {
pair_t *pair;
bucket_t *bucket;
size_t hash, index;
@ -203,35 +235,20 @@ int hashtable_set(hashtable_t *hashtable, const char *key, json_t *value) {
if (hashtable_do_rehash(hashtable))
return -1;
hash = hash_str(key);
hash = hash_str(key, key_len);
index = hash & hashmask(hashtable->order);
bucket = &hashtable->buckets[index];
pair = hashtable_find_pair(hashtable, bucket, key, hash);
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
if (pair) {
json_decref(pair->value);
pair->value = value;
} else {
/* offsetof(...) returns the size of pair_t without the last,
flexible member. This way, the correct amount is
allocated. */
pair = init_pair(value, key, key_len, hash);
size_t len = strlen(key);
if (len >= (size_t)-1 - offsetof(pair_t, key)) {
/* Avoid an overflow if the key is very long */
return -1;
}
pair = jsonp_malloc(offsetof(pair_t, key) + len + 1);
if (!pair)
return -1;
pair->hash = hash;
strncpy(pair->key, key, len + 1);
pair->value = value;
list_init(&pair->list);
list_init(&pair->ordered_list);
insert_to_bucket(hashtable, bucket, &pair->list);
list_insert(&hashtable->ordered_list, &pair->ordered_list);
@ -240,24 +257,24 @@ int hashtable_set(hashtable_t *hashtable, const char *key, json_t *value) {
return 0;
}
void *hashtable_get(hashtable_t *hashtable, const char *key) {
void *hashtable_get(hashtable_t *hashtable, const char *key, size_t key_len) {
pair_t *pair;
size_t hash;
bucket_t *bucket;
hash = hash_str(key);
hash = hash_str(key, key_len);
bucket = &hashtable->buckets[hash & hashmask(hashtable->order)];
pair = hashtable_find_pair(hashtable, bucket, key, hash);
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
if (!pair)
return NULL;
return pair->value;
}
int hashtable_del(hashtable_t *hashtable, const char *key) {
size_t hash = hash_str(key);
return hashtable_do_del(hashtable, key, hash);
int hashtable_del(hashtable_t *hashtable, const char *key, size_t key_len) {
size_t hash = hash_str(key, key_len);
return hashtable_do_del(hashtable, key, key_len, hash);
}
void hashtable_clear(hashtable_t *hashtable) {
@ -278,15 +295,15 @@ void *hashtable_iter(hashtable_t *hashtable) {
return hashtable_iter_next(hashtable, &hashtable->ordered_list);
}
void *hashtable_iter_at(hashtable_t *hashtable, const char *key) {
void *hashtable_iter_at(hashtable_t *hashtable, const char *key, size_t key_len) {
pair_t *pair;
size_t hash;
bucket_t *bucket;
hash = hash_str(key);
hash = hash_str(key, key_len);
bucket = &hashtable->buckets[hash & hashmask(hashtable->order)];
pair = hashtable_find_pair(hashtable, bucket, key, hash);
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
if (!pair)
return NULL;
@ -305,6 +322,11 @@ void *hashtable_iter_key(void *iter) {
return pair->key;
}
size_t hashtable_iter_key_len(void *iter) {
pair_t *pair = ordered_list_to_pair((list_t *)iter);
return pair->key_len;
}
void *hashtable_iter_value(void *iter) {
pair_t *pair = ordered_list_to_pair((list_t *)iter);
return pair->value;

View file

@ -24,6 +24,7 @@ struct hashtable_pair {
struct hashtable_list ordered_list;
size_t hash;
json_t *value;
size_t key_len;
char key[1];
};
@ -69,6 +70,7 @@ void hashtable_close(hashtable_t *hashtable);
*
* @hashtable: The hashtable object
* @key: The key
* @key: The length of key
* @serial: For addition order of keys
* @value: The value
*
@ -79,27 +81,29 @@ void hashtable_close(hashtable_t *hashtable);
*
* Returns 0 on success, -1 on failure (out of memory).
*/
int hashtable_set(hashtable_t *hashtable, const char *key, json_t *value);
int hashtable_set(hashtable_t *hashtable, const char *key, size_t key_len, json_t *value);
/**
* hashtable_get - Get a value associated with a key
*
* @hashtable: The hashtable object
* @key: The key
* @key: The length of key
*
* Returns value if it is found, or NULL otherwise.
*/
void *hashtable_get(hashtable_t *hashtable, const char *key);
void *hashtable_get(hashtable_t *hashtable, const char *key, size_t key_len);
/**
* hashtable_del - Remove a value from the hashtable
*
* @hashtable: The hashtable object
* @key: The key
* @key: The length of key
*
* Returns 0 on success, or -1 if the key was not found.
*/
int hashtable_del(hashtable_t *hashtable, const char *key);
int hashtable_del(hashtable_t *hashtable, const char *key, size_t key_len);
/**
* hashtable_clear - Clear hashtable
@ -132,11 +136,12 @@ void *hashtable_iter(hashtable_t *hashtable);
*
* @hashtable: The hashtable object
* @key: The key that the iterator should point to
* @key: The length of key
*
* Like hashtable_iter() but returns an iterator pointing to a
* specific key.
*/
void *hashtable_iter_at(hashtable_t *hashtable, const char *key);
void *hashtable_iter_at(hashtable_t *hashtable, const char *key, size_t key_len);
/**
* hashtable_iter_next - Advance an iterator
@ -156,6 +161,13 @@ void *hashtable_iter_next(hashtable_t *hashtable, void *iter);
*/
void *hashtable_iter_key(void *iter);
/**
* hashtable_iter_key_len - Retrieve the key length pointed by an iterator
*
* @iter: The iterator
*/
size_t hashtable_iter_key_len(void *iter);
/**
* hashtable_iter_value - Retrieve the value pointed by an iterator
*

View file

@ -34,9 +34,13 @@ EXPORTS
json_object
json_object_size
json_object_get
json_object_getn
json_object_set_new
json_object_setn_new
json_object_set_new_nocheck
json_object_setn_new_nocheck
json_object_del
json_object_deln
json_object_clear
json_object_update
json_object_update_existing
@ -46,6 +50,7 @@ EXPORTS
json_object_iter_at
json_object_iter_next
json_object_iter_key
json_object_iter_key_len
json_object_iter_value
json_object_iter_set_new
json_object_key_to_iter

View file

@ -21,11 +21,11 @@ extern "C" {
/* version */
#define JANSSON_MAJOR_VERSION 2
#define JANSSON_MINOR_VERSION 13
#define JANSSON_MICRO_VERSION 0
#define JANSSON_MINOR_VERSION 14
#define JANSSON_MICRO_VERSION 1
/* Micro version is omitted if it's 0 */
#define JANSSON_VERSION "2.13"
#define JANSSON_VERSION "2.14.1"
/* Version as a 3-byte hex number, e.g. 0x010201 == 1.2.1. Use this
for numeric comparisons, e.g. #if JANSSON_VERSION_HEX >= ... */
@ -188,9 +188,15 @@ void json_object_seed(size_t seed);
size_t json_object_size(const json_t *object);
json_t *json_object_get(const json_t *object, const char *key)
JANSSON_ATTRS((warn_unused_result));
json_t *json_object_getn(const json_t *object, const char *key, size_t key_len)
JANSSON_ATTRS((warn_unused_result));
int json_object_set_new(json_t *object, const char *key, json_t *value);
int json_object_setn_new(json_t *object, const char *key, size_t key_len, json_t *value);
int json_object_set_new_nocheck(json_t *object, const char *key, json_t *value);
int json_object_setn_new_nocheck(json_t *object, const char *key, size_t key_len,
json_t *value);
int json_object_del(json_t *object, const char *key);
int json_object_deln(json_t *object, const char *key, size_t key_len);
int json_object_clear(json_t *object);
int json_object_update(json_t *object, json_t *other);
int json_object_update_existing(json_t *object, json_t *other);
@ -201,6 +207,7 @@ void *json_object_iter_at(json_t *object, const char *key);
void *json_object_key_to_iter(const char *key);
void *json_object_iter_next(json_t *object, void *iter);
const char *json_object_iter_key(void *iter);
size_t json_object_iter_key_len(void *iter);
json_t *json_object_iter_value(void *iter);
int json_object_iter_set_new(json_t *object, void *iter, json_t *value);
@ -210,6 +217,14 @@ int json_object_iter_set_new(json_t *object, void *iter, json_t *value);
key = json_object_iter_key( \
json_object_iter_next(object, json_object_key_to_iter(key))))
#define json_object_keylen_foreach(object, key, key_len, value) \
for (key = json_object_iter_key(json_object_iter(object)), \
key_len = json_object_iter_key_len(json_object_key_to_iter(key)); \
key && (value = json_object_iter_value(json_object_key_to_iter(key))); \
key = json_object_iter_key( \
json_object_iter_next(object, json_object_key_to_iter(key))), \
key_len = json_object_iter_key_len(json_object_key_to_iter(key)))
#define json_object_foreach_safe(object, n, key, value) \
for (key = json_object_iter_key(json_object_iter(object)), \
n = json_object_iter_next(object, json_object_key_to_iter(key)); \
@ -217,6 +232,14 @@ int json_object_iter_set_new(json_t *object, void *iter, json_t *value);
key = json_object_iter_key(n), \
n = json_object_iter_next(object, json_object_key_to_iter(key)))
#define json_object_keylen_foreach_safe(object, n, key, key_len, value) \
for (key = json_object_iter_key(json_object_iter(object)), \
n = json_object_iter_next(object, json_object_key_to_iter(key)), \
key_len = json_object_iter_key_len(json_object_key_to_iter(key)); \
key && (value = json_object_iter_value(json_object_key_to_iter(key))); \
key = json_object_iter_key(n), key_len = json_object_iter_key_len(n), \
n = json_object_iter_next(object, json_object_key_to_iter(key)))
#define json_array_foreach(array, index, value) \
for (index = 0; \
index < json_array_size(array) && (value = json_array_get(array, index)); \
@ -226,11 +249,21 @@ static JSON_INLINE int json_object_set(json_t *object, const char *key, json_t *
return json_object_set_new(object, key, json_incref(value));
}
static JSON_INLINE int json_object_setn(json_t *object, const char *key, size_t key_len,
json_t *value) {
return json_object_setn_new(object, key, key_len, json_incref(value));
}
static JSON_INLINE int json_object_set_nocheck(json_t *object, const char *key,
json_t *value) {
return json_object_set_new_nocheck(object, key, json_incref(value));
}
static JSON_INLINE int json_object_setn_nocheck(json_t *object, const char *key,
size_t key_len, json_t *value) {
return json_object_setn_new_nocheck(object, key, key_len, json_incref(value));
}
static JSON_INLINE int json_object_iter_set(json_t *object, void *iter, json_t *value) {
return json_object_iter_set_new(object, iter, json_incref(value));
}
@ -346,14 +379,14 @@ json_t *json_load_callback(json_load_callback_t callback, void *data, size_t fla
/* encoding */
#define JSON_MAX_INDENT 0x1F
#define JSON_INDENT(n) ((n)&JSON_MAX_INDENT)
#define JSON_INDENT(n) ((n) & JSON_MAX_INDENT)
#define JSON_COMPACT 0x20
#define JSON_ENSURE_ASCII 0x40
#define JSON_SORT_KEYS 0x80
#define JSON_PRESERVE_ORDER 0x100
#define JSON_ENCODE_ANY 0x200
#define JSON_ESCAPE_SLASH 0x400
#define JSON_REAL_PRECISION(n) (((n)&0x1F) << 11)
#define JSON_REAL_PRECISION(n) (((n) & 0x1F) << 11)
#define JSON_EMBED 0x10000
typedef int (*json_dump_callback_t)(const char *buffer, size_t size, void *data);

View file

@ -32,10 +32,6 @@
otherwise to 0. */
#define JSON_INTEGER_IS_LONG_LONG @json_have_long_long@
/* If locale.h and localeconv() are available, define to 1,
otherwise to 0. */
#define JSON_HAVE_LOCALECONV @json_have_localeconv@
/* If __atomic builtins are available they will be used to manage
reference counts of json_t. */
#define JSON_HAVE_ATOMIC_BUILTINS @json_have_atomic_builtins@

View file

@ -91,8 +91,8 @@ char *jsonp_strndup(const char *str, size_t len) JANSSON_ATTRS((warn_unused_resu
/* Circular reference check*/
/* Space for "0x", double the sizeof a pointer for the hex and a terminator. */
#define LOOP_KEY_LEN (2 + (sizeof(json_t *) * 2) + 1)
int jsonp_loop_check(hashtable_t *parents, const json_t *json, char *key,
size_t key_size);
int jsonp_loop_check(hashtable_t *parents, const json_t *json, char *key, size_t key_size,
size_t *key_len_out);
/* Windows compatibility */
#if defined(_WIN32) || defined(WIN32)

View file

@ -689,7 +689,7 @@ static json_t *parse_object(lex_t *lex, size_t flags, json_error_t *error) {
}
if (flags & JSON_REJECT_DUPLICATES) {
if (json_object_get(object, key)) {
if (json_object_getn(object, key, len)) {
jsonp_free(key);
error_set(error, lex, json_error_duplicate_key, "duplicate object key");
goto error;
@ -710,7 +710,7 @@ static json_t *parse_object(lex_t *lex, size_t flags, json_error_t *error) {
goto error;
}
if (json_object_set_new_nocheck(object, key, value)) {
if (json_object_setn_new_nocheck(object, key, len, value)) {
jsonp_free(key);
goto error;
}

View file

@ -73,7 +73,7 @@ on 1 byte), but shoehorning those bytes into integers efficiently is messy.
# define HASH_BIG_ENDIAN 0
#endif
#define hashsize(n) ((uint32_t)1<<(n))
#define hashsize(n) ((size_t)1<<(n))
#define hashmask(n) (hashsize(n)-1)
#define rot(x,k) (((x)<<(k)) | ((x)>>(32-(k))))

View file

@ -544,7 +544,7 @@ static int unpack_object(scanner_t *s, json_t *root, va_list *ap) {
if (unpack(s, value, ap))
goto out;
hashtable_set(&key_set, key, json_null());
hashtable_set(&key_set, key, strlen(key), json_null());
next_token(s);
}
@ -554,6 +554,7 @@ static int unpack_object(scanner_t *s, json_t *root, va_list *ap) {
if (root && strict == 1) {
/* We need to check that all non optional items have been parsed */
const char *key;
size_t key_len;
/* keys_res is 1 for uninitialized, 0 for success, -1 for error. */
int keys_res = 1;
strbuffer_t unrecognized_keys;
@ -561,8 +562,8 @@ static int unpack_object(scanner_t *s, json_t *root, va_list *ap) {
long unpacked = 0;
if (gotopt || json_object_size(root) != key_set.size) {
json_object_foreach(root, key, value) {
if (!hashtable_get(&key_set, key)) {
json_object_keylen_foreach(root, key, key_len, value) {
if (!hashtable_get(&key_set, key, key_len)) {
unpacked++;
/* Save unrecognized keys for the error message */
@ -574,7 +575,7 @@ static int unpack_object(scanner_t *s, json_t *root, va_list *ap) {
if (!keys_res)
keys_res =
strbuffer_append_bytes(&unrecognized_keys, key, strlen(key));
strbuffer_append_bytes(&unrecognized_keys, key, key_len);
}
}
}

View file

@ -16,7 +16,7 @@
#define STRBUFFER_MIN_SIZE 16
#define STRBUFFER_FACTOR 2
#define STRBUFFER_SIZE_MAX ((size_t)-1)
#define STRBUFFER_SIZE_MAX ((size_t)(-1))
int strbuffer_init(strbuffer_t *strbuff) {
strbuff->size = STRBUFFER_MIN_SIZE;

View file

@ -11,57 +11,42 @@
#include <jansson_private_config.h>
#endif
#if JSON_HAVE_LOCALECONV
#include <locale.h>
/*
- This code assumes that the decimal separator is exactly one
character.
- If setlocale() is called by another thread between the call to
localeconv() and the call to sprintf() or strtod(), the result may
be wrong. setlocale() is not thread-safe and should not be used
this way. Multi-threaded programs should use uselocale() instead.
get_decimal_point() and the call to sprintf() or strtod(), the
result may be wrong. setlocale() is not thread-safe and should
not be used this way. Multi-threaded programs should use
uselocale() instead.
*/
static char get_decimal_point() {
char buf[3];
sprintf(buf, "%#.0f", 1.0); // "1." in the current locale
return buf[1];
}
static void to_locale(strbuffer_t *strbuffer) {
const char *point;
char point;
char *pos;
point = localeconv()->decimal_point;
if (*point == '.') {
point = get_decimal_point();
if (point == '.') {
/* No conversion needed */
return;
}
pos = strchr(strbuffer->value, '.');
if (pos)
*pos = *point;
*pos = point;
}
static void from_locale(char *buffer) {
const char *point;
char *pos;
point = localeconv()->decimal_point;
if (*point == '.') {
/* No conversion needed */
return;
}
pos = strchr(buffer, *point);
if (pos)
*pos = '.';
}
#endif
int jsonp_strtod(strbuffer_t *strbuffer, double *out) {
double value;
char *end;
#if JSON_HAVE_LOCALECONV
to_locale(strbuffer);
#endif
errno = 0;
value = strtod(strbuffer->value, &end);
@ -76,6 +61,127 @@ int jsonp_strtod(strbuffer_t *strbuffer, double *out) {
return 0;
}
#if DTOA_ENABLED
/* see dtoa.c */
char *dtoa_r(double dd, int mode, int ndigits, int *decpt, int *sign, char **rve,
char *buf, size_t blen);
int jsonp_dtostr(char *buffer, size_t size, double value, int precision) {
/* adapted from `format_float_short()` in
* https://github.com/python/cpython/blob/2cf18a44303b6d84faa8ecffaecc427b53ae121e/Python/pystrtod.c#L969
*/
char digits[25];
char *digits_end;
int mode = precision == 0 ? 0 : 2;
int decpt, sign, exp_len, exp = 0, use_exp = 0;
int digits_len, vdigits_start, vdigits_end;
char *p;
if (dtoa_r(value, mode, precision, &decpt, &sign, &digits_end, digits, 25) == NULL) {
// digits is too short => should not happen
return -1;
}
digits_len = digits_end - digits;
if (decpt <= -4 || decpt > 16) {
use_exp = 1;
exp = decpt - 1;
decpt = 1;
}
vdigits_start = decpt <= 0 ? decpt - 1 : 0;
vdigits_end = digits_len;
if (!use_exp) {
/* decpt + 1 to add ".0" if value is an integer */
vdigits_end = vdigits_end > decpt ? vdigits_end : decpt + 1;
} else {
vdigits_end = vdigits_end > decpt ? vdigits_end : decpt;
}
if (
/* sign, decimal point and trailing 0 byte */
(size_t)(3 +
/* total digit count (including zero padding on both sides) */
(vdigits_end - vdigits_start) +
/* exponent "e+100", max 3 numerical digits */
(use_exp ? 5 : 0)) > size) {
/* buffer is too short */
return -1;
}
p = buffer;
if (sign == 1) {
*p++ = '-';
}
/* note that exactly one of the three 'if' conditions is true,
so we include exactly one decimal point */
/* Zero padding on left of digit string */
if (decpt <= 0) {
memset(p, '0', decpt - vdigits_start);
p += decpt - vdigits_start;
*p++ = '.';
memset(p, '0', 0 - decpt);
p += 0 - decpt;
} else {
memset(p, '0', 0 - vdigits_start);
p += 0 - vdigits_start;
}
/* Digits, with included decimal point */
if (0 < decpt && decpt <= digits_len) {
strncpy(p, digits, decpt - 0);
p += decpt - 0;
*p++ = '.';
strncpy(p, digits + decpt, digits_len - decpt);
p += digits_len - decpt;
} else {
strncpy(p, digits, digits_len);
p += digits_len;
}
/* And zeros on the right */
if (digits_len < decpt) {
memset(p, '0', decpt - digits_len);
p += decpt - digits_len;
*p++ = '.';
memset(p, '0', vdigits_end - decpt);
p += vdigits_end - decpt;
} else {
memset(p, '0', vdigits_end - digits_len);
p += vdigits_end - digits_len;
}
if (p[-1] == '.')
p--;
if (use_exp) {
*p++ = 'e';
exp_len = sprintf(p, "%d", exp);
p += exp_len;
}
*p = '\0';
return (int)(p - buffer);
}
#else /* DTOA_ENABLED == 0 */
static void from_locale(char *buffer) {
char point;
char *pos;
point = get_decimal_point();
if (point == '.') {
/* No conversion needed */
return;
}
pos = strchr(buffer, point);
if (pos)
*pos = '.';
}
int jsonp_dtostr(char *buffer, size_t size, double value, int precision) {
int ret;
char *start, *end;
@ -92,9 +198,7 @@ int jsonp_dtostr(char *buffer, size_t size, double value, int precision) {
if (length >= size)
return -1;
#if JSON_HAVE_LOCALECONV
from_locale(buffer);
#endif
/* Make sure there's a dot or 'e' in the output. Otherwise
a real is converted to an integer when decoding */
@ -130,3 +234,4 @@ int jsonp_dtostr(char *buffer, size_t size, double value, int precision) {
return (int)length;
}
#endif

View file

@ -44,13 +44,17 @@ static JSON_INLINE void json_init(json_t *json, json_type type) {
json->refcount = 1;
}
int jsonp_loop_check(hashtable_t *parents, const json_t *json, char *key,
size_t key_size) {
snprintf(key, key_size, "%p", json);
if (hashtable_get(parents, key))
int jsonp_loop_check(hashtable_t *parents, const json_t *json, char *key, size_t key_size,
size_t *key_len_out) {
size_t key_len = snprintf(key, key_size, "%p", json);
if (key_len_out)
*key_len_out = key_len;
if (hashtable_get(parents, key, key_len))
return -1;
return hashtable_set(parents, key, json_null());
return hashtable_set(parents, key, key_len, json_null());
}
/*** object ***/
@ -93,16 +97,32 @@ size_t json_object_size(const json_t *json) {
}
json_t *json_object_get(const json_t *json, const char *key) {
if (!key)
return NULL;
return json_object_getn(json, key, strlen(key));
}
json_t *json_object_getn(const json_t *json, const char *key, size_t key_len) {
json_object_t *object;
if (!key || !json_is_object(json))
return NULL;
object = json_to_object(json);
return hashtable_get(&object->hashtable, key);
return hashtable_get(&object->hashtable, key, key_len);
}
int json_object_set_new_nocheck(json_t *json, const char *key, json_t *value) {
if (!key) {
json_decref(value);
return -1;
}
return json_object_setn_new_nocheck(json, key, strlen(key), value);
}
int json_object_setn_new_nocheck(json_t *json, const char *key, size_t key_len,
json_t *value) {
json_object_t *object;
if (!value)
@ -114,7 +134,7 @@ int json_object_set_new_nocheck(json_t *json, const char *key, json_t *value) {
}
object = json_to_object(json);
if (hashtable_set(&object->hashtable, key, value)) {
if (hashtable_set(&object->hashtable, key, key_len, value)) {
json_decref(value);
return -1;
}
@ -123,22 +143,38 @@ int json_object_set_new_nocheck(json_t *json, const char *key, json_t *value) {
}
int json_object_set_new(json_t *json, const char *key, json_t *value) {
if (!key || !utf8_check_string(key, strlen(key))) {
if (!key) {
json_decref(value);
return -1;
}
return json_object_set_new_nocheck(json, key, value);
return json_object_setn_new(json, key, strlen(key), value);
}
int json_object_setn_new(json_t *json, const char *key, size_t key_len, json_t *value) {
if (!key || !utf8_check_string(key, key_len)) {
json_decref(value);
return -1;
}
return json_object_setn_new_nocheck(json, key, key_len, value);
}
int json_object_del(json_t *json, const char *key) {
if (!key)
return -1;
return json_object_deln(json, key, strlen(key));
}
int json_object_deln(json_t *json, const char *key, size_t key_len) {
json_object_t *object;
if (!key || !json_is_object(json))
return -1;
object = json_to_object(json);
return hashtable_del(&object->hashtable, key);
return hashtable_del(&object->hashtable, key, key_len);
}
int json_object_clear(json_t *json) {
@ -155,13 +191,14 @@ int json_object_clear(json_t *json) {
int json_object_update(json_t *object, json_t *other) {
const char *key;
size_t key_len;
json_t *value;
if (!json_is_object(object) || !json_is_object(other))
return -1;
json_object_foreach(other, key, value) {
if (json_object_set_nocheck(object, key, value))
json_object_keylen_foreach(other, key, key_len, value) {
if (json_object_setn_nocheck(object, key, key_len, value))
return -1;
}
@ -170,14 +207,15 @@ int json_object_update(json_t *object, json_t *other) {
int json_object_update_existing(json_t *object, json_t *other) {
const char *key;
size_t key_len;
json_t *value;
if (!json_is_object(object) || !json_is_object(other))
return -1;
json_object_foreach(other, key, value) {
if (json_object_get(object, key))
json_object_set_nocheck(object, key, value);
json_object_keylen_foreach(other, key, key_len, value) {
if (json_object_getn(object, key, key_len))
json_object_setn_nocheck(object, key, key_len, value);
}
return 0;
@ -185,14 +223,15 @@ int json_object_update_existing(json_t *object, json_t *other) {
int json_object_update_missing(json_t *object, json_t *other) {
const char *key;
size_t key_len;
json_t *value;
if (!json_is_object(object) || !json_is_object(other))
return -1;
json_object_foreach(other, key, value) {
if (!json_object_get(object, key))
json_object_set_nocheck(object, key, value);
json_object_keylen_foreach(other, key, key_len, value) {
if (!json_object_getn(object, key, key_len))
json_object_setn_nocheck(object, key, key_len, value);
}
return 0;
@ -200,18 +239,20 @@ int json_object_update_missing(json_t *object, json_t *other) {
int do_object_update_recursive(json_t *object, json_t *other, hashtable_t *parents) {
const char *key;
size_t key_len;
json_t *value;
char loop_key[LOOP_KEY_LEN];
int res = 0;
size_t loop_key_len;
if (!json_is_object(object) || !json_is_object(other))
return -1;
if (jsonp_loop_check(parents, other, loop_key, sizeof(loop_key)))
if (jsonp_loop_check(parents, other, loop_key, sizeof(loop_key), &loop_key_len))
return -1;
json_object_foreach(other, key, value) {
json_t *v = json_object_get(object, key);
json_object_keylen_foreach(other, key, key_len, value) {
json_t *v = json_object_getn(object, key, key_len);
if (json_is_object(v) && json_is_object(value)) {
if (do_object_update_recursive(v, value, parents)) {
@ -219,14 +260,14 @@ int do_object_update_recursive(json_t *object, json_t *other, hashtable_t *paren
break;
}
} else {
if (json_object_set_nocheck(object, key, value)) {
if (json_object_setn_nocheck(object, key, key_len, value)) {
res = -1;
break;
}
}
}
hashtable_del(parents, loop_key);
hashtable_del(parents, loop_key, loop_key_len);
return res;
}
@ -260,7 +301,7 @@ void *json_object_iter_at(json_t *json, const char *key) {
return NULL;
object = json_to_object(json);
return hashtable_iter_at(&object->hashtable, key);
return hashtable_iter_at(&object->hashtable, key, strlen(key));
}
void *json_object_iter_next(json_t *json, void *iter) {
@ -280,6 +321,13 @@ const char *json_object_iter_key(void *iter) {
return hashtable_iter_key(iter);
}
size_t json_object_iter_key_len(void *iter) {
if (!iter)
return 0;
return hashtable_iter_key_len(iter);
}
json_t *json_object_iter_value(void *iter) {
if (!iter)
return NULL;
@ -306,13 +354,14 @@ void *json_object_key_to_iter(const char *key) {
static int json_object_equal(const json_t *object1, const json_t *object2) {
const char *key;
size_t key_len;
const json_t *value1, *value2;
if (json_object_size(object1) != json_object_size(object2))
return 0;
json_object_foreach((json_t *)object1, key, value1) {
value2 = json_object_get(object2, key);
json_object_keylen_foreach((json_t *)object1, key, key_len, value1) {
value2 = json_object_getn(object2, key, key_len);
if (!json_equal(value1, value2))
return 0;
@ -325,13 +374,15 @@ static json_t *json_object_copy(json_t *object) {
json_t *result;
const char *key;
size_t key_len;
json_t *value;
result = json_object();
if (!result)
return NULL;
json_object_foreach(object, key, value) json_object_set_nocheck(result, key, value);
json_object_keylen_foreach(object, key, key_len, value)
json_object_setn_nocheck(result, key, key_len, value);
return result;
}
@ -340,8 +391,9 @@ static json_t *json_object_deep_copy(const json_t *object, hashtable_t *parents)
json_t *result;
void *iter;
char loop_key[LOOP_KEY_LEN];
size_t loop_key_len;
if (jsonp_loop_check(parents, object, loop_key, sizeof(loop_key)))
if (jsonp_loop_check(parents, object, loop_key, sizeof(loop_key), &loop_key_len))
return NULL;
result = json_object();
@ -353,11 +405,14 @@ static json_t *json_object_deep_copy(const json_t *object, hashtable_t *parents)
iter = json_object_iter((json_t *)object);
while (iter) {
const char *key;
size_t key_len;
const json_t *value;
key = json_object_iter_key(iter);
key_len = json_object_iter_key_len(iter);
value = json_object_iter_value(iter);
if (json_object_set_new_nocheck(result, key, do_deep_copy(value, parents))) {
if (json_object_setn_new_nocheck(result, key, key_len,
do_deep_copy(value, parents))) {
json_decref(result);
result = NULL;
break;
@ -366,7 +421,7 @@ static json_t *json_object_deep_copy(const json_t *object, hashtable_t *parents)
}
out:
hashtable_del(parents, loop_key);
hashtable_del(parents, loop_key, loop_key_len);
return result;
}
@ -633,8 +688,9 @@ static json_t *json_array_deep_copy(const json_t *array, hashtable_t *parents) {
json_t *result;
size_t i;
char loop_key[LOOP_KEY_LEN];
size_t loop_key_len;
if (jsonp_loop_check(parents, array, loop_key, sizeof(loop_key)))
if (jsonp_loop_check(parents, array, loop_key, sizeof(loop_key), &loop_key_len))
return NULL;
result = json_array();
@ -651,7 +707,7 @@ static json_t *json_array_deep_copy(const json_t *array, hashtable_t *parents) {
}
out:
hashtable_del(parents, loop_key);
hashtable_del(parents, loop_key, loop_key_len);
return result;
}
@ -797,16 +853,18 @@ json_t *json_vsprintf(const char *fmt, va_list ap) {
va_copy(aq, ap);
length = vsnprintf(NULL, 0, fmt, ap);
if (length < 0)
goto out;
if (length == 0) {
json = json_string("");
goto out;
}
buf = jsonp_malloc(length + 1);
buf = jsonp_malloc((size_t)length + 1);
if (!buf)
goto out;
vsnprintf(buf, length + 1, fmt, aq);
vsnprintf(buf, (size_t)length + 1, fmt, aq);
if (!utf8_check_string(buf, length)) {
jsonp_free(buf);
goto out;

1
test/.gitignore vendored
View file

@ -7,6 +7,7 @@ suites/api/test_cpp
suites/api/test_dump
suites/api/test_dump_callback
suites/api/test_equal
suites/api/test_fixed_size
suites/api/test_load
suites/api/test_load_callback
suites/api/test_loadb

View file

@ -35,7 +35,6 @@ struct config {
int ensure_ascii;
int sort_keys;
int strip;
int use_env;
int have_hashseed;
int hashseed;
int precision;
@ -63,7 +62,7 @@ static const char *strip(char *str) {
}
static char *loadfile(FILE *file) {
long fsize, ret;
size_t fsize, ret;
char *buf;
fseek(file, 0, SEEK_END);
@ -81,11 +80,10 @@ static char *loadfile(FILE *file) {
static void read_conf(FILE *conffile) {
char *buffer, *line, *val;
conf.have_hashseed = 0;
buffer = loadfile(conffile);
for (line = strtok(buffer, "\r\n"); line; line = strtok(NULL, "\r\n")) {
if (!strncmp(line, "export ", 7))
continue;
val = strchr(line, '=');
if (!val) {
printf("invalid configuration line\n");
@ -110,8 +108,6 @@ static void read_conf(FILE *conffile) {
if (!strcmp(line, "HASHSEED")) {
conf.have_hashseed = 1;
conf.hashseed = atoi(val);
} else {
conf.have_hashseed = 0;
}
}
@ -138,10 +134,16 @@ static int cmpfile(const char *str, const char *path, const char *fname) {
}
buffer = loadfile(file);
if (strcmp(buffer, str) != 0)
if (strcmp(buffer, str) != 0) {
fprintf(stderr, "=== Expected %s ===\n", fname);
fprintf(stderr, "%s\n", buffer);
fprintf(stderr, "=== Actual %s ===\n", fname);
fprintf(stderr, "%s\n", str);
ret = 1;
else
} else {
ret = 0;
}
free(buffer);
fclose(file);
@ -206,8 +208,9 @@ int use_conf(char *test_path) {
buffer = loadfile(infile);
json = json_loads(strip(buffer), 0, &error);
free(buffer);
} else
} else {
json = json_loadf(infile, 0, &error);
}
fclose(infile);
@ -227,108 +230,6 @@ int use_conf(char *test_path) {
return ret;
}
static int getenv_int(const char *name) {
char *value, *end;
long result;
value = getenv(name);
if (!value)
return 0;
result = strtol(value, &end, 10);
if (*end != '\0')
return 0;
return (int)result;
}
int use_env() {
int indent, precision;
size_t flags = 0;
json_t *json;
json_error_t error;
#ifdef _WIN32
/* On Windows, set stdout and stderr to binary mode to avoid
outputting DOS line terminators */
_setmode(_fileno(stdout), _O_BINARY);
_setmode(_fileno(stderr), _O_BINARY);
#endif
indent = getenv_int("JSON_INDENT");
if (indent < 0 || indent > 31) {
fprintf(stderr, "invalid value for JSON_INDENT: %d\n", indent);
return 2;
}
if (indent > 0)
flags |= JSON_INDENT(indent);
if (getenv_int("JSON_COMPACT") > 0)
flags |= JSON_COMPACT;
if (getenv_int("JSON_ENSURE_ASCII"))
flags |= JSON_ENSURE_ASCII;
if (getenv_int("JSON_PRESERVE_ORDER"))
flags |= JSON_PRESERVE_ORDER;
if (getenv_int("JSON_SORT_KEYS"))
flags |= JSON_SORT_KEYS;
precision = getenv_int("JSON_REAL_PRECISION");
if (precision < 0 || precision > 31) {
fprintf(stderr, "invalid value for JSON_REAL_PRECISION: %d\n", precision);
return 2;
}
if (getenv("HASHSEED"))
json_object_seed(getenv_int("HASHSEED"));
if (precision > 0)
flags |= JSON_REAL_PRECISION(precision);
if (getenv_int("STRIP")) {
/* Load to memory, strip leading and trailing whitespace */
size_t size = 0, used = 0;
char *buffer = NULL, *buf_ck = NULL;
while (1) {
size_t count;
size = (size == 0 ? 128 : size * 2);
buf_ck = realloc(buffer, size);
if (!buf_ck) {
fprintf(stderr, "Unable to allocate %d bytes\n", (int)size);
free(buffer);
return 1;
}
buffer = buf_ck;
count = fread(buffer + used, 1, size - used, stdin);
if (count < size - used) {
buffer[used + count] = '\0';
break;
}
used += count;
}
json = json_loads(strip(buffer), 0, &error);
free(buffer);
} else
json = json_loadf(stdin, 0, &error);
if (!json) {
fprintf(stderr, "%d %d %d\n%s\n", error.line, error.column, error.position,
error.text);
return 1;
}
json_dumpf(json, stdout, flags);
json_decref(json);
return 0;
}
int main(int argc, char *argv[]) {
int i;
char *test_path = NULL;
@ -344,23 +245,17 @@ int main(int argc, char *argv[]) {
for (i = 1; i < argc; i++) {
if (!strcmp(argv[i], "--strip"))
conf.strip = 1;
else if (!strcmp(argv[i], "--env"))
conf.use_env = 1;
else
test_path = argv[i];
}
if (conf.use_env)
return use_env();
else {
if (!test_path)
goto usage;
return use_conf(test_path);
if (!test_path) {
goto usage;
}
return use_conf(test_path);
usage:
fprintf(stderr, "argc =%d\n", argc);
fprintf(stderr, "usage: %s [--strip] [--env] test_dir\n", argv[0]);
fprintf(stderr, "usage: %s [--strip] test_dir\n", argv[0]);
return 2;
}

View file

@ -1,38 +0,0 @@
#!/bin/bash
set -ex
PROJECT_NAME=jansson
# Clone the oss-fuzz repository
git clone https://github.com/google/oss-fuzz.git /tmp/ossfuzz
if [[ ! -d /tmp/ossfuzz/projects/${PROJECT_NAME} ]]
then
echo "Could not find the ${PROJECT_NAME} project in ossfuzz"
# Exit with a success code while the jansson project is not expected to exist
# on oss-fuzz.
exit 0
fi
# Work out which repo to clone from, inside Docker
if [[ ${TRAVIS_PULL_REQUEST} != "false" ]]
then
# Pull-request branch
REPO=${TRAVIS_PULL_REQUEST_SLUG}
BRANCH=${TRAVIS_PULL_REQUEST_BRANCH}
else
# Push build.
REPO=${TRAVIS_REPO_SLUG}
BRANCH=${TRAVIS_BRANCH}
fi
# Modify the oss-fuzz Dockerfile so that we're checking out the current branch on travis.
sed -i "s@https://github.com/akheron/jansson.git@-b ${BRANCH} https://github.com/${REPO}.git@" /tmp/ossfuzz/projects/${PROJECT_NAME}/Dockerfile
# Try and build the fuzzers
pushd /tmp/ossfuzz
python infra/helper.py build_image --pull ${PROJECT_NAME}
python infra/helper.py build_fuzzers ${PROJECT_NAME}
popd

View file

@ -7,9 +7,10 @@ check_PROGRAMS = \
test_dump \
test_dump_callback \
test_equal \
test_fixed_size \
test_load \
test_loadb \
test_load_callback \
test_loadb \
test_memory_funcs \
test_number \
test_object \
@ -24,6 +25,7 @@ test_chaos_SOURCES = test_chaos.c util.h
test_copy_SOURCES = test_copy.c util.h
test_dump_SOURCES = test_dump.c util.h
test_dump_callback_SOURCES = test_dump_callback.c util.h
test_fixed_size_SOURCES = test_fixed_size.c util.h
test_load_SOURCES = test_load.c util.h
test_loadb_SOURCES = test_loadb.c util.h
test_memory_funcs_SOURCES = test_memory_funcs.c util.h

View file

@ -7,7 +7,7 @@ SOFILE="../src/.libs/libjansson.so"
# The list of symbols, which the shared object should export, is read
# from the def file, which is used in Windows builds
grep 'json_' $top_srcdir/src/jansson.def \
grep 'json_\|jansson_' $top_srcdir/src/jansson.def \
| sed -e 's/ //g' \
| sort \
>$test_log/exports
@ -15,7 +15,7 @@ grep 'json_' $top_srcdir/src/jansson.def \
nm -D $SOFILE >/dev/null >$test_log/symbols 2>/dev/null \
|| exit 77 # Skip if "nm -D" doesn't seem to work
grep ' [DT] ' $test_log/symbols | cut -d' ' -f3 | grep -v '^_' | sort >$test_log/output
grep ' [DT] ' $test_log/symbols | cut -d' ' -f3 | grep -v '^_' | sed 's/@@libjansson.*//' | sort >$test_log/output
if ! cmp -s $test_log/exports $test_log/output; then
diff -u $test_log/exports $test_log/output >&2

View file

@ -0,0 +1,228 @@
/*
* Copyright (c) 2020 Petri Lehtinen <petri@digip.org>
*
* Jansson is free software; you can redistribute it and/or modify
* it under the terms of the MIT license. See LICENSE for details.
*/
#include "util.h"
#include <jansson.h>
#include <string.h>
static void test_keylen_iterator(json_t *object) {
const char key1[] = {'t', 'e', 's', 't', '1'};
const char key2[] = {'t', 'e', 's', 't'};
const char key3[] = {'t', 'e', 's', '\0', 't'};
const char key4[] = {'t', 'e', 's', 't', '\0'};
const char *reference_keys[] = {key1, key2, key3, key4};
const size_t reference_keys_len[] = {sizeof(key1), sizeof(key2), sizeof(key3),
sizeof(key4)};
size_t index = 0;
json_t *value;
const char *key;
size_t keylen;
json_object_keylen_foreach(object, key, keylen, value) {
if (keylen != reference_keys_len[index])
fail("invalid key len in iterator");
if (memcmp(key, reference_keys[index], reference_keys_len[index]) != 0)
fail("invalid key in iterator");
index++;
}
}
static void test_keylen(void) {
json_t *obj = json_object();
const char key[] = {'t', 'e', 's', 't', '1'};
const char key2[] = {'t', 'e', 's', 't'};
const char key3[] = {'t', 'e', 's', '\0', 't'};
const char key4[] = {'t', 'e', 's', 't', '\0'};
if (json_object_size(obj) != 0)
fail("incorrect json");
json_object_set_new_nocheck(obj, "test1", json_true());
if (json_object_size(obj) != 1)
fail("incorrect json");
if (json_object_getn(obj, key, sizeof(key)) != json_true())
fail("json_object_getn failed");
if (json_object_getn(obj, key2, sizeof(key2)) != NULL)
fail("false positive json_object_getn by key2");
if (json_object_setn_nocheck(obj, key2, sizeof(key2), json_false()))
fail("json_object_setn_nocheck for key2 failed");
if (json_object_size(obj) != 2)
fail("incorrect json");
if (json_object_get(obj, "test") != json_false())
fail("json_object_setn_nocheck for key2 failed");
if (json_object_getn(obj, key2, sizeof(key2)) != json_false())
fail("json_object_getn by key 2 failed");
if (json_object_getn(obj, key3, sizeof(key3)) != NULL)
fail("false positive json_object_getn by key3");
if (json_object_setn_nocheck(obj, key3, sizeof(key3), json_false()))
fail("json_object_setn_nocheck for key3 failed");
if (json_object_size(obj) != 3)
fail("incorrect json");
if (json_object_getn(obj, key3, sizeof(key3)) != json_false())
fail("json_object_getn by key 3 failed");
if (json_object_getn(obj, key4, sizeof(key4)) != NULL)
fail("false positive json_object_getn by key3");
if (json_object_setn_nocheck(obj, key4, sizeof(key4), json_false()))
fail("json_object_setn_nocheck for key3 failed");
if (json_object_size(obj) != 4)
fail("incorrect json");
test_keylen_iterator(obj);
if (json_object_getn(obj, key4, sizeof(key4)) != json_false())
fail("json_object_getn by key 3 failed");
if (json_object_size(obj) != 4)
fail("incorrect json");
if (json_object_deln(obj, key4, sizeof(key4)))
fail("json_object_deln failed");
if (json_object_getn(obj, key4, sizeof(key4)) != NULL)
fail("json_object_deln failed");
if (json_object_size(obj) != 3)
fail("incorrect json");
if (json_object_deln(obj, key3, sizeof(key3)))
fail("json_object_deln failed");
if (json_object_getn(obj, key3, sizeof(key3)) != NULL)
fail("json_object_deln failed");
if (json_object_size(obj) != 2)
fail("incorrect json");
if (json_object_deln(obj, key2, sizeof(key2)))
fail("json_object_deln failed");
if (json_object_getn(obj, key2, sizeof(key2)) != NULL)
fail("json_object_deln failed");
if (json_object_size(obj) != 1)
fail("incorrect json");
if (json_object_deln(obj, key, sizeof(key)))
fail("json_object_deln failed");
if (json_object_getn(obj, key, sizeof(key)) != NULL)
fail("json_object_deln failed");
if (json_object_size(obj) != 0)
fail("incorrect json");
json_decref(obj);
}
static void test_invalid_keylen(void) {
json_t *obj = json_object();
json_t *empty_obj = json_object();
const char key[] = {'t', 'e', 's', 't', '1'};
json_object_set_new_nocheck(obj, "test1", json_true());
if (json_object_getn(NULL, key, sizeof(key)) != NULL)
fail("json_object_getn on NULL failed");
if (json_object_getn(obj, NULL, sizeof(key)) != NULL)
fail("json_object_getn on NULL failed");
if (json_object_getn(obj, key, 0) != NULL)
fail("json_object_getn on NULL failed");
if (!json_object_setn_new(obj, NULL, sizeof(key), json_true()))
fail("json_object_setn_new with NULL key failed");
if (!json_object_setn_new_nocheck(obj, NULL, sizeof(key), json_true()))
fail("json_object_setn_new_nocheck with NULL key failed");
if (!json_object_del(obj, NULL))
fail("json_object_del with NULL failed");
if (!json_object_deln(empty_obj, key, sizeof(key)))
fail("json_object_deln with empty object failed");
if (!json_object_deln(obj, key, sizeof(key) - 1))
fail("json_object_deln with incomplete key failed");
json_decref(obj);
json_decref(empty_obj);
}
static void test_binary_keys(void) {
json_t *obj = json_object();
int key1 = 0;
int key2 = 1;
json_object_setn_nocheck(obj, (const char *)&key1, sizeof(key1), json_true());
json_object_setn_nocheck(obj, (const char *)&key2, sizeof(key2), json_true());
if (!json_is_true(json_object_getn(obj, (const char *)&key1, sizeof(key1))))
fail("cannot get integer key1");
if (!json_is_true(json_object_getn(obj, (const char *)&key1, sizeof(key2))))
fail("cannot get integer key2");
if (json_object_size(obj) != 2)
fail("binary object size missmatch");
if (json_object_deln(obj, (const char *)&key1, sizeof(key1)))
fail("cannot del integer key1");
if (json_object_size(obj) != 1)
fail("binary object size missmatch");
if (json_object_deln(obj, (const char *)&key2, sizeof(key2)))
fail("cannot del integer key2");
if (json_object_size(obj) != 0)
fail("binary object size missmatch");
json_decref(obj);
}
static void test_dump_order(void) {
json_t *obj = json_object();
char key1[] = {'k', '\0', '-', '2'};
char key2[] = {'k', '\0', '-', '1'};
const char expected_sorted_str[] =
"{\"k\\u0000-1\": \"first\", \"k\\u0000-2\": \"second\"}";
const char expected_nonsorted_str[] =
"{\"k\\u0000-2\": \"second\", \"k\\u0000-1\": \"first\"}";
char *out;
json_object_setn_new_nocheck(obj, key1, sizeof(key1), json_string("second"));
json_object_setn_new_nocheck(obj, key2, sizeof(key2), json_string("first"));
out = malloc(512);
json_dumpb(obj, out, 512, 0);
if (memcmp(expected_nonsorted_str, out, sizeof(expected_nonsorted_str) - 1) != 0)
fail("preserve order failed");
json_dumpb(obj, out, 512, JSON_SORT_KEYS);
if (memcmp(expected_sorted_str, out, sizeof(expected_sorted_str) - 1) != 0)
fail("utf-8 sort failed");
free(out);
json_decref(obj);
}
static void run_tests() {
test_keylen();
test_invalid_keylen();
test_binary_keys();
test_dump_order();
}

View file

@ -1,2 +1 @@
JSON_COMPACT=1
export JSON_COMPACT

View file

@ -1,3 +1,2 @@
JSON_COMPACT=1
HASHSEED=1
export JSON_COMPACT HASHSEED

View file

@ -1,2 +1 @@
JSON_ENSURE_ASCII=1
export JSON_ENSURE_ASCII

View file

@ -1,2 +1 @@
JSON_INDENT=4
export JSON_INDENT

View file

@ -1,3 +1,2 @@
JSON_INDENT=4
JSON_COMPACT=1
export JSON_INDENT JSON_COMPACT

View file

@ -1,4 +1,3 @@
JSON_INDENT=4
JSON_COMPACT=1
HASHSEED=1
export JSON_INDENT JSON_COMPACT HASHSEED

View file

@ -1,3 +1,2 @@
JSON_INDENT=4
HASHSEED=1
export JSON_INDENT HASHSEED

View file

@ -1,2 +1 @@
HASHSEED=1
export HASHSEED

View file

@ -1,2 +1 @@
JSON_PRESERVE_ORDER=1
export JSON_PRESERVE_ORDER

View file

@ -1,2 +1 @@
JSON_REAL_PRECISION=4
export JSON_REAL_PRECISION

View file

@ -1 +1 @@
[1.23456789, 1.0, 1.0000000000000002]
[1.23456789, 1.0, 1.0000000000000002, 1.23456e99, 1.23456e-99, 0.0000000000012345]

View file

@ -1 +1 @@
[1.235, 1.0, 1.0]
[1.235, 1.0, 1.0, 1.235e99, 1.235e-99, 1.235e-12]

View file

@ -10,23 +10,13 @@ is_test() {
}
run_test() {
(
if [ -f $test_path/env ]; then
. $test_path/env
fi
$json_process --env <$test_path/input >$test_log/stdout 2>$test_log/stderr
)
$json_process $test_path >$test_log/stdout 2>$test_log/stderr || return 1
valgrind_check $test_log/stderr || return 1
cmp -s $test_path/output $test_log/stdout
}
show_error() {
valgrind_show_error && return
echo "EXPECTED OUTPUT:"
nl -bn $test_path/output
echo "ACTUAL OUTPUT:"
nl -bn $test_log/stdout
cat $test_log/stderr
}
. $top_srcdir/test/scripts/run-tests.sh

View file

@ -1,2 +1 @@
JSON_SORT_KEYS=1
export JSON_SORT_KEYS

View file

@ -10,18 +10,13 @@ is_test() {
}
run_test() {
$json_process --env <$test_path/input >$test_log/stdout 2>$test_log/stderr
valgrind_check $test_log/stderr || return 1
cmp -s $test_path/error $test_log/stderr
$json_process $test_path >$test_log/stdout 2>$test_log/stderr || return 1
valgrind_check $test_log/stderr$s || return 1
}
show_error() {
valgrind_show_error && return
echo "EXPECTED ERROR:"
nl -bn $test_path/error
echo "ACTUAL ERROR:"
nl -bn $test_log/stderr
cat $test_log/stderr
}
. $top_srcdir/test/scripts/run-tests.sh

View file

@ -13,24 +13,18 @@ do_run() {
variant=$1
s=".$1"
strip=0
strip=""
if [ "$variant" = "strip" ]; then
# This test should not be stripped
[ -f $test_path/nostrip ] && return
strip=1
strip="--strip"
fi
STRIP=$strip $json_process --env \
<$test_path/input >$test_log/stdout$s 2>$test_log/stderr$s
valgrind_check $test_log/stderr$s || return 1
ref=error
[ -f $test_path/error$s ] && ref=error$s
if ! cmp -s $test_path/$ref $test_log/stderr$s; then
echo $variant > $test_log/variant
if ! $json_process $strip $test_path >$test_log/stdout$s 2>$test_log/stderr$s; then
echo $variant >$test_log/variant
return 1
fi
valgrind_check $test_log/stderr$s || return 1
}
run_test() {
@ -44,14 +38,7 @@ show_error() {
s=".$variant"
echo "VARIANT: $variant"
echo "EXPECTED ERROR:"
ref=error
[ -f $test_path/error$s ] && ref=error$s
nl -bn $test_path/$ref
echo "ACTUAL ERROR:"
nl -bn $test_log/stderr$s
cat $test_log/stderr$s
}
. $top_srcdir/test/scripts/run-tests.sh

View file

@ -0,0 +1 @@
[1.23e47, 0.1, 0.3, 9.99]

View file

@ -0,0 +1 @@
[1.2299999999999999e47, 0.10000000000000001, 0.29999999999999999, 9.9900000000000002]

View file

@ -1 +1 @@
[123e45]
[1.23e47, 0.1, 0.3, 9.99]

View file

@ -1 +1 @@
[1.2299999999999999e47]
[1.23e47, 0.1, 0.3, 9.99]

View file

@ -5,31 +5,33 @@
# Jansson is free software; you can redistribute it and/or modify
# it under the terms of the MIT license. See LICENSE for details.
JSON_SORT_KEYS=1
export JSON_SORT_KEYS
dtoa_enabled() {
grep -q "DTOA_ENABLED 1" $top_builddir/jansson_private_config.h
}
is_test() {
test -d $test_path
}
do_run() {
if [ -f $test_path/skip_unless_dtoa ]; then
dtoa_enabled || return 77
fi
if [ -f $test_path/skip_if_dtoa ]; then
dtoa_enabled && return 77
fi
variant=$1
s=".$1"
strip=0
[ "$variant" = "strip" ] && strip=1
strip=""
[ "$variant" = "strip" ] && strip="--strip"
STRIP=$strip $json_process --env \
<$test_path/input >$test_log/stdout$s 2>$test_log/stderr$s
valgrind_check $test_log/stderr$s || return 1
ref=output
[ -f $test_path/output$s ] && ref=output$s
if ! cmp -s $test_path/$ref $test_log/stdout$s; then
echo $variant > $test_log/variant
if ! $json_process $strip $test_path >$test_log/stdout$s 2>$test_log/stderr$s; then
echo $variant >$test_log/variant
return 1
fi
valgrind_check $test_log/stderr$s || return 1
}
run_test() {
@ -43,14 +45,7 @@ show_error() {
s=".$variant"
echo "VARIANT: $variant"
echo "EXPECTED OUTPUT:"
ref=output
[ -f $test_path/output$s ] && ref=output$s
nl -bn $test_path/$ref
echo "ACTUAL OUTPUT:"
nl -bn $test_log/stdout$s
cat $test_log/stderr$s
}
. $top_srcdir/test/scripts/run-tests.sh