mirror of
https://github.com/akheron/jansson.git
synced 2025-04-15 01:18:12 +00:00
Compare commits
1021 commits
Author | SHA1 | Date | |
---|---|---|---|
|
96d160df90 | ||
|
aef13f87f1 | ||
|
c16ac732e4 | ||
|
05a10aa8af | ||
|
4d7ac97b89 | ||
|
23905f372c | ||
|
ed5cae4ed0 | ||
|
0f9c18dd12 | ||
|
61fc3d0e28 | ||
|
cce8caba26 | ||
|
50953fb1fa | ||
|
c780171cf3 | ||
|
2297a2e320 | ||
|
f5b3ab323c | ||
|
9d3abab610 | ||
|
9699de8600 | ||
|
33a6c95d56 | ||
|
ed06f65412 | ||
|
8b975abca1 | ||
|
8660da0f7c | ||
|
dcbeb58829 | ||
|
53383b9e26 | ||
|
649c9357c6 | ||
|
73dc6960ad | ||
|
88375fb10e | ||
|
0247b5e2e7 | ||
|
842708ac0c | ||
|
2d1c13224f | ||
|
9b9b5e81cf | ||
|
0c9c11a89d | ||
|
f52d79a4d3 | ||
|
2f1777ba80 | ||
|
1e57cadbd3 | ||
|
0db4db1048 | ||
|
73d968feef | ||
|
0154c4af07 | ||
|
60097f0096 | ||
|
bde28463f8 | ||
|
e7c9ef8e52 | ||
|
a22dc95311 | ||
|
a7d04c8554 | ||
|
e23f558007 | ||
|
1894366598 | ||
|
7e04530916 | ||
|
011e625769 | ||
|
de5f2963ab | ||
|
128e9c5f37 | ||
|
fe6e8eec7e | ||
|
addeeef408 | ||
|
d82b436b2f | ||
|
586b4461e6 | ||
|
78418c84f1 | ||
|
eb81670881 | ||
|
0677666f65 | ||
|
684e18c927 | ||
|
e950e57e13 | ||
|
0dffb4284e | ||
|
2de2c3d5fc | ||
|
2cd3126651 | ||
|
bde3cab216 | ||
|
f7aee00928 | ||
|
aa52a60ca8 | ||
|
d8aedd3682 | ||
|
0441ccd3c6 | ||
|
0bc4325222 | ||
|
fd3e9e3051 | ||
|
55ea6d44dd | ||
|
9a0fc069bf | ||
|
cc318fc042 | ||
|
638449c43d | ||
|
38c4b80ab7 | ||
|
ec1b6318e4 | ||
|
1d8201c656 | ||
|
0758caaac0 | ||
|
16a3899a9e | ||
|
ca6775dee4 | ||
|
ba4503804b | ||
|
966f9cc20a | ||
|
a740f15c17 | ||
|
601b568b8d | ||
|
1112580f4a | ||
|
d9c5b1d4df | ||
|
a154389827 | ||
|
c329fd7bc8 | ||
|
94318890c0 | ||
|
798d40c3f3 | ||
|
73ccec0601 | ||
|
ca80d5127e | ||
|
52dfc3dd4a | ||
|
38b001edbd | ||
|
e9ebfa7e77 | ||
|
3f4d948652 | ||
|
a839d3cad6 | ||
|
2d2efde34a | ||
|
bcb6b6f3fd | ||
|
2882ead5bb | ||
|
4c9018cc4c | ||
|
e15fd861e1 | ||
|
ca88677bdd | ||
|
6e211b24fe | ||
|
50c9623e37 | ||
|
3704e1cd78 | ||
|
67900c85d3 | ||
|
d288cc157e | ||
|
5b8ff675b3 | ||
|
78da35682a | ||
|
bb4f99e919 | ||
|
f912430cda | ||
|
a8f5fa5f5a | ||
|
7c0297abe8 | ||
|
1a95a60f80 | ||
|
be0fca914e | ||
|
79fe8c3435 | ||
|
7dc463ee4e | ||
|
79075d5393 | ||
|
c3958a326c | ||
|
f75dc840e4 | ||
|
25e706cce7 | ||
|
8b022dad76 | ||
|
0eec0327be | ||
|
e7bed4c109 | ||
|
355aa42b48 | ||
|
92873e9d27 | ||
|
9e1c37c889 | ||
|
010092c7bd | ||
|
fb602f331b | ||
|
00d2d274bc | ||
|
b403bbba8d | ||
|
cb4727c4a9 | ||
|
a585a2778e | ||
|
4f26548e3a | ||
|
1f889c4b60 | ||
|
8682f2aab3 | ||
|
4f3305c506 | ||
|
78ea35c8e9 | ||
|
c436bec682 | ||
|
f5c0afecdb | ||
|
5371816480 | ||
|
09e455275c | ||
|
3adf3e6a5a | ||
|
672b6df474 | ||
|
53e9dd848f | ||
|
63fb81faa5 | ||
|
a1f297aa83 | ||
|
e68c5ea0b5 | ||
|
af66d3f4c0 | ||
|
a6f6ec1856 | ||
|
287b5acb07 | ||
|
3858607be0 | ||
|
0ed750a7ea | ||
|
7feae084b1 | ||
|
a6f436fefc | ||
|
66dec35c8c | ||
|
1ac79b2558 | ||
|
6e26599980 | ||
|
3bfa3f1946 | ||
|
813c3e2c47 | ||
|
c49fbc7082 | ||
|
24cc9dd078 | ||
|
100e5549b6 | ||
|
b333f3656d | ||
|
76300601d9 | ||
|
f4498d2856 | ||
|
71c4e8ec21 | ||
|
10afd33efb | ||
|
2a31c4f475 | ||
|
f44c137a84 | ||
|
7197810714 | ||
|
4ba5c7cc5d | ||
|
e262ea5fcd | ||
|
6ac0eefed0 | ||
|
226b34d546 | ||
|
d115953347 | ||
|
15f77c8f47 | ||
|
ef080d17b5 | ||
|
4f91b1d072 | ||
|
8d659113d5 | ||
|
5df5fc5b13 | ||
|
e65a490c44 | ||
|
fc591913ac | ||
|
d83d3d9172 | ||
|
020cc26b5c | ||
|
360b1ef5a1 | ||
|
37147b8a23 | ||
|
8a00a56ec4 | ||
|
401ece058d | ||
|
66e4ee795d | ||
|
3e13f514ce | ||
|
81fe13eeed | ||
|
aed855e692 | ||
|
b59ac57617 | ||
|
904f5c28ac | ||
|
80cea73bf9 | ||
|
e28bcfeac8 | ||
|
fda9288b5f | ||
|
9ff08f6312 | ||
|
dd4743a51d | ||
|
50f29f9b1a | ||
|
e5dbe7bb64 | ||
|
15105b66b4 | ||
|
b23025d72b | ||
|
2c98c30a02 | ||
|
bb71db204f | ||
|
92760bb363 | ||
|
fe7873e963 | ||
|
a586c0654f | ||
|
6d7a02beb0 | ||
|
b70364b362 | ||
|
6d1ae86e1c | ||
|
a324d18940 | ||
|
44f6606df8 | ||
|
d8798468c6 | ||
|
749bef0b6a | ||
|
e37e52549f | ||
|
ea664722d4 | ||
|
d098c0ff86 | ||
|
2d494c169f | ||
|
a5af280bac | ||
|
73c22de516 | ||
|
6dddf687d8 | ||
|
744fe5ed44 | ||
|
03620980cf | ||
|
248d62111c | ||
|
46dff2737d | ||
|
fa0b5ece9e | ||
|
a6138a07b6 | ||
|
2863dde053 | ||
|
efe6c7b3f2 | ||
|
3e81f78366 | ||
|
8104ce167a | ||
|
f44921e176 | ||
|
3aee856d7b | ||
|
37e0ee4d48 | ||
|
dc3b313e91 | ||
|
45228cada4 | ||
|
24d45272a7 | ||
|
9e5af7c3b7 | ||
|
6c78910011 | ||
|
89dad8959b | ||
|
9a1d9c88fc | ||
|
02dade46c0 | ||
|
bc5c6826ef | ||
|
217859f849 | ||
|
3951d39b40 | ||
|
bd91753e91 | ||
|
0b04762c94 | ||
|
009ffa3fc8 | ||
|
89f0dde7ff | ||
|
9e7847ed26 | ||
|
112ccbd820 | ||
|
271ffda903 | ||
|
3e5405c39e | ||
|
93e8cd7d68 | ||
|
0abcbce3bb | ||
|
4947f9a193 | ||
|
ad6c1e37ad | ||
|
f52c3da717 | ||
|
28666cead0 | ||
|
74028ff958 | ||
|
fbf720f2c5 | ||
|
1b8bebf0bf | ||
|
f7a70de84a | ||
|
17f77cf2c6 | ||
|
b23201bb1a | ||
|
df454e3cf0 | ||
|
b8bb078cc2 | ||
|
3c51112063 | ||
|
1672bb5a65 | ||
|
b900967f6f | ||
|
746c2c3a99 | ||
|
2af820fb99 | ||
|
bc5741fb1a | ||
|
575f951b3e | ||
|
0cac862bbc | ||
|
4467bf243f | ||
|
ddd1e1f223 | ||
|
d1e97737d6 | ||
|
98be7da3e2 | ||
|
08cb7b6d6f | ||
|
b02db47881 | ||
|
074bb3838f | ||
|
3ba3b23fdc | ||
|
e9fcab08fb | ||
|
bdaf7584db | ||
|
889280c976 | ||
|
f9e7aa5eeb | ||
|
9258671924 | ||
|
a2bbb44d96 | ||
|
16b516f976 | ||
|
86196250b8 | ||
|
ada5372cff | ||
|
f11c1b9466 | ||
|
811a30691e | ||
|
7d1af52ab4 | ||
|
63b9fd0552 | ||
|
b45745118d | ||
|
0ffecdbade | ||
|
ab1ba69027 | ||
|
a5610c8895 | ||
|
012c5f0eca | ||
|
a931aace16 | ||
|
14573dc920 | ||
|
71594af7d5 | ||
|
6a4b3f878d | ||
|
86fdf76f79 | ||
|
835290dfdf | ||
|
7daffabf07 | ||
|
9df267054f | ||
|
8f067962f6 | ||
|
603fdc9154 | ||
|
520340998f | ||
|
2d710d832f | ||
|
a8c834c882 | ||
|
7438cc8ba8 | ||
|
72fd2fec4c | ||
|
762b299e71 | ||
|
013c3892c3 | ||
|
64ce0ad373 | ||
|
087ed94c45 | ||
|
006638a6a2 | ||
|
130c2fff31 | ||
|
cfd817895a | ||
|
ac97e0bf79 | ||
|
1927eeb4b4 | ||
|
c8361441fe | ||
|
107cfe9499 | ||
|
811965b475 | ||
|
e08101704c | ||
|
c17be5870b | ||
|
9d71d006cc | ||
|
b698ca13de | ||
|
0f50bb10b6 | ||
|
e6b60da043 | ||
|
89f4c15e6d | ||
|
1c2e707b34 | ||
|
52015cf35c | ||
|
1d513d063a | ||
|
581d5b899c | ||
|
4f49c07781 | ||
|
245e532934 | ||
|
e44b2231b5 | ||
|
40bd71f064 | ||
|
067dc50efe | ||
|
4876bda857 | ||
|
e89538f685 | ||
|
f7331c7194 | ||
|
02464862ed | ||
|
db0213ae56 | ||
|
8f2298bad8 | ||
|
4c4f692bd6 | ||
|
875b78dc97 | ||
|
fef27e6d3e | ||
|
1dab656dee | ||
|
69678aaa35 | ||
|
d384acd706 | ||
|
5d42e1520a | ||
|
d8753db4ac | ||
|
95dd927857 | ||
|
76760011ff | ||
|
11813f4128 | ||
|
8b1bdcacb7 | ||
|
c242b46016 | ||
|
58c188e1d5 | ||
|
ffc18128f4 | ||
|
6a38d0d431 | ||
|
d0a8ad4c06 | ||
|
c244b1483e | ||
|
fc83f10c85 | ||
|
8d561cd94e | ||
|
970c6988a5 | ||
|
220dcb7be3 | ||
|
48e0488f07 | ||
|
890760b2fb | ||
|
15653c47dd | ||
|
5508ab403d | ||
|
d799ee11b4 | ||
|
abaae7630e | ||
|
5c1d87592a | ||
|
5885035f5f | ||
|
dee4a7c29e | ||
|
82a55ef205 | ||
|
19f33c0e71 | ||
|
3c9e5c9925 | ||
|
1c38ab17f5 | ||
|
d5edfcc6fd | ||
|
9b435df3d4 | ||
|
bc743ad2d9 | ||
|
19cc800ad3 | ||
|
b52e7a69aa | ||
|
1395e4303a | ||
|
d7a6269a17 | ||
|
7fbe7c3960 | ||
|
ee27b7e3dd | ||
|
391d9101a8 | ||
|
2137e0c895 | ||
|
5fc44e10aa | ||
|
b94d767f86 | ||
|
43f17d010a | ||
|
fa20e80860 | ||
|
485c7640a1 | ||
|
d4a7de7c11 | ||
|
a6229a2d3e | ||
|
abd151f5d7 | ||
|
4f1238af71 | ||
|
122a1e2af9 | ||
|
c8d017bd88 | ||
|
23b1b7ba9a | ||
|
88943b64e3 | ||
|
348401e7b8 | ||
|
df248712a5 | ||
|
de1b5db70e | ||
|
4debe8e567 | ||
|
28d362cf28 | ||
|
541cdf5960 | ||
|
fc6e314498 | ||
|
17b5fdd94b | ||
|
5b88cc5ded | ||
|
db285b3764 | ||
|
a425f8c650 | ||
|
96f8916e50 | ||
|
dd2bf2971e | ||
|
34a64cfe53 | ||
|
571617ebfc | ||
|
3bd8a5729d | ||
|
33e4988180 | ||
|
663fbfb7a4 | ||
|
88aa6a9e30 | ||
|
92e2588d6e | ||
|
960ead07f2 | ||
|
78da1de021 | ||
|
946531bd7b | ||
|
10009d61a7 | ||
|
49fc6b1194 | ||
|
6e8b2e161a | ||
|
3922f84a2f | ||
|
56a50e147d | ||
|
7a0b9af662 | ||
|
4fbe44605b | ||
|
ea7a77236c | ||
|
1dc87ed5a1 | ||
|
50a811ca07 | ||
|
4bbb53a25d | ||
|
569295fe30 | ||
|
3f5acaa3fb | ||
|
42016a35c8 | ||
|
17ec22f514 | ||
|
e83ded066a | ||
|
492feb26ce | ||
|
8f80c2d838 | ||
|
b9c588de3d | ||
|
a721d36f41 | ||
|
eee85cd53c | ||
|
7c80778827 | ||
|
4c2cc0e36c | ||
|
a630e226ba | ||
|
0c95a22888 | ||
|
30fdf6067e | ||
|
cffc5df600 | ||
|
3fe8f74e7f | ||
|
913937c98d | ||
|
b21cd65d30 | ||
|
c89638d73c | ||
|
98610bfcec | ||
|
9c8b3c833f | ||
|
dc69aa797b | ||
|
4e8c4bfbd2 | ||
|
316492e4d0 | ||
|
3347679d8f | ||
|
b951baec0d | ||
|
05f7d30e5a | ||
|
34d8b92dce | ||
|
67a7bc7376 | ||
|
30a4c88843 | ||
|
08be94e8e6 | ||
|
d544852ff6 | ||
|
8dc3233f3b | ||
|
c7d479c740 | ||
|
a76dc45512 | ||
|
9bb5a266dd | ||
|
6ebd0bc7a6 | ||
|
f8d8d524cf | ||
|
1bfc33362e | ||
|
5744468c99 | ||
|
4d5aead31c | ||
|
78a80b8899 | ||
|
b961e8101e | ||
|
7876125a22 | ||
|
e2dcf94598 | ||
|
6c1ce27095 | ||
|
46e27ae6d5 | ||
|
0be5c959da | ||
|
face43929d | ||
|
dcaa90d21e | ||
|
9c259c07aa | ||
|
e4d6a9f6f4 | ||
|
641002da37 | ||
|
9f9c9fe410 | ||
|
e39112b29c | ||
|
bf2584703a | ||
|
db8ca3645f | ||
|
0490907fb3 | ||
|
c2e8fdde9d | ||
|
24c67966aa | ||
|
0747d3943f | ||
|
5f9ef108d0 | ||
|
21599b95f8 | ||
|
3196ad48ed | ||
|
49ad5328c7 | ||
|
71676acd0b | ||
|
2489ea90b1 | ||
|
11d45b0090 | ||
|
8490e377c0 | ||
|
ef666519f7 | ||
|
6fe231757e | ||
|
84b5bfe173 | ||
|
d9ee5a7f1b | ||
|
19a19d1952 | ||
|
68e201add8 | ||
|
62531bd012 | ||
|
f639fbd2c3 | ||
|
a38704df58 | ||
|
8d439710cd | ||
|
120a90a155 | ||
|
c3e9725f02 | ||
|
02a3829363 | ||
|
018f7fea31 | ||
|
e6bd0aba9d | ||
|
9e7f11a847 | ||
|
042d4b2a6b | ||
|
ddd10b5e0b | ||
|
2fd66fab1a | ||
|
5cc594c9e8 | ||
|
ffb7ef4b75 | ||
|
b49280be72 | ||
|
7121134abd | ||
|
e8c812b500 | ||
|
6950cd203b | ||
|
d3bd463897 | ||
|
54d86fb4a4 | ||
|
42d398243d | ||
|
f675514815 | ||
|
5793142861 | ||
|
b227f96c45 | ||
|
0687442f5d | ||
|
8390c90a91 | ||
|
3000831365 | ||
|
6b1cba94e3 | ||
|
9591d3a155 | ||
|
83d04ddd68 | ||
|
70232676f7 | ||
|
257a716073 | ||
|
0ccd2c7715 | ||
|
42e307b3c5 | ||
|
61cba2ae50 | ||
|
867079c820 | ||
|
d20751d53d | ||
|
6281d1c549 | ||
|
8fd475bf96 | ||
|
bbd3b0ca4f | ||
|
7c4f323abc | ||
|
1edff47af3 | ||
|
dc2d54d9c6 | ||
|
263fd0c9d3 | ||
|
6db70f0301 | ||
|
70f663db29 | ||
|
59bc1f42e4 | ||
|
b1b4f307d5 | ||
|
a3e6369105 | ||
|
3d0d61fdaf | ||
|
650707fccc | ||
|
33b0855b27 | ||
|
7214a222c7 | ||
|
81ce127048 | ||
|
344d2b00ea | ||
|
52a8072727 | ||
|
741e6915e2 | ||
|
e40d52c550 | ||
|
3d5bea5714 | ||
|
3e03b07831 | ||
|
e00cd4f941 | ||
|
a1882fee02 | ||
|
4a5626695c | ||
|
afa1d37e20 | ||
|
b9967fdbcf | ||
|
ccf6e48a52 | ||
|
93a3c7f663 | ||
|
5dbac70a67 | ||
|
40c2e532b9 | ||
|
02beb90db3 | ||
|
92bf4f6fa8 | ||
|
8ad98c9ad3 | ||
|
7a4ec36f31 | ||
|
3fab956599 | ||
|
a2de27a828 | ||
|
99855b2d63 | ||
|
08c5ec8f01 | ||
|
e9cb9dbf60 | ||
|
8284b7d3da | ||
|
511db446d7 | ||
|
b98be1f18d | ||
|
872f847655 | ||
|
f86bb0377f | ||
|
c82cea9d32 | ||
|
67c002f7c9 | ||
|
e0a7f81b39 | ||
|
7b35a18ac0 | ||
|
54d59c743c | ||
|
6279610ce4 | ||
|
ac0ca9223b | ||
|
4853a3454c | ||
|
cf1074e70e | ||
|
5804e9de9b | ||
|
3279aacdee | ||
|
2158670177 | ||
|
4cff593dd4 | ||
|
597423ea80 | ||
|
c3fc1d7382 | ||
|
c922354076 | ||
|
4118315afa | ||
|
ee13c667f1 | ||
|
23d563434a | ||
|
6142dbd8d0 | ||
|
0dac319bc4 | ||
|
0b871a113c | ||
|
8176527f56 | ||
|
ec7bb71d75 | ||
|
b6a1d8cfd4 | ||
|
52924288b9 | ||
|
7892ecce1c | ||
|
a501a39626 | ||
|
37bc3bbf4b | ||
|
2d46ea069b | ||
|
b217cd6689 | ||
|
a0c262d08b | ||
|
6ce273e2e6 | ||
|
f62b1f5d69 | ||
|
2b87fdcb43 | ||
|
c0139681cd | ||
|
9d6f9511f5 | ||
|
a79f64e155 | ||
|
7ca783c3bc | ||
|
ff0c05b8f1 | ||
|
4601bf71e5 | ||
|
c7f86abf6d | ||
|
f309e30320 | ||
|
233574e8e0 | ||
|
4a6939ef87 | ||
|
42bc7a3c50 | ||
|
abdb8d99d7 | ||
|
ff6e6ee293 | ||
|
8b2bfd5586 | ||
|
e46b912f53 | ||
|
1581f26a7f | ||
|
aabfd493d3 | ||
|
bd72efbd80 | ||
|
e8fd3e3085 | ||
|
873eddaf19 | ||
|
bd2c0c730d | ||
|
17a51a4bf0 | ||
|
09c39adc55 | ||
|
cbb80baf03 | ||
|
040bd7b0fa | ||
|
2637faa450 | ||
|
952e1d4ba9 | ||
|
353b5e08ba | ||
|
d286e7b753 | ||
|
3c6e36ba2d | ||
|
9af64480e1 | ||
|
4ae5736bd0 | ||
|
56039ed596 | ||
|
02b915af54 | ||
|
1eb274c555 | ||
|
6ac6f311b5 | ||
|
f736e705b2 | ||
|
54d88753a6 | ||
|
98a99fb2bd | ||
|
ac97f6f225 | ||
|
60fd7ab781 | ||
|
0d64e8ef89 | ||
|
a894980258 | ||
|
f227483846 | ||
|
6cb14dd337 | ||
|
fa268b5017 | ||
|
a307974731 | ||
|
a2381948bb | ||
|
5eb2c442a9 | ||
|
f471e63bb3 | ||
|
bb24697d9b | ||
|
c4a7bf90cf | ||
|
bef87fc258 | ||
|
68d6410da0 | ||
|
1e36667193 | ||
|
0931d938b0 | ||
|
0f2cdd70ff | ||
|
5ec101ec21 | ||
|
72cd84b92a | ||
|
6799c8e4c9 | ||
|
8484ea3fb2 | ||
|
92d9b89d59 | ||
|
bc98ab6a69 | ||
|
32cd821273 | ||
|
9c6cb42f17 | ||
|
ed06777937 | ||
|
6362032513 | ||
|
84f739036d | ||
|
0f358c8eaa | ||
|
f0d5c04734 | ||
|
fd56deb7dd | ||
|
d7ddbf3661 | ||
|
b6d0191e51 | ||
|
2a70d62251 | ||
|
909874b1b9 | ||
|
ff57dee13d | ||
|
e4cc77ce52 | ||
|
889f295958 | ||
|
68809cd913 | ||
|
910fb92267 | ||
|
f241e14cab | ||
|
7e9c293986 | ||
|
d43464a1ec | ||
|
c7079a25eb | ||
|
54f38d250c | ||
|
c7d543d36c | ||
|
7fab57dcef | ||
|
cd9757512d | ||
|
c0193bfb7f | ||
|
6e1f4bb560 | ||
|
1358d0bfac | ||
|
86d17a8dc2 | ||
|
3988ab2d27 | ||
|
f7f7bf5ab2 | ||
|
a76ba52f34 | ||
|
9febdf333c | ||
|
013b8b3f60 | ||
|
49fc708d4c | ||
|
92f6f0f22c | ||
|
e9e34f430e | ||
|
ab723c7fb5 | ||
|
636d5f60f9 | ||
|
c3492973e1 | ||
|
e20619e071 | ||
|
b44e2be032 | ||
|
76d6d700ad | ||
|
c96763215d | ||
|
4a76900bd7 | ||
|
1c0a3b2a55 | ||
|
056702e541 | ||
|
eab23f05d8 | ||
|
0944ac8d91 | ||
|
a5a43caa9a | ||
|
5456fc59ab | ||
|
279d8bf108 | ||
|
38e35e0973 | ||
|
af18578928 | ||
|
c30e92603c | ||
|
6ecba84817 | ||
|
b90ed1accb | ||
|
1111960120 | ||
|
7f09f48e7e | ||
|
b397711a66 | ||
|
cf9b384bcb | ||
|
42b651ef56 | ||
|
387298d4a6 | ||
|
cd854b5bc2 | ||
|
dd9b4e530c | ||
|
b5dd566c83 | ||
|
ff26dc60d1 | ||
|
53bc9d8a39 | ||
|
a3468c9bd8 | ||
|
58f9d65535 | ||
|
a33c3628da | ||
|
50dc64a7af | ||
|
5df7b79397 | ||
|
e54ea1f7c9 | ||
|
4be9e9e7fe | ||
|
dd7dd414f0 | ||
|
f25698d08f | ||
|
ef13fb9189 | ||
|
7706abcbed | ||
|
7d49fc75d5 | ||
|
908c62f327 | ||
|
a1c185a376 | ||
|
ac96ac13d4 | ||
|
579c291882 | ||
|
2770dca2c0 | ||
|
6825c2c706 | ||
|
6d1f28f050 | ||
|
7f3018a4fb | ||
|
53383860e8 | ||
|
fa7c2ea070 | ||
|
46f91797ec | ||
|
a242381024 | ||
|
3a7512d2b0 | ||
|
269e86b725 | ||
|
bf32f6cd75 | ||
|
c7611e7a0d | ||
|
23bc8e468d | ||
|
5422a862de | ||
|
818baf5fdb | ||
|
bb5d4efb2e | ||
|
198d537be7 | ||
|
1acd1a7b56 | ||
|
23dd078c8d | ||
|
781bda1404 | ||
|
3d5c0f46f1 | ||
|
8567816542 | ||
|
664c88ca97 | ||
|
cbb3855d97 | ||
|
3c4cf31a01 | ||
|
06eb436008 | ||
|
e3654c2245 | ||
|
a112563214 | ||
|
976fc2279f | ||
|
56643d4311 | ||
|
cb8fcc7808 | ||
|
b76c69de1b | ||
|
bfac1972e2 | ||
|
f8d0e01e46 | ||
|
ffbab6fedd | ||
|
145032a57f | ||
|
519d52e2bb | ||
|
94182a5acc | ||
|
f71eb7fe17 | ||
|
7ce70533c9 | ||
|
014c49c285 | ||
|
6e3ca5c45c | ||
|
68f2861e92 | ||
|
b354f8a35a | ||
|
b461c652b4 | ||
|
2caac965d4 | ||
|
1347686dbf | ||
|
8b2b12e05f | ||
|
1a090bbcd3 | ||
|
dec3ad498e | ||
|
978a47e2c5 | ||
|
453e4c0aa2 | ||
|
2630980f49 | ||
|
782acfe378 | ||
|
f9475f9577 | ||
|
8857aeadfd | ||
|
047a1417fb | ||
|
ce42e30b8c | ||
|
4e63fcd55d | ||
|
024106bbfb | ||
|
29ee3832cf | ||
|
c7c2edae8a | ||
|
bb89a5d4d3 | ||
|
f76966b438 | ||
|
49880cbabe | ||
|
f284e3c069 | ||
|
66a69f3f10 | ||
|
7d5982e6fe | ||
|
a2a9107600 | ||
|
42621370c3 | ||
|
8e61b7c0f0 | ||
|
35ddd2de20 | ||
|
f18ef5144a | ||
|
307167fb66 | ||
|
7e8b128740 | ||
|
acec2559a5 | ||
|
286823227c | ||
|
8d75235ff2 | ||
|
79e9dae9a0 | ||
|
f021ba00a2 | ||
|
adb1b58627 | ||
|
b8059a1880 | ||
|
49d40f020b | ||
|
910a2f318b | ||
|
08dc8d9baf | ||
|
c9fc055351 | ||
|
d1a0c3ffc2 | ||
|
b07e69c37a | ||
|
2b43e7dbda | ||
|
5b1a666cf1 | ||
|
b495b96547 | ||
|
72e3948438 | ||
|
f5662a82cd | ||
|
ab2e567685 | ||
|
d8ea2f8c4b | ||
|
aaae37afba | ||
|
04f7e27877 | ||
|
3dd29366b8 | ||
|
8c2ca3fae6 | ||
|
2ae279e0d4 | ||
|
4c6cb6afd1 | ||
|
78594e9bd3 | ||
|
e921e63b54 | ||
|
38950b081c | ||
|
56687e9b56 | ||
|
c9b33e3386 | ||
|
2ad4634de5 | ||
|
e080667729 | ||
|
ef6c35ae1b | ||
|
95bf762eeb | ||
|
f9febb64c5 | ||
|
dd36e4e838 | ||
|
df35adc438 | ||
|
f88a5a0e6b | ||
|
cc06bc334a | ||
|
2dc2b6bab7 | ||
|
49a64a6edf | ||
|
f0be52f9f8 | ||
|
1bc0225441 | ||
|
87df8bb0fe | ||
|
b76ee75aad | ||
|
69437a7183 | ||
|
63f762bc48 | ||
|
5a0efe6536 | ||
|
01759517aa | ||
|
17805e5829 | ||
|
492d95329a | ||
|
7ba18d3f0a | ||
|
7d09af38c1 | ||
|
8d3a9e347c | ||
|
f79a81dad9 | ||
|
b98e9d180c | ||
|
b077d7988e | ||
|
8d5d2a93d5 | ||
|
d77c2e3fb0 | ||
|
7ef3202f83 | ||
|
36085ab49a | ||
|
f743c4ee7f | ||
|
c994eddec4 | ||
|
5a20e2695b | ||
|
cd18aa97f0 | ||
|
bd09127859 | ||
|
6818c117ee | ||
|
39601c183a | ||
|
1e3b41e8ea | ||
|
7f8684828d | ||
|
93ac06c902 | ||
|
b21f07b35c | ||
|
508873de9b | ||
|
aeb5b481c9 | ||
|
9db34dc31a | ||
|
95a468cebb | ||
|
22173c1d8b | ||
|
dd2fe1ebe8 | ||
|
6637b976ed | ||
|
f5202bedef | ||
|
2db2f2cfb6 | ||
|
e7a5dc58e6 | ||
|
3889af476b | ||
|
34fb97998c | ||
|
ec96cbf016 | ||
|
19a606d736 | ||
|
3add1cf361 | ||
|
50031440a3 | ||
|
d67aeb9739 | ||
|
7c707a73a2 | ||
|
330e892ff6 | ||
|
d857fd08a5 | ||
|
e0a88d19d1 | ||
|
842bc2128b | ||
|
ca6d26a1c2 | ||
|
17d913307e | ||
|
f236c14dc5 | ||
|
bf01067a8a | ||
|
d3959a8ce7 | ||
|
f243930b68 | ||
|
d1b07171cc | ||
|
15d992cb6a | ||
|
59c58ea26c | ||
|
f95bb423a3 | ||
|
9448ed3ef7 | ||
|
7c7a1ed01e | ||
|
5ff8ae8052 | ||
|
8d53f447bf | ||
|
325101e525 | ||
|
1b02f3546c | ||
|
cd96049d10 | ||
|
a83cd30c31 | ||
|
a00988f663 | ||
|
86dc1d629b | ||
|
b67e130fda | ||
|
4cd777712b | ||
|
79009e62c1 | ||
|
76999799ed | ||
|
22af193a51 | ||
|
951d091f07 | ||
|
185e107d24 | ||
|
ca7703fbd1 | ||
|
12cd4e8c09 | ||
|
bad16ea52a | ||
|
30015bde90 | ||
|
1e00cd58a5 | ||
|
40bb7bf437 | ||
|
9d648a87cb | ||
|
8de850be95 | ||
|
19588c2d69 | ||
|
9c5a8430db | ||
|
afc9c1a23a | ||
|
cbacac5975 | ||
|
b3e1fe2ec5 | ||
|
7728716759 | ||
|
b7bf96996f | ||
|
6d8c287032 | ||
|
ab3764ed0a | ||
|
5406c2b3d3 | ||
|
743af38e7f | ||
|
9d16ec755c | ||
|
04d550b02e | ||
|
55d2566539 | ||
|
9cc6fbe580 | ||
|
827a01937f | ||
|
0f62dac627 | ||
|
7ee974e91c | ||
|
c288188a0f | ||
|
057ba29a27 | ||
|
234ee47281 | ||
|
98a8c1aebf | ||
|
42e546d065 | ||
|
6ffb04f0d4 | ||
|
fd259ff68c | ||
|
ab2d93b724 | ||
|
9611780907 | ||
|
93c5892bc3 | ||
|
1095ca956b | ||
|
05654bfcba | ||
|
89d09813c3 | ||
|
5ac4914642 |
384 changed files with 23355 additions and 2538 deletions
5
.clang-format
Normal file
5
.clang-format
Normal file
|
@ -0,0 +1,5 @@
|
|||
BasedOnStyle: LLVM
|
||||
AlignConsecutiveMacros: true
|
||||
ColumnLimit: 90
|
||||
IndentCaseLabels: true
|
||||
IndentWidth: 4
|
15
.github/dependabot.yml
vendored
Normal file
15
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
groups:
|
||||
github-actions:
|
||||
patterns:
|
||||
- "*"
|
31
.github/workflows/fuzz.yml
vendored
Normal file
31
.github/workflows/fuzz.yml
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
name: oss-fuzz
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
paths:
|
||||
- '**.c'
|
||||
- '**.h'
|
||||
|
||||
jobs:
|
||||
fuzz:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Build Fuzzers
|
||||
id: build
|
||||
uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master
|
||||
with:
|
||||
oss-fuzz-project-name: 'jansson'
|
||||
dry-run: false
|
||||
- name: Run Fuzzers
|
||||
uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master
|
||||
with:
|
||||
oss-fuzz-project-name: 'jansson'
|
||||
fuzz-seconds: 600
|
||||
dry-run: false
|
||||
- name: Upload Crash
|
||||
uses: actions/upload-artifact@v4
|
||||
if: failure() && steps.build.outcome == 'success'
|
||||
with:
|
||||
name: artifacts
|
||||
path: ./out/artifacts
|
67
.github/workflows/tests.yml
vendored
Normal file
67
.github/workflows/tests.yml
vendored
Normal file
|
@ -0,0 +1,67 @@
|
|||
name: tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: ./scripts/clang-format-check
|
||||
|
||||
autotools:
|
||||
strategy:
|
||||
matrix:
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
cc: ["gcc", "clang"]
|
||||
dtoa: ["yes", "no"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- if: ${{runner.os == 'macOS'}}
|
||||
run: brew install autoconf automake libtool
|
||||
- uses: actions/checkout@v4
|
||||
- run: autoreconf -fi
|
||||
- env:
|
||||
CC: ${{ matrix.cc }}
|
||||
CFLAGS: -Werror
|
||||
run: ./configure --enable-dtoa=${{ matrix.dtoa }}
|
||||
- run: make check
|
||||
|
||||
cmake:
|
||||
strategy:
|
||||
matrix:
|
||||
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
|
||||
cc: ["gcc", "clang"]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
cc: gcc
|
||||
- os: windows-latest
|
||||
cc: clang
|
||||
include:
|
||||
- os: windows-latest
|
||||
cc: 'msvc' # Doesn't really matter, MSVC is always used on Windows
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- env:
|
||||
CC: ${{matrix.cc}}
|
||||
run: cmake .
|
||||
- run: cmake --build .
|
||||
- run: ctest --output-on-failure
|
||||
|
||||
valgrind:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: sudo apt update && sudo apt install valgrind
|
||||
- run: cmake -DJANSSON_TEST_WITH_VALGRIND=ON .
|
||||
- run: cmake --build .
|
||||
- run: ctest --output-on-failure
|
14
.gitignore
vendored
14
.gitignore
vendored
|
@ -1,3 +1,4 @@
|
|||
*~
|
||||
*.o
|
||||
*.a
|
||||
.libs
|
||||
|
@ -8,6 +9,7 @@ aclocal.m4
|
|||
autom4te.cache
|
||||
config.guess
|
||||
config.h
|
||||
config.h.in
|
||||
config.log
|
||||
config.status
|
||||
config.sub
|
||||
|
@ -17,7 +19,19 @@ install-sh
|
|||
libtool
|
||||
ltmain.sh
|
||||
missing
|
||||
compile
|
||||
test-driver
|
||||
*.lo
|
||||
*.la
|
||||
stamp-h1
|
||||
*.pyc
|
||||
*.pc
|
||||
/src/jansson_config.h
|
||||
/jansson_private_config.h.in
|
||||
/jansson_private_config.h
|
||||
/build
|
||||
*.exe
|
||||
.idea
|
||||
cmake-build-debug/
|
||||
*.log
|
||||
*.trs
|
30
Android.mk
Normal file
30
Android.mk
Normal file
|
@ -0,0 +1,30 @@
|
|||
LOCAL_PATH:= $(call my-dir)
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
LOCAL_ARM_MODE := arm
|
||||
|
||||
LOCAL_SRC_FILES := \
|
||||
src/dump.c \
|
||||
src/error.c \
|
||||
src/hashtable.c \
|
||||
src/hashtable_seed.c \
|
||||
src/load.c \
|
||||
src/memory.c \
|
||||
src/pack_unpack.c \
|
||||
src/strbuffer.c \
|
||||
src/strconv.c \
|
||||
src/utf.c \
|
||||
src/value.c
|
||||
|
||||
LOCAL_C_INCLUDES += \
|
||||
$(LOCAL_PATH) \
|
||||
$(LOCAL_PATH)/android \
|
||||
$(LOCAL_PATH)/src
|
||||
|
||||
LOCAL_MODULE_TAGS := optional
|
||||
LOCAL_SHARED_LIBRARIES := libc
|
||||
LOCAL_CFLAGS += -O3 -DHAVE_STDINT_H=1
|
||||
|
||||
LOCAL_MODULE:= libjansson
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
674
CMakeLists.txt
Normal file
674
CMakeLists.txt
Normal file
|
@ -0,0 +1,674 @@
|
|||
cmake_minimum_required (VERSION 3.10)
|
||||
project(jansson C)
|
||||
|
||||
# Options
|
||||
option(JANSSON_BUILD_SHARED_LIBS "Build shared libraries." OFF)
|
||||
option(USE_URANDOM "Use /dev/urandom to seed the hash function." ON)
|
||||
option(USE_WINDOWS_CRYPTOAPI "Use CryptGenRandom to seed the hash function." ON)
|
||||
option(USE_DTOA "Use dtoa for optimal floating-point to string conversions." ON)
|
||||
|
||||
if (MSVC)
|
||||
# This option must match the settings used in your program, in particular if you
|
||||
# are linking statically
|
||||
option(JANSSON_STATIC_CRT "Link the static CRT libraries" OFF )
|
||||
endif ()
|
||||
|
||||
option(JANSSON_EXAMPLES "Compile example applications" ON)
|
||||
|
||||
if (UNIX)
|
||||
option(JANSSON_COVERAGE "(GCC Only! Requires gcov/lcov to be installed). Include target for doing coverage analysis for the test suite. Note that -DCMAKE_BUILD_TYPE=Debug must be set" OFF)
|
||||
endif ()
|
||||
|
||||
# Set some nicer output dirs.
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/bin)
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib)
|
||||
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib)
|
||||
set(JANSSON_TEMP_DIR ${CMAKE_CURRENT_BINARY_DIR}/tmp)
|
||||
|
||||
# Give the debug version a different postfix for windows,
|
||||
# so both the debug and release version can be built in the
|
||||
# same build-tree on Windows (MSVC).
|
||||
if (WIN32 AND NOT CMAKE_DEBUG_POSTFIX)
|
||||
set(CMAKE_DEBUG_POSTFIX "_d")
|
||||
endif()
|
||||
|
||||
# This is how I thought it should go
|
||||
# set (JANSSON_VERSION "2.3.1")
|
||||
# set (JANSSON_SOVERSION 2)
|
||||
|
||||
set(JANSSON_DISPLAY_VERSION "2.14.1")
|
||||
|
||||
# This is what is required to match the same numbers as automake's
|
||||
set(JANSSON_VERSION "4.14.0")
|
||||
set(JANSSON_SOVERSION 4)
|
||||
|
||||
# for CheckFunctionKeywords
|
||||
set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
||||
|
||||
include (CheckCSourceCompiles)
|
||||
include (CheckFunctionExists)
|
||||
include (CheckFunctionKeywords)
|
||||
include (CheckIncludeFiles)
|
||||
include (CheckTypeSize)
|
||||
|
||||
# suppress format-truncation warning
|
||||
include (CheckCCompilerFlag)
|
||||
check_c_compiler_flag(-Wno-format-truncation HAS_NO_FORMAT_TRUNCATION)
|
||||
if (HAS_NO_FORMAT_TRUNCATION)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-format-truncation")
|
||||
endif()
|
||||
|
||||
if (MSVC)
|
||||
# Turn off Microsofts "security" warnings.
|
||||
add_definitions( "/W3 /D_CRT_SECURE_NO_WARNINGS /wd4005 /wd4996 /nologo" )
|
||||
|
||||
if (JANSSON_STATIC_CRT)
|
||||
set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} /MT")
|
||||
set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} /MTd")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
message("C compiler: ${CMAKE_C_COMPILER_ID}")
|
||||
|
||||
if (JANSSON_COVERAGE)
|
||||
include(CodeCoverage)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
|
||||
endif()
|
||||
|
||||
check_include_files (endian.h HAVE_ENDIAN_H)
|
||||
check_include_files (fcntl.h HAVE_FCNTL_H)
|
||||
check_include_files (sched.h HAVE_SCHED_H)
|
||||
check_include_files (unistd.h HAVE_UNISTD_H)
|
||||
check_include_files (sys/param.h HAVE_SYS_PARAM_H)
|
||||
check_include_files (sys/stat.h HAVE_SYS_STAT_H)
|
||||
check_include_files (sys/time.h HAVE_SYS_TIME_H)
|
||||
check_include_files (sys/types.h HAVE_SYS_TYPES_H)
|
||||
|
||||
check_function_exists (close HAVE_CLOSE)
|
||||
check_function_exists (getpid HAVE_GETPID)
|
||||
check_function_exists (gettimeofday HAVE_GETTIMEOFDAY)
|
||||
check_function_exists (open HAVE_OPEN)
|
||||
check_function_exists (read HAVE_READ)
|
||||
check_function_exists (sched_yield HAVE_SCHED_YIELD)
|
||||
|
||||
# Check for the int-type includes
|
||||
check_include_files (stdint.h HAVE_STDINT_H)
|
||||
|
||||
include (TestBigEndian)
|
||||
TEST_BIG_ENDIAN(WORDS_BIGENDIAN)
|
||||
|
||||
# Check our 64 bit integer sizes
|
||||
check_type_size (__int64 __INT64)
|
||||
check_type_size (int64_t INT64_T)
|
||||
check_type_size ("long long" LONG_LONG_INT)
|
||||
|
||||
# Check our 32 bit integer sizes
|
||||
check_type_size (int32_t INT32_T)
|
||||
check_type_size (__int32 __INT32)
|
||||
check_type_size ("long" LONG_INT)
|
||||
check_type_size ("int" INT)
|
||||
if (HAVE_INT32_T)
|
||||
set (JSON_INT32 int32_t)
|
||||
elseif (HAVE___INT32)
|
||||
set (JSON_INT32 __int32)
|
||||
elseif (HAVE_LONG_INT AND (LONG_INT EQUAL 4))
|
||||
set (JSON_INT32 long)
|
||||
elseif (HAVE_INT AND (INT EQUAL 4))
|
||||
set (JSON_INT32 int)
|
||||
else ()
|
||||
message (FATAL_ERROR "Could not detect a valid 32-bit integer type")
|
||||
endif ()
|
||||
|
||||
check_type_size ("unsigned long" UNSIGNED_LONG_INT)
|
||||
check_type_size ("unsigned int" UNSIGNED_INT)
|
||||
check_type_size ("unsigned short" UNSIGNED_SHORT)
|
||||
|
||||
check_type_size (uint32_t UINT32_T)
|
||||
check_type_size (__uint32 __UINT32)
|
||||
if (HAVE_UINT32_T)
|
||||
set (JSON_UINT32 uint32_t)
|
||||
elseif (HAVE___UINT32)
|
||||
set (JSON_UINT32 __uint32)
|
||||
elseif (HAVE_UNSIGNED_LONG_INT AND (UNSIGNED_LONG_INT EQUAL 4))
|
||||
set (JSON_UINT32 "unsigned long")
|
||||
elseif (HAVE_UNSIGNED_INT AND (UNSIGNED_INT EQUAL 4))
|
||||
set (JSON_UINT32 "unsigned int")
|
||||
else ()
|
||||
message (FATAL_ERROR "Could not detect a valid unsigned 32-bit integer type")
|
||||
endif ()
|
||||
|
||||
check_type_size (uint16_t UINT16_T)
|
||||
check_type_size (__uint16 __UINT16)
|
||||
if (HAVE_UINT16_T)
|
||||
set (JSON_UINT16 uint16_t)
|
||||
elseif (HAVE___UINT16)
|
||||
set (JSON_UINT16 __uint16)
|
||||
elseif (HAVE_UNSIGNED_INT AND (UNSIGNED_INT EQUAL 2))
|
||||
set (JSON_UINT16 "unsigned int")
|
||||
elseif (HAVE_UNSIGNED_SHORT AND (UNSIGNED_SHORT EQUAL 2))
|
||||
set (JSON_UINT16 "unsigned short")
|
||||
else ()
|
||||
message (FATAL_ERROR "Could not detect a valid unsigned 16-bit integer type")
|
||||
endif ()
|
||||
|
||||
check_type_size (uint8_t UINT8_T)
|
||||
check_type_size (__uint8 __UINT8)
|
||||
if (HAVE_UINT8_T)
|
||||
set (JSON_UINT8 uint8_t)
|
||||
elseif (HAVE___UINT8)
|
||||
set (JSON_UINT8 __uint8)
|
||||
else ()
|
||||
set (JSON_UINT8 "unsigned char")
|
||||
endif ()
|
||||
|
||||
# Check for ssize_t and SSIZE_T existence.
|
||||
check_type_size(ssize_t SSIZE_T)
|
||||
check_type_size(SSIZE_T UPPERCASE_SSIZE_T)
|
||||
if(NOT HAVE_SSIZE_T)
|
||||
if(HAVE_UPPERCASE_SSIZE_T)
|
||||
set(JSON_SSIZE SSIZE_T)
|
||||
else()
|
||||
set(JSON_SSIZE int)
|
||||
endif()
|
||||
endif()
|
||||
set(CMAKE_EXTRA_INCLUDE_FILES "")
|
||||
|
||||
# Check for all the variants of strtoll
|
||||
check_function_exists (strtoll HAVE_STRTOLL)
|
||||
check_function_exists (strtoq HAVE_STRTOQ)
|
||||
check_function_exists (_strtoi64 HAVE__STRTOI64)
|
||||
|
||||
# Figure out what variant we should use
|
||||
if (HAVE_STRTOLL)
|
||||
set (JSON_STRTOINT strtoll)
|
||||
elseif (HAVE_STRTOQ)
|
||||
set (JSON_STRTOINT strtoq)
|
||||
elseif (HAVE__STRTOI64)
|
||||
set (JSON_STRTOINT _strtoi64)
|
||||
else ()
|
||||
# fallback to strtol (32 bit)
|
||||
# this will set all the required variables
|
||||
set (JSON_STRTOINT strtol)
|
||||
set (JSON_INT_T long)
|
||||
set (JSON_INTEGER_FORMAT "\"ld\"")
|
||||
endif ()
|
||||
|
||||
# if we haven't defined JSON_INT_T, then we have a 64 bit conversion function.
|
||||
# detect what to use for the 64 bit type.
|
||||
# Note: I will prefer long long if I can get it, as that is what the automake system aimed for.
|
||||
if (NOT DEFINED JSON_INT_T)
|
||||
set (JSON_INTEGER_IS_LONG_LONG 1)
|
||||
|
||||
if (HAVE_LONG_LONG_INT AND (LONG_LONG_INT EQUAL 8))
|
||||
set (JSON_INT_T "long long")
|
||||
elseif (HAVE_INT64_T)
|
||||
set (JSON_INT_T int64_t)
|
||||
elseif (HAVE___INT64)
|
||||
set (JSON_INT_T __int64)
|
||||
else ()
|
||||
message (FATAL_ERROR "Could not detect 64 bit type, although I detected the strtoll equivalent")
|
||||
endif ()
|
||||
|
||||
# Apparently, Borland BCC and MSVC wants I64d,
|
||||
# Borland BCC could also accept LD
|
||||
# and gcc wants ldd,
|
||||
# I am not sure what cygwin will want, so I will assume I64d
|
||||
|
||||
if (WIN32) # matches both msvc and cygwin
|
||||
set (JSON_INTEGER_FORMAT "\"I64d\"")
|
||||
else ()
|
||||
set (JSON_INTEGER_FORMAT "\"lld\"")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
check_include_files (locale.h HAVE_LOCALE_H)
|
||||
check_function_exists(setlocale HAVE_SETLOCALE)
|
||||
|
||||
# Check what the inline keyword is.
|
||||
# Note that the original JSON_INLINE was always set to just 'inline', so this goes further.
|
||||
check_function_keywords("inline")
|
||||
check_function_keywords("__inline")
|
||||
check_function_keywords("__inline__")
|
||||
|
||||
if (HAVE_INLINE)
|
||||
set(JSON_INLINE inline)
|
||||
elseif (HAVE___INLINE)
|
||||
set(JSON_INLINE __inline)
|
||||
elseif (HAVE___INLINE__)
|
||||
set(JSON_INLINE __inline__)
|
||||
else()
|
||||
# no inline on this platform
|
||||
set (JSON_INLINE)
|
||||
endif()
|
||||
|
||||
check_c_source_compiles ("int main() { unsigned long val; __sync_bool_compare_and_swap(&val, 0, 1); __sync_add_and_fetch(&val, 1); __sync_sub_and_fetch(&val, 1); return 0; } " HAVE_SYNC_BUILTINS)
|
||||
check_c_source_compiles ("int main() { char l; unsigned long v; __atomic_test_and_set(&l, __ATOMIC_RELAXED); __atomic_store_n(&v, 1, __ATOMIC_RELEASE); __atomic_load_n(&v, __ATOMIC_ACQUIRE); __atomic_add_fetch(&v, 1, __ATOMIC_ACQUIRE); __atomic_sub_fetch(&v, 1, __ATOMIC_RELEASE); return 0; }" HAVE_ATOMIC_BUILTINS)
|
||||
|
||||
if (HAVE_SYNC_BUILTINS)
|
||||
set(JSON_HAVE_SYNC_BUILTINS 1)
|
||||
else()
|
||||
set(JSON_HAVE_SYNC_BUILTINS 0)
|
||||
endif()
|
||||
|
||||
if (HAVE_ATOMIC_BUILTINS)
|
||||
set(JSON_HAVE_ATOMIC_BUILTINS 1)
|
||||
else()
|
||||
set(JSON_HAVE_ATOMIC_BUILTINS 0)
|
||||
endif()
|
||||
|
||||
set (JANSSON_INITIAL_HASHTABLE_ORDER 3 CACHE STRING "Number of buckets new object hashtables contain is 2 raised to this power. The default is 3, so empty hashtables contain 2^3 = 8 buckets.")
|
||||
|
||||
# configure the public config file
|
||||
configure_file (${CMAKE_CURRENT_SOURCE_DIR}/cmake/jansson_config.h.cmake
|
||||
${CMAKE_CURRENT_BINARY_DIR}/include/jansson_config.h)
|
||||
|
||||
# Copy the jansson.h file to the public include folder
|
||||
file (COPY ${CMAKE_CURRENT_SOURCE_DIR}/src/jansson.h
|
||||
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/include/)
|
||||
|
||||
# configure the private config file
|
||||
configure_file (${CMAKE_CURRENT_SOURCE_DIR}/cmake/jansson_private_config.h.cmake
|
||||
${CMAKE_CURRENT_BINARY_DIR}/private_include/jansson_private_config.h)
|
||||
|
||||
include_directories (${CMAKE_CURRENT_BINARY_DIR}/private_include)
|
||||
|
||||
# Configuration flags will be set on project later once we have defined the target
|
||||
|
||||
|
||||
# Add the lib sources.
|
||||
file(GLOB JANSSON_SRC src/*.c)
|
||||
if (NOT USE_DTOA)
|
||||
list(FILTER JANSSON_SRC EXCLUDE REGEX ".*dtoa\\.c$")
|
||||
endif()
|
||||
|
||||
set(JANSSON_HDR_PRIVATE
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/src/hashtable.h
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/src/jansson_private.h
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/src/strbuffer.h
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/src/utf.h
|
||||
${CMAKE_CURRENT_BINARY_DIR}/private_include/jansson_private_config.h)
|
||||
|
||||
set(JANSSON_HDR_PUBLIC
|
||||
${CMAKE_CURRENT_BINARY_DIR}/include/jansson_config.h
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/src/jansson.h)
|
||||
|
||||
source_group("Library Sources" FILES ${JANSSON_SRC})
|
||||
source_group("Library Private Headers" FILES ${JANSSON_HDR_PRIVATE})
|
||||
source_group("Library Public Headers" FILES ${JANSSON_HDR_PUBLIC})
|
||||
|
||||
if(JANSSON_BUILD_SHARED_LIBS)
|
||||
add_library(jansson SHARED
|
||||
${JANSSON_SRC}
|
||||
${JANSSON_HDR_PRIVATE}
|
||||
${JANSSON_HDR_PUBLIC}
|
||||
src/jansson.def)
|
||||
|
||||
# check if linker support --default-symver
|
||||
list(APPEND CMAKE_REQUIRED_LIBRARIES "-Wl,--default-symver")
|
||||
check_c_source_compiles(
|
||||
"
|
||||
int main (void)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
"
|
||||
DSYMVER_WORKS
|
||||
)
|
||||
list(REMOVE_ITEM CMAKE_REQUIRED_LIBRARIES "-Wl,--default-symver")
|
||||
|
||||
if (SYMVER_WORKS)
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--default-symver")
|
||||
else()
|
||||
# some linkers may only support --version-script
|
||||
file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/jansson.sym" "JANSSON_${JANSSON_SOVERSION} {
|
||||
global:
|
||||
*;
|
||||
};
|
||||
")
|
||||
list(APPEND CMAKE_REQUIRED_LIBRARIES "-Wl,--version-script,${CMAKE_CURRENT_BINARY_DIR}/jansson.sym")
|
||||
check_c_source_compiles(
|
||||
"
|
||||
int main (void)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
"
|
||||
VSCRIPT_WORKS
|
||||
)
|
||||
list(REMOVE_ITEM CMAKE_REQUIRED_LIBRARIES "-Wl,--version-script,${CMAKE_CURRENT_BINARY_DIR}/jansson.sym")
|
||||
if (VSCRIPT_WORKS)
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--version-script,${CMAKE_CURRENT_BINARY_DIR}/jansson.sym")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set_target_properties(jansson PROPERTIES
|
||||
VERSION ${JANSSON_VERSION}
|
||||
SOVERSION ${JANSSON_SOVERSION})
|
||||
else()
|
||||
add_library(jansson STATIC
|
||||
${JANSSON_SRC}
|
||||
${JANSSON_HDR_PRIVATE}
|
||||
${JANSSON_HDR_PUBLIC})
|
||||
set_target_properties(jansson PROPERTIES
|
||||
POSITION_INDEPENDENT_CODE true)
|
||||
endif()
|
||||
|
||||
|
||||
# Now target jansson is declared, set per-target values
|
||||
|
||||
target_compile_definitions(jansson PUBLIC JANSSON_USING_CMAKE)
|
||||
target_compile_definitions(jansson PRIVATE HAVE_CONFIG_H)
|
||||
|
||||
target_include_directories(jansson
|
||||
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}/include>
|
||||
INTERFACE $<INSTALL_INTERFACE:include>
|
||||
)
|
||||
|
||||
add_library( jansson::jansson ALIAS jansson )
|
||||
|
||||
|
||||
if (JANSSON_EXAMPLES)
|
||||
add_executable(simple_parse "${CMAKE_CURRENT_SOURCE_DIR}/examples/simple_parse.c")
|
||||
target_link_libraries(simple_parse jansson)
|
||||
endif()
|
||||
|
||||
# For building Documentation (uses Sphinx)
|
||||
option(JANSSON_BUILD_DOCS "Build documentation (uses python-sphinx)." ON)
|
||||
if (JANSSON_BUILD_DOCS)
|
||||
find_package(Sphinx)
|
||||
|
||||
if (NOT SPHINX_FOUND)
|
||||
message(WARNING "Sphinx not found. Cannot generate documentation!
|
||||
Set -DJANSSON_BUILD_DOCS=OFF to get rid of this message.")
|
||||
else()
|
||||
if (Sphinx_VERSION_STRING VERSION_LESS 1.0)
|
||||
message(WARNING "Your Sphinx version is too old!
|
||||
This project requires Sphinx v1.0 or above to produce
|
||||
proper documentation (you have v${Sphinx_VERSION_STRING}).
|
||||
You will get output but it will have errors.")
|
||||
endif()
|
||||
|
||||
# configured documentation tools and intermediate build results
|
||||
set(BINARY_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}/_build")
|
||||
|
||||
# Sphinx cache with pickled ReST documents
|
||||
set(SPHINX_CACHE_DIR "${CMAKE_CURRENT_BINARY_DIR}/_doctrees")
|
||||
|
||||
# CMake could be used to build the conf.py file too,
|
||||
# eg it could automatically write the version of the program or change the theme.
|
||||
# if(NOT DEFINED SPHINX_THEME)
|
||||
# set(SPHINX_THEME default)
|
||||
# endif()
|
||||
#
|
||||
# if(NOT DEFINED SPHINX_THEME_DIR)
|
||||
# set(SPHINX_THEME_DIR)
|
||||
# endif()
|
||||
#
|
||||
# configure_file(
|
||||
# "${CMAKE_CURRENT_SOURCE_DIR}/conf.py.in"
|
||||
# "${BINARY_BUILD_DIR}/conf.py"
|
||||
# @ONLY)
|
||||
|
||||
# TODO: Add support for all sphinx builders: http://sphinx-doc.org/builders.html
|
||||
|
||||
# Add documentation targets.
|
||||
set(DOC_TARGETS html)
|
||||
|
||||
option(JANSSON_BUILD_MAN "Create a target for building man pages." ON)
|
||||
|
||||
if (JANSSON_BUILD_MAN)
|
||||
if (Sphinx_VERSION_STRING VERSION_LESS 1.0)
|
||||
message(WARNING "Sphinx version 1.0 > is required to build man pages. You have v${Sphinx_VERSION_STRING}.")
|
||||
else()
|
||||
list(APPEND DOC_TARGETS man)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
option(JANSSON_BUILD_LATEX "Create a target for building latex docs (to create PDF)." OFF)
|
||||
|
||||
if (JANSSON_BUILD_LATEX)
|
||||
find_package(LATEX)
|
||||
|
||||
if (NOT LATEX_COMPILER)
|
||||
message("Couldn't find Latex, can't build latex docs using Sphinx")
|
||||
else()
|
||||
message("Latex found! If you have problems building, see Sphinx documentation for required Latex packages.")
|
||||
list(APPEND DOC_TARGETS latex)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# The doc target will build all documentation targets.
|
||||
add_custom_target(doc)
|
||||
|
||||
foreach (DOC_TARGET ${DOC_TARGETS})
|
||||
add_custom_target(${DOC_TARGET}
|
||||
${SPHINX_EXECUTABLE}
|
||||
# -q # Enable for quiet mode
|
||||
-b ${DOC_TARGET}
|
||||
-d "${SPHINX_CACHE_DIR}"
|
||||
# -c "${BINARY_BUILD_DIR}" # enable if using cmake-generated conf.py
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/doc"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/doc/${DOC_TARGET}"
|
||||
COMMENT "Building ${DOC_TARGET} documentation with Sphinx")
|
||||
|
||||
add_dependencies(doc ${DOC_TARGET})
|
||||
endforeach()
|
||||
|
||||
message("Building documentation enabled for: ${DOC_TARGETS}")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
|
||||
option(JANSSON_WITHOUT_TESTS "Don't build tests ('make test' to execute tests)" OFF)
|
||||
|
||||
if (NOT JANSSON_WITHOUT_TESTS)
|
||||
option(JANSSON_TEST_WITH_VALGRIND "Enable valgrind tests." OFF)
|
||||
|
||||
ENABLE_TESTING()
|
||||
|
||||
if (JANSSON_TEST_WITH_VALGRIND)
|
||||
# TODO: Add FindValgrind.cmake instead of having a hardcoded path.
|
||||
|
||||
add_definitions(-DVALGRIND)
|
||||
|
||||
# enable valgrind
|
||||
set(CMAKE_MEMORYCHECK_COMMAND valgrind)
|
||||
set(CMAKE_MEMORYCHECK_COMMAND_OPTIONS
|
||||
"--error-exitcode=1 --leak-check=full --show-reachable=yes --track-origins=yes -q")
|
||||
|
||||
set(MEMCHECK_COMMAND
|
||||
"${CMAKE_MEMORYCHECK_COMMAND} ${CMAKE_MEMORYCHECK_COMMAND_OPTIONS}")
|
||||
separate_arguments(MEMCHECK_COMMAND)
|
||||
endif ()
|
||||
|
||||
#
|
||||
# Test suites.
|
||||
#
|
||||
if (CMAKE_COMPILER_IS_GNUCC)
|
||||
add_definitions(-Wall -Wextra -Wdeclaration-after-statement)
|
||||
endif ()
|
||||
|
||||
set(api_tests
|
||||
test_array
|
||||
test_chaos
|
||||
test_copy
|
||||
test_dump
|
||||
test_dump_callback
|
||||
test_equal
|
||||
test_fixed_size
|
||||
test_load
|
||||
test_load_callback
|
||||
test_loadb
|
||||
test_number
|
||||
test_object
|
||||
test_pack
|
||||
test_simple
|
||||
test_sprintf
|
||||
test_unpack)
|
||||
|
||||
# Doing arithmetic on void pointers is not allowed by Microsofts compiler
|
||||
# such as secure_malloc and secure_free is doing, so exclude it for now.
|
||||
if (NOT MSVC)
|
||||
list(APPEND api_tests test_memory_funcs)
|
||||
endif()
|
||||
|
||||
# Helper macro for building and linking a test program.
|
||||
macro(build_testprog name dir)
|
||||
add_executable(${name} ${dir}/${name}.c)
|
||||
add_dependencies(${name} jansson)
|
||||
target_link_libraries(${name} jansson)
|
||||
endmacro(build_testprog)
|
||||
|
||||
# Create executables and tests/valgrind tests for API tests.
|
||||
foreach (test ${api_tests})
|
||||
build_testprog(${test} ${CMAKE_CURRENT_SOURCE_DIR}/test/suites/api)
|
||||
|
||||
if (JANSSON_TEST_WITH_VALGRIND)
|
||||
add_test(memcheck__${test}
|
||||
${MEMCHECK_COMMAND} ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${test}
|
||||
WORKING_DIRECTORY ${JANSSON_TEMP_DIR})
|
||||
else()
|
||||
add_test(${test}
|
||||
${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${test}
|
||||
WORKING_DIRECTORY ${JANSSON_TEMP_DIR})
|
||||
endif ()
|
||||
endforeach ()
|
||||
|
||||
# Test harness for the suites tests.
|
||||
build_testprog(json_process ${CMAKE_CURRENT_SOURCE_DIR}/test/bin)
|
||||
|
||||
set(SUITE_TEST_CMD ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/json_process)
|
||||
set(SUITES encoding-flags valid invalid invalid-unicode)
|
||||
foreach (SUITE ${SUITES})
|
||||
file(GLOB TESTDIRS test/suites/${SUITE}/*)
|
||||
|
||||
foreach (TESTDIR ${TESTDIRS})
|
||||
if (IS_DIRECTORY ${TESTDIR})
|
||||
get_filename_component(TNAME ${TESTDIR} NAME)
|
||||
|
||||
if ((USE_DTOA AND EXISTS ${TESTDIR}/skip_if_dtoa) OR
|
||||
(NOT USE_DTOA AND EXISTS ${TESTDIR}/skip_unless_dtoa))
|
||||
continue()
|
||||
endif()
|
||||
|
||||
if (JANSSON_TEST_WITH_VALGRIND)
|
||||
add_test(memcheck__${SUITE}__${TNAME}
|
||||
${MEMCHECK_COMMAND} ${SUITE_TEST_CMD} ${TESTDIR})
|
||||
else()
|
||||
add_test(${SUITE}__${TNAME}
|
||||
${SUITE_TEST_CMD} ${TESTDIR})
|
||||
endif()
|
||||
|
||||
if ((${SUITE} STREQUAL "valid" OR ${SUITE} STREQUAL "invalid") AND NOT EXISTS ${TESTDIR}/nostrip)
|
||||
if (JANSSON_TEST_WITH_VALGRIND)
|
||||
add_test(memcheck__${SUITE}__${TNAME}__strip
|
||||
${MEMCHECK_COMMAND} ${SUITE_TEST_CMD} --strip ${TESTDIR})
|
||||
else()
|
||||
add_test(${SUITE}__${TNAME}__strip
|
||||
${SUITE_TEST_CMD} --strip ${TESTDIR})
|
||||
endif()
|
||||
endif ()
|
||||
endif ()
|
||||
endforeach ()
|
||||
endforeach ()
|
||||
|
||||
if (JANSSON_COVERAGE)
|
||||
SETUP_TARGET_FOR_COVERAGE(coverage coverage ctest)
|
||||
endif ()
|
||||
|
||||
# Enable using "make check" just like the autotools project.
|
||||
# By default cmake creates a target "make test"
|
||||
add_custom_target(check COMMAND ${CMAKE_CTEST_COMMAND}
|
||||
DEPENDS json_process ${api_tests})
|
||||
endif ()
|
||||
|
||||
#
|
||||
# Installation preparation.
|
||||
#
|
||||
|
||||
# Allow the user to override installation directories.
|
||||
set(JANSSON_INSTALL_LIB_DIR lib CACHE PATH "Installation directory for libraries")
|
||||
set(JANSSON_INSTALL_BIN_DIR bin CACHE PATH "Installation directory for executables")
|
||||
set(JANSSON_INSTALL_INCLUDE_DIR include CACHE PATH "Installation directory for header files")
|
||||
|
||||
if(WIN32 AND NOT CYGWIN)
|
||||
set(DEF_INSTALL_CMAKE_DIR cmake)
|
||||
else()
|
||||
set(DEF_INSTALL_CMAKE_DIR lib/cmake/jansson)
|
||||
endif()
|
||||
|
||||
set(JANSSON_INSTALL_CMAKE_DIR ${DEF_INSTALL_CMAKE_DIR} CACHE PATH "Installation directory for CMake files")
|
||||
|
||||
# Create pkg-conf file.
|
||||
# (We use the same files as ./configure does, so we
|
||||
# have to defined the same variables used there).
|
||||
set(prefix ${CMAKE_INSTALL_PREFIX})
|
||||
set(exec_prefix "\${prefix}")
|
||||
set(libdir "\${exec_prefix}/${JANSSON_INSTALL_LIB_DIR}")
|
||||
set(includedir "\${prefix}/${JANSSON_INSTALL_INCLUDE_DIR}")
|
||||
set(VERSION ${JANSSON_DISPLAY_VERSION})
|
||||
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/jansson.pc.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/jansson.pc @ONLY)
|
||||
|
||||
# Make sure the paths are relative.
|
||||
foreach(p LIB BIN INCLUDE CMAKE)
|
||||
set(var JANSSON_INSTALL_${p}_DIR)
|
||||
endforeach()
|
||||
|
||||
# Generate the config file for the build-tree.
|
||||
set(JANSSON__INCLUDE_DIRS "${CMAKE_CURRENT_BINARY_DIR}/include")
|
||||
set(JANSSON_INCLUDE_DIRS ${JANSSON__INCLUDE_DIRS} CACHE PATH "Jansson include directories")
|
||||
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/janssonConfig.cmake.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/janssonConfig.cmake
|
||||
@ONLY)
|
||||
|
||||
|
||||
# Generate the config file for the installation tree.
|
||||
include(CMakePackageConfigHelpers)
|
||||
|
||||
write_basic_package_version_file(
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/cmake/janssonConfigVersion.cmake"
|
||||
VERSION ${JANSSON_VERSION}
|
||||
COMPATIBILITY ExactVersion
|
||||
)
|
||||
|
||||
configure_package_config_file(
|
||||
"cmake/janssonConfig.cmake.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/cmake/janssonConfig.cmake"
|
||||
INSTALL_DESTINATION "${JANSSON_INSTALL_CMAKE_DIR}"
|
||||
)
|
||||
|
||||
#
|
||||
# Install targets.
|
||||
#
|
||||
option(JANSSON_INSTALL "Generate installation target" ON)
|
||||
if (JANSSON_INSTALL)
|
||||
install(TARGETS jansson
|
||||
EXPORT janssonTargets
|
||||
LIBRARY DESTINATION "lib"
|
||||
ARCHIVE DESTINATION "lib"
|
||||
RUNTIME DESTINATION "bin"
|
||||
INCLUDES DESTINATION "include")
|
||||
|
||||
install(FILES ${JANSSON_HDR_PUBLIC}
|
||||
DESTINATION "include")
|
||||
|
||||
# Install the pkg-config.
|
||||
install(FILES
|
||||
${CMAKE_CURRENT_BINARY_DIR}/jansson.pc
|
||||
DESTINATION lib/pkgconfig)
|
||||
|
||||
# Install the configs.
|
||||
install(FILES
|
||||
${CMAKE_CURRENT_BINARY_DIR}/cmake/janssonConfig.cmake
|
||||
${CMAKE_CURRENT_BINARY_DIR}/cmake/janssonConfigVersion.cmake
|
||||
DESTINATION "${JANSSON_INSTALL_CMAKE_DIR}")
|
||||
|
||||
# Install exports for the install-tree.
|
||||
install(EXPORT janssonTargets
|
||||
NAMESPACE jansson::
|
||||
DESTINATION "${JANSSON_INSTALL_CMAKE_DIR}")
|
||||
endif()
|
||||
|
||||
# For use when simply using add_library from a parent project to build jansson.
|
||||
set(JANSSON_LIBRARIES jansson CACHE STRING "jansson libraries")
|
3
CONTRIBUTING.md
Normal file
3
CONTRIBUTING.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
Hi, and thanks for contributing!
|
||||
|
||||
Please remember to add tests and documentation for new functionality. Backwards incompatible changes or features that are not directly related to JSON are likely to be rejected.
|
49
CleanSpec.mk
Normal file
49
CleanSpec.mk
Normal file
|
@ -0,0 +1,49 @@
|
|||
# Copyright (C) 2007 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# If you don't need to do a full clean build but would like to touch
|
||||
# a file or delete some intermediate files, add a clean step to the end
|
||||
# of the list. These steps will only be run once, if they haven't been
|
||||
# run before.
|
||||
#
|
||||
# E.g.:
|
||||
# $(call add-clean-step, touch -c external/sqlite/sqlite3.h)
|
||||
# $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libz_intermediates)
|
||||
#
|
||||
# Always use "touch -c" and "rm -f" or "rm -rf" to gracefully deal with
|
||||
# files that are missing or have been moved.
|
||||
#
|
||||
# Use $(PRODUCT_OUT) to get to the "out/target/product/blah/" directory.
|
||||
# Use $(OUT_DIR) to refer to the "out" directory.
|
||||
#
|
||||
# If you need to re-do something that's already mentioned, just copy
|
||||
# the command and add it to the bottom of the list. E.g., if a change
|
||||
# that you made last week required touching a file and a change you
|
||||
# made today requires touching the same file, just copy the old
|
||||
# touch step and add it to the end of the list.
|
||||
#
|
||||
# ************************************************
|
||||
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
|
||||
# ************************************************
|
||||
|
||||
# For example:
|
||||
#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/AndroidTests_intermediates)
|
||||
#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/core_intermediates)
|
||||
#$(call add-clean-step, find $(OUT_DIR) -type f -name "IGTalkSession*" -print0 | xargs -0 rm -f)
|
||||
#$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/*)
|
||||
|
||||
# ************************************************
|
||||
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
|
||||
# ************************************************
|
26
LICENSE
26
LICENSE
|
@ -1,4 +1,11 @@
|
|||
Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
# License
|
||||
|
||||
This project is licensed under the MIT license, except where otherwise noted.
|
||||
The full text of the MIT license is included below.
|
||||
|
||||
## MIT License
|
||||
|
||||
Copyright (c) 2009-2024 Petri Lehtinen <petri@digip.org>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -17,3 +24,20 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
## Exceptions
|
||||
|
||||
### `src/dtoa.c`
|
||||
|
||||
Copyright (c) 1991, 2000, 2001 by Lucent Technologies.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose without fee is hereby granted, provided that this entire notice
|
||||
is included in all copies of any software which is or includes a copy
|
||||
or modification of this software and in all copies of the supporting
|
||||
documentation for such software.
|
||||
|
||||
THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
|
||||
WARRANTY. IN PARTICULAR, NEITHER THE AUTHOR NOR LUCENT MAKES ANY
|
||||
REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
|
||||
OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.
|
||||
|
|
12
Makefile.am
12
Makefile.am
|
@ -1,2 +1,12 @@
|
|||
EXTRA_DIST = LICENSE README.rst
|
||||
EXTRA_DIST = CHANGES LICENSE README.rst CMakeLists.txt cmake android examples scripts
|
||||
SUBDIRS = doc src test
|
||||
|
||||
# "make distcheck" builds the dvi target, so use it to check that the
|
||||
# documentation is built correctly.
|
||||
dvi:
|
||||
$(MAKE) SPHINXOPTS_EXTRA=-W html
|
||||
|
||||
pkgconfigdir = $(libdir)/pkgconfig
|
||||
pkgconfig_DATA = jansson.pc
|
||||
|
||||
TESTS = scripts/clang-format-check
|
||||
|
|
57
README.rst
57
README.rst
|
@ -1,63 +1,70 @@
|
|||
Jansson README
|
||||
==============
|
||||
|
||||
.. |tests| image:: https://github.com/akheron/jansson/workflows/tests/badge.svg
|
||||
.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/lmhkkc4q8cwc65ko
|
||||
|
||||
|tests| |appveyor|
|
||||
|
||||
Jansson_ is a C library for encoding, decoding and manipulating JSON
|
||||
data. Its main features and design principles are:
|
||||
|
||||
- Simple and intuitive API and data model
|
||||
|
||||
- Good documentation
|
||||
- `Comprehensive documentation`_
|
||||
|
||||
- No dependencies on other libraries
|
||||
|
||||
- Full Unicode support (UTF-8)
|
||||
|
||||
- Extensive test suite
|
||||
|
||||
- No dependencies on other libraries
|
||||
|
||||
Jansson is licensed under the `MIT license`_; see LICENSE in the
|
||||
source distribution for details.
|
||||
|
||||
|
||||
Compilation and Installation
|
||||
----------------------------
|
||||
|
||||
If you obtained a source tarball, just use the standard autotools
|
||||
commands::
|
||||
If you obtained a ``jansson-X.Y.tar.*`` tarball from GitHub Releases, just use
|
||||
the standard autotools commands::
|
||||
|
||||
$ ./configure && make && make install
|
||||
|
||||
If the source has been checked out from a Git repository, the
|
||||
./configure script has to be generated fist. The easiest way is to use
|
||||
autoreconf::
|
||||
|
||||
$ autoreconf -i
|
||||
$ ./configure
|
||||
$ make
|
||||
$ make install
|
||||
|
||||
To run the test suite, invoke::
|
||||
|
||||
$ make check
|
||||
|
||||
Python_ is required to run the tests.
|
||||
If the source has been checked out from a Git repository, the ``configure``
|
||||
script has to be generated first. The easiest way is to use autoreconf::
|
||||
|
||||
$ autoreconf -i
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
Documentation is in the ``doc/`` subdirectory. It's written in
|
||||
reStructuredText_ with Sphinx_ annotations, so reading it in plain may
|
||||
be inconvenient. For this reason, prebuilt HTML documentation is
|
||||
available at http://www.digip.org/jansson/doc/.
|
||||
Documentation is available at http://jansson.readthedocs.io/en/latest/.
|
||||
|
||||
To generate HTML documentation yourself, invoke::
|
||||
The documentation source is in the ``doc/`` subdirectory. To generate
|
||||
HTML documentation, invoke::
|
||||
|
||||
cd doc/
|
||||
sphinx-build . .build/html
|
||||
$ make html
|
||||
|
||||
... and point your browser to ``.build/html/index.html``. Sphinx_ is
|
||||
required to generate the documentation.
|
||||
Then, point your browser to ``doc/_build/html/index.html``. Sphinx_
|
||||
1.0 or newer is required to generate the documentation.
|
||||
|
||||
|
||||
Community
|
||||
---------
|
||||
|
||||
* `Documentation <http://jansson.readthedocs.io/en/latest/>`_
|
||||
* `Issue tracker <https://github.com/akheron/jansson/issues>`_
|
||||
* `Mailing list <http://groups.google.com/group/jansson-users>`_
|
||||
* `Wiki <https://github.com/akheron/jansson/wiki>`_ contains some development documentation
|
||||
|
||||
.. _Jansson: http://www.digip.org/jansson/
|
||||
.. _`Comprehensive documentation`: http://jansson.readthedocs.io/en/latest/
|
||||
.. _`MIT license`: http://www.opensource.org/licenses/mit-license.php
|
||||
.. _Python: http://www.python.org/
|
||||
.. _reStructuredText: http://docutils.sourceforge.net/rst.html
|
||||
.. _Sphinx: http://sphinx.pocoo.org/
|
||||
|
|
9
SECURITY.md
Normal file
9
SECURITY.md
Normal file
|
@ -0,0 +1,9 @@
|
|||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Latest released version.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Send an email to petri@digip.org.
|
39
android/jansson_config.h
Normal file
39
android/jansson_config.h
Normal file
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Copyright (c) 2010-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*
|
||||
*
|
||||
* This file specifies a part of the site-specific configuration for
|
||||
* Jansson, namely those things that affect the public API in
|
||||
* jansson.h.
|
||||
*
|
||||
* The configure script copies this file to jansson_config.h and
|
||||
* replaces @var@ substitutions by values that fit your system. If you
|
||||
* cannot run the configure script, you can do the value substitution
|
||||
* by hand.
|
||||
*/
|
||||
|
||||
#ifndef JANSSON_CONFIG_H
|
||||
#define JANSSON_CONFIG_H
|
||||
|
||||
/* If your compiler supports the inline keyword in C, JSON_INLINE is
|
||||
defined to `inline', otherwise empty. In C++, the inline is always
|
||||
supported. */
|
||||
#ifdef __cplusplus
|
||||
#define JSON_INLINE inline
|
||||
#else
|
||||
#define JSON_INLINE inline
|
||||
#endif
|
||||
|
||||
/* If your compiler supports the `long long` type and the strtoll()
|
||||
library function, JSON_INTEGER_IS_LONG_LONG is defined to 1,
|
||||
otherwise to 0. */
|
||||
#define JSON_INTEGER_IS_LONG_LONG 1
|
||||
|
||||
/* Maximum recursion depth for parsing JSON input.
|
||||
This limits the depth of e.g. array-within-array constructions. */
|
||||
#define JSON_PARSER_MAX_DEPTH 2048
|
||||
|
||||
#endif
|
18
appveyor.yml
Normal file
18
appveyor.yml
Normal file
|
@ -0,0 +1,18 @@
|
|||
environment:
|
||||
matrix:
|
||||
- VS: Visual Studio 9 2008
|
||||
- VS: Visual Studio 10 2010
|
||||
- VS: Visual Studio 11 2012
|
||||
- VS: Visual Studio 12 2013
|
||||
- VS: Visual Studio 14 2015
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
VS: Visual Studio 15 2017
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
VS: Visual Studio 16 2019
|
||||
|
||||
build_script:
|
||||
- md build
|
||||
- cd build
|
||||
- cmake -G "%VS%" ..
|
||||
- cmake --build . --config Release
|
||||
- ctest --output-on-failure
|
15
cmake/CheckFunctionKeywords.cmake
Normal file
15
cmake/CheckFunctionKeywords.cmake
Normal file
|
@ -0,0 +1,15 @@
|
|||
include(CheckCSourceCompiles)
|
||||
|
||||
macro(check_function_keywords _wordlist)
|
||||
set(${_result} "")
|
||||
foreach(flag ${_wordlist})
|
||||
string(REGEX REPLACE "[-+/ ()]" "_" flagname "${flag}")
|
||||
string(TOUPPER "${flagname}" flagname)
|
||||
set(have_flag "HAVE_${flagname}")
|
||||
check_c_source_compiles("${flag} void func(); void func() { } int main() { func(); return 0; }" ${have_flag})
|
||||
if(${have_flag} AND NOT ${_result})
|
||||
set(${_result} "${flag}")
|
||||
# break()
|
||||
endif(${have_flag} AND NOT ${_result})
|
||||
endforeach(flag)
|
||||
endmacro(check_function_keywords)
|
163
cmake/CodeCoverage.cmake
Normal file
163
cmake/CodeCoverage.cmake
Normal file
|
@ -0,0 +1,163 @@
|
|||
#
|
||||
# Boost Software License - Version 1.0 - August 17th, 2003
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person or organization
|
||||
# obtaining a copy of the software and accompanying documentation covered by
|
||||
# this license (the "Software") to use, reproduce, display, distribute,
|
||||
# execute, and transmit the Software, and to prepare derivative works of the
|
||||
# Software, and to permit third-parties to whom the Software is furnished to
|
||||
# do so, all subject to the following:
|
||||
#
|
||||
# The copyright notices in the Software and this entire statement, including
|
||||
# the above license grant, this restriction and the following disclaimer,
|
||||
# must be included in all copies of the Software, in whole or in part, and
|
||||
# all derivative works of the Software, unless such copies or derivative
|
||||
# works are solely in the form of machine-executable object code generated by
|
||||
# a source language processor.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
|
||||
# SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
|
||||
# FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
|
||||
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
# DEALINGS IN THE SOFTWARE.
|
||||
#
|
||||
# 2012-01-31, Lars Bilke
|
||||
# - Enable Code Coverage
|
||||
#
|
||||
# 2013-09-17, Joakim Söderberg
|
||||
# - Added support for Clang.
|
||||
# - Some additional usage instructions.
|
||||
#
|
||||
# USAGE:
|
||||
# 1. Copy this file into your cmake modules path.
|
||||
#
|
||||
# 2. Add the following line to your CMakeLists.txt:
|
||||
# INCLUDE(CodeCoverage)
|
||||
#
|
||||
# 3. Set compiler flags to turn off optimization and enable coverage:
|
||||
# SET(CMAKE_CXX_FLAGS "-g -O0 -fprofile-arcs -ftest-coverage")
|
||||
# SET(CMAKE_C_FLAGS "-g -O0 -fprofile-arcs -ftest-coverage")
|
||||
#
|
||||
# 3. Use the function SETUP_TARGET_FOR_COVERAGE to create a custom make target
|
||||
# which runs your test executable and produces a lcov code coverage report:
|
||||
# Example:
|
||||
# SETUP_TARGET_FOR_COVERAGE(
|
||||
# my_coverage_target # Name for custom target.
|
||||
# test_driver # Name of the test driver executable that runs the tests.
|
||||
# # NOTE! This should always have a ZERO as exit code
|
||||
# # otherwise the coverage generation will not complete.
|
||||
# coverage # Name of output directory.
|
||||
# )
|
||||
#
|
||||
# 4. Build a Debug build:
|
||||
# cmake -DCMAKE_BUILD_TYPE=Debug ..
|
||||
# make
|
||||
# make my_coverage_target
|
||||
#
|
||||
#
|
||||
|
||||
# Check prereqs
|
||||
FIND_PROGRAM( GCOV_PATH gcov )
|
||||
FIND_PROGRAM( LCOV_PATH lcov )
|
||||
FIND_PROGRAM( GENHTML_PATH genhtml )
|
||||
FIND_PROGRAM( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/tests)
|
||||
|
||||
IF(NOT GCOV_PATH)
|
||||
MESSAGE(FATAL_ERROR "gcov not found! Aborting...")
|
||||
ENDIF() # NOT GCOV_PATH
|
||||
|
||||
IF(NOT (CMAKE_COMPILER_IS_GNUCXX OR CMAKE_COMPILER_IS_GNUCC))
|
||||
# Clang version 3.0.0 and greater now supports gcov as well.
|
||||
MESSAGE(WARNING "Compiler is not GNU gcc! Clang Version 3.0.0 and greater supports gcov as well, but older versions don't.")
|
||||
|
||||
IF(NOT ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" OR "${CMAKE_C_COMPILER_ID}" STREQUAL "Clang"))
|
||||
MESSAGE(FATAL_ERROR "Compiler is not GNU gcc or Clang! Aborting...")
|
||||
ENDIF()
|
||||
ENDIF() # NOT CMAKE_COMPILER_IS_GNUCXX
|
||||
|
||||
IF ( NOT CMAKE_BUILD_TYPE STREQUAL "Debug" )
|
||||
MESSAGE( WARNING "Code coverage results with an optimized (non-Debug) build may be misleading" )
|
||||
ENDIF() # NOT CMAKE_BUILD_TYPE STREQUAL "Debug"
|
||||
|
||||
|
||||
# Param _targetname The name of new the custom make target
|
||||
# Param _outputname lcov output is generated as _outputname.info
|
||||
# HTML report is generated in _outputname/index.html
|
||||
# Param _testrunner The name of the target which runs the tests.
|
||||
# MUST return ZERO always, even on errors.
|
||||
# If not, no coverage report will be created!
|
||||
# Optional fourth parameter is passed as arguments to _testrunner
|
||||
# Pass them in list form, e.g.: "-j;2" for -j 2
|
||||
FUNCTION(SETUP_TARGET_FOR_COVERAGE _targetname _outputname _testrunner)
|
||||
|
||||
IF(NOT LCOV_PATH)
|
||||
MESSAGE(FATAL_ERROR "lcov not found! Aborting...")
|
||||
ENDIF() # NOT LCOV_PATH
|
||||
|
||||
IF(NOT GENHTML_PATH)
|
||||
MESSAGE(FATAL_ERROR "genhtml not found! Aborting...")
|
||||
ENDIF() # NOT GENHTML_PATH
|
||||
|
||||
# Setup target
|
||||
ADD_CUSTOM_TARGET(${_targetname}
|
||||
|
||||
# Cleanup lcov
|
||||
${LCOV_PATH} --directory . --zerocounters
|
||||
|
||||
# Run tests
|
||||
COMMAND ${_testrunner} ${ARGV3}
|
||||
|
||||
# Capturing lcov counters and generating report
|
||||
COMMAND ${LCOV_PATH} --directory . --capture --output-file ${_outputname}.info --rc lcov_branch_coverage=1
|
||||
COMMAND ${LCOV_PATH} --remove ${_outputname}.info '*/build/include/*' '*/test/*' '/usr/include/*' --output-file ${_outputname}.info --rc lcov_branch_coverage=1
|
||||
# COMMAND ${GENHTML_PATH} --branch-coverage -o ${_outputname} ${_outputname}.info.cleaned
|
||||
# COMMAND ${CMAKE_COMMAND} -E remove ${_outputname}.info ${_outputname}.info.cleaned
|
||||
|
||||
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
||||
COMMENT "Resetting code coverage counters to zero.\nProcessing code coverage counters and generating report."
|
||||
)
|
||||
|
||||
# Show info where to find the report
|
||||
ADD_CUSTOM_COMMAND(TARGET ${_targetname} POST_BUILD
|
||||
COMMAND ;
|
||||
COMMENT "Open ./${_outputname}/index.html in your browser to view the coverage report."
|
||||
)
|
||||
|
||||
ENDFUNCTION() # SETUP_TARGET_FOR_COVERAGE
|
||||
|
||||
# Param _targetname The name of new the custom make target
|
||||
# Param _testrunner The name of the target which runs the tests
|
||||
# Param _outputname cobertura output is generated as _outputname.xml
|
||||
# Optional fourth parameter is passed as arguments to _testrunner
|
||||
# Pass them in list form, e.g.: "-j;2" for -j 2
|
||||
FUNCTION(SETUP_TARGET_FOR_COVERAGE_COBERTURA _targetname _testrunner _outputname)
|
||||
|
||||
IF(NOT PYTHON_EXECUTABLE)
|
||||
MESSAGE(FATAL_ERROR "Python not found! Aborting...")
|
||||
ENDIF() # NOT PYTHON_EXECUTABLE
|
||||
|
||||
IF(NOT GCOVR_PATH)
|
||||
MESSAGE(FATAL_ERROR "gcovr not found! Aborting...")
|
||||
ENDIF() # NOT GCOVR_PATH
|
||||
|
||||
ADD_CUSTOM_TARGET(${_targetname}
|
||||
|
||||
# Run tests
|
||||
${_testrunner} ${ARGV3}
|
||||
|
||||
# Running gcovr
|
||||
COMMAND ${GCOVR_PATH} -x -r ${CMAKE_SOURCE_DIR} -e '${CMAKE_SOURCE_DIR}/tests/' -o ${_outputname}.xml
|
||||
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
||||
COMMENT "Running gcovr to produce Cobertura code coverage report."
|
||||
)
|
||||
|
||||
# Show info where to find the report
|
||||
ADD_CUSTOM_COMMAND(TARGET ${_targetname} POST_BUILD
|
||||
COMMAND ;
|
||||
COMMENT "Cobertura code coverage report saved in ${_outputname}.xml."
|
||||
)
|
||||
|
||||
ENDFUNCTION() # SETUP_TARGET_FOR_COVERAGE_COBERTURA
|
||||
|
315
cmake/FindSphinx.cmake
Normal file
315
cmake/FindSphinx.cmake
Normal file
|
@ -0,0 +1,315 @@
|
|||
#
|
||||
# PART B. DOWNLOADING AGREEMENT - LICENSE FROM SBIA WITH RIGHT TO SUBLICENSE ("SOFTWARE LICENSE").
|
||||
# ------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# 1. As used in this Software License, "you" means the individual downloading and/or
|
||||
# using, reproducing, modifying, displaying and/or distributing the Software and
|
||||
# the institution or entity which employs or is otherwise affiliated with such
|
||||
# individual in connection therewith. The Section of Biomedical Image Analysis,
|
||||
# Department of Radiology at the Universiy of Pennsylvania ("SBIA") hereby grants
|
||||
# you, with right to sublicense, with respect to SBIA's rights in the software,
|
||||
# and data, if any, which is the subject of this Software License (collectively,
|
||||
# the "Software"), a royalty-free, non-exclusive license to use, reproduce, make
|
||||
# derivative works of, display and distribute the Software, provided that:
|
||||
# (a) you accept and adhere to all of the terms and conditions of this Software
|
||||
# License; (b) in connection with any copy of or sublicense of all or any portion
|
||||
# of the Software, all of the terms and conditions in this Software License shall
|
||||
# appear in and shall apply to such copy and such sublicense, including without
|
||||
# limitation all source and executable forms and on any user documentation,
|
||||
# prefaced with the following words: "All or portions of this licensed product
|
||||
# (such portions are the "Software") have been obtained under license from the
|
||||
# Section of Biomedical Image Analysis, Department of Radiology at the University
|
||||
# of Pennsylvania and are subject to the following terms and conditions:"
|
||||
# (c) you preserve and maintain all applicable attributions, copyright notices
|
||||
# and licenses included in or applicable to the Software; (d) modified versions
|
||||
# of the Software must be clearly identified and marked as such, and must not
|
||||
# be misrepresented as being the original Software; and (e) you consider making,
|
||||
# but are under no obligation to make, the source code of any of your modifications
|
||||
# to the Software freely available to others on an open source basis.
|
||||
#
|
||||
# 2. The license granted in this Software License includes without limitation the
|
||||
# right to (i) incorporate the Software into proprietary programs (subject to
|
||||
# any restrictions applicable to such programs), (ii) add your own copyright
|
||||
# statement to your modifications of the Software, and (iii) provide additional
|
||||
# or different license terms and conditions in your sublicenses of modifications
|
||||
# of the Software; provided that in each case your use, reproduction or
|
||||
# distribution of such modifications otherwise complies with the conditions
|
||||
# stated in this Software License.
|
||||
#
|
||||
# 3. This Software License does not grant any rights with respect to third party
|
||||
# software, except those rights that SBIA has been authorized by a third
|
||||
# party to grant to you, and accordingly you are solely responsible for
|
||||
# (i) obtaining any permissions from third parties that you need to use,
|
||||
# reproduce, make derivative works of, display and distribute the Software,
|
||||
# and (ii) informing your sublicensees, including without limitation your
|
||||
# end-users, of their obligations to secure any such required permissions.
|
||||
#
|
||||
# 4. The Software has been designed for research purposes only and has not been
|
||||
# reviewed or approved by the Food and Drug Administration or by any other
|
||||
# agency. YOU ACKNOWLEDGE AND AGREE THAT CLINICAL APPLICATIONS ARE NEITHER
|
||||
# RECOMMENDED NOR ADVISED. Any commercialization of the Software is at the
|
||||
# sole risk of the party or parties engaged in such commercialization.
|
||||
# You further agree to use, reproduce, make derivative works of, display
|
||||
# and distribute the Software in compliance with all applicable governmental
|
||||
# laws, regulations and orders, including without limitation those relating
|
||||
# to export and import control.
|
||||
#
|
||||
# 5. The Software is provided "AS IS" and neither SBIA nor any contributor to
|
||||
# the software (each a "Contributor") shall have any obligation to provide
|
||||
# maintenance, support, updates, enhancements or modifications thereto.
|
||||
# SBIA AND ALL CONTRIBUTORS SPECIFICALLY DISCLAIM ALL EXPRESS AND IMPLIED
|
||||
# WARRANTIES OF ANY KIND INCLUDING, BUT NOT LIMITED TO, ANY WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
|
||||
# IN NO EVENT SHALL SBIA OR ANY CONTRIBUTOR BE LIABLE TO ANY PARTY FOR
|
||||
# DIRECT, INDIRECT, SPECIAL, INCIDENTAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES
|
||||
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ARISING IN ANY WAY RELATED
|
||||
# TO THE SOFTWARE, EVEN IF SBIA OR ANY CONTRIBUTOR HAS BEEN ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGES. TO THE MAXIMUM EXTENT NOT PROHIBITED BY LAW OR
|
||||
# REGULATION, YOU FURTHER ASSUME ALL LIABILITY FOR YOUR USE, REPRODUCTION,
|
||||
# MAKING OF DERIVATIVE WORKS, DISPLAY, LICENSE OR DISTRIBUTION OF THE SOFTWARE
|
||||
# AND AGREE TO INDEMNIFY AND HOLD HARMLESS SBIA AND ALL CONTRIBUTORS FROM
|
||||
# AND AGAINST ANY AND ALL CLAIMS, SUITS, ACTIONS, DEMANDS AND JUDGMENTS ARISING
|
||||
# THEREFROM.
|
||||
#
|
||||
# 6. None of the names, logos or trademarks of SBIA or any of SBIA's affiliates
|
||||
# or any of the Contributors, or any funding agency, may be used to endorse
|
||||
# or promote products produced in whole or in part by operation of the Software
|
||||
# or derived from or based on the Software without specific prior written
|
||||
# permission from the applicable party.
|
||||
#
|
||||
# 7. Any use, reproduction or distribution of the Software which is not in accordance
|
||||
# with this Software License shall automatically revoke all rights granted to you
|
||||
# under this Software License and render Paragraphs 1 and 2 of this Software
|
||||
# License null and void.
|
||||
#
|
||||
# 8. This Software License does not grant any rights in or to any intellectual
|
||||
# property owned by SBIA or any Contributor except those rights expressly
|
||||
# granted hereunder.
|
||||
#
|
||||
#
|
||||
# PART C. MISCELLANEOUS
|
||||
# ---------------------
|
||||
#
|
||||
# This Agreement shall be governed by and construed in accordance with the laws
|
||||
# of The Commonwealth of Pennsylvania without regard to principles of conflicts
|
||||
# of law. This Agreement shall supercede and replace any license terms that you
|
||||
# may have agreed to previously with respect to Software from SBIA.
|
||||
#
|
||||
##############################################################################
|
||||
# @file FindSphinx.cmake
|
||||
# @brief Find Sphinx documentation build tools.
|
||||
#
|
||||
# @par Input variables:
|
||||
# <table border="0">
|
||||
# <tr>
|
||||
# @tp @b Sphinx_DIR @endtp
|
||||
# <td>Installation directory of Sphinx tools. Can also be set as environment variable.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b SPHINX_DIR @endtp
|
||||
# <td>Alternative environment variable for @c Sphinx_DIR.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b Sphinx_FIND_COMPONENTS @endtp
|
||||
# <td>Sphinx build tools to look for, i.e., 'apidoc' and/or 'build'.</td>
|
||||
# </tr>
|
||||
# </table>
|
||||
#
|
||||
# @par Output variables:
|
||||
# <table border="0">
|
||||
# <tr>
|
||||
# @tp @b Sphinx_FOUND @endtp
|
||||
# <td>Whether all or only the requested Sphinx build tools were found.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b SPHINX_FOUND @endtp
|
||||
# <td>Alias for @c Sphinx_FOUND.<td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b SPHINX_EXECUTABLE @endtp
|
||||
# <td>Non-cached alias for @c Sphinx-build_EXECUTABLE.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b Sphinx_PYTHON_EXECUTABLE @endtp
|
||||
# <td>Python executable used to run sphinx-build. This is either the
|
||||
# by default found Python interpreter or a specific version as
|
||||
# specified by the shebang (#!) of the sphinx-build script.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b Sphinx_PYTHON_OPTIONS @endtp
|
||||
# <td>A list of Python options extracted from the shebang (#!) of the
|
||||
# sphinx-build script. The -E option is added by this module
|
||||
# if the Python executable is not the system default to avoid
|
||||
# problems with a differing setting of the @c PYTHONHOME.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b Sphinx-build_EXECUTABLE @endtp
|
||||
# <td>Absolute path of the found sphinx-build tool.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b Sphinx-apidoc_EXECUTABLE @endtp
|
||||
# <td>Absolute path of the found sphinx-apidoc tool.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b Sphinx_VERSION_STRING @endtp
|
||||
# <td>Sphinx version found e.g. 1.1.2.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b Sphinx_VERSION_MAJOR @endtp
|
||||
# <td>Sphinx major version found e.g. 1.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b Sphinx_VERSION_MINOR @endtp
|
||||
# <td>Sphinx minor version found e.g. 1.</td>
|
||||
# </tr>
|
||||
# <tr>
|
||||
# @tp @b Sphinx_VERSION_PATCH @endtp
|
||||
# <td>Sphinx patch version found e.g. 2.</td>
|
||||
# </tr>
|
||||
# </table>
|
||||
#
|
||||
# @ingroup CMakeFindModules
|
||||
##############################################################################
|
||||
|
||||
set (_Sphinx_REQUIRED_VARS)
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# initialize search
|
||||
if (NOT Sphinx_DIR)
|
||||
if (NOT $ENV{Sphinx_DIR} STREQUAL "")
|
||||
set (Sphinx_DIR "$ENV{Sphinx_DIR}" CACHE PATH "Installation prefix of Sphinx (docutils)." FORCE)
|
||||
else ()
|
||||
set (Sphinx_DIR "$ENV{SPHINX_DIR}" CACHE PATH "Installation prefix of Sphinx (docutils)." FORCE)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# default components to look for
|
||||
if (NOT Sphinx_FIND_COMPONENTS)
|
||||
set (Sphinx_FIND_COMPONENTS "build")
|
||||
elseif (NOT Sphinx_FIND_COMPONENTS MATCHES "^(build|apidoc)$")
|
||||
message (FATAL_ERROR "Invalid Sphinx component in: ${Sphinx_FIND_COMPONENTS}")
|
||||
endif ()
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# find components, i.e., build tools
|
||||
foreach (_Sphinx_TOOL IN LISTS Sphinx_FIND_COMPONENTS)
|
||||
if (Sphinx_DIR)
|
||||
find_program (
|
||||
Sphinx-${_Sphinx_TOOL}_EXECUTABLE
|
||||
NAMES sphinx-${_Sphinx_TOOL} sphinx-${_Sphinx_TOOL}.py
|
||||
HINTS "${Sphinx_DIR}"
|
||||
PATH_SUFFIXES bin
|
||||
DOC "The sphinx-${_Sphinx_TOOL} Python script."
|
||||
NO_DEFAULT_PATH
|
||||
)
|
||||
else ()
|
||||
find_program (
|
||||
Sphinx-${_Sphinx_TOOL}_EXECUTABLE
|
||||
NAMES sphinx-${_Sphinx_TOOL} sphinx-${_Sphinx_TOOL}.py
|
||||
DOC "The sphinx-${_Sphinx_TOOL} Python script."
|
||||
)
|
||||
endif ()
|
||||
mark_as_advanced (Sphinx-${_Sphinx_TOOL}_EXECUTABLE)
|
||||
list (APPEND _Sphinx_REQUIRED_VARS Sphinx-${_Sphinx_TOOL}_EXECUTABLE)
|
||||
endforeach ()
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# determine Python executable used by Sphinx
|
||||
if (Sphinx-build_EXECUTABLE)
|
||||
# extract python executable from shebang of sphinx-build
|
||||
find_package (PythonInterp QUIET)
|
||||
set (Sphinx_PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE}")
|
||||
set (Sphinx_PYTHON_OPTIONS)
|
||||
file (STRINGS "${Sphinx-build_EXECUTABLE}" FIRST_LINE LIMIT_COUNT 1)
|
||||
if (FIRST_LINE MATCHES "^#!(.*/python.*)") # does not match "#!/usr/bin/env python" !
|
||||
string (REGEX REPLACE "^ +| +$" "" Sphinx_PYTHON_EXECUTABLE "${CMAKE_MATCH_1}")
|
||||
if (Sphinx_PYTHON_EXECUTABLE MATCHES "([^ ]+) (.*)")
|
||||
set (Sphinx_PYTHON_EXECUTABLE "${CMAKE_MATCH_1}")
|
||||
string (REGEX REPLACE " +" ";" Sphinx_PYTHON_OPTIONS "${CMAKE_MATCH_2}")
|
||||
endif ()
|
||||
endif ()
|
||||
# this is done to avoid problems with multiple Python versions being installed
|
||||
# remember: CMake command if(STR EQUAL STR) is bad and may cause many troubles !
|
||||
string (REGEX REPLACE "([.+*?^$])" "\\\\\\1" _Sphinx_PYTHON_EXECUTABLE_RE "${PYTHON_EXECUTABLE}")
|
||||
list (FIND Sphinx_PYTHON_OPTIONS -E IDX)
|
||||
if (IDX EQUAL -1 AND NOT Sphinx_PYTHON_EXECUTABLE MATCHES "^${_Sphinx_PYTHON_EXECUTABLE_RE}$")
|
||||
list (INSERT Sphinx_PYTHON_OPTIONS 0 -E)
|
||||
endif ()
|
||||
unset (_Sphinx_PYTHON_EXECUTABLE_RE)
|
||||
endif ()
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# determine Sphinx version
|
||||
# some quick experiments by @ploxiln
|
||||
# - sphinx 1.7 and later have the version output format like "sphinx-build 1.7.2"
|
||||
# - sphinx 1.2 through 1.6 have the version output format like "Sphinx (sphinx-build) 1.2.2"
|
||||
# - sphinx 1.1 and before do not have a "--version" flag, but it causes the help output like "-h" does which includes version like "Sphinx v1.0.2"
|
||||
if (Sphinx-build_EXECUTABLE)
|
||||
# intentionally use invalid -h option here as the help that is shown then
|
||||
# will include the Sphinx version information
|
||||
if (Sphinx_PYTHON_EXECUTABLE)
|
||||
execute_process (
|
||||
COMMAND "${Sphinx_PYTHON_EXECUTABLE}" ${Sphinx_PYTHON_OPTIONS} "${Sphinx-build_EXECUTABLE}" --version
|
||||
OUTPUT_VARIABLE _Sphinx_VERSION
|
||||
ERROR_VARIABLE _Sphinx_VERSION
|
||||
)
|
||||
elseif (UNIX)
|
||||
execute_process (
|
||||
COMMAND "${Sphinx-build_EXECUTABLE}" --version
|
||||
OUTPUT_VARIABLE _Sphinx_VERSION
|
||||
ERROR_VARIABLE _Sphinx_VERSION
|
||||
)
|
||||
endif ()
|
||||
|
||||
# The sphinx version can also contain a "b" instead of the last dot.
|
||||
# For example "Sphinx v1.2b1" or "Sphinx 1.7.0b2" so we cannot just split on "."
|
||||
if (_Sphinx_VERSION MATCHES "sphinx-build ([0-9]+\\.[0-9]+(\\.|a?|b?)([0-9]*)(b?)([0-9]*))")
|
||||
set (Sphinx_VERSION_STRING "${CMAKE_MATCH_1}")
|
||||
set (_SPHINX_VERSION_FOUND)
|
||||
elseif (_Sphinx_VERSION MATCHES "Sphinx v([0-9]+\\.[0-9]+(\\.|b?)([0-9]*)(b?)([0-9]*))")
|
||||
set (Sphinx_VERSION_STRING "${CMAKE_MATCH_1}")
|
||||
set (_SPHINX_VERSION_FOUND)
|
||||
elseif (_Sphinx_VERSION MATCHES "Sphinx \\(sphinx-build\\) ([0-9]+\\.[0-9]+(\\.|a?|b?)([0-9]*)(b?)([0-9]*))")
|
||||
set (Sphinx_VERSION_STRING "${CMAKE_MATCH_1}")
|
||||
set (_SPHINX_VERSION_FOUND)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if(_SPHINX_VERSION_FOUND)
|
||||
string(REGEX REPLACE "([0-9]+)\\.[0-9]+(\\.|b)[0-9]+" "\\1" Sphinx_VERSION_MAJOR ${Sphinx_VERSION_STRING})
|
||||
string(REGEX REPLACE "[0-9]+\\.([0-9]+)(\\.|b)[0-9]+" "\\1" Sphinx_VERSION_MINOR ${Sphinx_VERSION_STRING})
|
||||
string(REGEX REPLACE "[0-9]+\\.[0-9]+(\\.|b)([0-9]+)" "\\1" Sphinx_VERSION_PATCH ${Sphinx_VERSION_STRING})
|
||||
|
||||
# v1.2.0 -> v1.2
|
||||
if (Sphinx_VERSION_PATCH EQUAL 0)
|
||||
string (REGEX REPLACE "\\.0$" "" Sphinx_VERSION_STRING "${Sphinx_VERSION_STRING}")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# compatibility with FindPythonInterp.cmake and FindPerl.cmake
|
||||
set (SPHINX_EXECUTABLE "${Sphinx-build_EXECUTABLE}")
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# handle the QUIETLY and REQUIRED arguments and set SPHINX_FOUND to TRUE if
|
||||
# all listed variables are TRUE
|
||||
include (FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS (
|
||||
Sphinx
|
||||
REQUIRED_VARS
|
||||
${_Sphinx_REQUIRED_VARS}
|
||||
# VERSION_VAR # This isn't available until CMake 2.8.8 so don't use it.
|
||||
Sphinx_VERSION_STRING
|
||||
)
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# set Sphinx_DIR
|
||||
if (NOT Sphinx_DIR AND Sphinx-build_EXECUTABLE)
|
||||
get_filename_component (Sphinx_DIR "${Sphinx-build_EXECUTABLE}" PATH)
|
||||
string (REGEX REPLACE "/bin/?" "" Sphinx_DIR "${Sphinx_DIR}")
|
||||
set (Sphinx_DIR "${Sphinx_DIR}" CACHE PATH "Installation directory of Sphinx tools." FORCE)
|
||||
endif ()
|
||||
|
||||
unset (_Sphinx_VERSION)
|
||||
unset (_Sphinx_REQUIRED_VARS)
|
4
cmake/janssonConfig.cmake.in
Normal file
4
cmake/janssonConfig.cmake.in
Normal file
|
@ -0,0 +1,4 @@
|
|||
@PACKAGE_INIT@
|
||||
|
||||
include("${CMAKE_CURRENT_LIST_DIR}/janssonTargets.cmake")
|
||||
check_required_components("@PROJECT_NAME@")
|
72
cmake/jansson_config.h.cmake
Normal file
72
cmake/jansson_config.h.cmake
Normal file
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Copyright (c) 2010-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*
|
||||
*
|
||||
* This file specifies a part of the site-specific configuration for
|
||||
* Jansson, namely those things that affect the public API in
|
||||
* jansson.h.
|
||||
*
|
||||
* The CMake system will generate the jansson_config.h file and
|
||||
* copy it to the build and install directories.
|
||||
*/
|
||||
|
||||
#ifndef JANSSON_CONFIG_H
|
||||
#define JANSSON_CONFIG_H
|
||||
|
||||
/* Define this so that we can disable scattered automake configuration in source files */
|
||||
#ifndef JANSSON_USING_CMAKE
|
||||
#define JANSSON_USING_CMAKE
|
||||
#endif
|
||||
|
||||
/* If your compiler supports the `long long` type and the strtoll()
|
||||
library function, JSON_INTEGER_IS_LONG_LONG is defined to 1,
|
||||
otherwise to 0. */
|
||||
#cmakedefine JSON_INTEGER_IS_LONG_LONG 1
|
||||
|
||||
/* Bring in the cmake-detected defines */
|
||||
#cmakedefine HAVE_STDINT_H 1
|
||||
#cmakedefine HAVE_INTTYPES_H 1
|
||||
#cmakedefine HAVE_SYS_TYPES_H 1
|
||||
|
||||
/* Include our standard type header for the integer typedef */
|
||||
|
||||
#if defined(HAVE_STDINT_H)
|
||||
# include <stdint.h>
|
||||
#elif defined(HAVE_INTTYPES_H)
|
||||
# include <inttypes.h>
|
||||
#elif defined(HAVE_SYS_TYPES_H)
|
||||
# include <sys/types.h>
|
||||
#endif
|
||||
|
||||
|
||||
/* If your compiler supports the inline keyword in C, JSON_INLINE is
|
||||
defined to `inline', otherwise empty. In C++, the inline is always
|
||||
supported. */
|
||||
#ifdef __cplusplus
|
||||
#define JSON_INLINE inline
|
||||
#else
|
||||
#define JSON_INLINE @JSON_INLINE@
|
||||
#endif
|
||||
|
||||
|
||||
#define json_int_t @JSON_INT_T@
|
||||
#define json_strtoint @JSON_STRTOINT@
|
||||
#define JSON_INTEGER_FORMAT @JSON_INTEGER_FORMAT@
|
||||
|
||||
|
||||
/* If __atomic builtins are available they will be used to manage
|
||||
reference counts of json_t. */
|
||||
#define JSON_HAVE_ATOMIC_BUILTINS @JSON_HAVE_ATOMIC_BUILTINS@
|
||||
|
||||
/* If __atomic builtins are not available we try using __sync builtins
|
||||
to manage reference counts of json_t. */
|
||||
#define JSON_HAVE_SYNC_BUILTINS @JSON_HAVE_SYNC_BUILTINS@
|
||||
|
||||
/* Maximum recursion depth for parsing JSON input.
|
||||
This limits the depth of e.g. array-within-array constructions. */
|
||||
#define JSON_PARSER_MAX_DEPTH 2048
|
||||
|
||||
#endif
|
62
cmake/jansson_private_config.h.cmake
Normal file
62
cmake/jansson_private_config.h.cmake
Normal file
|
@ -0,0 +1,62 @@
|
|||
#cmakedefine HAVE_ENDIAN_H 1
|
||||
#cmakedefine HAVE_FCNTL_H 1
|
||||
#cmakedefine HAVE_SCHED_H 1
|
||||
#cmakedefine HAVE_UNISTD_H 1
|
||||
#cmakedefine HAVE_SYS_PARAM_H 1
|
||||
#cmakedefine HAVE_SYS_STAT_H 1
|
||||
#cmakedefine HAVE_SYS_TIME_H 1
|
||||
#cmakedefine HAVE_SYS_TYPES_H 1
|
||||
#cmakedefine HAVE_STDINT_H 1
|
||||
|
||||
#cmakedefine HAVE_CLOSE 1
|
||||
#cmakedefine HAVE_GETPID 1
|
||||
#cmakedefine HAVE_GETTIMEOFDAY 1
|
||||
#cmakedefine HAVE_OPEN 1
|
||||
#cmakedefine HAVE_READ 1
|
||||
#cmakedefine HAVE_SCHED_YIELD 1
|
||||
|
||||
#cmakedefine HAVE_SYNC_BUILTINS 1
|
||||
#cmakedefine HAVE_ATOMIC_BUILTINS 1
|
||||
|
||||
#cmakedefine HAVE_LOCALE_H 1
|
||||
#cmakedefine HAVE_SETLOCALE 1
|
||||
|
||||
#cmakedefine WORDS_BIGENDIAN 1
|
||||
|
||||
#cmakedefine HAVE_INT32_T 1
|
||||
#ifndef HAVE_INT32_T
|
||||
# define int32_t @JSON_INT32@
|
||||
#endif
|
||||
|
||||
#cmakedefine HAVE_UINT32_T 1
|
||||
#ifndef HAVE_UINT32_T
|
||||
# define uint32_t @JSON_UINT32@
|
||||
#endif
|
||||
|
||||
#cmakedefine HAVE_UINT16_T 1
|
||||
#ifndef HAVE_UINT16_T
|
||||
# define uint16_t @JSON_UINT16@
|
||||
#endif
|
||||
|
||||
#cmakedefine HAVE_UINT8_T 1
|
||||
#ifndef HAVE_UINT8_T
|
||||
# define uint8_t @JSON_UINT8@
|
||||
#endif
|
||||
|
||||
#cmakedefine HAVE_SSIZE_T 1
|
||||
|
||||
#ifndef HAVE_SSIZE_T
|
||||
# define ssize_t @JSON_SSIZE@
|
||||
#endif
|
||||
|
||||
#cmakedefine USE_URANDOM 1
|
||||
#cmakedefine USE_WINDOWS_CRYPTOAPI 1
|
||||
|
||||
#cmakedefine USE_DTOA 1
|
||||
#if USE_DTOA
|
||||
# define DTOA_ENABLED 1
|
||||
#else
|
||||
# define DTOA_ENABLED 0
|
||||
#endif
|
||||
|
||||
#define INITIAL_HASHTABLE_ORDER @JANSSON_INITIAL_HASHTABLE_ORDER@
|
59
config.h.in
59
config.h.in
|
@ -1,59 +0,0 @@
|
|||
/* config.h.in. Generated from configure.ac by autoheader. */
|
||||
|
||||
/* Define to 1 if you have the <dlfcn.h> header file. */
|
||||
#undef HAVE_DLFCN_H
|
||||
|
||||
/* Define to 1 if you have the <inttypes.h> header file. */
|
||||
#undef HAVE_INTTYPES_H
|
||||
|
||||
/* Define to 1 if you have the <memory.h> header file. */
|
||||
#undef HAVE_MEMORY_H
|
||||
|
||||
/* Define to 1 if you have the <stdint.h> header file. */
|
||||
#undef HAVE_STDINT_H
|
||||
|
||||
/* Define to 1 if you have the <stdlib.h> header file. */
|
||||
#undef HAVE_STDLIB_H
|
||||
|
||||
/* Define to 1 if you have the <strings.h> header file. */
|
||||
#undef HAVE_STRINGS_H
|
||||
|
||||
/* Define to 1 if you have the <string.h> header file. */
|
||||
#undef HAVE_STRING_H
|
||||
|
||||
/* Define to 1 if you have the <sys/stat.h> header file. */
|
||||
#undef HAVE_SYS_STAT_H
|
||||
|
||||
/* Define to 1 if you have the <sys/types.h> header file. */
|
||||
#undef HAVE_SYS_TYPES_H
|
||||
|
||||
/* Define to 1 if you have the <unistd.h> header file. */
|
||||
#undef HAVE_UNISTD_H
|
||||
|
||||
/* Define to the sub-directory in which libtool stores uninstalled libraries.
|
||||
*/
|
||||
#undef LT_OBJDIR
|
||||
|
||||
/* Name of package */
|
||||
#undef PACKAGE
|
||||
|
||||
/* Define to the address where bug reports for this package should be sent. */
|
||||
#undef PACKAGE_BUGREPORT
|
||||
|
||||
/* Define to the full name of this package. */
|
||||
#undef PACKAGE_NAME
|
||||
|
||||
/* Define to the full name and version of this package. */
|
||||
#undef PACKAGE_STRING
|
||||
|
||||
/* Define to the one symbol short name of this package. */
|
||||
#undef PACKAGE_TARNAME
|
||||
|
||||
/* Define to the version of this package. */
|
||||
#undef PACKAGE_VERSION
|
||||
|
||||
/* Define to 1 if you have the ANSI C header files. */
|
||||
#undef STDC_HEADERS
|
||||
|
||||
/* Version number of package */
|
||||
#undef VERSION
|
171
configure.ac
171
configure.ac
|
@ -1,29 +1,190 @@
|
|||
AC_PREREQ([2.63])
|
||||
AC_INIT([jansson], [1.0.1], [petri@digip.org])
|
||||
AC_PREREQ([2.60])
|
||||
AC_INIT([jansson], [2.14.1], [https://github.com/akheron/jansson/issues])
|
||||
|
||||
AC_CONFIG_AUX_DIR([.])
|
||||
AM_INIT_AUTOMAKE([1.10 foreign])
|
||||
|
||||
AC_CONFIG_SRCDIR([src/value.c])
|
||||
AC_CONFIG_HEADERS([config.h])
|
||||
AC_CONFIG_HEADERS([jansson_private_config.h])
|
||||
|
||||
# Checks for programs.
|
||||
AC_PROG_CC
|
||||
AC_PROG_CXX
|
||||
AC_PROG_LIBTOOL
|
||||
AM_CONDITIONAL([GCC], [test x$GCC = xyes])
|
||||
|
||||
# Checks for libraries.
|
||||
|
||||
# Checks for header files.
|
||||
AC_CHECK_HEADERS([endian.h fcntl.h locale.h sched.h unistd.h sys/param.h sys/stat.h sys/time.h sys/types.h])
|
||||
|
||||
# Checks for typedefs, structures, and compiler characteristics.
|
||||
AC_TYPE_INT32_T
|
||||
AC_TYPE_UINT32_T
|
||||
AC_TYPE_UINT16_T
|
||||
AC_TYPE_UINT8_T
|
||||
AC_TYPE_LONG_LONG_INT
|
||||
|
||||
AC_C_BIGENDIAN
|
||||
|
||||
AC_C_INLINE
|
||||
case $ac_cv_c_inline in
|
||||
yes) json_inline=inline;;
|
||||
no) json_inline=;;
|
||||
*) json_inline=$ac_cv_c_inline;;
|
||||
esac
|
||||
AC_SUBST([json_inline])
|
||||
|
||||
# Checks for library functions.
|
||||
AC_CHECK_FUNCS([close getpid gettimeofday open read setlocale sched_yield strtoll])
|
||||
|
||||
AC_MSG_CHECKING([for gcc __sync builtins])
|
||||
have_sync_builtins=no
|
||||
AC_TRY_LINK(
|
||||
[], [unsigned long val; __sync_bool_compare_and_swap(&val, 0, 1); __sync_add_and_fetch(&val, 1); __sync_sub_and_fetch(&val, 1);],
|
||||
[have_sync_builtins=yes],
|
||||
)
|
||||
if test "x$have_sync_builtins" = "xyes"; then
|
||||
AC_DEFINE([HAVE_SYNC_BUILTINS], [1],
|
||||
[Define to 1 if gcc's __sync builtins are available])
|
||||
json_have_sync_builtins=1
|
||||
else
|
||||
json_have_sync_builtins=0
|
||||
fi
|
||||
AC_SUBST([json_have_sync_builtins])
|
||||
AC_MSG_RESULT([$have_sync_builtins])
|
||||
|
||||
AC_MSG_CHECKING([for gcc __atomic builtins])
|
||||
have_atomic_builtins=no
|
||||
AC_TRY_LINK(
|
||||
[], [char l; unsigned long v; __atomic_test_and_set(&l, __ATOMIC_RELAXED); __atomic_store_n(&v, 1, __ATOMIC_RELEASE); __atomic_load_n(&v, __ATOMIC_ACQUIRE); __atomic_add_fetch(&v, 1, __ATOMIC_ACQUIRE); __atomic_sub_fetch(&v, 1, __ATOMIC_RELEASE);],
|
||||
[have_atomic_builtins=yes],
|
||||
)
|
||||
if test "x$have_atomic_builtins" = "xyes"; then
|
||||
AC_DEFINE([HAVE_ATOMIC_BUILTINS], [1],
|
||||
[Define to 1 if gcc's __atomic builtins are available])
|
||||
json_have_atomic_builtins=1
|
||||
else
|
||||
json_have_atomic_builtins=0
|
||||
fi
|
||||
AC_SUBST([json_have_atomic_builtins])
|
||||
AC_MSG_RESULT([$have_atomic_builtins])
|
||||
|
||||
case "$ac_cv_type_long_long_int$ac_cv_func_strtoll" in
|
||||
yesyes) json_have_long_long=1;;
|
||||
*) json_have_long_long=0;;
|
||||
esac
|
||||
AC_SUBST([json_have_long_long])
|
||||
|
||||
# Features
|
||||
AC_ARG_ENABLE([urandom],
|
||||
[AS_HELP_STRING([--disable-urandom],
|
||||
[Don't use /dev/urandom to seed the hash function])],
|
||||
[use_urandom=$enableval], [use_urandom=yes])
|
||||
|
||||
if test "x$use_urandom" = xyes; then
|
||||
AC_DEFINE([USE_URANDOM], [1],
|
||||
[Define to 1 if /dev/urandom should be used for seeding the hash function])
|
||||
fi
|
||||
|
||||
AC_ARG_ENABLE([windows-cryptoapi],
|
||||
[AS_HELP_STRING([--disable-windows-cryptoapi],
|
||||
[Don't use CryptGenRandom to seed the hash function])],
|
||||
[use_windows_cryptoapi=$enableval], [use_windows_cryptoapi=yes])
|
||||
|
||||
if test "x$use_windows_cryptoapi" = xyes; then
|
||||
AC_DEFINE([USE_WINDOWS_CRYPTOAPI], [1],
|
||||
[Define to 1 if CryptGenRandom should be used for seeding the hash function])
|
||||
fi
|
||||
|
||||
AC_ARG_ENABLE([initial-hashtable-order],
|
||||
[AS_HELP_STRING([--enable-initial-hashtable-order=VAL],
|
||||
[Number of buckets new object hashtables contain is 2 raised to this power. The default is 3, so empty hashtables contain 2^3 = 8 buckets.])],
|
||||
[initial_hashtable_order=$enableval], [initial_hashtable_order=3])
|
||||
AC_DEFINE_UNQUOTED([INITIAL_HASHTABLE_ORDER], [$initial_hashtable_order],
|
||||
[Number of buckets new object hashtables contain is 2 raised to this power. E.g. 3 -> 2^3 = 8.])
|
||||
|
||||
AC_ARG_ENABLE([Bsymbolic],
|
||||
[AS_HELP_STRING([--disable-Bsymbolic],
|
||||
[Avoid linking with -Bsymbolic-function])],
|
||||
[], [with_Bsymbolic=check])
|
||||
|
||||
if test "x$with_Bsymbolic" != "xno" ; then
|
||||
AC_MSG_CHECKING([for -Bsymbolic-functions linker flag])
|
||||
saved_LDFLAGS="${LDFLAGS}"
|
||||
LDFLAGS=-Wl,-Bsymbolic-functions
|
||||
AC_TRY_LINK(
|
||||
[], [int main (void) { return 0; }],
|
||||
[AC_MSG_RESULT([yes])
|
||||
have_Bsymbolic=yes],
|
||||
[AC_MSG_RESULT([no])
|
||||
have_Bsymbolic=no]
|
||||
)
|
||||
LDFLAGS="${saved_LDFLAGS}"
|
||||
|
||||
if test "x$with_Bsymbolic" = "xcheck" ; then
|
||||
with_Bsymbolic=$have_Bsymbolic;
|
||||
fi
|
||||
if test "x$with_Bsymbolic:x$have_Bsymbolic" = "xyes:xno" ; then
|
||||
AC_MSG_ERROR([linker support is required for -Bsymbolic])
|
||||
fi
|
||||
fi
|
||||
|
||||
AS_IF([test "x$with_Bsymbolic" = "xyes"], [JSON_BSYMBOLIC_LDFLAGS=-Wl[,]-Bsymbolic-functions])
|
||||
AC_SUBST(JSON_BSYMBOLIC_LDFLAGS)
|
||||
|
||||
# Enable symbol versioning on GNU libc
|
||||
JSON_SYMVER_LDFLAGS=
|
||||
AC_CHECK_DECL([__GLIBC__], [JSON_SYMVER_LDFLAGS=-Wl,--default-symver])
|
||||
AC_SUBST([JSON_SYMVER_LDFLAGS])
|
||||
|
||||
AC_ARG_ENABLE([dtoa],
|
||||
[AS_HELP_STRING([--enable-dtoa], [Use dtoa for optimal floating point to string conversion])],
|
||||
[case "$enableval" in
|
||||
yes) dtoa=yes ;;
|
||||
no) dtoa=no ;;
|
||||
*) AC_MSG_ERROR([bad value ${enableval} for --enable-dtoa]) ;;
|
||||
esac], [dtoa=yes])
|
||||
if test "$dtoa" = "yes"; then
|
||||
AC_DEFINE([DTOA_ENABLED], [1],
|
||||
[Define to 1 to use dtoa to convert floating points to strings])
|
||||
fi
|
||||
AM_CONDITIONAL([DTOA_ENABLED], [test "$dtoa" = "yes"])
|
||||
|
||||
AC_ARG_ENABLE([ossfuzzers],
|
||||
[AS_HELP_STRING([--enable-ossfuzzers],
|
||||
[Whether to generate the fuzzers for OSS-Fuzz])],
|
||||
[have_ossfuzzers=yes], [have_ossfuzzers=no])
|
||||
AM_CONDITIONAL([USE_OSSFUZZERS], [test "x$have_ossfuzzers" = "xyes"])
|
||||
|
||||
|
||||
AC_SUBST([LIB_FUZZING_ENGINE])
|
||||
AM_CONDITIONAL([USE_OSSFUZZ_FLAG], [test "x$LIB_FUZZING_ENGINE" = "x-fsanitize=fuzzer"])
|
||||
AM_CONDITIONAL([USE_OSSFUZZ_STATIC], [test -f "$LIB_FUZZING_ENGINE"])
|
||||
|
||||
|
||||
if test x$GCC = xyes; then
|
||||
AC_MSG_CHECKING(for -Wno-format-truncation)
|
||||
wnoformat_truncation="-Wno-format-truncation"
|
||||
AS_IF([${CC} -Wno-format-truncation -Werror -S -o /dev/null -xc /dev/null > /dev/null 2>&1],
|
||||
[AC_MSG_RESULT(yes)],
|
||||
[AC_MSG_RESULT(no)
|
||||
wnoformat_truncation=""])
|
||||
|
||||
AM_CFLAGS="-Wall -Wextra -Wdeclaration-after-statement -Wshadow ${wnoformat_truncation}"
|
||||
fi
|
||||
AC_SUBST([AM_CFLAGS])
|
||||
|
||||
AC_CONFIG_FILES([
|
||||
jansson.pc
|
||||
Makefile
|
||||
doc/Makefile
|
||||
src/Makefile
|
||||
src/jansson_config.h
|
||||
test/Makefile
|
||||
test/testdata/Makefile
|
||||
test/testprogs/Makefile
|
||||
test/bin/Makefile
|
||||
test/ossfuzz/Makefile
|
||||
test/suites/Makefile
|
||||
test/suites/api/Makefile
|
||||
])
|
||||
AC_OUTPUT
|
||||
|
|
2
doc/.gitignore
vendored
2
doc/.gitignore
vendored
|
@ -1 +1 @@
|
|||
.build/
|
||||
_build/
|
||||
|
|
9
doc/.readthedocs.yaml
Normal file
9
doc/.readthedocs.yaml
Normal file
|
@ -0,0 +1,9 @@
|
|||
version: 2
|
||||
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.12"
|
||||
|
||||
sphinx:
|
||||
configuration: doc/conf.py
|
|
@ -1,4 +1,20 @@
|
|||
EXTRA_DIST = conf.py apiref.rst index.rst ext/refcounting.py
|
||||
EXTRA_DIST = conf.py apiref.rst changes.rst conformance.rst \
|
||||
gettingstarted.rst github_commits.c index.rst threadsafety.rst \
|
||||
tutorial.rst upgrading.rst ext/refcounting.py
|
||||
|
||||
SPHINXBUILD = sphinx-build
|
||||
SPHINXOPTS = -d _build/doctrees $(SPHINXOPTS_EXTRA)
|
||||
|
||||
html-local:
|
||||
$(SPHINXBUILD) -b html $(SPHINXOPTS) $(srcdir) _build/html
|
||||
|
||||
install-html-local: html
|
||||
mkdir -p $(DESTDIR)$(htmldir)
|
||||
cp -r _build/html $(DESTDIR)$(htmldir)
|
||||
|
||||
uninstall-local:
|
||||
rm -rf $(DESTDIR)$(htmldir)
|
||||
|
||||
clean-local:
|
||||
rm -rf .build
|
||||
rm -rf _build
|
||||
rm -f ext/refcounting.pyc
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
To build the documentation, invoke
|
||||
|
||||
sphinx-build . .build/html
|
||||
make html
|
||||
|
||||
in this directory. Then point your browser to .build/html/index.html.
|
||||
Then point your browser to _build/html/index.html.
|
||||
|
|
1986
doc/apiref.rst
1986
doc/apiref.rst
File diff suppressed because it is too large
Load diff
5
doc/changes.rst
Normal file
5
doc/changes.rst
Normal file
|
@ -0,0 +1,5 @@
|
|||
******************
|
||||
Changes in Jansson
|
||||
******************
|
||||
|
||||
.. include:: ../CHANGES
|
104
doc/conf.py
104
doc/conf.py
|
@ -1,13 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Jansson documentation build configuration file, created by
|
||||
# sphinx-quickstart on Thu Jul 30 11:35:32 2009.
|
||||
# sphinx-quickstart on Sun Sep 5 21:47:20 2010.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its containing dir.
|
||||
#
|
||||
# The contents of this file are pickled, so don't put values in the namespace
|
||||
# that aren't pickleable (module imports are okay, they're removed automatically).
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
|
@ -15,44 +12,45 @@
|
|||
# serve to show the default.
|
||||
|
||||
import sys, os
|
||||
sys.path.insert(0, os.path.abspath('ext'))
|
||||
|
||||
# If your extensions (or modules documented by autodoc) are in another directory,
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.append(os.path.abspath('.'))
|
||||
sys.path.insert(0, os.path.abspath('ext'))
|
||||
|
||||
# General configuration
|
||||
# ---------------------
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['refcounting']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = []
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8'
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'Jansson'
|
||||
copyright = u'2009, Petri Lehtinen'
|
||||
copyright = u'2009-2020, Petri Lehtinen'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '1.0'
|
||||
version = '2.14.1'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '1.0.1'
|
||||
release = version
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
|
@ -64,15 +62,13 @@ release = '1.0.1'
|
|||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of documents that shouldn't be included in the build.
|
||||
#unused_docs = []
|
||||
|
||||
# List of directories, relative to source directory, that shouldn't be searched
|
||||
# for source files.
|
||||
exclude_trees = ['.build']
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
default_role = 'c:func'
|
||||
primary_domain = 'c'
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
@ -88,14 +84,23 @@ exclude_trees = ['.build']
|
|||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
# Options for HTML output
|
||||
# -----------------------
|
||||
|
||||
# The style sheet to use for HTML and HTML Help pages. A file of that name
|
||||
# must exist either in Sphinx' static/ path, or in one of the custom paths
|
||||
# given in html_static_path.
|
||||
html_style = 'default.css'
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#html_theme = 'default'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
|
@ -116,7 +121,7 @@ html_style = 'default.css'
|
|||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = []
|
||||
#html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
|
@ -134,7 +139,7 @@ html_static_path = []
|
|||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_use_modindex = True
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
@ -142,23 +147,28 @@ html_static_path = []
|
|||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, the reST sources are included in the HTML build as _sources/<name>.
|
||||
#html_copy_source = True
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = ''
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'Janssondoc'
|
||||
|
||||
|
||||
# Options for LaTeX output
|
||||
# ------------------------
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
# The paper size ('letter' or 'a4').
|
||||
#latex_paper_size = 'letter'
|
||||
|
@ -167,10 +177,10 @@ htmlhelp_basename = 'Janssondoc'
|
|||
#latex_font_size = '10pt'
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, document class [howto/manual]).
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'Jansson.tex', ur'Jansson Documentation',
|
||||
ur'Petri Lehtinen', 'manual'),
|
||||
('index', 'Jansson.tex', u'Jansson Documentation',
|
||||
u'Petri Lehtinen', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
|
@ -181,6 +191,12 @@ latex_documents = [
|
|||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#latex_preamble = ''
|
||||
|
||||
|
@ -188,4 +204,14 @@ latex_documents = [
|
|||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_use_modindex = True
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output --------------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'jansson', u'Jansson Documentation',
|
||||
[u'Petri Lehtinen'], 1)
|
||||
]
|
||||
|
|
119
doc/conformance.rst
Normal file
119
doc/conformance.rst
Normal file
|
@ -0,0 +1,119 @@
|
|||
.. _rfc-conformance:
|
||||
|
||||
***************
|
||||
RFC Conformance
|
||||
***************
|
||||
|
||||
JSON is specified in :rfc:`4627`, *"The application/json Media Type
|
||||
for JavaScript Object Notation (JSON)"*.
|
||||
|
||||
Character Encoding
|
||||
==================
|
||||
|
||||
Jansson only supports UTF-8 encoded JSON texts. It does not support or
|
||||
auto-detect any of the other encodings mentioned in the RFC, namely
|
||||
UTF-16LE, UTF-16BE, UTF-32LE or UTF-32BE. Pure ASCII is supported, as
|
||||
it's a subset of UTF-8.
|
||||
|
||||
Strings
|
||||
=======
|
||||
|
||||
JSON strings are mapped to C-style null-terminated character arrays,
|
||||
and UTF-8 encoding is used internally.
|
||||
|
||||
All Unicode codepoints U+0000 through U+10FFFF are allowed in string
|
||||
values. However, U+0000 is allowed in object keys only for length-aware functions.
|
||||
|
||||
Unicode normalization or any other transformation is never performed
|
||||
on any strings (string values or object keys). When checking for
|
||||
equivalence of strings or object keys, the comparison is performed
|
||||
byte by byte between the original UTF-8 representations of the
|
||||
strings.
|
||||
|
||||
Numbers
|
||||
=======
|
||||
|
||||
.. _real-vs-integer:
|
||||
|
||||
Real vs. Integer
|
||||
----------------
|
||||
|
||||
JSON makes no distinction between real and integer numbers; Jansson
|
||||
does. Real numbers are mapped to the ``double`` type and integers to
|
||||
the ``json_int_t`` type, which is a typedef of ``long long`` or
|
||||
``long``, depending on whether ``long long`` is supported by your
|
||||
compiler or not.
|
||||
|
||||
A JSON number is considered to be a real number if its lexical
|
||||
representation includes one of ``e``, ``E``, or ``.``; regardless if
|
||||
its actual numeric value is a true integer (e.g., all of ``1E6``,
|
||||
``3.0``, ``400E-2``, and ``3.14E3`` are mathematical integers, but
|
||||
will be treated as real values). With the ``JSON_DECODE_INT_AS_REAL``
|
||||
decoder flag set all numbers are interpreted as real.
|
||||
|
||||
All other JSON numbers are considered integers.
|
||||
|
||||
When encoding to JSON, real values are always represented
|
||||
with a fractional part; e.g., the ``double`` value 3.0 will be
|
||||
represented in JSON as ``3.0``, not ``3``.
|
||||
|
||||
Overflow, Underflow & Precision
|
||||
-------------------------------
|
||||
|
||||
Real numbers whose absolute values are too small to be represented in
|
||||
a C ``double`` will be silently estimated with 0.0. Thus, depending on
|
||||
platform, JSON numbers very close to zero such as 1E-999 may result in
|
||||
0.0.
|
||||
|
||||
Real numbers whose absolute values are too large to be represented in
|
||||
a C ``double`` will result in an overflow error (a JSON decoding
|
||||
error). Thus, depending on platform, JSON numbers like 1E+999 or
|
||||
-1E+999 may result in a parsing error.
|
||||
|
||||
Likewise, integer numbers whose absolute values are too large to be
|
||||
represented in the ``json_int_t`` type (see above) will result in an
|
||||
overflow error (a JSON decoding error). Thus, depending on platform,
|
||||
JSON numbers like 1000000000000000 may result in parsing error.
|
||||
|
||||
Parsing JSON real numbers may result in a loss of precision. As long
|
||||
as overflow does not occur (i.e. a total loss of precision), the
|
||||
rounded approximate value is silently used. Thus the JSON number
|
||||
1.000000000000000005 may, depending on platform, result in the
|
||||
``double`` value 1.0.
|
||||
|
||||
Signed zeros
|
||||
------------
|
||||
|
||||
JSON makes no statement about what a number means; however Javascript
|
||||
(ECMAscript) does state that +0.0 and -0.0 must be treated as being
|
||||
distinct values, i.e. -0.0 |not-equal| 0.0. Jansson relies on the
|
||||
underlying floating point library in the C environment in which it is
|
||||
compiled. Therefore it is platform-dependent whether 0.0 and -0.0 will
|
||||
be distinct values. Most platforms that use the IEEE 754
|
||||
floating-point standard will support signed zeros.
|
||||
|
||||
Note that this only applies to floating-point; neither JSON, C, or
|
||||
IEEE support the concept of signed integer zeros.
|
||||
|
||||
.. |not-equal| unicode:: U+2260
|
||||
|
||||
Types
|
||||
-----
|
||||
|
||||
No support is provided in Jansson for any C numeric types other than
|
||||
``json_int_t`` and ``double``. This excludes things such as unsigned
|
||||
types, ``long double``, etc. Obviously, shorter types like ``short``,
|
||||
``int``, ``long`` (if ``json_int_t`` is ``long long``) and ``float``
|
||||
are implicitly handled via the ordinary C type coercion rules (subject
|
||||
to overflow semantics). Also, no support or hooks are provided for any
|
||||
supplemental "bignum" type add-on packages.
|
||||
|
||||
Depth of nested values
|
||||
======================
|
||||
|
||||
To avoid stack exhaustion, Jansson currently limits the nesting depth
|
||||
for arrays and objects to a certain value (default: 2048), defined as
|
||||
a macro ``JSON_PARSER_MAX_DEPTH`` within ``jansson_config.h``.
|
||||
|
||||
The limit is allowed to be set by the RFC; there is no recommended value
|
||||
or required minimum depth to be supported.
|
|
@ -19,13 +19,13 @@
|
|||
|
||||
<description of the json_object function>
|
||||
|
||||
:copyright: Copyright 2009 Petri Lehtinen <petri@digip.org>
|
||||
:copyright: Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
:license: MIT, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import Directive
|
||||
|
||||
class refcounting(nodes.emphasis): pass
|
||||
|
||||
def visit(self, node):
|
||||
self.visit_emphasis(node)
|
||||
|
@ -40,20 +40,30 @@ def html_depart(self, node):
|
|||
self.body.append('</em>')
|
||||
|
||||
|
||||
def refcounting_directive(name, arguments, options, content, lineno,
|
||||
content_offset, block_text, state, state_machine):
|
||||
if arguments[0] == 'borrow':
|
||||
text = 'Return value: Borrowed reference.'
|
||||
elif arguments[0] == 'new':
|
||||
text = 'Return value: New reference.'
|
||||
else:
|
||||
raise Error('Valid arguments: new, borrow')
|
||||
class refcounting(nodes.emphasis):
|
||||
pass
|
||||
|
||||
class refcounting_directive(Directive):
|
||||
has_content = False
|
||||
required_arguments = 1
|
||||
optional_arguments = 0
|
||||
final_argument_whitespace = False
|
||||
|
||||
def run(self):
|
||||
if self.arguments[0] == 'borrow':
|
||||
text = 'Return value: Borrowed reference.'
|
||||
elif self.arguments[0] == 'new':
|
||||
text = 'Return value: New reference.'
|
||||
else:
|
||||
raise Error('Valid arguments: new, borrow')
|
||||
|
||||
return [refcounting(text, text)]
|
||||
|
||||
return [refcounting(text, text)]
|
||||
|
||||
def setup(app):
|
||||
app.add_node(refcounting,
|
||||
html=(html_visit, html_depart),
|
||||
latex=(visit, depart),
|
||||
text=(visit, depart))
|
||||
app.add_directive('refcounting', refcounting_directive, 0, (1, 0, 0))
|
||||
text=(visit, depart),
|
||||
man=(visit, depart))
|
||||
app.add_directive('refcounting', refcounting_directive)
|
||||
|
|
264
doc/gettingstarted.rst
Normal file
264
doc/gettingstarted.rst
Normal file
|
@ -0,0 +1,264 @@
|
|||
***************
|
||||
Getting Started
|
||||
***************
|
||||
|
||||
.. highlight:: c
|
||||
|
||||
Compiling and Installing Jansson
|
||||
================================
|
||||
|
||||
The Jansson source is available at
|
||||
http://www.digip.org/jansson/releases/.
|
||||
|
||||
Unix-like systems (including MinGW)
|
||||
-----------------------------------
|
||||
|
||||
Unpack the source tarball and change to the source directory:
|
||||
|
||||
.. parsed-literal::
|
||||
|
||||
bunzip2 -c jansson-|release|.tar.bz2 | tar xf -
|
||||
cd jansson-|release|
|
||||
|
||||
The source uses GNU Autotools (autoconf_, automake_, libtool_), so
|
||||
compiling and installing is extremely simple::
|
||||
|
||||
./configure
|
||||
make
|
||||
make check
|
||||
make install
|
||||
|
||||
To change the destination directory (``/usr/local`` by default), use
|
||||
the ``--prefix=DIR`` argument to ``./configure``. See ``./configure
|
||||
--help`` for the list of all possible configuration options.
|
||||
|
||||
The command ``make check`` runs the test suite distributed with
|
||||
Jansson. This step is not strictly necessary, but it may find possible
|
||||
problems that Jansson has on your platform. If any problems are found,
|
||||
please report them.
|
||||
|
||||
If you obtained the source from a Git repository (or any other source
|
||||
control system), there's no ``./configure`` script as it's not kept in
|
||||
version control. To create the script, the build system needs to be
|
||||
bootstrapped. There are many ways to do this, but the easiest one is
|
||||
to use ``autoreconf``::
|
||||
|
||||
autoreconf -fi
|
||||
|
||||
This command creates the ``./configure`` script, which can then be
|
||||
used as described above.
|
||||
|
||||
.. _autoconf: http://www.gnu.org/software/autoconf/
|
||||
.. _automake: http://www.gnu.org/software/automake/
|
||||
.. _libtool: http://www.gnu.org/software/libtool/
|
||||
|
||||
|
||||
.. _build-cmake:
|
||||
|
||||
CMake (various platforms, including Windows)
|
||||
--------------------------------------------
|
||||
|
||||
Jansson can be built using CMake_. Create a build directory for an
|
||||
out-of-tree build, change to that directory, and run ``cmake`` (or ``ccmake``,
|
||||
``cmake-gui``, or similar) to configure the project.
|
||||
|
||||
See the examples below for more detailed information.
|
||||
|
||||
.. note:: In the below examples ``..`` is used as an argument for ``cmake``.
|
||||
This is simply the path to the jansson project root directory.
|
||||
In the example it is assumed you've created a sub-directory ``build``
|
||||
and are using that. You could use any path you want.
|
||||
|
||||
.. _build-cmake-unix:
|
||||
|
||||
Unix (Make files)
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Generating make files on unix:
|
||||
|
||||
.. parsed-literal::
|
||||
|
||||
bunzip2 -c jansson-|release|.tar.bz2 | tar xf -
|
||||
cd jansson-|release|
|
||||
|
||||
mkdir build
|
||||
cd build
|
||||
cmake .. # or ccmake .. for a GUI.
|
||||
|
||||
.. note::
|
||||
|
||||
If you don't want to build docs or ``Sphinx`` is not installed, you should add ``"-DJANSSON_BUILD_DOCS=OFF"`` in the ``cmake`` command.
|
||||
|
||||
|
||||
Then to build::
|
||||
|
||||
make
|
||||
make check
|
||||
make install
|
||||
|
||||
Windows (Visual Studio)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Creating Visual Studio project files from the command line:
|
||||
|
||||
.. parsed-literal::
|
||||
|
||||
<unpack>
|
||||
cd jansson-|release|
|
||||
|
||||
md build
|
||||
cd build
|
||||
cmake -G "Visual Studio 15 2017" ..
|
||||
|
||||
.. note::
|
||||
|
||||
You should replace the name of the generator (``-G`` flag) matching
|
||||
the Visual Studio version installed on your system. Currently, the
|
||||
following versions are supported:
|
||||
|
||||
- ``Visual Studio 9 2008``
|
||||
- ``Visual Studio 10 2010``
|
||||
- ``Visual Studio 11 2012``
|
||||
- ``Visual Studio 12 2013``
|
||||
- ``Visual Studio 14 2015``
|
||||
- ``Visual Studio 15 2017``
|
||||
- ``Visual Studio 16 2019``
|
||||
|
||||
Any later version should also work.
|
||||
|
||||
You will now have a *Visual Studio Solution* in your build directory.
|
||||
To run the unit tests build the ``RUN_TESTS`` project.
|
||||
|
||||
If you prefer a GUI the ``cmake`` line in the above example can
|
||||
be replaced with::
|
||||
|
||||
cmake-gui ..
|
||||
|
||||
For command line help (including a list of available generators)
|
||||
for CMake_ simply run::
|
||||
|
||||
cmake
|
||||
|
||||
To list available CMake_ settings (and what they are currently set to)
|
||||
for the project, run::
|
||||
|
||||
cmake -LH ..
|
||||
|
||||
Windows (MinGW)
|
||||
^^^^^^^^^^^^^^^
|
||||
If you prefer using MinGW on Windows, make sure MinGW installed and ``{MinGW}/bin`` has been added to ``PATH``, then do the following commands:
|
||||
|
||||
.. parsed-literal::
|
||||
|
||||
<unpack>
|
||||
cd jansson-|release|
|
||||
|
||||
md build
|
||||
cd build
|
||||
cmake -G "MinGW Makefiles" ..
|
||||
mingw32-make
|
||||
|
||||
|
||||
Mac OSX (Xcode)
|
||||
^^^^^^^^^^^^^^^
|
||||
If you prefer using Xcode instead of make files on OSX,
|
||||
do the following. (Use the same steps as
|
||||
for :ref:`Unix <build-cmake-unix>`)::
|
||||
|
||||
...
|
||||
cmake -G "Xcode" ..
|
||||
|
||||
Additional CMake settings
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Shared library
|
||||
""""""""""""""
|
||||
By default the CMake_ project will generate build files for building the
|
||||
static library. To build the shared version use::
|
||||
|
||||
...
|
||||
cmake -DJANSSON_BUILD_SHARED_LIBS=1 ..
|
||||
|
||||
Changing install directory (same as autoconf --prefix)
|
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
||||
Just as with the autoconf_ project you can change the destination directory
|
||||
for ``make install``. The equivalent for autoconfs ``./configure --prefix``
|
||||
in CMake_ is::
|
||||
|
||||
...
|
||||
cmake -DCMAKE_INSTALL_PREFIX:PATH=/some/other/path ..
|
||||
make install
|
||||
|
||||
.. _CMake: http://www.cmake.org
|
||||
|
||||
|
||||
Android
|
||||
-------
|
||||
|
||||
Jansson can be built for Android platforms. Android.mk is in the
|
||||
source root directory. The configuration header file is located in the
|
||||
``android`` directory in the source distribution.
|
||||
|
||||
|
||||
Other Systems
|
||||
-------------
|
||||
|
||||
On non Unix-like systems, you may be unable to run the ``./configure``
|
||||
script. In this case, follow these steps. All the files mentioned can
|
||||
be found in the ``src/`` directory.
|
||||
|
||||
1. Create ``jansson_config.h`` (which has some platform-specific
|
||||
parameters that are normally filled in by the ``./configure``
|
||||
script). Edit ``jansson_config.h.in``, replacing all ``@variable@``
|
||||
placeholders, and rename the file to ``jansson_config.h``.
|
||||
|
||||
2. Make ``jansson.h`` and ``jansson_config.h`` available to the
|
||||
compiler, so that they can be found when compiling programs that
|
||||
use Jansson.
|
||||
|
||||
3. Compile all the ``.c`` files (in the ``src/`` directory) into a
|
||||
library file. Make the library available to the compiler, as in
|
||||
step 2.
|
||||
|
||||
|
||||
Building the Documentation
|
||||
--------------------------
|
||||
|
||||
(This subsection describes how to build the HTML documentation you are
|
||||
currently reading, so it can be safely skipped.)
|
||||
|
||||
Documentation is in the ``doc/`` subdirectory. It's written in
|
||||
reStructuredText_ with Sphinx_ annotations. To generate the HTML
|
||||
documentation, invoke::
|
||||
|
||||
make html
|
||||
|
||||
and point your browser to ``doc/_build/html/index.html``. Sphinx_ 1.0
|
||||
or newer is required to generate the documentation.
|
||||
|
||||
.. _reStructuredText: http://docutils.sourceforge.net/rst.html
|
||||
.. _Sphinx: http://sphinx.pocoo.org/
|
||||
|
||||
|
||||
Compiling Programs that Use Jansson
|
||||
===================================
|
||||
|
||||
Jansson involves one C header file, :file:`jansson.h`, so it's enough
|
||||
to put the line
|
||||
|
||||
::
|
||||
|
||||
#include <jansson.h>
|
||||
|
||||
in the beginning of every source file that uses Jansson.
|
||||
|
||||
There's also just one library to link with, ``libjansson``. Compile and
|
||||
link the program as follows::
|
||||
|
||||
cc -o prog prog.c -ljansson
|
||||
|
||||
Starting from version 1.2, there's also support for pkg-config_:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
cc -o prog prog.c `pkg-config --cflags --libs jansson`
|
||||
|
||||
.. _pkg-config: http://pkg-config.freedesktop.org/
|
181
doc/github_commits.c
Normal file
181
doc/github_commits.c
Normal file
|
@ -0,0 +1,181 @@
|
|||
/*
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#include <curl/curl.h>
|
||||
#include <jansson.h>
|
||||
|
||||
#define BUFFER_SIZE (256 * 1024) /* 256 KB */
|
||||
|
||||
#define URL_FORMAT "https://api.github.com/repos/%s/%s/commits"
|
||||
#define URL_SIZE 256
|
||||
|
||||
/* Return the offset of the first newline in text or the length of
|
||||
text if there's no newline */
|
||||
static int newline_offset(const char *text) {
|
||||
const char *newline = strchr(text, '\n');
|
||||
if (!newline)
|
||||
return strlen(text);
|
||||
else
|
||||
return (int)(newline - text);
|
||||
}
|
||||
|
||||
struct write_result {
|
||||
char *data;
|
||||
int pos;
|
||||
};
|
||||
|
||||
static size_t write_response(void *ptr, size_t size, size_t nmemb, void *stream) {
|
||||
struct write_result *result = (struct write_result *)stream;
|
||||
|
||||
if (result->pos + size * nmemb >= BUFFER_SIZE - 1) {
|
||||
fprintf(stderr, "error: too small buffer\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
memcpy(result->data + result->pos, ptr, size * nmemb);
|
||||
result->pos += size * nmemb;
|
||||
|
||||
return size * nmemb;
|
||||
}
|
||||
|
||||
static char *request(const char *url) {
|
||||
CURL *curl = NULL;
|
||||
CURLcode status;
|
||||
struct curl_slist *headers = NULL;
|
||||
char *data = NULL;
|
||||
long code;
|
||||
|
||||
curl_global_init(CURL_GLOBAL_ALL);
|
||||
curl = curl_easy_init();
|
||||
if (!curl)
|
||||
goto error;
|
||||
|
||||
data = malloc(BUFFER_SIZE);
|
||||
if (!data)
|
||||
goto error;
|
||||
|
||||
struct write_result write_result = {.data = data, .pos = 0};
|
||||
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url);
|
||||
|
||||
/* GitHub commits API v3 requires a User-Agent header */
|
||||
headers = curl_slist_append(headers, "User-Agent: Jansson-Tutorial");
|
||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
|
||||
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_response);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &write_result);
|
||||
|
||||
status = curl_easy_perform(curl);
|
||||
if (status != 0) {
|
||||
fprintf(stderr, "error: unable to request data from %s:\n", url);
|
||||
fprintf(stderr, "%s\n", curl_easy_strerror(status));
|
||||
goto error;
|
||||
}
|
||||
|
||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &code);
|
||||
if (code != 200) {
|
||||
fprintf(stderr, "error: server responded with code %ld\n", code);
|
||||
goto error;
|
||||
}
|
||||
|
||||
curl_easy_cleanup(curl);
|
||||
curl_slist_free_all(headers);
|
||||
curl_global_cleanup();
|
||||
|
||||
/* zero-terminate the result */
|
||||
data[write_result.pos] = '\0';
|
||||
|
||||
return data;
|
||||
|
||||
error:
|
||||
if (data)
|
||||
free(data);
|
||||
if (curl)
|
||||
curl_easy_cleanup(curl);
|
||||
if (headers)
|
||||
curl_slist_free_all(headers);
|
||||
curl_global_cleanup();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
size_t i;
|
||||
char *text;
|
||||
char url[URL_SIZE];
|
||||
|
||||
json_t *root;
|
||||
json_error_t error;
|
||||
|
||||
if (argc != 3) {
|
||||
fprintf(stderr, "usage: %s USER REPOSITORY\n\n", argv[0]);
|
||||
fprintf(stderr, "List commits at USER's REPOSITORY.\n\n");
|
||||
return 2;
|
||||
}
|
||||
|
||||
snprintf(url, URL_SIZE, URL_FORMAT, argv[1], argv[2]);
|
||||
|
||||
text = request(url);
|
||||
if (!text)
|
||||
return 1;
|
||||
|
||||
root = json_loads(text, 0, &error);
|
||||
free(text);
|
||||
|
||||
if (!root) {
|
||||
fprintf(stderr, "error: on line %d: %s\n", error.line, error.text);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!json_is_array(root)) {
|
||||
fprintf(stderr, "error: root is not an array\n");
|
||||
json_decref(root);
|
||||
return 1;
|
||||
}
|
||||
|
||||
for (i = 0; i < json_array_size(root); i++) {
|
||||
json_t *data, *sha, *commit, *message;
|
||||
const char *message_text;
|
||||
|
||||
data = json_array_get(root, i);
|
||||
if (!json_is_object(data)) {
|
||||
fprintf(stderr, "error: commit data %d is not an object\n", (int)(i + 1));
|
||||
json_decref(root);
|
||||
return 1;
|
||||
}
|
||||
|
||||
sha = json_object_get(data, "sha");
|
||||
if (!json_is_string(sha)) {
|
||||
fprintf(stderr, "error: commit %d: sha is not a string\n", (int)(i + 1));
|
||||
json_decref(root);
|
||||
return 1;
|
||||
}
|
||||
|
||||
commit = json_object_get(data, "commit");
|
||||
if (!json_is_object(commit)) {
|
||||
fprintf(stderr, "error: commit %d: commit is not an object\n", (int)(i + 1));
|
||||
json_decref(root);
|
||||
return 1;
|
||||
}
|
||||
|
||||
message = json_object_get(commit, "message");
|
||||
if (!json_is_string(message)) {
|
||||
fprintf(stderr, "error: commit %d: message is not a string\n", (int)(i + 1));
|
||||
json_decref(root);
|
||||
return 1;
|
||||
}
|
||||
|
||||
message_text = json_string_value(message);
|
||||
printf("%.8s %.*s\n", json_string_value(sha), newline_offset(message_text),
|
||||
message_text);
|
||||
}
|
||||
|
||||
json_decref(root);
|
||||
return 0;
|
||||
}
|
|
@ -1,16 +1,49 @@
|
|||
Overview
|
||||
========
|
||||
Jansson Documentation
|
||||
=====================
|
||||
|
||||
This is the documentation for Jansson_ |release|, last updated |today|.
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
Jansson_ is a C library for encoding, decoding and manipulating JSON
|
||||
data. Its main features and design principles are:
|
||||
|
||||
- Simple and intuitive API and data model
|
||||
|
||||
- Comprehensive documentation
|
||||
|
||||
- No dependencies on other libraries
|
||||
|
||||
- Full Unicode support (UTF-8)
|
||||
|
||||
- Extensive test suite
|
||||
|
||||
Jansson is licensed under the `MIT license`_; see LICENSE in the
|
||||
source distribution for details.
|
||||
|
||||
Jansson is used in production and its API is stable. It works on
|
||||
numerous platforms, including numerous Unix like systems and Windows.
|
||||
It's suitable for use on any system, including desktop, server, and
|
||||
small embedded systems.
|
||||
|
||||
|
||||
.. _`MIT license`: http://www.opensource.org/licenses/mit-license.php
|
||||
.. _Jansson: http://www.digip.org/jansson/
|
||||
|
||||
**Contents:**
|
||||
Contents
|
||||
--------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
gettingstarted
|
||||
upgrading
|
||||
tutorial
|
||||
conformance
|
||||
threadsafety
|
||||
apiref
|
||||
changes
|
||||
|
||||
|
||||
Indices and Tables
|
||||
|
|
82
doc/threadsafety.rst
Normal file
82
doc/threadsafety.rst
Normal file
|
@ -0,0 +1,82 @@
|
|||
.. _thread-safety:
|
||||
|
||||
*************
|
||||
Thread safety
|
||||
*************
|
||||
|
||||
Jansson as a library is thread safe and has no mutable global state.
|
||||
The only exceptions are the hash function seed and memory allocation
|
||||
functions, see below.
|
||||
|
||||
There's no locking performed inside Jansson's code. **Read-only**
|
||||
access to JSON values shared by multiple threads is safe, but
|
||||
**mutating** a JSON value that's shared by multiple threads is not. A
|
||||
multithreaded program must perform its own locking if JSON values
|
||||
shared by multiple threads are mutated.
|
||||
|
||||
However, **reference count manipulation** (:func:`json_incref()`,
|
||||
:func:`json_decref()`) is usually thread-safe, and can be performed on
|
||||
JSON values that are shared among threads. The thread-safety of
|
||||
reference counting can be checked with the
|
||||
``JANSSON_THREAD_SAFE_REFCOUNT`` preprocessor constant. Thread-safe
|
||||
reference count manipulation is achieved using compiler built-in
|
||||
atomic functions, which are available in most modern compilers.
|
||||
|
||||
If compiler support is not available (``JANSSON_THREAD_SAFE_REFCOUNT``
|
||||
is not defined), it may be very difficult to ensure thread safety of
|
||||
reference counting. It's possible to have a reference to a value
|
||||
that's also stored inside an array or object in another thread.
|
||||
Modifying the container (adding or removing values) may trigger
|
||||
concurrent access to such values, as containers manage the reference
|
||||
count of their contained values.
|
||||
|
||||
|
||||
Hash function seed
|
||||
==================
|
||||
|
||||
To prevent an attacker from intentionally causing large JSON objects
|
||||
with specially crafted keys to perform very slow, the hash function
|
||||
used by Jansson is randomized using a seed value. The seed is
|
||||
automatically generated on the first explicit or implicit call to
|
||||
:func:`json_object()`, if :func:`json_object_seed()` has not been
|
||||
called beforehand.
|
||||
|
||||
The seed is generated by using operating system's entropy sources if
|
||||
they are available (``/dev/urandom``, ``CryptGenRandom()``). The
|
||||
initialization is done in as thread safe manner as possible, by using
|
||||
architecture specific lockless operations if provided by the platform
|
||||
or the compiler.
|
||||
|
||||
If you're using threads, it's recommended to autoseed the hashtable
|
||||
explicitly before spawning any threads by calling
|
||||
``json_object_seed(0)`` , especially if you're unsure whether the
|
||||
initialization is thread safe on your platform.
|
||||
|
||||
|
||||
Memory allocation functions
|
||||
===========================
|
||||
|
||||
Memory allocation functions should be set at most once, and only on
|
||||
program startup. See :ref:`apiref-custom-memory-allocation`.
|
||||
|
||||
|
||||
Locale
|
||||
======
|
||||
|
||||
Jansson works fine under any locale.
|
||||
|
||||
However, if the host program is multithreaded and uses ``setlocale()``
|
||||
to switch the locale in one thread while Jansson is currently encoding
|
||||
or decoding JSON data in another thread, the result may be wrong or
|
||||
the program may even crash.
|
||||
|
||||
Jansson uses locale specific functions for certain string conversions
|
||||
in the encoder and decoder, and then converts the locale specific
|
||||
values to/from the JSON representation. This fails if the locale
|
||||
changes between the string conversion and the locale-to-JSON
|
||||
conversion. This can only happen in multithreaded programs that use
|
||||
``setlocale()``, because ``setlocale()`` switches the locale for all
|
||||
running threads, not only the thread that calls ``setlocale()``.
|
||||
|
||||
If your program uses ``setlocale()`` as described above, consider
|
||||
using the thread-safe ``uselocale()`` instead.
|
288
doc/tutorial.rst
Normal file
288
doc/tutorial.rst
Normal file
|
@ -0,0 +1,288 @@
|
|||
.. _tutorial:
|
||||
|
||||
********
|
||||
Tutorial
|
||||
********
|
||||
|
||||
.. highlight:: c
|
||||
|
||||
In this tutorial, we create a program that fetches the latest commits
|
||||
of a repository in GitHub_ over the web. `GitHub API`_ uses JSON, so
|
||||
the result can be parsed using Jansson.
|
||||
|
||||
To stick to the scope of this tutorial, we will only cover the
|
||||
parts of the program related to handling JSON data. For the best user
|
||||
experience, the full source code is available:
|
||||
:download:`github_commits.c`. To compile it (on Unix-like systems with
|
||||
gcc), use the following command::
|
||||
|
||||
gcc -o github_commits github_commits.c -ljansson -lcurl
|
||||
|
||||
libcurl_ is used to communicate over the web, so it is required to
|
||||
compile the program.
|
||||
|
||||
The command line syntax is::
|
||||
|
||||
github_commits USER REPOSITORY
|
||||
|
||||
``USER`` is a GitHub user ID and ``REPOSITORY`` is the repository
|
||||
name. Please note that the GitHub API is rate limited, so if you run
|
||||
the program too many times within a short period of time, the sever
|
||||
starts to respond with an error.
|
||||
|
||||
.. _GitHub: https://github.com/
|
||||
.. _GitHub API: http://developer.github.com/
|
||||
.. _libcurl: http://curl.haxx.se/
|
||||
|
||||
|
||||
.. _tutorial-github-commits-api:
|
||||
|
||||
The GitHub Repo Commits API
|
||||
===========================
|
||||
|
||||
The `GitHub Repo Commits API`_ is used by sending HTTP requests to
|
||||
URLs like ``https://api.github.com/repos/USER/REPOSITORY/commits``,
|
||||
where ``USER`` and ``REPOSITORY`` are the GitHub user ID and the name
|
||||
of the repository whose commits are to be listed, respectively.
|
||||
|
||||
GitHub responds with a JSON array of the following form:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
[
|
||||
{
|
||||
"sha": "<the commit ID>",
|
||||
"commit": {
|
||||
"message": "<the commit message>",
|
||||
<more fields, not important to this tutorial...>
|
||||
},
|
||||
<more fields...>
|
||||
},
|
||||
{
|
||||
"sha": "<the commit ID>",
|
||||
"commit": {
|
||||
"message": "<the commit message>",
|
||||
<more fields...>
|
||||
},
|
||||
<more fields...>
|
||||
},
|
||||
<more commits...>
|
||||
]
|
||||
|
||||
In our program, the HTTP request is sent using the following
|
||||
function::
|
||||
|
||||
static char *request(const char *url);
|
||||
|
||||
It takes the URL as a parameter, performs a HTTP GET request, and
|
||||
returns a newly allocated string that contains the response body. If
|
||||
the request fails, an error message is printed to stderr and the
|
||||
return value is *NULL*. For full details, refer to :download:`the code
|
||||
<github_commits.c>`, as the actual implementation is not important
|
||||
here.
|
||||
|
||||
.. _GitHub Repo Commits API: http://developer.github.com/v3/repos/commits/
|
||||
|
||||
.. _tutorial-the-program:
|
||||
|
||||
The Program
|
||||
===========
|
||||
|
||||
First the includes::
|
||||
|
||||
#include <string.h>
|
||||
#include <jansson.h>
|
||||
|
||||
Like all the programs using Jansson, we need to include
|
||||
:file:`jansson.h`.
|
||||
|
||||
The following definitions are used to build the GitHub API request
|
||||
URL::
|
||||
|
||||
#define URL_FORMAT "https://api.github.com/repos/%s/%s/commits"
|
||||
#define URL_SIZE 256
|
||||
|
||||
The following function is used when formatting the result to find the
|
||||
first newline in the commit message::
|
||||
|
||||
/* Return the offset of the first newline in text or the length of
|
||||
text if there's no newline */
|
||||
static int newline_offset(const char *text)
|
||||
{
|
||||
const char *newline = strchr(text, '\n');
|
||||
if(!newline)
|
||||
return strlen(text);
|
||||
else
|
||||
return (int)(newline - text);
|
||||
}
|
||||
|
||||
The main function follows. In the beginning, we first declare a bunch
|
||||
of variables and check the command line parameters::
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
size_t i;
|
||||
char *text;
|
||||
char url[URL_SIZE];
|
||||
|
||||
json_t *root;
|
||||
json_error_t error;
|
||||
|
||||
if(argc != 3)
|
||||
{
|
||||
fprintf(stderr, "usage: %s USER REPOSITORY\n\n", argv[0]);
|
||||
fprintf(stderr, "List commits at USER's REPOSITORY.\n\n");
|
||||
return 2;
|
||||
}
|
||||
|
||||
Then we build the request URL using the user and repository names
|
||||
given as command line parameters::
|
||||
|
||||
snprintf(url, URL_SIZE, URL_FORMAT, argv[1], argv[2]);
|
||||
|
||||
This uses the ``URL_SIZE`` and ``URL_FORMAT`` constants defined above.
|
||||
Now we're ready to actually request the JSON data over the web::
|
||||
|
||||
text = request(url);
|
||||
if(!text)
|
||||
return 1;
|
||||
|
||||
If an error occurs, our function ``request`` prints the error and
|
||||
returns *NULL*, so it's enough to just return 1 from the main
|
||||
function.
|
||||
|
||||
Next we'll call :func:`json_loads()` to decode the JSON text we got
|
||||
as a response::
|
||||
|
||||
root = json_loads(text, 0, &error);
|
||||
free(text);
|
||||
|
||||
if(!root)
|
||||
{
|
||||
fprintf(stderr, "error: on line %d: %s\n", error.line, error.text);
|
||||
return 1;
|
||||
}
|
||||
|
||||
We don't need the JSON text anymore, so we can free the ``text``
|
||||
variable right after decoding it. If :func:`json_loads()` fails, it
|
||||
returns *NULL* and sets error information to the :type:`json_error_t`
|
||||
structure given as the second parameter. In this case, our program
|
||||
prints the error information out and returns 1 from the main function.
|
||||
|
||||
Now we're ready to extract the data out of the decoded JSON response.
|
||||
The structure of the response JSON was explained in section
|
||||
:ref:`tutorial-github-commits-api`.
|
||||
|
||||
We check that the returned value really is an array::
|
||||
|
||||
if(!json_is_array(root))
|
||||
{
|
||||
fprintf(stderr, "error: root is not an array\n");
|
||||
json_decref(root);
|
||||
return 1;
|
||||
}
|
||||
|
||||
Then we proceed to loop over all the commits in the array::
|
||||
|
||||
for(i = 0; i < json_array_size(root); i++)
|
||||
{
|
||||
json_t *data, *sha, *commit, *message;
|
||||
const char *message_text;
|
||||
|
||||
data = json_array_get(root, i);
|
||||
if(!json_is_object(data))
|
||||
{
|
||||
fprintf(stderr, "error: commit data %d is not an object\n", i + 1);
|
||||
json_decref(root);
|
||||
return 1;
|
||||
}
|
||||
...
|
||||
|
||||
The function :func:`json_array_size()` returns the size of a JSON
|
||||
array. First, we again declare some variables and then extract the
|
||||
i'th element of the ``root`` array using :func:`json_array_get()`.
|
||||
We also check that the resulting value is a JSON object.
|
||||
|
||||
Next we'll extract the commit ID (a hexadecimal SHA-1 sum),
|
||||
intermediate commit info object, and the commit message from that
|
||||
object. We also do proper type checks::
|
||||
|
||||
sha = json_object_get(data, "sha");
|
||||
if(!json_is_string(sha))
|
||||
{
|
||||
fprintf(stderr, "error: commit %d: sha is not a string\n", i + 1);
|
||||
json_decref(root);
|
||||
return 1;
|
||||
}
|
||||
|
||||
commit = json_object_get(data, "commit");
|
||||
if(!json_is_object(commit))
|
||||
{
|
||||
fprintf(stderr, "error: commit %d: commit is not an object\n", i + 1);
|
||||
json_decref(root);
|
||||
return 1;
|
||||
}
|
||||
|
||||
message = json_object_get(commit, "message");
|
||||
if(!json_is_string(message))
|
||||
{
|
||||
fprintf(stderr, "error: commit %d: message is not a string\n", i + 1);
|
||||
json_decref(root);
|
||||
return 1;
|
||||
}
|
||||
...
|
||||
|
||||
And finally, we'll print the first 8 characters of the commit ID and
|
||||
the first line of the commit message. A C-style string is extracted
|
||||
from a JSON string using :func:`json_string_value()`::
|
||||
|
||||
message_text = json_string_value(message);
|
||||
printf("%.8s %.*s\n",
|
||||
json_string_value(sha),
|
||||
newline_offset(message_text),
|
||||
message_text);
|
||||
}
|
||||
|
||||
After sending the HTTP request, we decoded the JSON text using
|
||||
:func:`json_loads()`, remember? It returns a *new reference* to the
|
||||
JSON value it decodes. When we're finished with the value, we'll need
|
||||
to decrease the reference count using :func:`json_decref()`. This way
|
||||
Jansson can release the resources::
|
||||
|
||||
json_decref(root);
|
||||
return 0;
|
||||
|
||||
For a detailed explanation of reference counting in Jansson, see
|
||||
:ref:`apiref-reference-count` in :ref:`apiref`.
|
||||
|
||||
The program's ready, let's test it and view the latest commits in
|
||||
Jansson's repository:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ ./github_commits akheron jansson
|
||||
1581f26a Merge branch '2.3'
|
||||
aabfd493 load: Change buffer_pos to be a size_t
|
||||
bd72efbd load: Avoid unexpected behaviour in macro expansion
|
||||
e8fd3e30 Document and tweak json_load_callback()
|
||||
873eddaf Merge pull request #60 from rogerz/contrib
|
||||
bd2c0c73 Ignore the binary test_load_callback
|
||||
17a51a4b Merge branch '2.3'
|
||||
09c39adc Add json_load_callback to the list of exported symbols
|
||||
cbb80baf Merge pull request #57 from rogerz/contrib
|
||||
040bd7b0 Add json_load_callback()
|
||||
2637faa4 Make test stripping locale independent
|
||||
<...>
|
||||
|
||||
|
||||
Conclusion
|
||||
==========
|
||||
|
||||
In this tutorial, we implemented a program that fetches the latest
|
||||
commits of a GitHub repository using the GitHub Repo Commits API.
|
||||
Jansson was used to decode the JSON response and to extract the commit
|
||||
data.
|
||||
|
||||
This tutorial only covered a small part of Jansson. For example, we
|
||||
did not create or manipulate JSON values at all. Proceed to
|
||||
:ref:`apiref` to explore all features of Jansson.
|
76
doc/upgrading.rst
Normal file
76
doc/upgrading.rst
Normal file
|
@ -0,0 +1,76 @@
|
|||
.. highlight:: c
|
||||
|
||||
******************
|
||||
Upgrading from 1.x
|
||||
******************
|
||||
|
||||
This chapter lists the backwards incompatible changes introduced in
|
||||
Jansson 2.0, and the steps that are needed for upgrading your code.
|
||||
|
||||
**The incompatibilities are not dramatic.** The biggest change is that
|
||||
all decoding functions now require and extra parameter. Most programs
|
||||
can be modified to work with 2.0 by adding a ``0`` as the second
|
||||
parameter to all calls of :func:`json_loads()`, :func:`json_loadf()`
|
||||
and :func:`json_load_file()`.
|
||||
|
||||
|
||||
Compatibility
|
||||
=============
|
||||
|
||||
Jansson 2.0 is backwards incompatible with the Jansson 1.x releases.
|
||||
It is ABI incompatible, i.e. all programs dynamically linking to the
|
||||
Jansson library need to be recompiled. It's also API incompatible,
|
||||
i.e. the source code of programs using Jansson 1.x may need
|
||||
modifications to make them compile against Jansson 2.0.
|
||||
|
||||
All the 2.x releases are guaranteed to be backwards compatible for
|
||||
both ABI and API, so no recompilation or source changes are needed
|
||||
when upgrading from 2.x to 2.y.
|
||||
|
||||
|
||||
List of Incompatible Changes
|
||||
============================
|
||||
|
||||
**Decoding flags**
|
||||
For future needs, a ``flags`` parameter was added as the second
|
||||
parameter to all decoding functions, i.e. :func:`json_loads()`,
|
||||
:func:`json_loadf()` and :func:`json_load_file()`. All calls to
|
||||
these functions need to be changed by adding a ``0`` as the second
|
||||
argument. For example::
|
||||
|
||||
/* old code */
|
||||
json_loads(input, &error);
|
||||
|
||||
/* new code */
|
||||
json_loads(input, 0, &error);
|
||||
|
||||
|
||||
**Underlying type of JSON integers**
|
||||
The underlying C type of JSON integers has been changed from
|
||||
``int`` to the widest available signed integer type, i.e.
|
||||
``long long`` or ``long``, depending on whether
|
||||
``long long`` is supported on your system or not. This makes
|
||||
the whole 64-bit integer range available on most modern systems.
|
||||
|
||||
``jansson.h`` has a typedef :type:`json_int_t` to the underlying
|
||||
integer type. ``int`` should still be used in most cases when
|
||||
dealing with smallish JSON integers, as the compiler handles
|
||||
implicit type coercion. Only when the full 64-bit range is needed,
|
||||
:type:`json_int_t` should be explicitly used.
|
||||
|
||||
|
||||
**Maximum encoder indentation depth**
|
||||
The maximum argument of the ``JSON_INDENT()`` macro has been
|
||||
changed from 255 to 31, to free up bits from the ``flags``
|
||||
parameter of :func:`json_dumps()`, :func:`json_dumpf()` and
|
||||
:func:`json_dump_file()`. If your code uses a bigger indentation
|
||||
than 31, it needs to be changed.
|
||||
|
||||
|
||||
**Unsigned integers in API functions**
|
||||
Version 2.0 unifies unsigned integer usage in the API. All uses of
|
||||
``unsigned int`` and ``unsigned long`` have been replaced
|
||||
with ``size_t``. This includes flags, container sizes, etc.
|
||||
This should not require source code changes, as both
|
||||
``unsigned int`` and ``unsigned long`` are usually
|
||||
compatible with ``size_t``.
|
4
examples/README.rst
Normal file
4
examples/README.rst
Normal file
|
@ -0,0 +1,4 @@
|
|||
Jansson examples
|
||||
================
|
||||
|
||||
This directory contains simple example programs that use Jansson.
|
200
examples/simple_parse.c
Normal file
200
examples/simple_parse.c
Normal file
|
@ -0,0 +1,200 @@
|
|||
/*
|
||||
* Simple example of parsing and printing JSON using jansson.
|
||||
*
|
||||
* SYNOPSIS:
|
||||
* $ examples/simple_parse
|
||||
* Type some JSON > [true, false, null, 1, 0.0, -0.0, "", {"name": "barney"}]
|
||||
* JSON Array of 8 elements:
|
||||
* JSON True
|
||||
* JSON False
|
||||
* JSON Null
|
||||
* JSON Integer: "1"
|
||||
* JSON Real: 0.000000
|
||||
* JSON Real: -0.000000
|
||||
* JSON String: ""
|
||||
* JSON Object of 1 pair:
|
||||
* JSON Key: "name"
|
||||
* JSON String: "barney"
|
||||
*
|
||||
* Copyright (c) 2014 Robert Poor <rdpoor@gmail.com>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include <jansson.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
/* forward refs */
|
||||
void print_json(json_t *root);
|
||||
void print_json_aux(json_t *element, int indent);
|
||||
void print_json_indent(int indent);
|
||||
const char *json_plural(size_t count);
|
||||
void print_json_object(json_t *element, int indent);
|
||||
void print_json_array(json_t *element, int indent);
|
||||
void print_json_string(json_t *element, int indent);
|
||||
void print_json_integer(json_t *element, int indent);
|
||||
void print_json_real(json_t *element, int indent);
|
||||
void print_json_true(json_t *element, int indent);
|
||||
void print_json_false(json_t *element, int indent);
|
||||
void print_json_null(json_t *element, int indent);
|
||||
|
||||
void print_json(json_t *root) { print_json_aux(root, 0); }
|
||||
|
||||
void print_json_aux(json_t *element, int indent) {
|
||||
switch (json_typeof(element)) {
|
||||
case JSON_OBJECT:
|
||||
print_json_object(element, indent);
|
||||
break;
|
||||
case JSON_ARRAY:
|
||||
print_json_array(element, indent);
|
||||
break;
|
||||
case JSON_STRING:
|
||||
print_json_string(element, indent);
|
||||
break;
|
||||
case JSON_INTEGER:
|
||||
print_json_integer(element, indent);
|
||||
break;
|
||||
case JSON_REAL:
|
||||
print_json_real(element, indent);
|
||||
break;
|
||||
case JSON_TRUE:
|
||||
print_json_true(element, indent);
|
||||
break;
|
||||
case JSON_FALSE:
|
||||
print_json_false(element, indent);
|
||||
break;
|
||||
case JSON_NULL:
|
||||
print_json_null(element, indent);
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr, "unrecognized JSON type %d\n", json_typeof(element));
|
||||
}
|
||||
}
|
||||
|
||||
void print_json_indent(int indent) {
|
||||
int i;
|
||||
for (i = 0; i < indent; i++) {
|
||||
putchar(' ');
|
||||
}
|
||||
}
|
||||
|
||||
const char *json_plural(size_t count) { return count == 1 ? "" : "s"; }
|
||||
|
||||
void print_json_object(json_t *element, int indent) {
|
||||
size_t size;
|
||||
const char *key;
|
||||
json_t *value;
|
||||
|
||||
print_json_indent(indent);
|
||||
size = json_object_size(element);
|
||||
|
||||
printf("JSON Object of %lld pair%s:\n", (long long)size, json_plural(size));
|
||||
json_object_foreach(element, key, value) {
|
||||
print_json_indent(indent + 2);
|
||||
printf("JSON Key: \"%s\"\n", key);
|
||||
print_json_aux(value, indent + 2);
|
||||
}
|
||||
}
|
||||
|
||||
void print_json_array(json_t *element, int indent) {
|
||||
size_t i;
|
||||
size_t size = json_array_size(element);
|
||||
print_json_indent(indent);
|
||||
|
||||
printf("JSON Array of %lld element%s:\n", (long long)size, json_plural(size));
|
||||
for (i = 0; i < size; i++) {
|
||||
print_json_aux(json_array_get(element, i), indent + 2);
|
||||
}
|
||||
}
|
||||
|
||||
void print_json_string(json_t *element, int indent) {
|
||||
print_json_indent(indent);
|
||||
printf("JSON String: \"%s\"\n", json_string_value(element));
|
||||
}
|
||||
|
||||
void print_json_integer(json_t *element, int indent) {
|
||||
print_json_indent(indent);
|
||||
printf("JSON Integer: \"%" JSON_INTEGER_FORMAT "\"\n", json_integer_value(element));
|
||||
}
|
||||
|
||||
void print_json_real(json_t *element, int indent) {
|
||||
print_json_indent(indent);
|
||||
printf("JSON Real: %f\n", json_real_value(element));
|
||||
}
|
||||
|
||||
void print_json_true(json_t *element, int indent) {
|
||||
(void)element;
|
||||
print_json_indent(indent);
|
||||
printf("JSON True\n");
|
||||
}
|
||||
|
||||
void print_json_false(json_t *element, int indent) {
|
||||
(void)element;
|
||||
print_json_indent(indent);
|
||||
printf("JSON False\n");
|
||||
}
|
||||
|
||||
void print_json_null(json_t *element, int indent) {
|
||||
(void)element;
|
||||
print_json_indent(indent);
|
||||
printf("JSON Null\n");
|
||||
}
|
||||
|
||||
/*
|
||||
* Parse text into a JSON object. If text is valid JSON, returns a
|
||||
* json_t structure, otherwise prints and error and returns null.
|
||||
*/
|
||||
json_t *load_json(const char *text) {
|
||||
json_t *root;
|
||||
json_error_t error;
|
||||
|
||||
root = json_loads(text, 0, &error);
|
||||
|
||||
if (root) {
|
||||
return root;
|
||||
} else {
|
||||
fprintf(stderr, "json error on line %d: %s\n", error.line, error.text);
|
||||
return (json_t *)0;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Print a prompt and return (by reference) a null-terminated line of
|
||||
* text. Returns NULL on eof or some error.
|
||||
*/
|
||||
char *read_line(char *line, int max_chars) {
|
||||
printf("Type some JSON > ");
|
||||
fflush(stdout);
|
||||
return fgets(line, max_chars, stdin);
|
||||
}
|
||||
|
||||
/* ================================================================
|
||||
* main
|
||||
*/
|
||||
|
||||
#define MAX_CHARS 4096
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
char line[MAX_CHARS];
|
||||
|
||||
if (argc != 1) {
|
||||
fprintf(stderr, "Usage: %s\n", argv[0]);
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
while (read_line(line, MAX_CHARS) != (char *)NULL) {
|
||||
|
||||
/* parse text into JSON structure */
|
||||
json_t *root = load_json(line);
|
||||
|
||||
if (root) {
|
||||
/* print and release the JSON structure */
|
||||
print_json(root);
|
||||
json_decref(root);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
10
jansson.pc.in
Normal file
10
jansson.pc.in
Normal file
|
@ -0,0 +1,10 @@
|
|||
prefix=@prefix@
|
||||
exec_prefix=@exec_prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Jansson
|
||||
Description: Library for encoding, decoding and manipulating JSON data
|
||||
Version: @VERSION@
|
||||
Libs: -L${libdir} -ljansson
|
||||
Cflags: -I${includedir}
|
70
release.sh
Executable file
70
release.sh
Executable file
|
@ -0,0 +1,70 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Use this script to easily make releases of Jansson. It configures
|
||||
# the source tree, and builds and signs all tarballs.
|
||||
|
||||
die() {
|
||||
echo $1 >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
confirm() {
|
||||
local answer
|
||||
read -p "$1 [yN]: " answer
|
||||
[ "$answer" = "Y" -o "$answer" = "y" ] || exit 0
|
||||
}
|
||||
|
||||
set -e
|
||||
[ -f configure.ac ] || die "Must be run at project root directory"
|
||||
|
||||
# Determine version
|
||||
v=$(grep AC_INIT configure.ac | sed -r 's/.*, \[(.+?)\],.*/\1/')
|
||||
[ -n "$v" ] || die "Unable to determine version"
|
||||
confirm "Version is $v, proceed?"
|
||||
|
||||
# Sanity checks
|
||||
vi=$(grep version-info src/Makefile.am | sed 's/^[ \t]*//g' | cut -d" " -f2)
|
||||
confirm "Libtool version-info is $vi, proceed?"
|
||||
|
||||
r=$(grep 'Released ' CHANGES | head -n 1)
|
||||
confirm "Last CHANGES entry says \"$r\", proceed??"
|
||||
|
||||
dv=$(grep ^version doc/conf.py | sed -r "s/.*'(.*)'.*/\1/")
|
||||
if [ "$dv" != "$v" ]; then
|
||||
die "Documentation version ($dv) doesn't match library version"
|
||||
fi
|
||||
|
||||
[ -f Makefile ] && make distclean || true
|
||||
rm -f jansson-$v.tar.*
|
||||
rm -rf jansson-$v-doc
|
||||
rm -f jansson-$v-doc.tar.*
|
||||
|
||||
autoreconf -fi
|
||||
./configure
|
||||
|
||||
# Run tests and make gz source tarball
|
||||
: ${VALGRIND:=1}
|
||||
export VALGRIND
|
||||
make distcheck
|
||||
|
||||
# Make bzip2 source tarball
|
||||
make dist-bzip2
|
||||
|
||||
# Sign source tarballs
|
||||
for s in gz bz2; do
|
||||
gpg --detach-sign --armor jansson-$v.tar.$s
|
||||
done
|
||||
|
||||
# Build documentation
|
||||
make html
|
||||
mv doc/_build/html jansson-$v-doc
|
||||
|
||||
# Make and sign documentation tarballs
|
||||
for s in gz bz2; do
|
||||
[ $s = gz ] && compress=gzip
|
||||
[ $s = bz2 ] && compress=bzip2
|
||||
tar cf - jansson-$v-doc | $compress -9 -c > jansson-$v-doc.tar.$s
|
||||
gpg --detach-sign --armor jansson-$v-doc.tar.$s
|
||||
done
|
||||
|
||||
echo "All done"
|
3
scripts/clang-format
Executable file
3
scripts/clang-format
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
git ls-files | grep '\.[ch]$' | xargs clang-format -i
|
30
scripts/clang-format-check
Executable file
30
scripts/clang-format-check
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/bin/bash
|
||||
|
||||
CLANG_FORMAT=${CLANG_FORMAT:-clang-format}
|
||||
CLANG_FORMAT_VERSION=${CLANG_FORMAT_VERSION:-}
|
||||
|
||||
if ! type $CLANG_FORMAT >/dev/null || \
|
||||
! $CLANG_FORMAT --version | grep -q "version ${CLANG_FORMAT_VERSION}"; then
|
||||
# If running tests, mark this test as skipped.
|
||||
exit 77
|
||||
fi
|
||||
|
||||
errors=0
|
||||
paths=$(git ls-files | grep '\.[ch]$')
|
||||
for path in $paths; do
|
||||
echo "Checking $path"
|
||||
$CLANG_FORMAT $path > $path.formatted
|
||||
in=$(cat $path)
|
||||
out=$(cat $path.formatted)
|
||||
|
||||
if [ "$in" != "$out" ]; then
|
||||
diff -u $path $path.formatted
|
||||
errors=1
|
||||
fi
|
||||
rm $path.formatted
|
||||
done
|
||||
|
||||
if [ $errors -ne 0 ]; then
|
||||
echo "Formatting errors detected, run ./scripts/clang-format to fix!"
|
||||
exit 1
|
||||
fi
|
|
@ -1,18 +1,35 @@
|
|||
EXTRA_DIST = jansson.def dtoa.c
|
||||
|
||||
include_HEADERS = jansson.h
|
||||
nodist_include_HEADERS = jansson_config.h
|
||||
|
||||
lib_LTLIBRARIES = libjansson.la
|
||||
libjansson_la_SOURCES = \
|
||||
dump.c \
|
||||
error.c \
|
||||
hashtable.c \
|
||||
hashtable.h \
|
||||
hashtable_seed.c \
|
||||
jansson_private.h \
|
||||
load.c \
|
||||
lookup3.h \
|
||||
memory.c \
|
||||
pack_unpack.c \
|
||||
strbuffer.c \
|
||||
strbuffer.h \
|
||||
strconv.c \
|
||||
utf.c \
|
||||
utf.h \
|
||||
util.h \
|
||||
value.c
|
||||
libjansson_la_LDFLAGS = -version-info 0:1:0
|
||||
value.c \
|
||||
version.c
|
||||
|
||||
AM_CFLAGS = -Wall -Wextra -Werror -std=c99
|
||||
if DTOA_ENABLED
|
||||
libjansson_la_SOURCES += dtoa.c
|
||||
endif
|
||||
|
||||
libjansson_la_LDFLAGS = \
|
||||
-no-undefined \
|
||||
-export-symbols-regex '^json_|^jansson_' \
|
||||
-version-info 18:1:14 \
|
||||
@JSON_SYMVER_LDFLAGS@ \
|
||||
@JSON_BSYMBOLIC_LDFLAGS@
|
||||
|
|
6265
src/dtoa.c
Normal file
6265
src/dtoa.c
Normal file
File diff suppressed because it is too large
Load diff
508
src/dump.c
508
src/dump.c
|
@ -1,120 +1,227 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#ifndef _GNU_SOURCE
|
||||
#define _GNU_SOURCE
|
||||
#endif
|
||||
|
||||
#include "jansson_private.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#ifdef HAVE_UNISTD_H
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
#include <jansson.h>
|
||||
#include "jansson.h"
|
||||
#include "strbuffer.h"
|
||||
#include "utf.h"
|
||||
|
||||
typedef int (*dump_func)(const char *buffer, int size, void *data);
|
||||
#define MAX_INTEGER_STR_LENGTH 25
|
||||
#define MAX_REAL_STR_LENGTH 25
|
||||
|
||||
struct string
|
||||
{
|
||||
char *buffer;
|
||||
int length;
|
||||
int size;
|
||||
#define FLAGS_TO_INDENT(f) ((f) & 0x1F)
|
||||
#define FLAGS_TO_PRECISION(f) (((f) >> 11) & 0x1F)
|
||||
|
||||
struct buffer {
|
||||
const size_t size;
|
||||
size_t used;
|
||||
char *data;
|
||||
};
|
||||
|
||||
static int dump_to_strbuffer(const char *buffer, int size, void *data)
|
||||
{
|
||||
static int dump_to_strbuffer(const char *buffer, size_t size, void *data) {
|
||||
return strbuffer_append_bytes((strbuffer_t *)data, buffer, size);
|
||||
}
|
||||
|
||||
static int dump_to_file(const char *buffer, int size, void *data)
|
||||
{
|
||||
static int dump_to_buffer(const char *buffer, size_t size, void *data) {
|
||||
struct buffer *buf = (struct buffer *)data;
|
||||
|
||||
if (buf->used + size <= buf->size)
|
||||
memcpy(&buf->data[buf->used], buffer, size);
|
||||
|
||||
buf->used += size;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int dump_to_file(const char *buffer, size_t size, void *data) {
|
||||
FILE *dest = (FILE *)data;
|
||||
if(fwrite(buffer, size, 1, dest) != 1)
|
||||
if (fwrite(buffer, size, 1, dest) != 1)
|
||||
return -1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int dump_indent(uint32_t flags, int depth, dump_func dump, void *data)
|
||||
{
|
||||
if(JSON_INDENT(flags) > 0)
|
||||
{
|
||||
char *ws_buffer;
|
||||
int ws_count = JSON_INDENT(flags) * depth;
|
||||
static int dump_to_fd(const char *buffer, size_t size, void *data) {
|
||||
#ifdef HAVE_UNISTD_H
|
||||
int *dest = (int *)data;
|
||||
if (write(*dest, buffer, size) == (ssize_t)size)
|
||||
return 0;
|
||||
#endif
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(dump("\n", 1, data))
|
||||
/* 32 spaces (the maximum indentation size) */
|
||||
static const char whitespace[] = " ";
|
||||
|
||||
static int dump_indent(size_t flags, int depth, int space, json_dump_callback_t dump,
|
||||
void *data) {
|
||||
if (FLAGS_TO_INDENT(flags) > 0) {
|
||||
unsigned int ws_count = FLAGS_TO_INDENT(flags), n_spaces = depth * ws_count;
|
||||
|
||||
if (dump("\n", 1, data))
|
||||
return -1;
|
||||
|
||||
if(ws_count == 0)
|
||||
return 0;
|
||||
while (n_spaces > 0) {
|
||||
int cur_n =
|
||||
n_spaces < sizeof whitespace - 1 ? n_spaces : sizeof whitespace - 1;
|
||||
|
||||
ws_buffer = alloca(ws_count);
|
||||
memset(ws_buffer, ' ', ws_count);
|
||||
return dump(ws_buffer, ws_count, data);
|
||||
if (dump(whitespace, cur_n, data))
|
||||
return -1;
|
||||
|
||||
n_spaces -= cur_n;
|
||||
}
|
||||
} else if (space && !(flags & JSON_COMPACT)) {
|
||||
return dump(" ", 1, data);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int dump_string(const char *str, dump_func dump, void *data)
|
||||
{
|
||||
const char *end;
|
||||
static int dump_string(const char *str, size_t len, json_dump_callback_t dump, void *data,
|
||||
size_t flags) {
|
||||
const char *pos, *end, *lim;
|
||||
int32_t codepoint = 0;
|
||||
|
||||
if(dump("\"", 1, data))
|
||||
if (dump("\"", 1, data))
|
||||
return -1;
|
||||
|
||||
end = str;
|
||||
while(1)
|
||||
{
|
||||
end = pos = str;
|
||||
lim = str + len;
|
||||
while (1) {
|
||||
const char *text;
|
||||
char seq[7];
|
||||
char seq[13];
|
||||
int length;
|
||||
|
||||
while(*end && *end != '\\' && *end != '"' && (*end < 0 || *end > 0x1F))
|
||||
end++;
|
||||
while (end < lim) {
|
||||
end = utf8_iterate(pos, lim - pos, &codepoint);
|
||||
if (!end)
|
||||
return -1;
|
||||
|
||||
if(end != str) {
|
||||
if(dump(str, end - str, data))
|
||||
/* mandatory escape or control char */
|
||||
if (codepoint == '\\' || codepoint == '"' || codepoint < 0x20)
|
||||
break;
|
||||
|
||||
/* slash */
|
||||
if ((flags & JSON_ESCAPE_SLASH) && codepoint == '/')
|
||||
break;
|
||||
|
||||
/* non-ASCII */
|
||||
if ((flags & JSON_ENSURE_ASCII) && codepoint > 0x7F)
|
||||
break;
|
||||
|
||||
pos = end;
|
||||
}
|
||||
|
||||
if (pos != str) {
|
||||
if (dump(str, pos - str, data))
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(!*end)
|
||||
if (end == pos)
|
||||
break;
|
||||
|
||||
/* handle \, ", and control codes */
|
||||
/* handle \, /, ", and control codes */
|
||||
length = 2;
|
||||
switch(*end)
|
||||
{
|
||||
case '\\': text = "\\\\"; break;
|
||||
case '\"': text = "\\\""; break;
|
||||
case '\b': text = "\\b"; break;
|
||||
case '\f': text = "\\f"; break;
|
||||
case '\n': text = "\\n"; break;
|
||||
case '\r': text = "\\r"; break;
|
||||
case '\t': text = "\\t"; break;
|
||||
default:
|
||||
{
|
||||
sprintf(seq, "\\u00%02x", *end);
|
||||
switch (codepoint) {
|
||||
case '\\':
|
||||
text = "\\\\";
|
||||
break;
|
||||
case '\"':
|
||||
text = "\\\"";
|
||||
break;
|
||||
case '\b':
|
||||
text = "\\b";
|
||||
break;
|
||||
case '\f':
|
||||
text = "\\f";
|
||||
break;
|
||||
case '\n':
|
||||
text = "\\n";
|
||||
break;
|
||||
case '\r':
|
||||
text = "\\r";
|
||||
break;
|
||||
case '\t':
|
||||
text = "\\t";
|
||||
break;
|
||||
case '/':
|
||||
text = "\\/";
|
||||
break;
|
||||
default: {
|
||||
/* codepoint is in BMP */
|
||||
if (codepoint < 0x10000) {
|
||||
snprintf(seq, sizeof(seq), "\\u%04X", (unsigned int)codepoint);
|
||||
length = 6;
|
||||
}
|
||||
|
||||
/* not in BMP -> construct a UTF-16 surrogate pair */
|
||||
else {
|
||||
int32_t first, last;
|
||||
|
||||
codepoint -= 0x10000;
|
||||
first = 0xD800 | ((codepoint & 0xffc00) >> 10);
|
||||
last = 0xDC00 | (codepoint & 0x003ff);
|
||||
|
||||
snprintf(seq, sizeof(seq), "\\u%04X\\u%04X", (unsigned int)first,
|
||||
(unsigned int)last);
|
||||
length = 12;
|
||||
}
|
||||
|
||||
text = seq;
|
||||
length = 6;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(dump(text, length, data))
|
||||
if (dump(text, length, data))
|
||||
return -1;
|
||||
|
||||
end++;
|
||||
str = end;
|
||||
str = pos = end;
|
||||
}
|
||||
|
||||
return dump("\"", 1, data);
|
||||
}
|
||||
|
||||
static int do_dump(const json_t *json, uint32_t flags, int depth,
|
||||
dump_func dump, void *data)
|
||||
{
|
||||
switch(json_typeof(json)) {
|
||||
struct key_len {
|
||||
const char *key;
|
||||
int len;
|
||||
};
|
||||
|
||||
static int compare_keys(const void *key1, const void *key2) {
|
||||
const struct key_len *k1 = key1;
|
||||
const struct key_len *k2 = key2;
|
||||
const size_t min_size = k1->len < k2->len ? k1->len : k2->len;
|
||||
int res = memcmp(k1->key, k2->key, min_size);
|
||||
|
||||
if (res)
|
||||
return res;
|
||||
|
||||
return k1->len - k2->len;
|
||||
}
|
||||
|
||||
static int do_dump(const json_t *json, size_t flags, int depth, hashtable_t *parents,
|
||||
json_dump_callback_t dump, void *data) {
|
||||
int embed = flags & JSON_EMBED;
|
||||
|
||||
flags &= ~JSON_EMBED;
|
||||
|
||||
if (!json)
|
||||
return -1;
|
||||
|
||||
switch (json_typeof(json)) {
|
||||
case JSON_NULL:
|
||||
return dump("null", 4, data);
|
||||
|
||||
|
@ -124,105 +231,190 @@ static int do_dump(const json_t *json, uint32_t flags, int depth,
|
|||
case JSON_FALSE:
|
||||
return dump("false", 5, data);
|
||||
|
||||
case JSON_INTEGER:
|
||||
{
|
||||
char *buffer;
|
||||
int size, ret;
|
||||
case JSON_INTEGER: {
|
||||
char buffer[MAX_INTEGER_STR_LENGTH];
|
||||
int size;
|
||||
|
||||
size = asprintf(&buffer, "%d", json_integer_value(json));
|
||||
if(size == -1)
|
||||
size = snprintf(buffer, MAX_INTEGER_STR_LENGTH, "%" JSON_INTEGER_FORMAT,
|
||||
json_integer_value(json));
|
||||
if (size < 0 || size >= MAX_INTEGER_STR_LENGTH)
|
||||
return -1;
|
||||
|
||||
ret = dump(buffer, size, data);
|
||||
free(buffer);
|
||||
return ret;
|
||||
return dump(buffer, size, data);
|
||||
}
|
||||
|
||||
case JSON_REAL:
|
||||
{
|
||||
char *buffer;
|
||||
int size, ret;
|
||||
case JSON_REAL: {
|
||||
char buffer[MAX_REAL_STR_LENGTH];
|
||||
int size;
|
||||
double value = json_real_value(json);
|
||||
|
||||
size = asprintf(&buffer, "%.17f", json_real_value(json));
|
||||
if(size == -1)
|
||||
size = jsonp_dtostr(buffer, MAX_REAL_STR_LENGTH, value,
|
||||
FLAGS_TO_PRECISION(flags));
|
||||
if (size < 0)
|
||||
return -1;
|
||||
|
||||
ret = dump(buffer, size, data);
|
||||
free(buffer);
|
||||
return ret;
|
||||
return dump(buffer, size, data);
|
||||
}
|
||||
|
||||
case JSON_STRING:
|
||||
return dump_string(json_string_value(json), dump, data);
|
||||
return dump_string(json_string_value(json), json_string_length(json), dump,
|
||||
data, flags);
|
||||
|
||||
case JSON_ARRAY:
|
||||
{
|
||||
int i;
|
||||
int n = json_array_size(json);
|
||||
case JSON_ARRAY: {
|
||||
size_t n;
|
||||
size_t i;
|
||||
/* Space for "0x", double the sizeof a pointer for the hex and a
|
||||
* terminator. */
|
||||
char key[2 + (sizeof(json) * 2) + 1];
|
||||
size_t key_len;
|
||||
|
||||
if(dump("[", 1, data))
|
||||
return -1;
|
||||
if(n == 0)
|
||||
return dump("]", 1, data);
|
||||
if(dump_indent(flags, depth + 1, dump, data))
|
||||
/* detect circular references */
|
||||
if (jsonp_loop_check(parents, json, key, sizeof(key), &key_len))
|
||||
return -1;
|
||||
|
||||
for(i = 0; i < n; ++i) {
|
||||
if(do_dump(json_array_get(json, i), flags, depth + 1,
|
||||
dump, data))
|
||||
n = json_array_size(json);
|
||||
|
||||
if (!embed && dump("[", 1, data))
|
||||
return -1;
|
||||
if (n == 0) {
|
||||
hashtable_del(parents, key, key_len);
|
||||
return embed ? 0 : dump("]", 1, data);
|
||||
}
|
||||
if (dump_indent(flags, depth + 1, 0, dump, data))
|
||||
return -1;
|
||||
|
||||
for (i = 0; i < n; ++i) {
|
||||
if (do_dump(json_array_get(json, i), flags, depth + 1, parents, dump,
|
||||
data))
|
||||
return -1;
|
||||
|
||||
if(i < n - 1)
|
||||
{
|
||||
if(dump(",", 1, data) ||
|
||||
dump_indent(flags, depth + 1, dump, data))
|
||||
if (i < n - 1) {
|
||||
if (dump(",", 1, data) ||
|
||||
dump_indent(flags, depth + 1, 1, dump, data))
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
if(dump_indent(flags, depth, dump, data))
|
||||
} else {
|
||||
if (dump_indent(flags, depth, 0, dump, data))
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return dump("]", 1, data);
|
||||
|
||||
hashtable_del(parents, key, key_len);
|
||||
return embed ? 0 : dump("]", 1, data);
|
||||
}
|
||||
|
||||
case JSON_OBJECT:
|
||||
{
|
||||
void *iter = json_object_iter((json_t *)json);
|
||||
case JSON_OBJECT: {
|
||||
void *iter;
|
||||
const char *separator;
|
||||
int separator_length;
|
||||
char loop_key[LOOP_KEY_LEN];
|
||||
size_t loop_key_len;
|
||||
|
||||
if(dump("{", 1, data))
|
||||
return -1;
|
||||
if(!iter)
|
||||
return dump("}", 1, data);
|
||||
if(dump_indent(flags, depth + 1, dump, data))
|
||||
if (flags & JSON_COMPACT) {
|
||||
separator = ":";
|
||||
separator_length = 1;
|
||||
} else {
|
||||
separator = ": ";
|
||||
separator_length = 2;
|
||||
}
|
||||
|
||||
/* detect circular references */
|
||||
if (jsonp_loop_check(parents, json, loop_key, sizeof(loop_key),
|
||||
&loop_key_len))
|
||||
return -1;
|
||||
|
||||
while(iter)
|
||||
{
|
||||
void *next = json_object_iter_next((json_t *)json, iter);
|
||||
iter = json_object_iter((json_t *)json);
|
||||
|
||||
dump_string(json_object_iter_key(iter), dump, data);
|
||||
if(dump(": ", 2, data) ||
|
||||
do_dump(json_object_iter_value(iter), flags, depth + 1,
|
||||
dump, data))
|
||||
if (!embed && dump("{", 1, data))
|
||||
return -1;
|
||||
if (!iter) {
|
||||
hashtable_del(parents, loop_key, loop_key_len);
|
||||
return embed ? 0 : dump("}", 1, data);
|
||||
}
|
||||
if (dump_indent(flags, depth + 1, 0, dump, data))
|
||||
return -1;
|
||||
|
||||
if (flags & JSON_SORT_KEYS) {
|
||||
struct key_len *keys;
|
||||
size_t size, i;
|
||||
|
||||
size = json_object_size(json);
|
||||
keys = jsonp_malloc(size * sizeof(struct key_len));
|
||||
if (!keys)
|
||||
return -1;
|
||||
|
||||
if(next)
|
||||
{
|
||||
if(dump(",", 1, data) ||
|
||||
dump_indent(flags, depth + 1, dump, data))
|
||||
return -1;
|
||||
i = 0;
|
||||
while (iter) {
|
||||
struct key_len *keylen = &keys[i];
|
||||
|
||||
keylen->key = json_object_iter_key(iter);
|
||||
keylen->len = json_object_iter_key_len(iter);
|
||||
|
||||
iter = json_object_iter_next((json_t *)json, iter);
|
||||
i++;
|
||||
}
|
||||
else
|
||||
{
|
||||
if(dump_indent(flags, depth, dump, data))
|
||||
assert(i == size);
|
||||
|
||||
qsort(keys, size, sizeof(struct key_len), compare_keys);
|
||||
|
||||
for (i = 0; i < size; i++) {
|
||||
const struct key_len *key;
|
||||
json_t *value;
|
||||
|
||||
key = &keys[i];
|
||||
value = json_object_getn(json, key->key, key->len);
|
||||
assert(value);
|
||||
|
||||
dump_string(key->key, key->len, dump, data, flags);
|
||||
if (dump(separator, separator_length, data) ||
|
||||
do_dump(value, flags, depth + 1, parents, dump, data)) {
|
||||
jsonp_free(keys);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (i < size - 1) {
|
||||
if (dump(",", 1, data) ||
|
||||
dump_indent(flags, depth + 1, 1, dump, data)) {
|
||||
jsonp_free(keys);
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
if (dump_indent(flags, depth, 0, dump, data)) {
|
||||
jsonp_free(keys);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
iter = next;
|
||||
jsonp_free(keys);
|
||||
} else {
|
||||
/* Don't sort keys */
|
||||
|
||||
while (iter) {
|
||||
void *next = json_object_iter_next((json_t *)json, iter);
|
||||
const char *key = json_object_iter_key(iter);
|
||||
const size_t key_len = json_object_iter_key_len(iter);
|
||||
|
||||
dump_string(key, key_len, dump, data, flags);
|
||||
if (dump(separator, separator_length, data) ||
|
||||
do_dump(json_object_iter_value(iter), flags, depth + 1, parents,
|
||||
dump, data))
|
||||
return -1;
|
||||
|
||||
if (next) {
|
||||
if (dump(",", 1, data) ||
|
||||
dump_indent(flags, depth + 1, 1, dump, data))
|
||||
return -1;
|
||||
} else {
|
||||
if (dump_indent(flags, depth, 0, dump, data))
|
||||
return -1;
|
||||
}
|
||||
|
||||
iter = next;
|
||||
}
|
||||
}
|
||||
return dump("}", 1, data);
|
||||
|
||||
hashtable_del(parents, loop_key, loop_key_len);
|
||||
return embed ? 0 : dump("}", 1, data);
|
||||
}
|
||||
|
||||
default:
|
||||
|
@ -231,50 +423,68 @@ static int do_dump(const json_t *json, uint32_t flags, int depth,
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
char *json_dumps(const json_t *json, uint32_t flags)
|
||||
{
|
||||
char *json_dumps(const json_t *json, size_t flags) {
|
||||
strbuffer_t strbuff;
|
||||
char *result;
|
||||
|
||||
if(!json_is_array(json) && !json_is_object(json))
|
||||
if (strbuffer_init(&strbuff))
|
||||
return NULL;
|
||||
|
||||
if(strbuffer_init(&strbuff))
|
||||
return NULL;
|
||||
if (json_dump_callback(json, dump_to_strbuffer, (void *)&strbuff, flags))
|
||||
result = NULL;
|
||||
else
|
||||
result = jsonp_strdup(strbuffer_value(&strbuff));
|
||||
|
||||
if(do_dump(json, flags, 0, dump_to_strbuffer, (void *)&strbuff))
|
||||
return NULL;
|
||||
|
||||
if(dump_to_strbuffer("\n", 1, (void *)&strbuff))
|
||||
return NULL;
|
||||
|
||||
result = strdup(strbuffer_value(&strbuff));
|
||||
strbuffer_close(&strbuff);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
int json_dumpf(const json_t *json, FILE *output, uint32_t flags)
|
||||
{
|
||||
if(!json_is_array(json) && !json_is_object(json))
|
||||
return -1;
|
||||
size_t json_dumpb(const json_t *json, char *buffer, size_t size, size_t flags) {
|
||||
struct buffer buf = {size, 0, buffer};
|
||||
|
||||
if(do_dump(json, flags, 0, dump_to_file, (void *)output))
|
||||
return -1;
|
||||
return dump_to_file("\n", 1, (void *)output);
|
||||
if (json_dump_callback(json, dump_to_buffer, (void *)&buf, flags))
|
||||
return 0;
|
||||
|
||||
return buf.used;
|
||||
}
|
||||
|
||||
int json_dump_file(const json_t *json, const char *path, uint32_t flags)
|
||||
{
|
||||
int json_dumpf(const json_t *json, FILE *output, size_t flags) {
|
||||
return json_dump_callback(json, dump_to_file, (void *)output, flags);
|
||||
}
|
||||
|
||||
int json_dumpfd(const json_t *json, int output, size_t flags) {
|
||||
return json_dump_callback(json, dump_to_fd, (void *)&output, flags);
|
||||
}
|
||||
|
||||
int json_dump_file(const json_t *json, const char *path, size_t flags) {
|
||||
int result;
|
||||
|
||||
FILE *output = fopen(path, "w");
|
||||
if(!output)
|
||||
if (!output)
|
||||
return -1;
|
||||
|
||||
result = json_dumpf(json, output, flags);
|
||||
|
||||
fclose(output);
|
||||
if (fclose(output) != 0)
|
||||
return -1;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
int json_dump_callback(const json_t *json, json_dump_callback_t callback, void *data,
|
||||
size_t flags) {
|
||||
int res;
|
||||
hashtable_t parents_set;
|
||||
|
||||
if (!(flags & JSON_ENCODE_ANY)) {
|
||||
if (!json_is_array(json) && !json_is_object(json))
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (hashtable_init(&parents_set))
|
||||
return -1;
|
||||
res = do_dump(json, flags, 0, &parents_set, callback, data);
|
||||
hashtable_close(&parents_set);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
|
59
src/error.c
Normal file
59
src/error.c
Normal file
|
@ -0,0 +1,59 @@
|
|||
#include "jansson_private.h"
|
||||
#include <string.h>
|
||||
|
||||
void jsonp_error_init(json_error_t *error, const char *source) {
|
||||
if (error) {
|
||||
error->text[0] = '\0';
|
||||
error->line = -1;
|
||||
error->column = -1;
|
||||
error->position = 0;
|
||||
if (source)
|
||||
jsonp_error_set_source(error, source);
|
||||
else
|
||||
error->source[0] = '\0';
|
||||
}
|
||||
}
|
||||
|
||||
void jsonp_error_set_source(json_error_t *error, const char *source) {
|
||||
size_t length;
|
||||
|
||||
if (!error || !source)
|
||||
return;
|
||||
|
||||
length = strlen(source);
|
||||
if (length < JSON_ERROR_SOURCE_LENGTH)
|
||||
strncpy(error->source, source, length + 1);
|
||||
else {
|
||||
size_t extra = length - JSON_ERROR_SOURCE_LENGTH + 4;
|
||||
memcpy(error->source, "...", 3);
|
||||
strncpy(error->source + 3, source + extra, length - extra + 1);
|
||||
}
|
||||
}
|
||||
|
||||
void jsonp_error_set(json_error_t *error, int line, int column, size_t position,
|
||||
enum json_error_code code, const char *msg, ...) {
|
||||
va_list ap;
|
||||
|
||||
va_start(ap, msg);
|
||||
jsonp_error_vset(error, line, column, position, code, msg, ap);
|
||||
va_end(ap);
|
||||
}
|
||||
|
||||
void jsonp_error_vset(json_error_t *error, int line, int column, size_t position,
|
||||
enum json_error_code code, const char *msg, va_list ap) {
|
||||
if (!error)
|
||||
return;
|
||||
|
||||
if (error->text[0] != '\0') {
|
||||
/* error already set */
|
||||
return;
|
||||
}
|
||||
|
||||
error->line = line;
|
||||
error->column = column;
|
||||
error->position = (int)position;
|
||||
|
||||
vsnprintf(error->text, JSON_ERROR_TEXT_LENGTH - 1, msg, ap);
|
||||
error->text[JSON_ERROR_TEXT_LENGTH - 2] = '\0';
|
||||
error->text[JSON_ERROR_TEXT_LENGTH - 1] = code;
|
||||
}
|
364
src/hashtable.c
364
src/hashtable.c
|
@ -1,93 +1,89 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include <jansson_private_config.h>
|
||||
#endif
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#ifdef HAVE_STDINT_H
|
||||
#include <stdint.h>
|
||||
#endif
|
||||
|
||||
#include "hashtable.h"
|
||||
#include "jansson_private.h" /* for container_of() */
|
||||
#include <jansson_config.h> /* for JSON_INLINE */
|
||||
|
||||
#ifndef INITIAL_HASHTABLE_ORDER
|
||||
#define INITIAL_HASHTABLE_ORDER 3
|
||||
#endif
|
||||
|
||||
typedef struct hashtable_list list_t;
|
||||
typedef struct hashtable_pair pair_t;
|
||||
typedef struct hashtable_bucket bucket_t;
|
||||
|
||||
#define container_of(ptr_, type_, member_) \
|
||||
((type_ *)((char *)ptr_ - (size_t)&((type_ *)0)->member_))
|
||||
extern volatile uint32_t hashtable_seed;
|
||||
|
||||
#define list_to_pair(list_) container_of(list_, pair_t, list)
|
||||
/* Implementation of the hash function */
|
||||
#include "lookup3.h"
|
||||
|
||||
static inline void list_init(list_t *list)
|
||||
{
|
||||
#define list_to_pair(list_) container_of(list_, pair_t, list)
|
||||
#define ordered_list_to_pair(list_) container_of(list_, pair_t, ordered_list)
|
||||
#define hash_str(key, len) ((size_t)hashlittle((key), len, hashtable_seed))
|
||||
|
||||
static JSON_INLINE void list_init(list_t *list) {
|
||||
list->next = list;
|
||||
list->prev = list;
|
||||
}
|
||||
|
||||
static inline void list_insert(list_t *list, list_t *node)
|
||||
{
|
||||
static JSON_INLINE void list_insert(list_t *list, list_t *node) {
|
||||
node->next = list;
|
||||
node->prev = list->prev;
|
||||
list->prev->next = node;
|
||||
list->prev = node;
|
||||
}
|
||||
|
||||
static inline void list_remove(list_t *list)
|
||||
{
|
||||
static JSON_INLINE void list_remove(list_t *list) {
|
||||
list->prev->next = list->next;
|
||||
list->next->prev = list->prev;
|
||||
}
|
||||
|
||||
static inline int bucket_is_empty(hashtable_t *hashtable, bucket_t *bucket)
|
||||
{
|
||||
static JSON_INLINE int bucket_is_empty(hashtable_t *hashtable, bucket_t *bucket) {
|
||||
return bucket->first == &hashtable->list && bucket->first == bucket->last;
|
||||
}
|
||||
|
||||
static void insert_to_bucket(hashtable_t *hashtable, bucket_t *bucket,
|
||||
list_t *list)
|
||||
{
|
||||
if(bucket_is_empty(hashtable, bucket))
|
||||
{
|
||||
static void insert_to_bucket(hashtable_t *hashtable, bucket_t *bucket, list_t *list) {
|
||||
if (bucket_is_empty(hashtable, bucket)) {
|
||||
list_insert(&hashtable->list, list);
|
||||
bucket->first = bucket->last = list;
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
list_insert(bucket->first, list);
|
||||
bucket->first = list;
|
||||
}
|
||||
}
|
||||
|
||||
static unsigned int primes[] = {
|
||||
5, 13, 23, 53, 97, 193, 389, 769, 1543, 3079, 6151, 12289, 24593,
|
||||
49157, 98317, 196613, 393241, 786433, 1572869, 3145739, 6291469,
|
||||
12582917, 25165843, 50331653, 100663319, 201326611, 402653189,
|
||||
805306457, 1610612741
|
||||
};
|
||||
static const unsigned int num_primes = sizeof(primes) / sizeof(unsigned int);
|
||||
|
||||
static inline unsigned int num_buckets(hashtable_t *hashtable)
|
||||
{
|
||||
return primes[hashtable->num_buckets];
|
||||
}
|
||||
|
||||
|
||||
static pair_t *hashtable_find_pair(hashtable_t *hashtable, bucket_t *bucket,
|
||||
const void *key, unsigned int hash)
|
||||
{
|
||||
const char *key, size_t key_len, size_t hash) {
|
||||
list_t *list;
|
||||
pair_t *pair;
|
||||
|
||||
if(bucket_is_empty(hashtable, bucket))
|
||||
if (bucket_is_empty(hashtable, bucket))
|
||||
return NULL;
|
||||
|
||||
list = bucket->first;
|
||||
while(1)
|
||||
{
|
||||
while (1) {
|
||||
pair = list_to_pair(list);
|
||||
if(pair->hash == hash && hashtable->cmp_keys(pair->key, key))
|
||||
if (pair->hash == hash && pair->key_len == key_len &&
|
||||
memcmp(pair->key, key, key_len) == 0)
|
||||
return pair;
|
||||
|
||||
if(list == bucket->last)
|
||||
if (list == bucket->last)
|
||||
break;
|
||||
|
||||
list = list->next;
|
||||
|
@ -97,67 +93,75 @@ static pair_t *hashtable_find_pair(hashtable_t *hashtable, bucket_t *bucket,
|
|||
}
|
||||
|
||||
/* returns 0 on success, -1 if key was not found */
|
||||
static int hashtable_do_del(hashtable_t *hashtable,
|
||||
const void *key, unsigned int hash)
|
||||
{
|
||||
static int hashtable_do_del(hashtable_t *hashtable, const char *key, size_t key_len,
|
||||
size_t hash) {
|
||||
pair_t *pair;
|
||||
bucket_t *bucket;
|
||||
unsigned int index;
|
||||
size_t index;
|
||||
|
||||
index = hash % num_buckets(hashtable);
|
||||
index = hash & hashmask(hashtable->order);
|
||||
bucket = &hashtable->buckets[index];
|
||||
|
||||
pair = hashtable_find_pair(hashtable, bucket, key, hash);
|
||||
if(!pair)
|
||||
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
|
||||
if (!pair)
|
||||
return -1;
|
||||
|
||||
if(&pair->list == bucket->first && &pair->list == bucket->last)
|
||||
if (&pair->list == bucket->first && &pair->list == bucket->last)
|
||||
bucket->first = bucket->last = &hashtable->list;
|
||||
|
||||
else if(&pair->list == bucket->first)
|
||||
else if (&pair->list == bucket->first)
|
||||
bucket->first = pair->list.next;
|
||||
|
||||
else if(&pair->list == bucket->last)
|
||||
else if (&pair->list == bucket->last)
|
||||
bucket->last = pair->list.prev;
|
||||
|
||||
list_remove(&pair->list);
|
||||
list_remove(&pair->ordered_list);
|
||||
json_decref(pair->value);
|
||||
|
||||
if(hashtable->free_key)
|
||||
hashtable->free_key(pair->key);
|
||||
if(hashtable->free_value)
|
||||
hashtable->free_value(pair->value);
|
||||
|
||||
free(pair);
|
||||
jsonp_free(pair);
|
||||
hashtable->size--;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int hashtable_do_rehash(hashtable_t *hashtable)
|
||||
{
|
||||
static void hashtable_do_clear(hashtable_t *hashtable) {
|
||||
list_t *list, *next;
|
||||
pair_t *pair;
|
||||
unsigned int i, index, new_size;
|
||||
|
||||
free(hashtable->buckets);
|
||||
for (list = hashtable->list.next; list != &hashtable->list; list = next) {
|
||||
next = list->next;
|
||||
pair = list_to_pair(list);
|
||||
json_decref(pair->value);
|
||||
jsonp_free(pair);
|
||||
}
|
||||
}
|
||||
|
||||
hashtable->num_buckets++;
|
||||
new_size = num_buckets(hashtable);
|
||||
static int hashtable_do_rehash(hashtable_t *hashtable) {
|
||||
list_t *list, *next;
|
||||
pair_t *pair;
|
||||
size_t i, index, new_size, new_order;
|
||||
struct hashtable_bucket *new_buckets;
|
||||
|
||||
hashtable->buckets = malloc(new_size * sizeof(bucket_t));
|
||||
if(!hashtable->buckets)
|
||||
new_order = hashtable->order + 1;
|
||||
new_size = hashsize(new_order);
|
||||
|
||||
new_buckets = jsonp_malloc(new_size * sizeof(bucket_t));
|
||||
if (!new_buckets)
|
||||
return -1;
|
||||
|
||||
for(i = 0; i < num_buckets(hashtable); i++)
|
||||
{
|
||||
hashtable->buckets[i].first = hashtable->buckets[i].last =
|
||||
&hashtable->list;
|
||||
jsonp_free(hashtable->buckets);
|
||||
hashtable->buckets = new_buckets;
|
||||
hashtable->order = new_order;
|
||||
|
||||
for (i = 0; i < hashsize(hashtable->order); i++) {
|
||||
hashtable->buckets[i].first = hashtable->buckets[i].last = &hashtable->list;
|
||||
}
|
||||
|
||||
list = hashtable->list.next;
|
||||
list_init(&hashtable->list);
|
||||
|
||||
for(; list != &hashtable->list; list = next) {
|
||||
for (; list != &hashtable->list; list = next) {
|
||||
next = list->next;
|
||||
pair = list_to_pair(list);
|
||||
index = pair->hash % new_size;
|
||||
|
@ -167,152 +171,170 @@ static int hashtable_do_rehash(hashtable_t *hashtable)
|
|||
return 0;
|
||||
}
|
||||
|
||||
|
||||
hashtable_t *hashtable_create(key_hash_fn hash_key, key_cmp_fn cmp_keys,
|
||||
free_fn free_key, free_fn free_value)
|
||||
{
|
||||
hashtable_t *hashtable = malloc(sizeof(hashtable_t));
|
||||
if(!hashtable)
|
||||
return NULL;
|
||||
|
||||
if(hashtable_init(hashtable, hash_key, cmp_keys, free_key, free_value))
|
||||
{
|
||||
free(hashtable);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return hashtable;
|
||||
}
|
||||
|
||||
void hashtable_destroy(hashtable_t *hashtable)
|
||||
{
|
||||
hashtable_close(hashtable);
|
||||
free(hashtable);
|
||||
}
|
||||
|
||||
int hashtable_init(hashtable_t *hashtable,
|
||||
key_hash_fn hash_key, key_cmp_fn cmp_keys,
|
||||
free_fn free_key, free_fn free_value)
|
||||
{
|
||||
unsigned int i;
|
||||
int hashtable_init(hashtable_t *hashtable) {
|
||||
size_t i;
|
||||
|
||||
hashtable->size = 0;
|
||||
hashtable->num_buckets = 0; /* index to primes[] */
|
||||
hashtable->buckets = malloc(num_buckets(hashtable) * sizeof(bucket_t));
|
||||
if(!hashtable->buckets)
|
||||
hashtable->order = INITIAL_HASHTABLE_ORDER;
|
||||
hashtable->buckets = jsonp_malloc(hashsize(hashtable->order) * sizeof(bucket_t));
|
||||
if (!hashtable->buckets)
|
||||
return -1;
|
||||
|
||||
list_init(&hashtable->list);
|
||||
list_init(&hashtable->ordered_list);
|
||||
|
||||
hashtable->hash_key = hash_key;
|
||||
hashtable->cmp_keys = cmp_keys;
|
||||
hashtable->free_key = free_key;
|
||||
hashtable->free_value = free_value;
|
||||
|
||||
for(i = 0; i < num_buckets(hashtable); i++)
|
||||
{
|
||||
hashtable->buckets[i].first = hashtable->buckets[i].last =
|
||||
&hashtable->list;
|
||||
for (i = 0; i < hashsize(hashtable->order); i++) {
|
||||
hashtable->buckets[i].first = hashtable->buckets[i].last = &hashtable->list;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void hashtable_close(hashtable_t *hashtable)
|
||||
{
|
||||
list_t *list, *next;
|
||||
pair_t *pair;
|
||||
for(list = hashtable->list.next; list != &hashtable->list; list = next)
|
||||
{
|
||||
next = list->next;
|
||||
pair = list_to_pair(list);
|
||||
if(hashtable->free_key)
|
||||
hashtable->free_key(pair->key);
|
||||
if(hashtable->free_value)
|
||||
hashtable->free_value(pair->value);
|
||||
free(pair);
|
||||
}
|
||||
|
||||
free(hashtable->buckets);
|
||||
void hashtable_close(hashtable_t *hashtable) {
|
||||
hashtable_do_clear(hashtable);
|
||||
jsonp_free(hashtable->buckets);
|
||||
}
|
||||
|
||||
int hashtable_set(hashtable_t *hashtable, void *key, void *value)
|
||||
{
|
||||
static pair_t *init_pair(json_t *value, const char *key, size_t key_len, size_t hash) {
|
||||
pair_t *pair;
|
||||
|
||||
/* offsetof(...) returns the size of pair_t without the last,
|
||||
flexible member. This way, the correct amount is
|
||||
allocated. */
|
||||
|
||||
if (key_len >= (size_t)-1 - offsetof(pair_t, key)) {
|
||||
/* Avoid an overflow if the key is very long */
|
||||
return NULL;
|
||||
}
|
||||
|
||||
pair = jsonp_malloc(offsetof(pair_t, key) + key_len + 1);
|
||||
|
||||
if (!pair)
|
||||
return NULL;
|
||||
|
||||
pair->hash = hash;
|
||||
memcpy(pair->key, key, key_len);
|
||||
pair->key[key_len] = '\0';
|
||||
pair->key_len = key_len;
|
||||
pair->value = value;
|
||||
|
||||
list_init(&pair->list);
|
||||
list_init(&pair->ordered_list);
|
||||
|
||||
return pair;
|
||||
}
|
||||
|
||||
int hashtable_set(hashtable_t *hashtable, const char *key, size_t key_len,
|
||||
json_t *value) {
|
||||
pair_t *pair;
|
||||
bucket_t *bucket;
|
||||
unsigned int hash, index;
|
||||
|
||||
hash = hashtable->hash_key(key);
|
||||
|
||||
/* if the key already exists, delete it */
|
||||
hashtable_do_del(hashtable, key, hash);
|
||||
size_t hash, index;
|
||||
|
||||
/* rehash if the load ratio exceeds 1 */
|
||||
if(hashtable->size >= num_buckets(hashtable))
|
||||
if(hashtable_do_rehash(hashtable))
|
||||
if (hashtable->size >= hashsize(hashtable->order))
|
||||
if (hashtable_do_rehash(hashtable))
|
||||
return -1;
|
||||
|
||||
pair = malloc(sizeof(pair_t));
|
||||
if(!pair)
|
||||
return -1;
|
||||
|
||||
pair->key = key;
|
||||
pair->value = value;
|
||||
pair->hash = hash;
|
||||
list_init(&pair->list);
|
||||
|
||||
index = hash % num_buckets(hashtable);
|
||||
hash = hash_str(key, key_len);
|
||||
index = hash & hashmask(hashtable->order);
|
||||
bucket = &hashtable->buckets[index];
|
||||
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
|
||||
|
||||
insert_to_bucket(hashtable, bucket, &pair->list);
|
||||
if (pair) {
|
||||
json_decref(pair->value);
|
||||
pair->value = value;
|
||||
} else {
|
||||
pair = init_pair(value, key, key_len, hash);
|
||||
|
||||
hashtable->size++;
|
||||
if (!pair)
|
||||
return -1;
|
||||
|
||||
insert_to_bucket(hashtable, bucket, &pair->list);
|
||||
list_insert(&hashtable->ordered_list, &pair->ordered_list);
|
||||
|
||||
hashtable->size++;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void *hashtable_get(hashtable_t *hashtable, const void *key)
|
||||
{
|
||||
void *hashtable_get(hashtable_t *hashtable, const char *key, size_t key_len) {
|
||||
pair_t *pair;
|
||||
unsigned int hash;
|
||||
size_t hash;
|
||||
bucket_t *bucket;
|
||||
|
||||
hash = hashtable->hash_key(key);
|
||||
bucket = &hashtable->buckets[hash % num_buckets(hashtable)];
|
||||
hash = hash_str(key, key_len);
|
||||
bucket = &hashtable->buckets[hash & hashmask(hashtable->order)];
|
||||
|
||||
pair = hashtable_find_pair(hashtable, bucket, key, hash);
|
||||
if(!pair)
|
||||
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
|
||||
if (!pair)
|
||||
return NULL;
|
||||
|
||||
return pair->value;
|
||||
}
|
||||
|
||||
int hashtable_del(hashtable_t *hashtable, const void *key)
|
||||
{
|
||||
unsigned int hash = hashtable->hash_key(key);
|
||||
return hashtable_do_del(hashtable, key, hash);
|
||||
int hashtable_del(hashtable_t *hashtable, const char *key, size_t key_len) {
|
||||
size_t hash = hash_str(key, key_len);
|
||||
return hashtable_do_del(hashtable, key, key_len, hash);
|
||||
}
|
||||
|
||||
void *hashtable_iter(hashtable_t *hashtable)
|
||||
{
|
||||
return hashtable_iter_next(hashtable, &hashtable->list);
|
||||
void hashtable_clear(hashtable_t *hashtable) {
|
||||
size_t i;
|
||||
|
||||
hashtable_do_clear(hashtable);
|
||||
|
||||
for (i = 0; i < hashsize(hashtable->order); i++) {
|
||||
hashtable->buckets[i].first = hashtable->buckets[i].last = &hashtable->list;
|
||||
}
|
||||
|
||||
list_init(&hashtable->list);
|
||||
list_init(&hashtable->ordered_list);
|
||||
hashtable->size = 0;
|
||||
}
|
||||
|
||||
void *hashtable_iter_next(hashtable_t *hashtable, void *iter)
|
||||
{
|
||||
void *hashtable_iter(hashtable_t *hashtable) {
|
||||
return hashtable_iter_next(hashtable, &hashtable->ordered_list);
|
||||
}
|
||||
|
||||
void *hashtable_iter_at(hashtable_t *hashtable, const char *key, size_t key_len) {
|
||||
pair_t *pair;
|
||||
size_t hash;
|
||||
bucket_t *bucket;
|
||||
|
||||
hash = hash_str(key, key_len);
|
||||
bucket = &hashtable->buckets[hash & hashmask(hashtable->order)];
|
||||
|
||||
pair = hashtable_find_pair(hashtable, bucket, key, key_len, hash);
|
||||
if (!pair)
|
||||
return NULL;
|
||||
|
||||
return &pair->ordered_list;
|
||||
}
|
||||
|
||||
void *hashtable_iter_next(hashtable_t *hashtable, void *iter) {
|
||||
list_t *list = (list_t *)iter;
|
||||
if(list->next == &hashtable->list)
|
||||
if (list->next == &hashtable->ordered_list)
|
||||
return NULL;
|
||||
return list->next;
|
||||
}
|
||||
|
||||
void *hashtable_iter_key(void *iter)
|
||||
{
|
||||
pair_t *pair = list_to_pair((list_t *)iter);
|
||||
void *hashtable_iter_key(void *iter) {
|
||||
pair_t *pair = ordered_list_to_pair((list_t *)iter);
|
||||
return pair->key;
|
||||
}
|
||||
|
||||
void *hashtable_iter_value(void *iter)
|
||||
{
|
||||
pair_t *pair = list_to_pair((list_t *)iter);
|
||||
size_t hashtable_iter_key_len(void *iter) {
|
||||
pair_t *pair = ordered_list_to_pair((list_t *)iter);
|
||||
return pair->key_len;
|
||||
}
|
||||
|
||||
void *hashtable_iter_value(void *iter) {
|
||||
pair_t *pair = ordered_list_to_pair((list_t *)iter);
|
||||
return pair->value;
|
||||
}
|
||||
|
||||
void hashtable_iter_set(void *iter, json_t *value) {
|
||||
pair_t *pair = ordered_list_to_pair((list_t *)iter);
|
||||
|
||||
json_decref(pair->value);
|
||||
pair->value = value;
|
||||
}
|
||||
|
|
123
src/hashtable.h
123
src/hashtable.h
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
|
@ -8,20 +8,24 @@
|
|||
#ifndef HASHTABLE_H
|
||||
#define HASHTABLE_H
|
||||
|
||||
typedef unsigned int (*key_hash_fn)(const void *key);
|
||||
typedef int (*key_cmp_fn)(const void *key1, const void *key2);
|
||||
typedef void (*free_fn)(void *key);
|
||||
#include "jansson.h"
|
||||
#include <stdlib.h>
|
||||
|
||||
struct hashtable_list {
|
||||
struct hashtable_list *prev;
|
||||
struct hashtable_list *next;
|
||||
struct hashtable_list *prev;
|
||||
struct hashtable_list *next;
|
||||
};
|
||||
|
||||
/* "pair" may be a bit confusing a name, but think of it as a
|
||||
key-value pair. In this case, it just encodes some extra data,
|
||||
too */
|
||||
struct hashtable_pair {
|
||||
void *key;
|
||||
void *value;
|
||||
unsigned int hash;
|
||||
struct hashtable_list list;
|
||||
struct hashtable_list ordered_list;
|
||||
size_t hash;
|
||||
json_t *value;
|
||||
size_t key_len;
|
||||
char key[1];
|
||||
};
|
||||
|
||||
struct hashtable_bucket {
|
||||
|
@ -30,60 +34,27 @@ struct hashtable_bucket {
|
|||
};
|
||||
|
||||
typedef struct hashtable {
|
||||
unsigned int size;
|
||||
size_t size;
|
||||
struct hashtable_bucket *buckets;
|
||||
unsigned int num_buckets; /* index to primes[] */
|
||||
size_t order; /* hashtable has pow(2, order) buckets */
|
||||
struct hashtable_list list;
|
||||
|
||||
key_hash_fn hash_key;
|
||||
key_cmp_fn cmp_keys; /* returns non-zero for equal keys */
|
||||
free_fn free_key;
|
||||
free_fn free_value;
|
||||
struct hashtable_list ordered_list;
|
||||
} hashtable_t;
|
||||
|
||||
/**
|
||||
* hashtable_create - Create a hashtable object
|
||||
*
|
||||
* @hash_key: The key hashing function
|
||||
* @cmp_keys: The key compare function. Returns non-zero for equal and
|
||||
* zero for unequal unequal keys
|
||||
* @free_key: If non-NULL, called for a key that is no longer referenced.
|
||||
* @free_value: If non-NULL, called for a value that is no longer referenced.
|
||||
*
|
||||
* Returns a new hashtable object that should be freed with
|
||||
* hashtable_destroy when it's no longer used, or NULL on failure (out
|
||||
* of memory).
|
||||
*/
|
||||
hashtable_t *hashtable_create(key_hash_fn hash_key, key_cmp_fn cmp_keys,
|
||||
free_fn free_key, free_fn free_value);
|
||||
|
||||
/**
|
||||
* hashtable_destroy - Destroy a hashtable object
|
||||
*
|
||||
* @hashtable: The hashtable
|
||||
*
|
||||
* Destroys a hashtable created with hashtable_create().
|
||||
*/
|
||||
void hashtable_destroy(hashtable_t *hashtable);
|
||||
#define hashtable_key_to_iter(key_) \
|
||||
(&(container_of(key_, struct hashtable_pair, key)->ordered_list))
|
||||
|
||||
/**
|
||||
* hashtable_init - Initialize a hashtable object
|
||||
*
|
||||
* @hashtable: The (statically allocated) hashtable object
|
||||
* @hash_key: The key hashing function
|
||||
* @cmp_keys: The key compare function. Returns non-zero for equal and
|
||||
* zero for unequal unequal keys
|
||||
* @free_key: If non-NULL, called for a key that is no longer referenced.
|
||||
* @free_value: If non-NULL, called for a value that is no longer referenced.
|
||||
*
|
||||
* Initializes a statically allocated hashtable object. The object
|
||||
* should be cleared with hashtable_close when it's no longer used.
|
||||
*
|
||||
* Returns 0 on success, -1 on error (out of memory).
|
||||
*/
|
||||
int hashtable_init(hashtable_t *hashtable,
|
||||
key_hash_fn hash_key, key_cmp_fn cmp_keys,
|
||||
free_fn free_key, free_fn free_value);
|
||||
int hashtable_init(hashtable_t *hashtable) JANSSON_ATTRS((warn_unused_result));
|
||||
|
||||
/**
|
||||
* hashtable_close - Release all resources used by a hashtable object
|
||||
|
@ -99,40 +70,49 @@ void hashtable_close(hashtable_t *hashtable);
|
|||
*
|
||||
* @hashtable: The hashtable object
|
||||
* @key: The key
|
||||
* @key: The length of key
|
||||
* @serial: For addition order of keys
|
||||
* @value: The value
|
||||
*
|
||||
* If a value with the given key already exists, its value is replaced
|
||||
* with the new value.
|
||||
*
|
||||
* Key and value are "stealed" in the sense that hashtable frees them
|
||||
* automatically when they are no longer used. The freeing is
|
||||
* accomplished by calling free_key and free_value functions that were
|
||||
* supplied to hashtable_new. In case one or both of the free
|
||||
* functions is NULL, the corresponding item is not "stealed".
|
||||
* with the new value. Value is "stealed" in the sense that hashtable
|
||||
* doesn't increment its refcount but decreases the refcount when the
|
||||
* value is no longer needed.
|
||||
*
|
||||
* Returns 0 on success, -1 on failure (out of memory).
|
||||
*/
|
||||
int hashtable_set(hashtable_t *hashtable, void *key, void *value);
|
||||
int hashtable_set(hashtable_t *hashtable, const char *key, size_t key_len, json_t *value);
|
||||
|
||||
/**
|
||||
* hashtable_get - Get a value associated with a key
|
||||
*
|
||||
* @hashtable: The hashtable object
|
||||
* @key: The key
|
||||
* @key: The length of key
|
||||
*
|
||||
* Returns value if it is found, or NULL otherwise.
|
||||
*/
|
||||
void *hashtable_get(hashtable_t *hashtable, const void *key);
|
||||
void *hashtable_get(hashtable_t *hashtable, const char *key, size_t key_len);
|
||||
|
||||
/**
|
||||
* hashtable_del - Remove a value from the hashtable
|
||||
*
|
||||
* @hashtable: The hashtable object
|
||||
* @key: The key
|
||||
* @key: The length of key
|
||||
*
|
||||
* Returns 0 on success, or -1 if the key was not found.
|
||||
*/
|
||||
int hashtable_del(hashtable_t *hashtable, const void *key);
|
||||
int hashtable_del(hashtable_t *hashtable, const char *key, size_t key_len);
|
||||
|
||||
/**
|
||||
* hashtable_clear - Clear hashtable
|
||||
*
|
||||
* @hashtable: The hashtable object
|
||||
*
|
||||
* Removes all items from the hashtable.
|
||||
*/
|
||||
void hashtable_clear(hashtable_t *hashtable);
|
||||
|
||||
/**
|
||||
* hashtable_iter - Iterate over hashtable
|
||||
|
@ -151,6 +131,18 @@ int hashtable_del(hashtable_t *hashtable, const void *key);
|
|||
*/
|
||||
void *hashtable_iter(hashtable_t *hashtable);
|
||||
|
||||
/**
|
||||
* hashtable_iter_at - Return an iterator at a specific key
|
||||
*
|
||||
* @hashtable: The hashtable object
|
||||
* @key: The key that the iterator should point to
|
||||
* @key: The length of key
|
||||
*
|
||||
* Like hashtable_iter() but returns an iterator pointing to a
|
||||
* specific key.
|
||||
*/
|
||||
void *hashtable_iter_at(hashtable_t *hashtable, const char *key, size_t key_len);
|
||||
|
||||
/**
|
||||
* hashtable_iter_next - Advance an iterator
|
||||
*
|
||||
|
@ -169,6 +161,13 @@ void *hashtable_iter_next(hashtable_t *hashtable, void *iter);
|
|||
*/
|
||||
void *hashtable_iter_key(void *iter);
|
||||
|
||||
/**
|
||||
* hashtable_iter_key_len - Retrieve the key length pointed by an iterator
|
||||
*
|
||||
* @iter: The iterator
|
||||
*/
|
||||
size_t hashtable_iter_key_len(void *iter);
|
||||
|
||||
/**
|
||||
* hashtable_iter_value - Retrieve the value pointed by an iterator
|
||||
*
|
||||
|
@ -176,4 +175,12 @@ void *hashtable_iter_key(void *iter);
|
|||
*/
|
||||
void *hashtable_iter_value(void *iter);
|
||||
|
||||
/**
|
||||
* hashtable_iter_set - Set the value pointed by an iterator
|
||||
*
|
||||
* @iter: The iterator
|
||||
* @value: The value to set
|
||||
*/
|
||||
void hashtable_iter_set(void *iter, json_t *value);
|
||||
|
||||
#endif
|
||||
|
|
277
src/hashtable_seed.c
Normal file
277
src/hashtable_seed.c
Normal file
|
@ -0,0 +1,277 @@
|
|||
/* Generate sizeof(uint32_t) bytes of as random data as possible to seed
|
||||
the hash function.
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include <jansson_private_config.h>
|
||||
#endif
|
||||
|
||||
#include <stdio.h>
|
||||
#include <time.h>
|
||||
|
||||
#ifdef HAVE_STDINT_H
|
||||
#include <stdint.h>
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_FCNTL_H
|
||||
#include <fcntl.h>
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_SCHED_H
|
||||
#include <sched.h>
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_UNISTD_H
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_SYS_STAT_H
|
||||
#include <sys/stat.h>
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_SYS_TIME_H
|
||||
#include <sys/time.h>
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_SYS_TYPES_H
|
||||
#include <sys/types.h>
|
||||
#endif
|
||||
|
||||
#if defined(_WIN32)
|
||||
/* For GetModuleHandle(), GetProcAddress() and GetCurrentProcessId() */
|
||||
#include <windows.h>
|
||||
#endif
|
||||
|
||||
#include "jansson.h"
|
||||
|
||||
static uint32_t buf_to_uint32(char *data) {
|
||||
size_t i;
|
||||
uint32_t result = 0;
|
||||
|
||||
for (i = 0; i < sizeof(uint32_t); i++)
|
||||
result = (result << 8) | (unsigned char)data[i];
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/* /dev/urandom */
|
||||
#if !defined(_WIN32) && defined(USE_URANDOM)
|
||||
static int seed_from_urandom(uint32_t *seed) {
|
||||
/* Use unbuffered I/O if we have open(), close() and read(). Otherwise
|
||||
fall back to fopen() */
|
||||
|
||||
char data[sizeof(uint32_t)];
|
||||
int ok;
|
||||
|
||||
#if defined(HAVE_OPEN) && defined(HAVE_CLOSE) && defined(HAVE_READ)
|
||||
int urandom;
|
||||
urandom = open("/dev/urandom", O_RDONLY);
|
||||
if (urandom == -1)
|
||||
return 1;
|
||||
|
||||
ok = read(urandom, data, sizeof(uint32_t)) == sizeof(uint32_t);
|
||||
close(urandom);
|
||||
#else
|
||||
FILE *urandom;
|
||||
|
||||
urandom = fopen("/dev/urandom", "rb");
|
||||
if (!urandom)
|
||||
return 1;
|
||||
|
||||
ok = fread(data, 1, sizeof(uint32_t), urandom) == sizeof(uint32_t);
|
||||
fclose(urandom);
|
||||
#endif
|
||||
|
||||
if (!ok)
|
||||
return 1;
|
||||
|
||||
*seed = buf_to_uint32(data);
|
||||
return 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Windows Crypto API */
|
||||
#if defined(_WIN32) && defined(USE_WINDOWS_CRYPTOAPI)
|
||||
#include <wincrypt.h>
|
||||
|
||||
typedef BOOL(WINAPI *CRYPTACQUIRECONTEXTA)(HCRYPTPROV *phProv, LPCSTR pszContainer,
|
||||
LPCSTR pszProvider, DWORD dwProvType,
|
||||
DWORD dwFlags);
|
||||
typedef BOOL(WINAPI *CRYPTGENRANDOM)(HCRYPTPROV hProv, DWORD dwLen, BYTE *pbBuffer);
|
||||
typedef BOOL(WINAPI *CRYPTRELEASECONTEXT)(HCRYPTPROV hProv, DWORD dwFlags);
|
||||
|
||||
static int seed_from_windows_cryptoapi(uint32_t *seed) {
|
||||
HINSTANCE hAdvAPI32 = NULL;
|
||||
CRYPTACQUIRECONTEXTA pCryptAcquireContext = NULL;
|
||||
CRYPTGENRANDOM pCryptGenRandom = NULL;
|
||||
CRYPTRELEASECONTEXT pCryptReleaseContext = NULL;
|
||||
HCRYPTPROV hCryptProv = 0;
|
||||
BYTE data[sizeof(uint32_t)];
|
||||
int ok;
|
||||
|
||||
hAdvAPI32 = GetModuleHandle(TEXT("advapi32.dll"));
|
||||
if (hAdvAPI32 == NULL)
|
||||
return 1;
|
||||
|
||||
pCryptAcquireContext =
|
||||
(CRYPTACQUIRECONTEXTA)GetProcAddress(hAdvAPI32, "CryptAcquireContextA");
|
||||
if (!pCryptAcquireContext)
|
||||
return 1;
|
||||
|
||||
pCryptGenRandom = (CRYPTGENRANDOM)GetProcAddress(hAdvAPI32, "CryptGenRandom");
|
||||
if (!pCryptGenRandom)
|
||||
return 1;
|
||||
|
||||
pCryptReleaseContext =
|
||||
(CRYPTRELEASECONTEXT)GetProcAddress(hAdvAPI32, "CryptReleaseContext");
|
||||
if (!pCryptReleaseContext)
|
||||
return 1;
|
||||
|
||||
if (!pCryptAcquireContext(&hCryptProv, NULL, NULL, PROV_RSA_FULL,
|
||||
CRYPT_VERIFYCONTEXT))
|
||||
return 1;
|
||||
|
||||
ok = pCryptGenRandom(hCryptProv, sizeof(uint32_t), data);
|
||||
pCryptReleaseContext(hCryptProv, 0);
|
||||
|
||||
if (!ok)
|
||||
return 1;
|
||||
|
||||
*seed = buf_to_uint32((char *)data);
|
||||
return 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
/* gettimeofday() and getpid() */
|
||||
static int seed_from_timestamp_and_pid(uint32_t *seed) {
|
||||
#ifdef HAVE_GETTIMEOFDAY
|
||||
/* XOR of seconds and microseconds */
|
||||
struct timeval tv;
|
||||
gettimeofday(&tv, NULL);
|
||||
*seed = (uint32_t)tv.tv_sec ^ (uint32_t)tv.tv_usec;
|
||||
#else
|
||||
/* Seconds only */
|
||||
*seed = (uint32_t)time(NULL);
|
||||
#endif
|
||||
|
||||
/* XOR with PID for more randomness */
|
||||
#if defined(_WIN32)
|
||||
*seed ^= (uint32_t)GetCurrentProcessId();
|
||||
#elif defined(HAVE_GETPID)
|
||||
*seed ^= (uint32_t)getpid();
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static uint32_t generate_seed() {
|
||||
uint32_t seed = 0;
|
||||
int done = 0;
|
||||
|
||||
#if !defined(_WIN32) && defined(USE_URANDOM)
|
||||
if (seed_from_urandom(&seed) == 0)
|
||||
done = 1;
|
||||
#endif
|
||||
|
||||
#if defined(_WIN32) && defined(USE_WINDOWS_CRYPTOAPI)
|
||||
if (seed_from_windows_cryptoapi(&seed) == 0)
|
||||
done = 1;
|
||||
#endif
|
||||
|
||||
if (!done) {
|
||||
/* Fall back to timestamp and PID if no better randomness is
|
||||
available */
|
||||
seed_from_timestamp_and_pid(&seed);
|
||||
}
|
||||
|
||||
/* Make sure the seed is never zero */
|
||||
if (seed == 0)
|
||||
seed = 1;
|
||||
|
||||
return seed;
|
||||
}
|
||||
|
||||
volatile uint32_t hashtable_seed = 0;
|
||||
|
||||
#if defined(HAVE_ATOMIC_BUILTINS) && (defined(HAVE_SCHED_YIELD) || !defined(_WIN32))
|
||||
static volatile char seed_initialized = 0;
|
||||
|
||||
void json_object_seed(size_t seed) {
|
||||
uint32_t new_seed = (uint32_t)seed;
|
||||
|
||||
if (hashtable_seed == 0) {
|
||||
if (__atomic_test_and_set(&seed_initialized, __ATOMIC_RELAXED) == 0) {
|
||||
/* Do the seeding ourselves */
|
||||
if (new_seed == 0)
|
||||
new_seed = generate_seed();
|
||||
|
||||
__atomic_store_n(&hashtable_seed, new_seed, __ATOMIC_RELEASE);
|
||||
} else {
|
||||
/* Wait for another thread to do the seeding */
|
||||
do {
|
||||
#ifdef HAVE_SCHED_YIELD
|
||||
sched_yield();
|
||||
#endif
|
||||
} while (__atomic_load_n(&hashtable_seed, __ATOMIC_ACQUIRE) == 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
#elif defined(HAVE_SYNC_BUILTINS) && (defined(HAVE_SCHED_YIELD) || !defined(_WIN32))
|
||||
void json_object_seed(size_t seed) {
|
||||
uint32_t new_seed = (uint32_t)seed;
|
||||
|
||||
if (hashtable_seed == 0) {
|
||||
if (new_seed == 0) {
|
||||
/* Explicit synchronization fences are not supported by the
|
||||
__sync builtins, so every thread getting here has to
|
||||
generate the seed value.
|
||||
*/
|
||||
new_seed = generate_seed();
|
||||
}
|
||||
|
||||
do {
|
||||
if (__sync_bool_compare_and_swap(&hashtable_seed, 0, new_seed)) {
|
||||
/* We were the first to seed */
|
||||
break;
|
||||
} else {
|
||||
/* Wait for another thread to do the seeding */
|
||||
#ifdef HAVE_SCHED_YIELD
|
||||
sched_yield();
|
||||
#endif
|
||||
}
|
||||
} while (hashtable_seed == 0);
|
||||
}
|
||||
}
|
||||
#elif defined(_WIN32)
|
||||
static long seed_initialized = 0;
|
||||
void json_object_seed(size_t seed) {
|
||||
uint32_t new_seed = (uint32_t)seed;
|
||||
|
||||
if (hashtable_seed == 0) {
|
||||
if (InterlockedIncrement(&seed_initialized) == 1) {
|
||||
/* Do the seeding ourselves */
|
||||
if (new_seed == 0)
|
||||
new_seed = generate_seed();
|
||||
|
||||
hashtable_seed = new_seed;
|
||||
} else {
|
||||
/* Wait for another thread to do the seeding */
|
||||
do {
|
||||
SwitchToThread();
|
||||
} while (hashtable_seed == 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
#else
|
||||
/* Fall back to a thread-unsafe version */
|
||||
void json_object_seed(size_t seed) {
|
||||
uint32_t new_seed = (uint32_t)seed;
|
||||
|
||||
if (hashtable_seed == 0) {
|
||||
if (new_seed == 0)
|
||||
new_seed = generate_seed();
|
||||
|
||||
hashtable_seed = new_seed;
|
||||
}
|
||||
}
|
||||
#endif
|
83
src/jansson.def
Normal file
83
src/jansson.def
Normal file
|
@ -0,0 +1,83 @@
|
|||
EXPORTS
|
||||
json_delete
|
||||
json_true
|
||||
json_false
|
||||
json_null
|
||||
json_sprintf
|
||||
json_vsprintf
|
||||
json_string
|
||||
json_stringn
|
||||
json_string_nocheck
|
||||
json_stringn_nocheck
|
||||
json_string_value
|
||||
json_string_length
|
||||
json_string_set
|
||||
json_string_setn
|
||||
json_string_set_nocheck
|
||||
json_string_setn_nocheck
|
||||
json_integer
|
||||
json_integer_value
|
||||
json_integer_set
|
||||
json_real
|
||||
json_real_value
|
||||
json_real_set
|
||||
json_number_value
|
||||
json_array
|
||||
json_array_size
|
||||
json_array_get
|
||||
json_array_set_new
|
||||
json_array_append_new
|
||||
json_array_insert_new
|
||||
json_array_remove
|
||||
json_array_clear
|
||||
json_array_extend
|
||||
json_object
|
||||
json_object_size
|
||||
json_object_get
|
||||
json_object_getn
|
||||
json_object_set_new
|
||||
json_object_setn_new
|
||||
json_object_set_new_nocheck
|
||||
json_object_setn_new_nocheck
|
||||
json_object_del
|
||||
json_object_deln
|
||||
json_object_clear
|
||||
json_object_update
|
||||
json_object_update_existing
|
||||
json_object_update_missing
|
||||
json_object_update_recursive
|
||||
json_object_iter
|
||||
json_object_iter_at
|
||||
json_object_iter_next
|
||||
json_object_iter_key
|
||||
json_object_iter_key_len
|
||||
json_object_iter_value
|
||||
json_object_iter_set_new
|
||||
json_object_key_to_iter
|
||||
json_object_seed
|
||||
json_dumps
|
||||
json_dumpb
|
||||
json_dumpf
|
||||
json_dumpfd
|
||||
json_dump_file
|
||||
json_dump_callback
|
||||
json_loads
|
||||
json_loadb
|
||||
json_loadf
|
||||
json_loadfd
|
||||
json_load_file
|
||||
json_load_callback
|
||||
json_equal
|
||||
json_copy
|
||||
json_deep_copy
|
||||
json_pack
|
||||
json_pack_ex
|
||||
json_vpack_ex
|
||||
json_unpack
|
||||
json_unpack_ex
|
||||
json_vunpack_ex
|
||||
json_set_alloc_funcs
|
||||
json_get_alloc_funcs
|
||||
jansson_version_str
|
||||
jansson_version_cmp
|
||||
|
397
src/jansson.h
397
src/jansson.h
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
|
@ -8,8 +8,42 @@
|
|||
#ifndef JANSSON_H
|
||||
#define JANSSON_H
|
||||
|
||||
#include <stdarg.h>
|
||||
#include <stdio.h>
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h> /* for size_t */
|
||||
|
||||
#include "jansson_config.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/* version */
|
||||
|
||||
#define JANSSON_MAJOR_VERSION 2
|
||||
#define JANSSON_MINOR_VERSION 14
|
||||
#define JANSSON_MICRO_VERSION 1
|
||||
|
||||
/* Micro version is omitted if it's 0 */
|
||||
#define JANSSON_VERSION "2.14.1"
|
||||
|
||||
/* Version as a 3-byte hex number, e.g. 0x010201 == 1.2.1. Use this
|
||||
for numeric comparisons, e.g. #if JANSSON_VERSION_HEX >= ... */
|
||||
#define JANSSON_VERSION_HEX \
|
||||
((JANSSON_MAJOR_VERSION << 16) | (JANSSON_MINOR_VERSION << 8) | \
|
||||
(JANSSON_MICRO_VERSION << 0))
|
||||
|
||||
/* If __atomic or __sync builtins are available the library is thread
|
||||
* safe for all read-only functions plus reference counting. */
|
||||
#if JSON_HAVE_ATOMIC_BUILTINS || JSON_HAVE_SYNC_BUILTINS
|
||||
#define JANSSON_THREAD_SAFE_REFCOUNT 1
|
||||
#endif
|
||||
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
#define JANSSON_ATTRS(x) __attribute__(x)
|
||||
#else
|
||||
#define JANSSON_ATTRS(x)
|
||||
#endif
|
||||
|
||||
/* types */
|
||||
|
||||
|
@ -24,89 +58,362 @@ typedef enum {
|
|||
JSON_NULL
|
||||
} json_type;
|
||||
|
||||
typedef struct {
|
||||
typedef struct json_t {
|
||||
json_type type;
|
||||
unsigned long refcount;
|
||||
volatile size_t refcount;
|
||||
} json_t;
|
||||
|
||||
#define json_typeof(json) ((json)->type)
|
||||
#define json_is_object(json) (json && json_typeof(json) == JSON_OBJECT)
|
||||
#define json_is_array(json) (json && json_typeof(json) == JSON_ARRAY)
|
||||
#define json_is_string(json) (json && json_typeof(json) == JSON_STRING)
|
||||
#define json_is_integer(json) (json && json_typeof(json) == JSON_INTEGER)
|
||||
#define json_is_real(json) (json && json_typeof(json) == JSON_REAL)
|
||||
#define json_is_number(json) (json_is_integer(json) || json_is_real(json))
|
||||
#define json_is_true(json) (json && json_typeof(json) == JSON_TRUE)
|
||||
#define json_is_false(json) (json && json_typeof(json) == JSON_FALSE)
|
||||
#define json_is_boolean(json) (json_is_true(json) || json_is_false(json))
|
||||
#define json_is_null(json) (json && json_typeof(json) == JSON_NULL)
|
||||
#ifndef JANSSON_USING_CMAKE /* disabled if using cmake */
|
||||
#if JSON_INTEGER_IS_LONG_LONG
|
||||
#ifdef _WIN32
|
||||
#define JSON_INTEGER_FORMAT "I64d"
|
||||
#else
|
||||
#define JSON_INTEGER_FORMAT "lld"
|
||||
#endif
|
||||
typedef long long json_int_t;
|
||||
#else
|
||||
#define JSON_INTEGER_FORMAT "ld"
|
||||
typedef long json_int_t;
|
||||
#endif /* JSON_INTEGER_IS_LONG_LONG */
|
||||
#endif
|
||||
|
||||
#define json_typeof(json) ((json)->type)
|
||||
#define json_is_object(json) ((json) && json_typeof(json) == JSON_OBJECT)
|
||||
#define json_is_array(json) ((json) && json_typeof(json) == JSON_ARRAY)
|
||||
#define json_is_string(json) ((json) && json_typeof(json) == JSON_STRING)
|
||||
#define json_is_integer(json) ((json) && json_typeof(json) == JSON_INTEGER)
|
||||
#define json_is_real(json) ((json) && json_typeof(json) == JSON_REAL)
|
||||
#define json_is_number(json) (json_is_integer(json) || json_is_real(json))
|
||||
#define json_is_true(json) ((json) && json_typeof(json) == JSON_TRUE)
|
||||
#define json_is_false(json) ((json) && json_typeof(json) == JSON_FALSE)
|
||||
#define json_boolean_value json_is_true
|
||||
#define json_is_boolean(json) (json_is_true(json) || json_is_false(json))
|
||||
#define json_is_null(json) ((json) && json_typeof(json) == JSON_NULL)
|
||||
|
||||
/* construction, destruction, reference counting */
|
||||
|
||||
json_t *json_object(void);
|
||||
json_t *json_array(void);
|
||||
json_t *json_string(const char *value);
|
||||
json_t *json_integer(int value);
|
||||
json_t *json_stringn(const char *value, size_t len);
|
||||
json_t *json_string_nocheck(const char *value);
|
||||
json_t *json_stringn_nocheck(const char *value, size_t len);
|
||||
json_t *json_integer(json_int_t value);
|
||||
json_t *json_real(double value);
|
||||
json_t *json_true(void);
|
||||
json_t *json_false(void);
|
||||
#define json_boolean(val) ((val) ? json_true() : json_false())
|
||||
json_t *json_null(void);
|
||||
|
||||
static inline json_t *json_incref(json_t *json)
|
||||
{
|
||||
if(json)
|
||||
++json->refcount;
|
||||
/* do not call JSON_INTERNAL_INCREF or JSON_INTERNAL_DECREF directly */
|
||||
#if JSON_HAVE_ATOMIC_BUILTINS
|
||||
#define JSON_INTERNAL_INCREF(json) \
|
||||
__atomic_add_fetch(&json->refcount, 1, __ATOMIC_ACQUIRE)
|
||||
#define JSON_INTERNAL_DECREF(json) \
|
||||
__atomic_sub_fetch(&json->refcount, 1, __ATOMIC_RELEASE)
|
||||
#elif JSON_HAVE_SYNC_BUILTINS
|
||||
#define JSON_INTERNAL_INCREF(json) __sync_add_and_fetch(&json->refcount, 1)
|
||||
#define JSON_INTERNAL_DECREF(json) __sync_sub_and_fetch(&json->refcount, 1)
|
||||
#else
|
||||
#define JSON_INTERNAL_INCREF(json) (++json->refcount)
|
||||
#define JSON_INTERNAL_DECREF(json) (--json->refcount)
|
||||
#endif
|
||||
|
||||
static JSON_INLINE json_t *json_incref(json_t *json) {
|
||||
if (json && json->refcount != (size_t)-1)
|
||||
JSON_INTERNAL_INCREF(json);
|
||||
return json;
|
||||
}
|
||||
|
||||
/* do not call json_delete directly */
|
||||
void json_delete(json_t *json);
|
||||
|
||||
static inline void json_decref(json_t *json)
|
||||
{
|
||||
if(json && --json->refcount == 0)
|
||||
static JSON_INLINE void json_decref(json_t *json) {
|
||||
if (json && json->refcount != (size_t)-1 && JSON_INTERNAL_DECREF(json) == 0)
|
||||
json_delete(json);
|
||||
}
|
||||
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
static JSON_INLINE void json_decrefp(json_t **json) {
|
||||
if (json) {
|
||||
json_decref(*json);
|
||||
*json = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
#define json_auto_t json_t __attribute__((cleanup(json_decrefp)))
|
||||
#endif
|
||||
|
||||
/* error reporting */
|
||||
|
||||
#define JSON_ERROR_TEXT_LENGTH 160
|
||||
#define JSON_ERROR_SOURCE_LENGTH 80
|
||||
|
||||
typedef struct json_error_t {
|
||||
int line;
|
||||
int column;
|
||||
int position;
|
||||
char source[JSON_ERROR_SOURCE_LENGTH];
|
||||
char text[JSON_ERROR_TEXT_LENGTH];
|
||||
} json_error_t;
|
||||
|
||||
enum json_error_code {
|
||||
json_error_unknown,
|
||||
json_error_out_of_memory,
|
||||
json_error_stack_overflow,
|
||||
json_error_cannot_open_file,
|
||||
json_error_invalid_argument,
|
||||
json_error_invalid_utf8,
|
||||
json_error_premature_end_of_input,
|
||||
json_error_end_of_input_expected,
|
||||
json_error_invalid_syntax,
|
||||
json_error_invalid_format,
|
||||
json_error_wrong_type,
|
||||
json_error_null_character,
|
||||
json_error_null_value,
|
||||
json_error_null_byte_in_key,
|
||||
json_error_duplicate_key,
|
||||
json_error_numeric_overflow,
|
||||
json_error_item_not_found,
|
||||
json_error_index_out_of_range
|
||||
};
|
||||
|
||||
static JSON_INLINE enum json_error_code json_error_code(const json_error_t *e) {
|
||||
return (enum json_error_code)e->text[JSON_ERROR_TEXT_LENGTH - 1];
|
||||
}
|
||||
|
||||
/* getters, setters, manipulation */
|
||||
|
||||
json_t *json_object_get(const json_t *object, const char *key);
|
||||
int json_object_set(json_t *object, const char *key, json_t *value);
|
||||
void json_object_seed(size_t seed);
|
||||
size_t json_object_size(const json_t *object);
|
||||
json_t *json_object_get(const json_t *object, const char *key)
|
||||
JANSSON_ATTRS((warn_unused_result));
|
||||
json_t *json_object_getn(const json_t *object, const char *key, size_t key_len)
|
||||
JANSSON_ATTRS((warn_unused_result));
|
||||
int json_object_set_new(json_t *object, const char *key, json_t *value);
|
||||
int json_object_setn_new(json_t *object, const char *key, size_t key_len, json_t *value);
|
||||
int json_object_set_new_nocheck(json_t *object, const char *key, json_t *value);
|
||||
int json_object_setn_new_nocheck(json_t *object, const char *key, size_t key_len,
|
||||
json_t *value);
|
||||
int json_object_del(json_t *object, const char *key);
|
||||
int json_object_deln(json_t *object, const char *key, size_t key_len);
|
||||
int json_object_clear(json_t *object);
|
||||
int json_object_update(json_t *object, json_t *other);
|
||||
int json_object_update_existing(json_t *object, json_t *other);
|
||||
int json_object_update_missing(json_t *object, json_t *other);
|
||||
int json_object_update_recursive(json_t *object, json_t *other);
|
||||
void *json_object_iter(json_t *object);
|
||||
void *json_object_iter_at(json_t *object, const char *key);
|
||||
void *json_object_key_to_iter(const char *key);
|
||||
void *json_object_iter_next(json_t *object, void *iter);
|
||||
const char *json_object_iter_key(void *iter);
|
||||
size_t json_object_iter_key_len(void *iter);
|
||||
json_t *json_object_iter_value(void *iter);
|
||||
int json_object_iter_set_new(json_t *object, void *iter, json_t *value);
|
||||
|
||||
unsigned int json_array_size(const json_t *array);
|
||||
json_t *json_array_get(const json_t *array, unsigned int index);
|
||||
int json_array_set(json_t *array, unsigned int index, json_t *value);
|
||||
int json_array_append(json_t *array, json_t *value);
|
||||
#define json_object_foreach(object, key, value) \
|
||||
for (key = json_object_iter_key(json_object_iter(object)); \
|
||||
key && (value = json_object_iter_value(json_object_key_to_iter(key))); \
|
||||
key = json_object_iter_key( \
|
||||
json_object_iter_next(object, json_object_key_to_iter(key))))
|
||||
|
||||
const char *json_string_value(const json_t *json);
|
||||
int json_integer_value(const json_t *json);
|
||||
double json_real_value(const json_t *json);
|
||||
#define json_object_keylen_foreach(object, key, key_len, value) \
|
||||
for (key = json_object_iter_key(json_object_iter(object)), \
|
||||
key_len = json_object_iter_key_len(json_object_key_to_iter(key)); \
|
||||
key && (value = json_object_iter_value(json_object_key_to_iter(key))); \
|
||||
key = json_object_iter_key( \
|
||||
json_object_iter_next(object, json_object_key_to_iter(key))), \
|
||||
key_len = json_object_iter_key_len(json_object_key_to_iter(key)))
|
||||
|
||||
#define json_object_foreach_safe(object, n, key, value) \
|
||||
for (key = json_object_iter_key(json_object_iter(object)), \
|
||||
n = json_object_iter_next(object, json_object_key_to_iter(key)); \
|
||||
key && (value = json_object_iter_value(json_object_key_to_iter(key))); \
|
||||
key = json_object_iter_key(n), \
|
||||
n = json_object_iter_next(object, json_object_key_to_iter(key)))
|
||||
|
||||
#define json_object_keylen_foreach_safe(object, n, key, key_len, value) \
|
||||
for (key = json_object_iter_key(json_object_iter(object)), \
|
||||
n = json_object_iter_next(object, json_object_key_to_iter(key)), \
|
||||
key_len = json_object_iter_key_len(json_object_key_to_iter(key)); \
|
||||
key && (value = json_object_iter_value(json_object_key_to_iter(key))); \
|
||||
key = json_object_iter_key(n), key_len = json_object_iter_key_len(n), \
|
||||
n = json_object_iter_next(object, json_object_key_to_iter(key)))
|
||||
|
||||
#define json_array_foreach(array, index, value) \
|
||||
for (index = 0; \
|
||||
index < json_array_size(array) && (value = json_array_get(array, index)); \
|
||||
index++)
|
||||
|
||||
static JSON_INLINE int json_object_set(json_t *object, const char *key, json_t *value) {
|
||||
return json_object_set_new(object, key, json_incref(value));
|
||||
}
|
||||
|
||||
static JSON_INLINE int json_object_setn(json_t *object, const char *key, size_t key_len,
|
||||
json_t *value) {
|
||||
return json_object_setn_new(object, key, key_len, json_incref(value));
|
||||
}
|
||||
|
||||
static JSON_INLINE int json_object_set_nocheck(json_t *object, const char *key,
|
||||
json_t *value) {
|
||||
return json_object_set_new_nocheck(object, key, json_incref(value));
|
||||
}
|
||||
|
||||
static JSON_INLINE int json_object_setn_nocheck(json_t *object, const char *key,
|
||||
size_t key_len, json_t *value) {
|
||||
return json_object_setn_new_nocheck(object, key, key_len, json_incref(value));
|
||||
}
|
||||
|
||||
static JSON_INLINE int json_object_iter_set(json_t *object, void *iter, json_t *value) {
|
||||
return json_object_iter_set_new(object, iter, json_incref(value));
|
||||
}
|
||||
|
||||
static JSON_INLINE int json_object_update_new(json_t *object, json_t *other) {
|
||||
int ret = json_object_update(object, other);
|
||||
json_decref(other);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static JSON_INLINE int json_object_update_existing_new(json_t *object, json_t *other) {
|
||||
int ret = json_object_update_existing(object, other);
|
||||
json_decref(other);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static JSON_INLINE int json_object_update_missing_new(json_t *object, json_t *other) {
|
||||
int ret = json_object_update_missing(object, other);
|
||||
json_decref(other);
|
||||
return ret;
|
||||
}
|
||||
|
||||
size_t json_array_size(const json_t *array);
|
||||
json_t *json_array_get(const json_t *array, size_t index)
|
||||
JANSSON_ATTRS((warn_unused_result));
|
||||
int json_array_set_new(json_t *array, size_t index, json_t *value);
|
||||
int json_array_append_new(json_t *array, json_t *value);
|
||||
int json_array_insert_new(json_t *array, size_t index, json_t *value);
|
||||
int json_array_remove(json_t *array, size_t index);
|
||||
int json_array_clear(json_t *array);
|
||||
int json_array_extend(json_t *array, json_t *other);
|
||||
|
||||
static JSON_INLINE int json_array_set(json_t *array, size_t ind, json_t *value) {
|
||||
return json_array_set_new(array, ind, json_incref(value));
|
||||
}
|
||||
|
||||
static JSON_INLINE int json_array_append(json_t *array, json_t *value) {
|
||||
return json_array_append_new(array, json_incref(value));
|
||||
}
|
||||
|
||||
static JSON_INLINE int json_array_insert(json_t *array, size_t ind, json_t *value) {
|
||||
return json_array_insert_new(array, ind, json_incref(value));
|
||||
}
|
||||
|
||||
const char *json_string_value(const json_t *string);
|
||||
size_t json_string_length(const json_t *string);
|
||||
json_int_t json_integer_value(const json_t *integer);
|
||||
double json_real_value(const json_t *real);
|
||||
double json_number_value(const json_t *json);
|
||||
|
||||
int json_string_set(json_t *string, const char *value);
|
||||
int json_string_setn(json_t *string, const char *value, size_t len);
|
||||
int json_string_set_nocheck(json_t *string, const char *value);
|
||||
int json_string_setn_nocheck(json_t *string, const char *value, size_t len);
|
||||
int json_integer_set(json_t *integer, json_int_t value);
|
||||
int json_real_set(json_t *real, double value);
|
||||
|
||||
/* loading, printing */
|
||||
/* pack, unpack */
|
||||
|
||||
#define JSON_ERROR_TEXT_LENGTH 160
|
||||
json_t *json_pack(const char *fmt, ...) JANSSON_ATTRS((warn_unused_result));
|
||||
json_t *json_pack_ex(json_error_t *error, size_t flags, const char *fmt, ...)
|
||||
JANSSON_ATTRS((warn_unused_result));
|
||||
json_t *json_vpack_ex(json_error_t *error, size_t flags, const char *fmt, va_list ap)
|
||||
JANSSON_ATTRS((warn_unused_result));
|
||||
|
||||
typedef struct {
|
||||
char text[JSON_ERROR_TEXT_LENGTH];
|
||||
int line;
|
||||
} json_error_t;
|
||||
#define JSON_VALIDATE_ONLY 0x1
|
||||
#define JSON_STRICT 0x2
|
||||
|
||||
json_t *json_loads(const char *input, json_error_t *error);
|
||||
json_t *json_loadf(FILE *input, json_error_t *error);
|
||||
json_t *json_load_file(const char *path, json_error_t *error);
|
||||
int json_unpack(json_t *root, const char *fmt, ...);
|
||||
int json_unpack_ex(json_t *root, json_error_t *error, size_t flags, const char *fmt, ...);
|
||||
int json_vunpack_ex(json_t *root, json_error_t *error, size_t flags, const char *fmt,
|
||||
va_list ap);
|
||||
|
||||
#define JSON_INDENT(n) (n & 0xFF)
|
||||
/* sprintf */
|
||||
|
||||
char *json_dumps(const json_t *json, uint32_t flags);
|
||||
int json_dumpf(const json_t *json, FILE *output, uint32_t flags);
|
||||
int json_dump_file(const json_t *json, const char *path, uint32_t flags);
|
||||
json_t *json_sprintf(const char *fmt, ...)
|
||||
JANSSON_ATTRS((warn_unused_result, format(printf, 1, 2)));
|
||||
json_t *json_vsprintf(const char *fmt, va_list ap)
|
||||
JANSSON_ATTRS((warn_unused_result, format(printf, 1, 0)));
|
||||
|
||||
/* equality */
|
||||
|
||||
int json_equal(const json_t *value1, const json_t *value2);
|
||||
|
||||
/* copying */
|
||||
|
||||
json_t *json_copy(json_t *value) JANSSON_ATTRS((warn_unused_result));
|
||||
json_t *json_deep_copy(const json_t *value) JANSSON_ATTRS((warn_unused_result));
|
||||
|
||||
/* decoding */
|
||||
|
||||
#define JSON_REJECT_DUPLICATES 0x1
|
||||
#define JSON_DISABLE_EOF_CHECK 0x2
|
||||
#define JSON_DECODE_ANY 0x4
|
||||
#define JSON_DECODE_INT_AS_REAL 0x8
|
||||
#define JSON_ALLOW_NUL 0x10
|
||||
|
||||
typedef size_t (*json_load_callback_t)(void *buffer, size_t buflen, void *data);
|
||||
|
||||
json_t *json_loads(const char *input, size_t flags, json_error_t *error)
|
||||
JANSSON_ATTRS((warn_unused_result));
|
||||
json_t *json_loadb(const char *buffer, size_t buflen, size_t flags, json_error_t *error)
|
||||
JANSSON_ATTRS((warn_unused_result));
|
||||
json_t *json_loadf(FILE *input, size_t flags, json_error_t *error)
|
||||
JANSSON_ATTRS((warn_unused_result));
|
||||
json_t *json_loadfd(int input, size_t flags, json_error_t *error)
|
||||
JANSSON_ATTRS((warn_unused_result));
|
||||
json_t *json_load_file(const char *path, size_t flags, json_error_t *error)
|
||||
JANSSON_ATTRS((warn_unused_result));
|
||||
json_t *json_load_callback(json_load_callback_t callback, void *data, size_t flags,
|
||||
json_error_t *error) JANSSON_ATTRS((warn_unused_result));
|
||||
|
||||
/* encoding */
|
||||
|
||||
#define JSON_MAX_INDENT 0x1F
|
||||
#define JSON_INDENT(n) ((n) & JSON_MAX_INDENT)
|
||||
#define JSON_COMPACT 0x20
|
||||
#define JSON_ENSURE_ASCII 0x40
|
||||
#define JSON_SORT_KEYS 0x80
|
||||
#define JSON_PRESERVE_ORDER 0x100
|
||||
#define JSON_ENCODE_ANY 0x200
|
||||
#define JSON_ESCAPE_SLASH 0x400
|
||||
#define JSON_REAL_PRECISION(n) (((n) & 0x1F) << 11)
|
||||
#define JSON_EMBED 0x10000
|
||||
|
||||
typedef int (*json_dump_callback_t)(const char *buffer, size_t size, void *data);
|
||||
|
||||
char *json_dumps(const json_t *json, size_t flags) JANSSON_ATTRS((warn_unused_result));
|
||||
size_t json_dumpb(const json_t *json, char *buffer, size_t size, size_t flags);
|
||||
int json_dumpf(const json_t *json, FILE *output, size_t flags);
|
||||
int json_dumpfd(const json_t *json, int output, size_t flags);
|
||||
int json_dump_file(const json_t *json, const char *path, size_t flags);
|
||||
int json_dump_callback(const json_t *json, json_dump_callback_t callback, void *data,
|
||||
size_t flags);
|
||||
|
||||
/* custom memory allocation */
|
||||
|
||||
typedef void *(*json_malloc_t)(size_t);
|
||||
typedef void (*json_free_t)(void *);
|
||||
|
||||
void json_set_alloc_funcs(json_malloc_t malloc_fn, json_free_t free_fn);
|
||||
void json_get_alloc_funcs(json_malloc_t *malloc_fn, json_free_t *free_fn);
|
||||
|
||||
/* runtime version checking */
|
||||
|
||||
const char *jansson_version_str(void);
|
||||
int jansson_version_cmp(int major, int minor, int micro);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
|
47
src/jansson_config.h.in
Normal file
47
src/jansson_config.h.in
Normal file
|
@ -0,0 +1,47 @@
|
|||
/*
|
||||
* Copyright (c) 2010-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*
|
||||
*
|
||||
* This file specifies a part of the site-specific configuration for
|
||||
* Jansson, namely those things that affect the public API in
|
||||
* jansson.h.
|
||||
*
|
||||
* The configure script copies this file to jansson_config.h and
|
||||
* replaces @var@ substitutions by values that fit your system. If you
|
||||
* cannot run the configure script, you can do the value substitution
|
||||
* by hand.
|
||||
*/
|
||||
|
||||
#ifndef JANSSON_CONFIG_H
|
||||
#define JANSSON_CONFIG_H
|
||||
|
||||
/* If your compiler supports the inline keyword in C, JSON_INLINE is
|
||||
defined to `inline', otherwise empty. In C++, the inline is always
|
||||
supported. */
|
||||
#ifdef __cplusplus
|
||||
#define JSON_INLINE inline
|
||||
#else
|
||||
#define JSON_INLINE @json_inline@
|
||||
#endif
|
||||
|
||||
/* If your compiler supports the `long long` type and the strtoll()
|
||||
library function, JSON_INTEGER_IS_LONG_LONG is defined to 1,
|
||||
otherwise to 0. */
|
||||
#define JSON_INTEGER_IS_LONG_LONG @json_have_long_long@
|
||||
|
||||
/* If __atomic builtins are available they will be used to manage
|
||||
reference counts of json_t. */
|
||||
#define JSON_HAVE_ATOMIC_BUILTINS @json_have_atomic_builtins@
|
||||
|
||||
/* If __atomic builtins are not available we try using __sync builtins
|
||||
to manage reference counts of json_t. */
|
||||
#define JSON_HAVE_SYNC_BUILTINS @json_have_sync_builtins@
|
||||
|
||||
/* Maximum recursion depth for parsing JSON input.
|
||||
This limits the depth of e.g. array-within-array constructions. */
|
||||
#define JSON_PARSER_MAX_DEPTH 2048
|
||||
|
||||
#endif
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
|
@ -8,8 +8,107 @@
|
|||
#ifndef JANSSON_PRIVATE_H
|
||||
#define JANSSON_PRIVATE_H
|
||||
|
||||
int json_object_set_nocheck(json_t *json, const char *key, json_t *value);
|
||||
json_t *json_string_nocheck(const char *value);
|
||||
#include "hashtable.h"
|
||||
#include "jansson.h"
|
||||
#include "jansson_private_config.h"
|
||||
#include "strbuffer.h"
|
||||
#include <stddef.h>
|
||||
|
||||
#define container_of(ptr_, type_, member_) \
|
||||
((type_ *)((char *)ptr_ - offsetof(type_, member_)))
|
||||
|
||||
/* On some platforms, max() may already be defined */
|
||||
#ifndef max
|
||||
#define max(a, b) ((a) > (b) ? (a) : (b))
|
||||
#endif
|
||||
|
||||
/* va_copy is a C99 feature. In C89 implementations, it's sometimes
|
||||
available as __va_copy. If not, memcpy() should do the trick. */
|
||||
#ifndef va_copy
|
||||
#ifdef __va_copy
|
||||
#define va_copy __va_copy
|
||||
#else
|
||||
#define va_copy(a, b) memcpy(&(a), &(b), sizeof(va_list))
|
||||
#endif
|
||||
#endif
|
||||
|
||||
typedef struct {
|
||||
json_t json;
|
||||
hashtable_t hashtable;
|
||||
} json_object_t;
|
||||
|
||||
typedef struct {
|
||||
json_t json;
|
||||
size_t size;
|
||||
size_t entries;
|
||||
json_t **table;
|
||||
} json_array_t;
|
||||
|
||||
typedef struct {
|
||||
json_t json;
|
||||
char *value;
|
||||
size_t length;
|
||||
} json_string_t;
|
||||
|
||||
typedef struct {
|
||||
json_t json;
|
||||
double value;
|
||||
} json_real_t;
|
||||
|
||||
typedef struct {
|
||||
json_t json;
|
||||
json_int_t value;
|
||||
} json_integer_t;
|
||||
|
||||
#define json_to_object(json_) container_of(json_, json_object_t, json)
|
||||
#define json_to_array(json_) container_of(json_, json_array_t, json)
|
||||
#define json_to_string(json_) container_of(json_, json_string_t, json)
|
||||
#define json_to_real(json_) container_of(json_, json_real_t, json)
|
||||
#define json_to_integer(json_) container_of(json_, json_integer_t, json)
|
||||
|
||||
/* Create a string by taking ownership of an existing buffer */
|
||||
json_t *jsonp_stringn_nocheck_own(const char *value, size_t len);
|
||||
|
||||
/* Error message formatting */
|
||||
void jsonp_error_init(json_error_t *error, const char *source);
|
||||
void jsonp_error_set_source(json_error_t *error, const char *source);
|
||||
void jsonp_error_set(json_error_t *error, int line, int column, size_t position,
|
||||
enum json_error_code code, const char *msg, ...);
|
||||
void jsonp_error_vset(json_error_t *error, int line, int column, size_t position,
|
||||
enum json_error_code code, const char *msg, va_list ap);
|
||||
|
||||
/* Locale independent string<->double conversions */
|
||||
int jsonp_strtod(strbuffer_t *strbuffer, double *out);
|
||||
int jsonp_dtostr(char *buffer, size_t size, double value, int prec);
|
||||
|
||||
/* Wrappers for custom memory functions */
|
||||
void *jsonp_malloc(size_t size) JANSSON_ATTRS((warn_unused_result));
|
||||
void jsonp_free(void *ptr);
|
||||
char *jsonp_strndup(const char *str, size_t length) JANSSON_ATTRS((warn_unused_result));
|
||||
char *jsonp_strdup(const char *str) JANSSON_ATTRS((warn_unused_result));
|
||||
char *jsonp_strndup(const char *str, size_t len) JANSSON_ATTRS((warn_unused_result));
|
||||
|
||||
/* Circular reference check*/
|
||||
/* Space for "0x", double the sizeof a pointer for the hex and a terminator. */
|
||||
#define LOOP_KEY_LEN (2 + (sizeof(json_t *) * 2) + 1)
|
||||
int jsonp_loop_check(hashtable_t *parents, const json_t *json, char *key, size_t key_size,
|
||||
size_t *key_len_out);
|
||||
|
||||
/* Windows compatibility */
|
||||
#if defined(_WIN32) || defined(WIN32)
|
||||
#if defined(_MSC_VER) /* MS compiller */
|
||||
#if (_MSC_VER < 1900) && \
|
||||
!defined(snprintf) /* snprintf not defined yet & not introduced */
|
||||
#define snprintf _snprintf
|
||||
#endif
|
||||
#if (_MSC_VER < 1500) && \
|
||||
!defined(vsnprintf) /* vsnprintf not defined yet & not introduced */
|
||||
#define vsnprintf(b, c, f, a) _vsnprintf(b, c, f, a)
|
||||
#endif
|
||||
#else /* Other Windows compiller, old definition */
|
||||
#define snprintf _snprintf
|
||||
#define vsnprintf _vsnprintf
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
|
1078
src/load.c
1078
src/load.c
File diff suppressed because it is too large
Load diff
382
src/lookup3.h
Normal file
382
src/lookup3.h
Normal file
|
@ -0,0 +1,382 @@
|
|||
// clang-format off
|
||||
/*
|
||||
-------------------------------------------------------------------------------
|
||||
lookup3.c, by Bob Jenkins, May 2006, Public Domain.
|
||||
|
||||
These are functions for producing 32-bit hashes for hash table lookup.
|
||||
hashword(), hashlittle(), hashlittle2(), hashbig(), mix(), and final()
|
||||
are externally useful functions. Routines to test the hash are included
|
||||
if SELF_TEST is defined. You can use this free for any purpose. It's in
|
||||
the public domain. It has no warranty.
|
||||
|
||||
You probably want to use hashlittle(). hashlittle() and hashbig()
|
||||
hash byte arrays. hashlittle() is is faster than hashbig() on
|
||||
little-endian machines. Intel and AMD are little-endian machines.
|
||||
On second thought, you probably want hashlittle2(), which is identical to
|
||||
hashlittle() except it returns two 32-bit hashes for the price of one.
|
||||
You could implement hashbig2() if you wanted but I haven't bothered here.
|
||||
|
||||
If you want to find a hash of, say, exactly 7 integers, do
|
||||
a = i1; b = i2; c = i3;
|
||||
mix(a,b,c);
|
||||
a += i4; b += i5; c += i6;
|
||||
mix(a,b,c);
|
||||
a += i7;
|
||||
final(a,b,c);
|
||||
then use c as the hash value. If you have a variable length array of
|
||||
4-byte integers to hash, use hashword(). If you have a byte array (like
|
||||
a character string), use hashlittle(). If you have several byte arrays, or
|
||||
a mix of things, see the comments above hashlittle().
|
||||
|
||||
Why is this so big? I read 12 bytes at a time into 3 4-byte integers,
|
||||
then mix those integers. This is fast (you can do a lot more thorough
|
||||
mixing with 12*3 instructions on 3 integers than you can with 3 instructions
|
||||
on 1 byte), but shoehorning those bytes into integers efficiently is messy.
|
||||
-------------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include <jansson_private_config.h>
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_STDINT_H
|
||||
#include <stdint.h> /* defines uint32_t etc */
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_SYS_PARAM_H
|
||||
#include <sys/param.h> /* attempt to define endianness */
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_ENDIAN_H
|
||||
# include <endian.h> /* attempt to define endianness */
|
||||
#endif
|
||||
|
||||
/*
|
||||
* My best guess at if you are big-endian or little-endian. This may
|
||||
* need adjustment.
|
||||
*/
|
||||
#if (defined(__BYTE_ORDER) && defined(__LITTLE_ENDIAN) && \
|
||||
__BYTE_ORDER == __LITTLE_ENDIAN) || \
|
||||
(defined(i386) || defined(__i386__) || defined(__i486__) || \
|
||||
defined(__i586__) || defined(__i686__) || defined(vax) || defined(MIPSEL))
|
||||
# define HASH_LITTLE_ENDIAN 1
|
||||
# define HASH_BIG_ENDIAN 0
|
||||
#elif (defined(__BYTE_ORDER) && defined(__BIG_ENDIAN) && \
|
||||
__BYTE_ORDER == __BIG_ENDIAN) || \
|
||||
(defined(sparc) || defined(POWERPC) || defined(mc68000) || defined(sel))
|
||||
# define HASH_LITTLE_ENDIAN 0
|
||||
# define HASH_BIG_ENDIAN 1
|
||||
#else
|
||||
# define HASH_LITTLE_ENDIAN 0
|
||||
# define HASH_BIG_ENDIAN 0
|
||||
#endif
|
||||
|
||||
#define hashsize(n) ((size_t)1<<(n))
|
||||
#define hashmask(n) (hashsize(n)-1)
|
||||
#define rot(x,k) (((x)<<(k)) | ((x)>>(32-(k))))
|
||||
|
||||
/*
|
||||
-------------------------------------------------------------------------------
|
||||
mix -- mix 3 32-bit values reversibly.
|
||||
|
||||
This is reversible, so any information in (a,b,c) before mix() is
|
||||
still in (a,b,c) after mix().
|
||||
|
||||
If four pairs of (a,b,c) inputs are run through mix(), or through
|
||||
mix() in reverse, there are at least 32 bits of the output that
|
||||
are sometimes the same for one pair and different for another pair.
|
||||
This was tested for:
|
||||
* pairs that differed by one bit, by two bits, in any combination
|
||||
of top bits of (a,b,c), or in any combination of bottom bits of
|
||||
(a,b,c).
|
||||
* "differ" is defined as +, -, ^, or ~^. For + and -, I transformed
|
||||
the output delta to a Gray code (a^(a>>1)) so a string of 1's (as
|
||||
is commonly produced by subtraction) look like a single 1-bit
|
||||
difference.
|
||||
* the base values were pseudorandom, all zero but one bit set, or
|
||||
all zero plus a counter that starts at zero.
|
||||
|
||||
Some k values for my "a-=c; a^=rot(c,k); c+=b;" arrangement that
|
||||
satisfy this are
|
||||
4 6 8 16 19 4
|
||||
9 15 3 18 27 15
|
||||
14 9 3 7 17 3
|
||||
Well, "9 15 3 18 27 15" didn't quite get 32 bits diffing
|
||||
for "differ" defined as + with a one-bit base and a two-bit delta. I
|
||||
used http://burtleburtle.net/bob/hash/avalanche.html to choose
|
||||
the operations, constants, and arrangements of the variables.
|
||||
|
||||
This does not achieve avalanche. There are input bits of (a,b,c)
|
||||
that fail to affect some output bits of (a,b,c), especially of a. The
|
||||
most thoroughly mixed value is c, but it doesn't really even achieve
|
||||
avalanche in c.
|
||||
|
||||
This allows some parallelism. Read-after-writes are good at doubling
|
||||
the number of bits affected, so the goal of mixing pulls in the opposite
|
||||
direction as the goal of parallelism. I did what I could. Rotates
|
||||
seem to cost as much as shifts on every machine I could lay my hands
|
||||
on, and rotates are much kinder to the top and bottom bits, so I used
|
||||
rotates.
|
||||
-------------------------------------------------------------------------------
|
||||
*/
|
||||
#define mix(a,b,c) \
|
||||
{ \
|
||||
a -= c; a ^= rot(c, 4); c += b; \
|
||||
b -= a; b ^= rot(a, 6); a += c; \
|
||||
c -= b; c ^= rot(b, 8); b += a; \
|
||||
a -= c; a ^= rot(c,16); c += b; \
|
||||
b -= a; b ^= rot(a,19); a += c; \
|
||||
c -= b; c ^= rot(b, 4); b += a; \
|
||||
}
|
||||
|
||||
/*
|
||||
-------------------------------------------------------------------------------
|
||||
final -- final mixing of 3 32-bit values (a,b,c) into c
|
||||
|
||||
Pairs of (a,b,c) values differing in only a few bits will usually
|
||||
produce values of c that look totally different. This was tested for
|
||||
* pairs that differed by one bit, by two bits, in any combination
|
||||
of top bits of (a,b,c), or in any combination of bottom bits of
|
||||
(a,b,c).
|
||||
* "differ" is defined as +, -, ^, or ~^. For + and -, I transformed
|
||||
the output delta to a Gray code (a^(a>>1)) so a string of 1's (as
|
||||
is commonly produced by subtraction) look like a single 1-bit
|
||||
difference.
|
||||
* the base values were pseudorandom, all zero but one bit set, or
|
||||
all zero plus a counter that starts at zero.
|
||||
|
||||
These constants passed:
|
||||
14 11 25 16 4 14 24
|
||||
12 14 25 16 4 14 24
|
||||
and these came close:
|
||||
4 8 15 26 3 22 24
|
||||
10 8 15 26 3 22 24
|
||||
11 8 15 26 3 22 24
|
||||
-------------------------------------------------------------------------------
|
||||
*/
|
||||
#define final(a,b,c) \
|
||||
{ \
|
||||
c ^= b; c -= rot(b,14); \
|
||||
a ^= c; a -= rot(c,11); \
|
||||
b ^= a; b -= rot(a,25); \
|
||||
c ^= b; c -= rot(b,16); \
|
||||
a ^= c; a -= rot(c,4); \
|
||||
b ^= a; b -= rot(a,14); \
|
||||
c ^= b; c -= rot(b,24); \
|
||||
}
|
||||
|
||||
/*
|
||||
-------------------------------------------------------------------------------
|
||||
hashlittle() -- hash a variable-length key into a 32-bit value
|
||||
k : the key (the unaligned variable-length array of bytes)
|
||||
length : the length of the key, counting by bytes
|
||||
initval : can be any 4-byte value
|
||||
Returns a 32-bit value. Every bit of the key affects every bit of
|
||||
the return value. Two keys differing by one or two bits will have
|
||||
totally different hash values.
|
||||
|
||||
The best hash table sizes are powers of 2. There is no need to do
|
||||
mod a prime (mod is sooo slow!). If you need less than 32 bits,
|
||||
use a bitmask. For example, if you need only 10 bits, do
|
||||
h = (h & hashmask(10));
|
||||
In which case, the hash table should have hashsize(10) elements.
|
||||
|
||||
If you are hashing n strings (uint8_t **)k, do it like this:
|
||||
for (i=0, h=0; i<n; ++i) h = hashlittle( k[i], len[i], h);
|
||||
|
||||
By Bob Jenkins, 2006. bob_jenkins@burtleburtle.net. You may use this
|
||||
code any way you wish, private, educational, or commercial. It's free.
|
||||
|
||||
Use for hash table lookup, or anything where one collision in 2^^32 is
|
||||
acceptable. Do NOT use for cryptographic purposes.
|
||||
-------------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
static uint32_t hashlittle(const void *key, size_t length, uint32_t initval)
|
||||
{
|
||||
uint32_t a,b,c; /* internal state */
|
||||
union { const void *ptr; size_t i; } u; /* needed for Mac Powerbook G4 */
|
||||
|
||||
/* Set up the internal state */
|
||||
a = b = c = 0xdeadbeef + ((uint32_t)length) + initval;
|
||||
|
||||
u.ptr = key;
|
||||
if (HASH_LITTLE_ENDIAN && ((u.i & 0x3) == 0)) {
|
||||
const uint32_t *k = (const uint32_t *)key; /* read 32-bit chunks */
|
||||
|
||||
/* Detect Valgrind or AddressSanitizer */
|
||||
#ifdef VALGRIND
|
||||
# define NO_MASKING_TRICK 1
|
||||
#else
|
||||
# if defined(__has_feature) /* Clang */
|
||||
# if __has_feature(address_sanitizer) /* is ASAN enabled? */
|
||||
# define NO_MASKING_TRICK 1
|
||||
# endif
|
||||
# else
|
||||
# if defined(__SANITIZE_ADDRESS__) /* GCC 4.8.x, is ASAN enabled? */
|
||||
# define NO_MASKING_TRICK 1
|
||||
# endif
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#ifdef NO_MASKING_TRICK
|
||||
const uint8_t *k8;
|
||||
#endif
|
||||
|
||||
/*------ all but last block: aligned reads and affect 32 bits of (a,b,c) */
|
||||
while (length > 12)
|
||||
{
|
||||
a += k[0];
|
||||
b += k[1];
|
||||
c += k[2];
|
||||
mix(a,b,c);
|
||||
length -= 12;
|
||||
k += 3;
|
||||
}
|
||||
|
||||
/*----------------------------- handle the last (probably partial) block */
|
||||
/*
|
||||
* "k[2]&0xffffff" actually reads beyond the end of the string, but
|
||||
* then masks off the part it's not allowed to read. Because the
|
||||
* string is aligned, the masked-off tail is in the same word as the
|
||||
* rest of the string. Every machine with memory protection I've seen
|
||||
* does it on word boundaries, so is OK with this. But VALGRIND will
|
||||
* still catch it and complain. The masking trick does make the hash
|
||||
* noticeably faster for short strings (like English words).
|
||||
*/
|
||||
#ifndef NO_MASKING_TRICK
|
||||
|
||||
switch(length)
|
||||
{
|
||||
case 12: c+=k[2]; b+=k[1]; a+=k[0]; break;
|
||||
case 11: c+=k[2]&0xffffff; b+=k[1]; a+=k[0]; break;
|
||||
case 10: c+=k[2]&0xffff; b+=k[1]; a+=k[0]; break;
|
||||
case 9 : c+=k[2]&0xff; b+=k[1]; a+=k[0]; break;
|
||||
case 8 : b+=k[1]; a+=k[0]; break;
|
||||
case 7 : b+=k[1]&0xffffff; a+=k[0]; break;
|
||||
case 6 : b+=k[1]&0xffff; a+=k[0]; break;
|
||||
case 5 : b+=k[1]&0xff; a+=k[0]; break;
|
||||
case 4 : a+=k[0]; break;
|
||||
case 3 : a+=k[0]&0xffffff; break;
|
||||
case 2 : a+=k[0]&0xffff; break;
|
||||
case 1 : a+=k[0]&0xff; break;
|
||||
case 0 : return c; /* zero length strings require no mixing */
|
||||
}
|
||||
|
||||
#else /* make valgrind happy */
|
||||
|
||||
k8 = (const uint8_t *)k;
|
||||
switch(length)
|
||||
{
|
||||
case 12: c+=k[2]; b+=k[1]; a+=k[0]; break;
|
||||
case 11: c+=((uint32_t)k8[10])<<16; /* fall through */
|
||||
case 10: c+=((uint32_t)k8[9])<<8; /* fall through */
|
||||
case 9 : c+=k8[8]; /* fall through */
|
||||
case 8 : b+=k[1]; a+=k[0]; break;
|
||||
case 7 : b+=((uint32_t)k8[6])<<16; /* fall through */
|
||||
case 6 : b+=((uint32_t)k8[5])<<8; /* fall through */
|
||||
case 5 : b+=k8[4]; /* fall through */
|
||||
case 4 : a+=k[0]; break;
|
||||
case 3 : a+=((uint32_t)k8[2])<<16; /* fall through */
|
||||
case 2 : a+=((uint32_t)k8[1])<<8; /* fall through */
|
||||
case 1 : a+=k8[0]; break;
|
||||
case 0 : return c;
|
||||
}
|
||||
|
||||
#endif /* !valgrind */
|
||||
|
||||
} else if (HASH_LITTLE_ENDIAN && ((u.i & 0x1) == 0)) {
|
||||
const uint16_t *k = (const uint16_t *)key; /* read 16-bit chunks */
|
||||
const uint8_t *k8;
|
||||
|
||||
/*--------------- all but last block: aligned reads and different mixing */
|
||||
while (length > 12)
|
||||
{
|
||||
a += k[0] + (((uint32_t)k[1])<<16);
|
||||
b += k[2] + (((uint32_t)k[3])<<16);
|
||||
c += k[4] + (((uint32_t)k[5])<<16);
|
||||
mix(a,b,c);
|
||||
length -= 12;
|
||||
k += 6;
|
||||
}
|
||||
|
||||
/*----------------------------- handle the last (probably partial) block */
|
||||
k8 = (const uint8_t *)k;
|
||||
switch(length)
|
||||
{
|
||||
case 12: c+=k[4]+(((uint32_t)k[5])<<16);
|
||||
b+=k[2]+(((uint32_t)k[3])<<16);
|
||||
a+=k[0]+(((uint32_t)k[1])<<16);
|
||||
break;
|
||||
case 11: c+=((uint32_t)k8[10])<<16; /* fall through */
|
||||
case 10: c+=k[4];
|
||||
b+=k[2]+(((uint32_t)k[3])<<16);
|
||||
a+=k[0]+(((uint32_t)k[1])<<16);
|
||||
break;
|
||||
case 9 : c+=k8[8]; /* fall through */
|
||||
case 8 : b+=k[2]+(((uint32_t)k[3])<<16);
|
||||
a+=k[0]+(((uint32_t)k[1])<<16);
|
||||
break;
|
||||
case 7 : b+=((uint32_t)k8[6])<<16; /* fall through */
|
||||
case 6 : b+=k[2];
|
||||
a+=k[0]+(((uint32_t)k[1])<<16);
|
||||
break;
|
||||
case 5 : b+=k8[4]; /* fall through */
|
||||
case 4 : a+=k[0]+(((uint32_t)k[1])<<16);
|
||||
break;
|
||||
case 3 : a+=((uint32_t)k8[2])<<16; /* fall through */
|
||||
case 2 : a+=k[0];
|
||||
break;
|
||||
case 1 : a+=k8[0];
|
||||
break;
|
||||
case 0 : return c; /* zero length requires no mixing */
|
||||
}
|
||||
|
||||
} else { /* need to read the key one byte at a time */
|
||||
const uint8_t *k = (const uint8_t *)key;
|
||||
|
||||
/*--------------- all but the last block: affect some 32 bits of (a,b,c) */
|
||||
while (length > 12)
|
||||
{
|
||||
a += k[0];
|
||||
a += ((uint32_t)k[1])<<8;
|
||||
a += ((uint32_t)k[2])<<16;
|
||||
a += ((uint32_t)k[3])<<24;
|
||||
b += k[4];
|
||||
b += ((uint32_t)k[5])<<8;
|
||||
b += ((uint32_t)k[6])<<16;
|
||||
b += ((uint32_t)k[7])<<24;
|
||||
c += k[8];
|
||||
c += ((uint32_t)k[9])<<8;
|
||||
c += ((uint32_t)k[10])<<16;
|
||||
c += ((uint32_t)k[11])<<24;
|
||||
mix(a,b,c);
|
||||
length -= 12;
|
||||
k += 12;
|
||||
}
|
||||
|
||||
/*-------------------------------- last block: affect all 32 bits of (c) */
|
||||
switch(length) /* all the case statements fall through */
|
||||
{
|
||||
case 12: c+=((uint32_t)k[11])<<24; /* fall through */
|
||||
case 11: c+=((uint32_t)k[10])<<16; /* fall through */
|
||||
case 10: c+=((uint32_t)k[9])<<8; /* fall through */
|
||||
case 9 : c+=k[8]; /* fall through */
|
||||
case 8 : b+=((uint32_t)k[7])<<24; /* fall through */
|
||||
case 7 : b+=((uint32_t)k[6])<<16; /* fall through */
|
||||
case 6 : b+=((uint32_t)k[5])<<8; /* fall through */
|
||||
case 5 : b+=k[4]; /* fall through */
|
||||
case 4 : a+=((uint32_t)k[3])<<24; /* fall through */
|
||||
case 3 : a+=((uint32_t)k[2])<<16; /* fall through */
|
||||
case 2 : a+=((uint32_t)k[1])<<8; /* fall through */
|
||||
case 1 : a+=k[0];
|
||||
break;
|
||||
case 0 : return c;
|
||||
}
|
||||
}
|
||||
|
||||
final(a,b,c);
|
||||
return c;
|
||||
}
|
61
src/memory.c
Normal file
61
src/memory.c
Normal file
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2011-2012 Basile Starynkevitch <basile@starynkevitch.net>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "jansson.h"
|
||||
#include "jansson_private.h"
|
||||
|
||||
/* C89 allows these to be macros */
|
||||
#undef malloc
|
||||
#undef free
|
||||
|
||||
/* memory function pointers */
|
||||
static json_malloc_t do_malloc = malloc;
|
||||
static json_free_t do_free = free;
|
||||
|
||||
void *jsonp_malloc(size_t size) {
|
||||
if (!size)
|
||||
return NULL;
|
||||
|
||||
return (*do_malloc)(size);
|
||||
}
|
||||
|
||||
void jsonp_free(void *ptr) {
|
||||
if (!ptr)
|
||||
return;
|
||||
|
||||
(*do_free)(ptr);
|
||||
}
|
||||
|
||||
char *jsonp_strdup(const char *str) { return jsonp_strndup(str, strlen(str)); }
|
||||
|
||||
char *jsonp_strndup(const char *str, size_t len) {
|
||||
char *new_str;
|
||||
|
||||
new_str = jsonp_malloc(len + 1);
|
||||
if (!new_str)
|
||||
return NULL;
|
||||
|
||||
memcpy(new_str, str, len);
|
||||
new_str[len] = '\0';
|
||||
return new_str;
|
||||
}
|
||||
|
||||
void json_set_alloc_funcs(json_malloc_t malloc_fn, json_free_t free_fn) {
|
||||
do_malloc = malloc_fn;
|
||||
do_free = free_fn;
|
||||
}
|
||||
|
||||
void json_get_alloc_funcs(json_malloc_t *malloc_fn, json_free_t *free_fn) {
|
||||
if (malloc_fn)
|
||||
*malloc_fn = do_malloc;
|
||||
if (free_fn)
|
||||
*free_fn = do_free;
|
||||
}
|
936
src/pack_unpack.c
Normal file
936
src/pack_unpack.c
Normal file
|
@ -0,0 +1,936 @@
|
|||
/*
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2011-2012 Graeme Smecher <graeme.smecher@mail.mcgill.ca>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include "jansson.h"
|
||||
#include "jansson_private.h"
|
||||
#include "utf.h"
|
||||
#include <string.h>
|
||||
|
||||
typedef struct {
|
||||
int line;
|
||||
int column;
|
||||
size_t pos;
|
||||
char token;
|
||||
} token_t;
|
||||
|
||||
typedef struct {
|
||||
const char *start;
|
||||
const char *fmt;
|
||||
token_t prev_token;
|
||||
token_t token;
|
||||
token_t next_token;
|
||||
json_error_t *error;
|
||||
size_t flags;
|
||||
int line;
|
||||
int column;
|
||||
size_t pos;
|
||||
int has_error;
|
||||
} scanner_t;
|
||||
|
||||
#define token(scanner) ((scanner)->token.token)
|
||||
|
||||
static const char *const type_names[] = {"object", "array", "string", "integer",
|
||||
"real", "true", "false", "null"};
|
||||
|
||||
#define type_name(x) type_names[json_typeof(x)]
|
||||
|
||||
static const char unpack_value_starters[] = "{[siIbfFOon";
|
||||
|
||||
static void scanner_init(scanner_t *s, json_error_t *error, size_t flags,
|
||||
const char *fmt) {
|
||||
s->error = error;
|
||||
s->flags = flags;
|
||||
s->fmt = s->start = fmt;
|
||||
memset(&s->prev_token, 0, sizeof(token_t));
|
||||
memset(&s->token, 0, sizeof(token_t));
|
||||
memset(&s->next_token, 0, sizeof(token_t));
|
||||
s->line = 1;
|
||||
s->column = 0;
|
||||
s->pos = 0;
|
||||
s->has_error = 0;
|
||||
}
|
||||
|
||||
static void next_token(scanner_t *s) {
|
||||
const char *t;
|
||||
s->prev_token = s->token;
|
||||
|
||||
if (s->next_token.line) {
|
||||
s->token = s->next_token;
|
||||
s->next_token.line = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!token(s) && !*s->fmt)
|
||||
return;
|
||||
|
||||
t = s->fmt;
|
||||
s->column++;
|
||||
s->pos++;
|
||||
|
||||
/* skip space and ignored chars */
|
||||
while (*t == ' ' || *t == '\t' || *t == '\n' || *t == ',' || *t == ':') {
|
||||
if (*t == '\n') {
|
||||
s->line++;
|
||||
s->column = 1;
|
||||
} else
|
||||
s->column++;
|
||||
|
||||
s->pos++;
|
||||
t++;
|
||||
}
|
||||
|
||||
s->token.token = *t;
|
||||
s->token.line = s->line;
|
||||
s->token.column = s->column;
|
||||
s->token.pos = s->pos;
|
||||
|
||||
if (*t)
|
||||
t++;
|
||||
s->fmt = t;
|
||||
}
|
||||
|
||||
static void prev_token(scanner_t *s) {
|
||||
s->next_token = s->token;
|
||||
s->token = s->prev_token;
|
||||
}
|
||||
|
||||
static void set_error(scanner_t *s, const char *source, enum json_error_code code,
|
||||
const char *fmt, ...) {
|
||||
va_list ap;
|
||||
va_start(ap, fmt);
|
||||
|
||||
jsonp_error_vset(s->error, s->token.line, s->token.column, s->token.pos, code, fmt,
|
||||
ap);
|
||||
|
||||
jsonp_error_set_source(s->error, source);
|
||||
|
||||
va_end(ap);
|
||||
}
|
||||
|
||||
static json_t *pack(scanner_t *s, va_list *ap);
|
||||
|
||||
/* ours will be set to 1 if jsonp_free() must be called for the result
|
||||
afterwards */
|
||||
static char *read_string(scanner_t *s, va_list *ap, const char *purpose, size_t *out_len,
|
||||
int *ours, int optional) {
|
||||
char t;
|
||||
strbuffer_t strbuff;
|
||||
const char *str;
|
||||
size_t length;
|
||||
|
||||
next_token(s);
|
||||
t = token(s);
|
||||
prev_token(s);
|
||||
|
||||
*ours = 0;
|
||||
if (t != '#' && t != '%' && t != '+') {
|
||||
/* Optimize the simple case */
|
||||
str = va_arg(*ap, const char *);
|
||||
|
||||
if (!str) {
|
||||
if (!optional) {
|
||||
set_error(s, "<args>", json_error_null_value, "NULL %s", purpose);
|
||||
s->has_error = 1;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
length = strlen(str);
|
||||
|
||||
if (!utf8_check_string(str, length)) {
|
||||
set_error(s, "<args>", json_error_invalid_utf8, "Invalid UTF-8 %s", purpose);
|
||||
s->has_error = 1;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
*out_len = length;
|
||||
return (char *)str;
|
||||
} else if (optional) {
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Cannot use '%c' on optional strings", t);
|
||||
s->has_error = 1;
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (strbuffer_init(&strbuff)) {
|
||||
set_error(s, "<internal>", json_error_out_of_memory, "Out of memory");
|
||||
s->has_error = 1;
|
||||
}
|
||||
|
||||
while (1) {
|
||||
str = va_arg(*ap, const char *);
|
||||
if (!str) {
|
||||
set_error(s, "<args>", json_error_null_value, "NULL %s", purpose);
|
||||
s->has_error = 1;
|
||||
}
|
||||
|
||||
next_token(s);
|
||||
|
||||
if (token(s) == '#') {
|
||||
length = va_arg(*ap, int);
|
||||
} else if (token(s) == '%') {
|
||||
length = va_arg(*ap, size_t);
|
||||
} else {
|
||||
prev_token(s);
|
||||
length = s->has_error ? 0 : strlen(str);
|
||||
}
|
||||
|
||||
if (!s->has_error && strbuffer_append_bytes(&strbuff, str, length) == -1) {
|
||||
set_error(s, "<internal>", json_error_out_of_memory, "Out of memory");
|
||||
s->has_error = 1;
|
||||
}
|
||||
|
||||
next_token(s);
|
||||
if (token(s) != '+') {
|
||||
prev_token(s);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (s->has_error) {
|
||||
strbuffer_close(&strbuff);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (!utf8_check_string(strbuff.value, strbuff.length)) {
|
||||
set_error(s, "<args>", json_error_invalid_utf8, "Invalid UTF-8 %s", purpose);
|
||||
strbuffer_close(&strbuff);
|
||||
s->has_error = 1;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
*out_len = strbuff.length;
|
||||
*ours = 1;
|
||||
return strbuffer_steal_value(&strbuff);
|
||||
}
|
||||
|
||||
static json_t *pack_object(scanner_t *s, va_list *ap) {
|
||||
json_t *object = json_object();
|
||||
next_token(s);
|
||||
|
||||
while (token(s) != '}') {
|
||||
char *key;
|
||||
size_t len;
|
||||
int ours;
|
||||
json_t *value;
|
||||
char valueOptional;
|
||||
|
||||
if (!token(s)) {
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Unexpected end of format string");
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (token(s) != 's') {
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Expected format 's', got '%c'", token(s));
|
||||
goto error;
|
||||
}
|
||||
|
||||
key = read_string(s, ap, "object key", &len, &ours, 0);
|
||||
|
||||
next_token(s);
|
||||
|
||||
next_token(s);
|
||||
valueOptional = token(s);
|
||||
prev_token(s);
|
||||
|
||||
value = pack(s, ap);
|
||||
if (!value) {
|
||||
if (ours)
|
||||
jsonp_free(key);
|
||||
|
||||
if (valueOptional != '*') {
|
||||
set_error(s, "<args>", json_error_null_value, "NULL object value");
|
||||
s->has_error = 1;
|
||||
}
|
||||
|
||||
next_token(s);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (s->has_error)
|
||||
json_decref(value);
|
||||
|
||||
if (!s->has_error && json_object_set_new_nocheck(object, key, value)) {
|
||||
set_error(s, "<internal>", json_error_out_of_memory,
|
||||
"Unable to add key \"%s\"", key);
|
||||
s->has_error = 1;
|
||||
}
|
||||
|
||||
if (ours)
|
||||
jsonp_free(key);
|
||||
|
||||
next_token(s);
|
||||
}
|
||||
|
||||
if (!s->has_error)
|
||||
return object;
|
||||
|
||||
error:
|
||||
json_decref(object);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static json_t *pack_array(scanner_t *s, va_list *ap) {
|
||||
json_t *array = json_array();
|
||||
next_token(s);
|
||||
|
||||
while (token(s) != ']') {
|
||||
json_t *value;
|
||||
char valueOptional;
|
||||
|
||||
if (!token(s)) {
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Unexpected end of format string");
|
||||
/* Format string errors are unrecoverable. */
|
||||
goto error;
|
||||
}
|
||||
|
||||
next_token(s);
|
||||
valueOptional = token(s);
|
||||
prev_token(s);
|
||||
|
||||
value = pack(s, ap);
|
||||
if (!value) {
|
||||
if (valueOptional != '*') {
|
||||
s->has_error = 1;
|
||||
}
|
||||
|
||||
next_token(s);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (s->has_error)
|
||||
json_decref(value);
|
||||
|
||||
if (!s->has_error && json_array_append_new(array, value)) {
|
||||
set_error(s, "<internal>", json_error_out_of_memory,
|
||||
"Unable to append to array");
|
||||
s->has_error = 1;
|
||||
}
|
||||
|
||||
next_token(s);
|
||||
}
|
||||
|
||||
if (!s->has_error)
|
||||
return array;
|
||||
|
||||
error:
|
||||
json_decref(array);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static json_t *pack_string(scanner_t *s, va_list *ap) {
|
||||
char *str;
|
||||
char t;
|
||||
size_t len;
|
||||
int ours;
|
||||
int optional;
|
||||
|
||||
next_token(s);
|
||||
t = token(s);
|
||||
optional = t == '?' || t == '*';
|
||||
if (!optional)
|
||||
prev_token(s);
|
||||
|
||||
str = read_string(s, ap, "string", &len, &ours, optional);
|
||||
|
||||
if (!str)
|
||||
return t == '?' && !s->has_error ? json_null() : NULL;
|
||||
|
||||
if (s->has_error) {
|
||||
/* It's impossible to reach this point if ours != 0, do not free str. */
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (ours)
|
||||
return jsonp_stringn_nocheck_own(str, len);
|
||||
|
||||
return json_stringn_nocheck(str, len);
|
||||
}
|
||||
|
||||
static json_t *pack_object_inter(scanner_t *s, va_list *ap, int need_incref) {
|
||||
json_t *json;
|
||||
char ntoken;
|
||||
|
||||
next_token(s);
|
||||
ntoken = token(s);
|
||||
|
||||
if (ntoken != '?' && ntoken != '*')
|
||||
prev_token(s);
|
||||
|
||||
json = va_arg(*ap, json_t *);
|
||||
|
||||
if (json)
|
||||
return need_incref ? json_incref(json) : json;
|
||||
|
||||
switch (ntoken) {
|
||||
case '?':
|
||||
return json_null();
|
||||
case '*':
|
||||
return NULL;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
set_error(s, "<args>", json_error_null_value, "NULL object");
|
||||
s->has_error = 1;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static json_t *pack_integer(scanner_t *s, json_int_t value) {
|
||||
json_t *json = json_integer(value);
|
||||
|
||||
if (!json) {
|
||||
set_error(s, "<internal>", json_error_out_of_memory, "Out of memory");
|
||||
s->has_error = 1;
|
||||
}
|
||||
|
||||
return json;
|
||||
}
|
||||
|
||||
static json_t *pack_real(scanner_t *s, double value) {
|
||||
/* Allocate without setting value so we can identify OOM error. */
|
||||
json_t *json = json_real(0.0);
|
||||
|
||||
if (!json) {
|
||||
set_error(s, "<internal>", json_error_out_of_memory, "Out of memory");
|
||||
s->has_error = 1;
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (json_real_set(json, value)) {
|
||||
json_decref(json);
|
||||
|
||||
set_error(s, "<args>", json_error_numeric_overflow,
|
||||
"Invalid floating point value");
|
||||
s->has_error = 1;
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return json;
|
||||
}
|
||||
|
||||
static json_t *pack(scanner_t *s, va_list *ap) {
|
||||
switch (token(s)) {
|
||||
case '{':
|
||||
return pack_object(s, ap);
|
||||
|
||||
case '[':
|
||||
return pack_array(s, ap);
|
||||
|
||||
case 's': /* string */
|
||||
return pack_string(s, ap);
|
||||
|
||||
case 'n': /* null */
|
||||
return json_null();
|
||||
|
||||
case 'b': /* boolean */
|
||||
return va_arg(*ap, int) ? json_true() : json_false();
|
||||
|
||||
case 'i': /* integer from int */
|
||||
return pack_integer(s, va_arg(*ap, int));
|
||||
|
||||
case 'I': /* integer from json_int_t */
|
||||
return pack_integer(s, va_arg(*ap, json_int_t));
|
||||
|
||||
case 'f': /* real */
|
||||
return pack_real(s, va_arg(*ap, double));
|
||||
|
||||
case 'O': /* a json_t object; increments refcount */
|
||||
return pack_object_inter(s, ap, 1);
|
||||
|
||||
case 'o': /* a json_t object; doesn't increment refcount */
|
||||
return pack_object_inter(s, ap, 0);
|
||||
|
||||
default:
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Unexpected format character '%c'", token(s));
|
||||
s->has_error = 1;
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static int unpack(scanner_t *s, json_t *root, va_list *ap);
|
||||
|
||||
static int unpack_object(scanner_t *s, json_t *root, va_list *ap) {
|
||||
int ret = -1;
|
||||
int strict = 0;
|
||||
int gotopt = 0;
|
||||
|
||||
/* Use a set (emulated by a hashtable) to check that all object
|
||||
keys are accessed. Checking that the correct number of keys
|
||||
were accessed is not enough, as the same key can be unpacked
|
||||
multiple times.
|
||||
*/
|
||||
hashtable_t key_set;
|
||||
|
||||
if (hashtable_init(&key_set)) {
|
||||
set_error(s, "<internal>", json_error_out_of_memory, "Out of memory");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (root && !json_is_object(root)) {
|
||||
set_error(s, "<validation>", json_error_wrong_type, "Expected object, got %s",
|
||||
type_name(root));
|
||||
goto out;
|
||||
}
|
||||
next_token(s);
|
||||
|
||||
while (token(s) != '}') {
|
||||
const char *key;
|
||||
json_t *value;
|
||||
int opt = 0;
|
||||
|
||||
if (strict != 0) {
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Expected '}' after '%c', got '%c'", (strict == 1 ? '!' : '*'),
|
||||
token(s));
|
||||
goto out;
|
||||
}
|
||||
|
||||
if (!token(s)) {
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Unexpected end of format string");
|
||||
goto out;
|
||||
}
|
||||
|
||||
if (token(s) == '!' || token(s) == '*') {
|
||||
strict = (token(s) == '!' ? 1 : -1);
|
||||
next_token(s);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token(s) != 's') {
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Expected format 's', got '%c'", token(s));
|
||||
goto out;
|
||||
}
|
||||
|
||||
key = va_arg(*ap, const char *);
|
||||
if (!key) {
|
||||
set_error(s, "<args>", json_error_null_value, "NULL object key");
|
||||
goto out;
|
||||
}
|
||||
|
||||
next_token(s);
|
||||
|
||||
if (token(s) == '?') {
|
||||
opt = gotopt = 1;
|
||||
next_token(s);
|
||||
}
|
||||
|
||||
if (!root) {
|
||||
/* skipping */
|
||||
value = NULL;
|
||||
} else {
|
||||
value = json_object_get(root, key);
|
||||
if (!value && !opt) {
|
||||
set_error(s, "<validation>", json_error_item_not_found,
|
||||
"Object item not found: %s", key);
|
||||
goto out;
|
||||
}
|
||||
}
|
||||
|
||||
if (unpack(s, value, ap))
|
||||
goto out;
|
||||
|
||||
hashtable_set(&key_set, key, strlen(key), json_null());
|
||||
next_token(s);
|
||||
}
|
||||
|
||||
if (strict == 0 && (s->flags & JSON_STRICT))
|
||||
strict = 1;
|
||||
|
||||
if (root && strict == 1) {
|
||||
/* We need to check that all non optional items have been parsed */
|
||||
const char *key;
|
||||
size_t key_len;
|
||||
/* keys_res is 1 for uninitialized, 0 for success, -1 for error. */
|
||||
int keys_res = 1;
|
||||
strbuffer_t unrecognized_keys;
|
||||
json_t *value;
|
||||
long unpacked = 0;
|
||||
|
||||
if (gotopt || json_object_size(root) != key_set.size) {
|
||||
json_object_keylen_foreach(root, key, key_len, value) {
|
||||
if (!hashtable_get(&key_set, key, key_len)) {
|
||||
unpacked++;
|
||||
|
||||
/* Save unrecognized keys for the error message */
|
||||
if (keys_res == 1) {
|
||||
keys_res = strbuffer_init(&unrecognized_keys);
|
||||
} else if (!keys_res) {
|
||||
keys_res = strbuffer_append_bytes(&unrecognized_keys, ", ", 2);
|
||||
}
|
||||
|
||||
if (!keys_res)
|
||||
keys_res =
|
||||
strbuffer_append_bytes(&unrecognized_keys, key, key_len);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (unpacked) {
|
||||
set_error(s, "<validation>", json_error_end_of_input_expected,
|
||||
"%li object item(s) left unpacked: %s", unpacked,
|
||||
keys_res ? "<unknown>" : strbuffer_value(&unrecognized_keys));
|
||||
strbuffer_close(&unrecognized_keys);
|
||||
goto out;
|
||||
}
|
||||
}
|
||||
|
||||
ret = 0;
|
||||
|
||||
out:
|
||||
hashtable_close(&key_set);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int unpack_array(scanner_t *s, json_t *root, va_list *ap) {
|
||||
size_t i = 0;
|
||||
int strict = 0;
|
||||
|
||||
if (root && !json_is_array(root)) {
|
||||
set_error(s, "<validation>", json_error_wrong_type, "Expected array, got %s",
|
||||
type_name(root));
|
||||
return -1;
|
||||
}
|
||||
next_token(s);
|
||||
|
||||
while (token(s) != ']') {
|
||||
json_t *value;
|
||||
|
||||
if (strict != 0) {
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Expected ']' after '%c', got '%c'", (strict == 1 ? '!' : '*'),
|
||||
token(s));
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!token(s)) {
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Unexpected end of format string");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (token(s) == '!' || token(s) == '*') {
|
||||
strict = (token(s) == '!' ? 1 : -1);
|
||||
next_token(s);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!strchr(unpack_value_starters, token(s))) {
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Unexpected format character '%c'", token(s));
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!root) {
|
||||
/* skipping */
|
||||
value = NULL;
|
||||
} else {
|
||||
value = json_array_get(root, i);
|
||||
if (!value) {
|
||||
set_error(s, "<validation>", json_error_index_out_of_range,
|
||||
"Array index %lu out of range", (unsigned long)i);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
if (unpack(s, value, ap))
|
||||
return -1;
|
||||
|
||||
next_token(s);
|
||||
i++;
|
||||
}
|
||||
|
||||
if (strict == 0 && (s->flags & JSON_STRICT))
|
||||
strict = 1;
|
||||
|
||||
if (root && strict == 1 && i != json_array_size(root)) {
|
||||
long diff = (long)json_array_size(root) - (long)i;
|
||||
set_error(s, "<validation>", json_error_end_of_input_expected,
|
||||
"%li array item(s) left unpacked", diff);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int unpack(scanner_t *s, json_t *root, va_list *ap) {
|
||||
switch (token(s)) {
|
||||
case '{':
|
||||
return unpack_object(s, root, ap);
|
||||
|
||||
case '[':
|
||||
return unpack_array(s, root, ap);
|
||||
|
||||
case 's':
|
||||
if (root && !json_is_string(root)) {
|
||||
set_error(s, "<validation>", json_error_wrong_type,
|
||||
"Expected string, got %s", type_name(root));
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!(s->flags & JSON_VALIDATE_ONLY)) {
|
||||
const char **str_target;
|
||||
size_t *len_target = NULL;
|
||||
|
||||
str_target = va_arg(*ap, const char **);
|
||||
if (!str_target) {
|
||||
set_error(s, "<args>", json_error_null_value, "NULL string argument");
|
||||
return -1;
|
||||
}
|
||||
|
||||
next_token(s);
|
||||
|
||||
if (token(s) == '%') {
|
||||
len_target = va_arg(*ap, size_t *);
|
||||
if (!len_target) {
|
||||
set_error(s, "<args>", json_error_null_value,
|
||||
"NULL string length argument");
|
||||
return -1;
|
||||
}
|
||||
} else
|
||||
prev_token(s);
|
||||
|
||||
if (root) {
|
||||
*str_target = json_string_value(root);
|
||||
if (len_target)
|
||||
*len_target = json_string_length(root);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
|
||||
case 'i':
|
||||
if (root && !json_is_integer(root)) {
|
||||
set_error(s, "<validation>", json_error_wrong_type,
|
||||
"Expected integer, got %s", type_name(root));
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!(s->flags & JSON_VALIDATE_ONLY)) {
|
||||
int *target = va_arg(*ap, int *);
|
||||
if (root)
|
||||
*target = (int)json_integer_value(root);
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
case 'I':
|
||||
if (root && !json_is_integer(root)) {
|
||||
set_error(s, "<validation>", json_error_wrong_type,
|
||||
"Expected integer, got %s", type_name(root));
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!(s->flags & JSON_VALIDATE_ONLY)) {
|
||||
json_int_t *target = va_arg(*ap, json_int_t *);
|
||||
if (root)
|
||||
*target = json_integer_value(root);
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
case 'b':
|
||||
if (root && !json_is_boolean(root)) {
|
||||
set_error(s, "<validation>", json_error_wrong_type,
|
||||
"Expected true or false, got %s", type_name(root));
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!(s->flags & JSON_VALIDATE_ONLY)) {
|
||||
int *target = va_arg(*ap, int *);
|
||||
if (root)
|
||||
*target = json_is_true(root);
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
case 'f':
|
||||
if (root && !json_is_real(root)) {
|
||||
set_error(s, "<validation>", json_error_wrong_type,
|
||||
"Expected real, got %s", type_name(root));
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!(s->flags & JSON_VALIDATE_ONLY)) {
|
||||
double *target = va_arg(*ap, double *);
|
||||
if (root)
|
||||
*target = json_real_value(root);
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
case 'F':
|
||||
if (root && !json_is_number(root)) {
|
||||
set_error(s, "<validation>", json_error_wrong_type,
|
||||
"Expected real or integer, got %s", type_name(root));
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!(s->flags & JSON_VALIDATE_ONLY)) {
|
||||
double *target = va_arg(*ap, double *);
|
||||
if (root)
|
||||
*target = json_number_value(root);
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
case 'O':
|
||||
if (root && !(s->flags & JSON_VALIDATE_ONLY))
|
||||
json_incref(root);
|
||||
/* Fall through */
|
||||
|
||||
case 'o':
|
||||
if (!(s->flags & JSON_VALIDATE_ONLY)) {
|
||||
json_t **target = va_arg(*ap, json_t **);
|
||||
if (root)
|
||||
*target = root;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
case 'n':
|
||||
/* Never assign, just validate */
|
||||
if (root && !json_is_null(root)) {
|
||||
set_error(s, "<validation>", json_error_wrong_type,
|
||||
"Expected null, got %s", type_name(root));
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
|
||||
default:
|
||||
set_error(s, "<format>", json_error_invalid_format,
|
||||
"Unexpected format character '%c'", token(s));
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
json_t *json_vpack_ex(json_error_t *error, size_t flags, const char *fmt, va_list ap) {
|
||||
scanner_t s;
|
||||
va_list ap_copy;
|
||||
json_t *value;
|
||||
|
||||
if (!fmt || !*fmt) {
|
||||
jsonp_error_init(error, "<format>");
|
||||
jsonp_error_set(error, -1, -1, 0, json_error_invalid_argument,
|
||||
"NULL or empty format string");
|
||||
return NULL;
|
||||
}
|
||||
jsonp_error_init(error, NULL);
|
||||
|
||||
scanner_init(&s, error, flags, fmt);
|
||||
next_token(&s);
|
||||
|
||||
va_copy(ap_copy, ap);
|
||||
value = pack(&s, &ap_copy);
|
||||
va_end(ap_copy);
|
||||
|
||||
/* This will cover all situations where s.has_error is true */
|
||||
if (!value)
|
||||
return NULL;
|
||||
|
||||
next_token(&s);
|
||||
if (token(&s)) {
|
||||
json_decref(value);
|
||||
set_error(&s, "<format>", json_error_invalid_format,
|
||||
"Garbage after format string");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
json_t *json_pack_ex(json_error_t *error, size_t flags, const char *fmt, ...) {
|
||||
json_t *value;
|
||||
va_list ap;
|
||||
|
||||
va_start(ap, fmt);
|
||||
value = json_vpack_ex(error, flags, fmt, ap);
|
||||
va_end(ap);
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
json_t *json_pack(const char *fmt, ...) {
|
||||
json_t *value;
|
||||
va_list ap;
|
||||
|
||||
va_start(ap, fmt);
|
||||
value = json_vpack_ex(NULL, 0, fmt, ap);
|
||||
va_end(ap);
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
int json_vunpack_ex(json_t *root, json_error_t *error, size_t flags, const char *fmt,
|
||||
va_list ap) {
|
||||
scanner_t s;
|
||||
va_list ap_copy;
|
||||
|
||||
if (!root) {
|
||||
jsonp_error_init(error, "<root>");
|
||||
jsonp_error_set(error, -1, -1, 0, json_error_null_value, "NULL root value");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!fmt || !*fmt) {
|
||||
jsonp_error_init(error, "<format>");
|
||||
jsonp_error_set(error, -1, -1, 0, json_error_invalid_argument,
|
||||
"NULL or empty format string");
|
||||
return -1;
|
||||
}
|
||||
jsonp_error_init(error, NULL);
|
||||
|
||||
scanner_init(&s, error, flags, fmt);
|
||||
next_token(&s);
|
||||
|
||||
va_copy(ap_copy, ap);
|
||||
if (unpack(&s, root, &ap_copy)) {
|
||||
va_end(ap_copy);
|
||||
return -1;
|
||||
}
|
||||
va_end(ap_copy);
|
||||
|
||||
next_token(&s);
|
||||
if (token(&s)) {
|
||||
set_error(&s, "<format>", json_error_invalid_format,
|
||||
"Garbage after format string");
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int json_unpack_ex(json_t *root, json_error_t *error, size_t flags, const char *fmt,
|
||||
...) {
|
||||
int ret;
|
||||
va_list ap;
|
||||
|
||||
va_start(ap, fmt);
|
||||
ret = json_vunpack_ex(root, error, flags, fmt, ap);
|
||||
va_end(ap);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int json_unpack(json_t *root, const char *fmt, ...) {
|
||||
int ret;
|
||||
va_list ap;
|
||||
|
||||
va_start(ap, fmt);
|
||||
ret = json_vunpack_ex(root, NULL, 0, fmt, ap);
|
||||
va_end(ap);
|
||||
|
||||
return ret;
|
||||
}
|
|
@ -1,26 +1,29 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#ifndef _GNU_SOURCE
|
||||
#define _GNU_SOURCE
|
||||
#endif
|
||||
|
||||
#include "strbuffer.h"
|
||||
#include "jansson_private.h"
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "strbuffer.h"
|
||||
#include "util.h"
|
||||
|
||||
#define STRBUFFER_MIN_SIZE 16
|
||||
#define STRBUFFER_FACTOR 2
|
||||
#define STRBUFFER_MIN_SIZE 16
|
||||
#define STRBUFFER_FACTOR 2
|
||||
#define STRBUFFER_SIZE_MAX ((size_t)(-1))
|
||||
|
||||
int strbuffer_init(strbuffer_t *strbuff)
|
||||
{
|
||||
int strbuffer_init(strbuffer_t *strbuff) {
|
||||
strbuff->size = STRBUFFER_MIN_SIZE;
|
||||
strbuff->length = 0;
|
||||
|
||||
strbuff->value = malloc(strbuff->size);
|
||||
if(!strbuff->value)
|
||||
strbuff->value = jsonp_malloc(strbuff->size);
|
||||
if (!strbuff->value)
|
||||
return -1;
|
||||
|
||||
/* initialize to empty */
|
||||
|
@ -28,52 +31,54 @@ int strbuffer_init(strbuffer_t *strbuff)
|
|||
return 0;
|
||||
}
|
||||
|
||||
void strbuffer_close(strbuffer_t *strbuff)
|
||||
{
|
||||
free(strbuff->value);
|
||||
void strbuffer_close(strbuffer_t *strbuff) {
|
||||
if (strbuff->value)
|
||||
jsonp_free(strbuff->value);
|
||||
|
||||
strbuff->size = 0;
|
||||
strbuff->length = 0;
|
||||
strbuff->value = NULL;
|
||||
}
|
||||
|
||||
void strbuffer_clear(strbuffer_t *strbuff)
|
||||
{
|
||||
void strbuffer_clear(strbuffer_t *strbuff) {
|
||||
strbuff->length = 0;
|
||||
strbuff->value[0] = '\0';
|
||||
}
|
||||
|
||||
const char *strbuffer_value(const strbuffer_t *strbuff)
|
||||
{
|
||||
return strbuff->value;
|
||||
}
|
||||
const char *strbuffer_value(const strbuffer_t *strbuff) { return strbuff->value; }
|
||||
|
||||
char *strbuffer_steal_value(strbuffer_t *strbuff)
|
||||
{
|
||||
char *strbuffer_steal_value(strbuffer_t *strbuff) {
|
||||
char *result = strbuff->value;
|
||||
strbuffer_init(strbuff);
|
||||
strbuff->value = NULL;
|
||||
return result;
|
||||
}
|
||||
|
||||
int strbuffer_append(strbuffer_t *strbuff, const char *string)
|
||||
{
|
||||
return strbuffer_append_bytes(strbuff, string, strlen(string));
|
||||
}
|
||||
|
||||
int strbuffer_append_byte(strbuffer_t *strbuff, char byte)
|
||||
{
|
||||
int strbuffer_append_byte(strbuffer_t *strbuff, char byte) {
|
||||
return strbuffer_append_bytes(strbuff, &byte, 1);
|
||||
}
|
||||
|
||||
int strbuffer_append_bytes(strbuffer_t *strbuff, const char *data, int size)
|
||||
{
|
||||
if(strbuff->length + size >= strbuff->size)
|
||||
{
|
||||
strbuff->size = max(strbuff->size * STRBUFFER_FACTOR,
|
||||
strbuff->length + size + 1);
|
||||
int strbuffer_append_bytes(strbuffer_t *strbuff, const char *data, size_t size) {
|
||||
if (size >= strbuff->size - strbuff->length) {
|
||||
size_t new_size;
|
||||
char *new_value;
|
||||
|
||||
strbuff->value = realloc(strbuff->value, strbuff->size);
|
||||
if(!strbuff->value)
|
||||
/* avoid integer overflow */
|
||||
if (strbuff->size > STRBUFFER_SIZE_MAX / STRBUFFER_FACTOR ||
|
||||
size > STRBUFFER_SIZE_MAX - 1 ||
|
||||
strbuff->length > STRBUFFER_SIZE_MAX - 1 - size)
|
||||
return -1;
|
||||
|
||||
new_size = max(strbuff->size * STRBUFFER_FACTOR, strbuff->length + size + 1);
|
||||
|
||||
new_value = jsonp_malloc(new_size);
|
||||
if (!new_value)
|
||||
return -1;
|
||||
|
||||
memcpy(new_value, strbuff->value, strbuff->length);
|
||||
|
||||
jsonp_free(strbuff->value);
|
||||
strbuff->value = new_value;
|
||||
strbuff->size = new_size;
|
||||
}
|
||||
|
||||
memcpy(strbuff->value + strbuff->length, data, size);
|
||||
|
@ -83,13 +88,11 @@ int strbuffer_append_bytes(strbuffer_t *strbuff, const char *data, int size)
|
|||
return 0;
|
||||
}
|
||||
|
||||
char strbuffer_pop(strbuffer_t *strbuff)
|
||||
{
|
||||
if(strbuff->length > 0) {
|
||||
char strbuffer_pop(strbuffer_t *strbuff) {
|
||||
if (strbuff->length > 0) {
|
||||
char c = strbuff->value[--strbuff->length];
|
||||
strbuff->value[strbuff->length] = '\0';
|
||||
return c;
|
||||
}
|
||||
else
|
||||
} else
|
||||
return '\0';
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
|
@ -8,23 +8,27 @@
|
|||
#ifndef STRBUFFER_H
|
||||
#define STRBUFFER_H
|
||||
|
||||
#include "jansson.h"
|
||||
#include <stdlib.h>
|
||||
|
||||
typedef struct {
|
||||
char *value;
|
||||
int length; /* bytes used */
|
||||
int size; /* bytes allocated */
|
||||
size_t length; /* bytes used */
|
||||
size_t size; /* bytes allocated */
|
||||
} strbuffer_t;
|
||||
|
||||
int strbuffer_init(strbuffer_t *strbuff);
|
||||
int strbuffer_init(strbuffer_t *strbuff) JANSSON_ATTRS((warn_unused_result));
|
||||
void strbuffer_close(strbuffer_t *strbuff);
|
||||
|
||||
void strbuffer_clear(strbuffer_t *strbuff);
|
||||
|
||||
const char *strbuffer_value(const strbuffer_t *strbuff);
|
||||
|
||||
/* Steal the value and close the strbuffer */
|
||||
char *strbuffer_steal_value(strbuffer_t *strbuff);
|
||||
|
||||
int strbuffer_append(strbuffer_t *strbuff, const char *string);
|
||||
int strbuffer_append_byte(strbuffer_t *strbuff, char byte);
|
||||
int strbuffer_append_bytes(strbuffer_t *strbuff, const char *data, int size);
|
||||
int strbuffer_append_bytes(strbuffer_t *strbuff, const char *data, size_t size);
|
||||
|
||||
char strbuffer_pop(strbuffer_t *strbuff);
|
||||
|
||||
|
|
237
src/strconv.c
Normal file
237
src/strconv.c
Normal file
|
@ -0,0 +1,237 @@
|
|||
#include "jansson_private.h"
|
||||
#include "strbuffer.h"
|
||||
#include <assert.h>
|
||||
#include <errno.h>
|
||||
#include <math.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
/* need jansson_private_config.h to get the correct snprintf */
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include <jansson_private_config.h>
|
||||
#endif
|
||||
|
||||
/*
|
||||
- This code assumes that the decimal separator is exactly one
|
||||
character.
|
||||
|
||||
- If setlocale() is called by another thread between the call to
|
||||
get_decimal_point() and the call to sprintf() or strtod(), the
|
||||
result may be wrong. setlocale() is not thread-safe and should
|
||||
not be used this way. Multi-threaded programs should use
|
||||
uselocale() instead.
|
||||
*/
|
||||
static char get_decimal_point() {
|
||||
char buf[3];
|
||||
sprintf(buf, "%#.0f", 1.0); // "1." in the current locale
|
||||
return buf[1];
|
||||
}
|
||||
|
||||
static void to_locale(strbuffer_t *strbuffer) {
|
||||
char point;
|
||||
char *pos;
|
||||
|
||||
point = get_decimal_point();
|
||||
if (point == '.') {
|
||||
/* No conversion needed */
|
||||
return;
|
||||
}
|
||||
|
||||
pos = strchr(strbuffer->value, '.');
|
||||
if (pos)
|
||||
*pos = point;
|
||||
}
|
||||
|
||||
int jsonp_strtod(strbuffer_t *strbuffer, double *out) {
|
||||
double value;
|
||||
char *end;
|
||||
|
||||
to_locale(strbuffer);
|
||||
|
||||
errno = 0;
|
||||
value = strtod(strbuffer->value, &end);
|
||||
assert(end == strbuffer->value + strbuffer->length);
|
||||
|
||||
if ((value == HUGE_VAL || value == -HUGE_VAL) && errno == ERANGE) {
|
||||
/* Overflow */
|
||||
return -1;
|
||||
}
|
||||
|
||||
*out = value;
|
||||
return 0;
|
||||
}
|
||||
|
||||
#if DTOA_ENABLED
|
||||
/* see dtoa.c */
|
||||
char *dtoa_r(double dd, int mode, int ndigits, int *decpt, int *sign, char **rve,
|
||||
char *buf, size_t blen);
|
||||
|
||||
int jsonp_dtostr(char *buffer, size_t size, double value, int precision) {
|
||||
/* adapted from `format_float_short()` in
|
||||
* https://github.com/python/cpython/blob/2cf18a44303b6d84faa8ecffaecc427b53ae121e/Python/pystrtod.c#L969
|
||||
*/
|
||||
char digits[25];
|
||||
char *digits_end;
|
||||
int mode = precision == 0 ? 0 : 2;
|
||||
int decpt, sign, exp_len, exp = 0, use_exp = 0;
|
||||
int digits_len, vdigits_start, vdigits_end;
|
||||
char *p;
|
||||
|
||||
if (dtoa_r(value, mode, precision, &decpt, &sign, &digits_end, digits, 25) == NULL) {
|
||||
// digits is too short => should not happen
|
||||
return -1;
|
||||
}
|
||||
|
||||
digits_len = digits_end - digits;
|
||||
if (decpt <= -4 || decpt > 16) {
|
||||
use_exp = 1;
|
||||
exp = decpt - 1;
|
||||
decpt = 1;
|
||||
}
|
||||
|
||||
vdigits_start = decpt <= 0 ? decpt - 1 : 0;
|
||||
vdigits_end = digits_len;
|
||||
if (!use_exp) {
|
||||
/* decpt + 1 to add ".0" if value is an integer */
|
||||
vdigits_end = vdigits_end > decpt ? vdigits_end : decpt + 1;
|
||||
} else {
|
||||
vdigits_end = vdigits_end > decpt ? vdigits_end : decpt;
|
||||
}
|
||||
|
||||
if (
|
||||
/* sign, decimal point and trailing 0 byte */
|
||||
(size_t)(3 +
|
||||
|
||||
/* total digit count (including zero padding on both sides) */
|
||||
(vdigits_end - vdigits_start) +
|
||||
|
||||
/* exponent "e+100", max 3 numerical digits */
|
||||
(use_exp ? 5 : 0)) > size) {
|
||||
/* buffer is too short */
|
||||
return -1;
|
||||
}
|
||||
|
||||
p = buffer;
|
||||
if (sign == 1) {
|
||||
*p++ = '-';
|
||||
}
|
||||
|
||||
/* note that exactly one of the three 'if' conditions is true,
|
||||
so we include exactly one decimal point */
|
||||
/* Zero padding on left of digit string */
|
||||
if (decpt <= 0) {
|
||||
memset(p, '0', decpt - vdigits_start);
|
||||
p += decpt - vdigits_start;
|
||||
*p++ = '.';
|
||||
memset(p, '0', 0 - decpt);
|
||||
p += 0 - decpt;
|
||||
} else {
|
||||
memset(p, '0', 0 - vdigits_start);
|
||||
p += 0 - vdigits_start;
|
||||
}
|
||||
|
||||
/* Digits, with included decimal point */
|
||||
if (0 < decpt && decpt <= digits_len) {
|
||||
strncpy(p, digits, decpt - 0);
|
||||
p += decpt - 0;
|
||||
*p++ = '.';
|
||||
strncpy(p, digits + decpt, digits_len - decpt);
|
||||
p += digits_len - decpt;
|
||||
} else {
|
||||
strncpy(p, digits, digits_len);
|
||||
p += digits_len;
|
||||
}
|
||||
|
||||
/* And zeros on the right */
|
||||
if (digits_len < decpt) {
|
||||
memset(p, '0', decpt - digits_len);
|
||||
p += decpt - digits_len;
|
||||
*p++ = '.';
|
||||
memset(p, '0', vdigits_end - decpt);
|
||||
p += vdigits_end - decpt;
|
||||
} else {
|
||||
memset(p, '0', vdigits_end - digits_len);
|
||||
p += vdigits_end - digits_len;
|
||||
}
|
||||
|
||||
if (p[-1] == '.')
|
||||
p--;
|
||||
|
||||
if (use_exp) {
|
||||
*p++ = 'e';
|
||||
exp_len = sprintf(p, "%d", exp);
|
||||
p += exp_len;
|
||||
}
|
||||
*p = '\0';
|
||||
|
||||
return (int)(p - buffer);
|
||||
}
|
||||
#else /* DTOA_ENABLED == 0 */
|
||||
static void from_locale(char *buffer) {
|
||||
char point;
|
||||
char *pos;
|
||||
|
||||
point = get_decimal_point();
|
||||
if (point == '.') {
|
||||
/* No conversion needed */
|
||||
return;
|
||||
}
|
||||
|
||||
pos = strchr(buffer, point);
|
||||
if (pos)
|
||||
*pos = '.';
|
||||
}
|
||||
|
||||
int jsonp_dtostr(char *buffer, size_t size, double value, int precision) {
|
||||
int ret;
|
||||
char *start, *end;
|
||||
size_t length;
|
||||
|
||||
if (precision == 0)
|
||||
precision = 17;
|
||||
|
||||
ret = snprintf(buffer, size, "%.*g", precision, value);
|
||||
if (ret < 0)
|
||||
return -1;
|
||||
|
||||
length = (size_t)ret;
|
||||
if (length >= size)
|
||||
return -1;
|
||||
|
||||
from_locale(buffer);
|
||||
|
||||
/* Make sure there's a dot or 'e' in the output. Otherwise
|
||||
a real is converted to an integer when decoding */
|
||||
if (strchr(buffer, '.') == NULL && strchr(buffer, 'e') == NULL) {
|
||||
if (length + 3 >= size) {
|
||||
/* No space to append ".0" */
|
||||
return -1;
|
||||
}
|
||||
buffer[length] = '.';
|
||||
buffer[length + 1] = '0';
|
||||
buffer[length + 2] = '\0';
|
||||
length += 2;
|
||||
}
|
||||
|
||||
/* Remove leading '+' from positive exponent. Also remove leading
|
||||
zeros from exponents (added by some printf() implementations) */
|
||||
start = strchr(buffer, 'e');
|
||||
if (start) {
|
||||
start++;
|
||||
end = start + 1;
|
||||
|
||||
if (*start == '-')
|
||||
start++;
|
||||
|
||||
while (*end == '0')
|
||||
end++;
|
||||
|
||||
if (end != start) {
|
||||
memmove(start, end, length - (size_t)(end - buffer));
|
||||
length -= (size_t)(end - start);
|
||||
}
|
||||
}
|
||||
|
||||
return (int)length;
|
||||
}
|
||||
#endif
|
128
src/utf.c
128
src/utf.c
|
@ -1,109 +1,89 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include "utf.h"
|
||||
#include <string.h>
|
||||
|
||||
int utf8_encode(int codepoint, char *buffer, int *size)
|
||||
{
|
||||
if(codepoint < 0)
|
||||
int utf8_encode(int32_t codepoint, char *buffer, size_t *size) {
|
||||
if (codepoint < 0)
|
||||
return -1;
|
||||
else if(codepoint < 0x80)
|
||||
{
|
||||
else if (codepoint < 0x80) {
|
||||
buffer[0] = (char)codepoint;
|
||||
*size = 1;
|
||||
}
|
||||
else if(codepoint < 0x800)
|
||||
{
|
||||
} else if (codepoint < 0x800) {
|
||||
buffer[0] = 0xC0 + ((codepoint & 0x7C0) >> 6);
|
||||
buffer[1] = 0x80 + ((codepoint & 0x03F));
|
||||
*size = 2;
|
||||
}
|
||||
else if(codepoint < 0x10000)
|
||||
{
|
||||
} else if (codepoint < 0x10000) {
|
||||
buffer[0] = 0xE0 + ((codepoint & 0xF000) >> 12);
|
||||
buffer[1] = 0x80 + ((codepoint & 0x0FC0) >> 6);
|
||||
buffer[2] = 0x80 + ((codepoint & 0x003F));
|
||||
*size = 3;
|
||||
}
|
||||
else if(codepoint <= 0x10FFFF)
|
||||
{
|
||||
} else if (codepoint <= 0x10FFFF) {
|
||||
buffer[0] = 0xF0 + ((codepoint & 0x1C0000) >> 18);
|
||||
buffer[1] = 0x80 + ((codepoint & 0x03F000) >> 12);
|
||||
buffer[2] = 0x80 + ((codepoint & 0x000FC0) >> 6);
|
||||
buffer[3] = 0x80 + ((codepoint & 0x00003F));
|
||||
*size = 4;
|
||||
}
|
||||
else
|
||||
} else
|
||||
return -1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int utf8_check_first(char byte)
|
||||
{
|
||||
size_t utf8_check_first(char byte) {
|
||||
unsigned char u = (unsigned char)byte;
|
||||
|
||||
if(u < 0x80)
|
||||
if (u < 0x80)
|
||||
return 1;
|
||||
|
||||
if(0x80 <= u && u <= 0xBF) {
|
||||
if (0x80 <= u && u <= 0xBF) {
|
||||
/* second, third or fourth byte of a multi-byte
|
||||
sequence, i.e. a "continuation byte" */
|
||||
return 0;
|
||||
}
|
||||
else if(u == 0xC0 || u == 0xC1) {
|
||||
} else if (u == 0xC0 || u == 0xC1) {
|
||||
/* overlong encoding of an ASCII byte */
|
||||
return 0;
|
||||
}
|
||||
else if(0xC2 <= u && u <= 0xDF) {
|
||||
} else if (0xC2 <= u && u <= 0xDF) {
|
||||
/* 2-byte sequence */
|
||||
return 2;
|
||||
}
|
||||
|
||||
else if(0xE0 <= u && u <= 0xEF) {
|
||||
else if (0xE0 <= u && u <= 0xEF) {
|
||||
/* 3-byte sequence */
|
||||
return 3;
|
||||
}
|
||||
else if(0xF0 <= u && u <= 0xF4) {
|
||||
} else if (0xF0 <= u && u <= 0xF4) {
|
||||
/* 4-byte sequence */
|
||||
return 4;
|
||||
}
|
||||
else { /* u >= 0xF5 */
|
||||
} else { /* u >= 0xF5 */
|
||||
/* Restricted (start of 4-, 5- or 6-byte sequence) or invalid
|
||||
UTF-8 */
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int utf8_check_full(const char *buffer, int size)
|
||||
{
|
||||
int i, value = 0;
|
||||
size_t utf8_check_full(const char *buffer, size_t size, int32_t *codepoint) {
|
||||
size_t i;
|
||||
int32_t value = 0;
|
||||
unsigned char u = (unsigned char)buffer[0];
|
||||
|
||||
if(size == 2)
|
||||
{
|
||||
if (size == 2) {
|
||||
value = u & 0x1F;
|
||||
}
|
||||
else if(size == 3)
|
||||
{
|
||||
} else if (size == 3) {
|
||||
value = u & 0xF;
|
||||
}
|
||||
else if(size == 4)
|
||||
{
|
||||
} else if (size == 4) {
|
||||
value = u & 0x7;
|
||||
}
|
||||
else
|
||||
} else
|
||||
return 0;
|
||||
|
||||
for(i = 1; i < size; i++)
|
||||
{
|
||||
for (i = 1; i < size; i++) {
|
||||
u = (unsigned char)buffer[i];
|
||||
|
||||
if(u < 0x80 || u > 0xBF) {
|
||||
if (u < 0x80 || u > 0xBF) {
|
||||
/* not a continuation byte */
|
||||
return 0;
|
||||
}
|
||||
|
@ -111,44 +91,64 @@ int utf8_check_full(const char *buffer, int size)
|
|||
value = (value << 6) + (u & 0x3F);
|
||||
}
|
||||
|
||||
if(value > 0x10FFFF) {
|
||||
if (value > 0x10FFFF) {
|
||||
/* not in Unicode range */
|
||||
return 0;
|
||||
}
|
||||
|
||||
else if(0xD800 <= value && value <= 0xDFFF) {
|
||||
else if (0xD800 <= value && value <= 0xDFFF) {
|
||||
/* invalid code point (UTF-16 surrogate halves) */
|
||||
return 0;
|
||||
}
|
||||
|
||||
else if((size == 2 && value < 0x80) ||
|
||||
(size == 3 && value < 0x800) ||
|
||||
(size == 4 && value < 0x10000)) {
|
||||
else if ((size == 2 && value < 0x80) || (size == 3 && value < 0x800) ||
|
||||
(size == 4 && value < 0x10000)) {
|
||||
/* overlong encoding */
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (codepoint)
|
||||
*codepoint = value;
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
int utf8_check_string(const char *string, int length)
|
||||
{
|
||||
int i;
|
||||
const char *utf8_iterate(const char *buffer, size_t bufsize, int32_t *codepoint) {
|
||||
size_t count;
|
||||
int32_t value;
|
||||
|
||||
if(length == -1)
|
||||
length = strlen(string);
|
||||
if (!bufsize)
|
||||
return buffer;
|
||||
|
||||
for(i = 0; i < length; i++)
|
||||
{
|
||||
int count = utf8_check_first(string[i]);
|
||||
if(count == 0)
|
||||
count = utf8_check_first(buffer[0]);
|
||||
if (count <= 0)
|
||||
return NULL;
|
||||
|
||||
if (count == 1)
|
||||
value = (unsigned char)buffer[0];
|
||||
else {
|
||||
if (count > bufsize || !utf8_check_full(buffer, count, &value))
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (codepoint)
|
||||
*codepoint = value;
|
||||
|
||||
return buffer + count;
|
||||
}
|
||||
|
||||
int utf8_check_string(const char *string, size_t length) {
|
||||
size_t i;
|
||||
|
||||
for (i = 0; i < length; i++) {
|
||||
size_t count = utf8_check_first(string[i]);
|
||||
if (count == 0)
|
||||
return 0;
|
||||
else if(count > 1)
|
||||
{
|
||||
if(i + count > length)
|
||||
else if (count > 1) {
|
||||
if (count > length - i)
|
||||
return 0;
|
||||
|
||||
if(!utf8_check_full(&string[i], count))
|
||||
if (!utf8_check_full(&string[i], count, NULL))
|
||||
return 0;
|
||||
|
||||
i += count - 1;
|
||||
|
|
20
src/utf.h
20
src/utf.h
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
|
@ -8,11 +8,21 @@
|
|||
#ifndef UTF_H
|
||||
#define UTF_H
|
||||
|
||||
int utf8_encode(int codepoint, char *buffer, int *size);
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include <jansson_private_config.h>
|
||||
#endif
|
||||
|
||||
int utf8_check_first(char byte);
|
||||
int utf8_check_full(const char *buffer, int size);
|
||||
#include <stddef.h>
|
||||
#ifdef HAVE_STDINT_H
|
||||
#include <stdint.h>
|
||||
#endif
|
||||
|
||||
int utf8_check_string(const char *string, int length);
|
||||
int utf8_encode(int32_t codepoint, char *buffer, size_t *size);
|
||||
|
||||
size_t utf8_check_first(char byte);
|
||||
size_t utf8_check_full(const char *buffer, size_t size, int32_t *codepoint);
|
||||
const char *utf8_iterate(const char *buffer, size_t size, int32_t *codepoint);
|
||||
|
||||
int utf8_check_string(const char *string, size_t length);
|
||||
|
||||
#endif
|
||||
|
|
13
src/util.h
13
src/util.h
|
@ -1,13 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#ifndef UTIL_H
|
||||
#define UTIL_H
|
||||
|
||||
#define max(a, b) ((a) > (b) ? (a) : (b))
|
||||
|
||||
#endif
|
1146
src/value.c
1146
src/value.c
File diff suppressed because it is too large
Load diff
28
src/version.c
Normal file
28
src/version.c
Normal file
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright (c) 2019 Sean Bright <sean.bright@gmail.com>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#ifndef _GNU_SOURCE
|
||||
#define _GNU_SOURCE
|
||||
#endif
|
||||
|
||||
#include "jansson.h"
|
||||
|
||||
const char *jansson_version_str(void) { return JANSSON_VERSION; }
|
||||
|
||||
int jansson_version_cmp(int major, int minor, int micro) {
|
||||
int diff;
|
||||
|
||||
if ((diff = JANSSON_MAJOR_VERSION - major)) {
|
||||
return diff;
|
||||
}
|
||||
|
||||
if ((diff = JANSSON_MINOR_VERSION - minor)) {
|
||||
return diff;
|
||||
}
|
||||
|
||||
return JANSSON_MICRO_VERSION - micro;
|
||||
}
|
28
test/.gitignore
vendored
28
test/.gitignore
vendored
|
@ -1,7 +1,21 @@
|
|||
loadf_dumpf
|
||||
loads_dumps
|
||||
load_file_dump_file
|
||||
testlogs
|
||||
testprogs/test_array
|
||||
testprogs/test_number
|
||||
testprogs/test_object
|
||||
logs
|
||||
bin/json_process
|
||||
suites/api/test_array
|
||||
suites/api/test_chaos
|
||||
suites/api/test_copy
|
||||
suites/api/test_cpp
|
||||
suites/api/test_dump
|
||||
suites/api/test_dump_callback
|
||||
suites/api/test_equal
|
||||
suites/api/test_fixed_size
|
||||
suites/api/test_load
|
||||
suites/api/test_load_callback
|
||||
suites/api/test_loadb
|
||||
suites/api/test_memory_funcs
|
||||
suites/api/test_number
|
||||
suites/api/test_object
|
||||
suites/api/test_pack
|
||||
suites/api/test_simple
|
||||
suites/api/test_sprintf
|
||||
suites/api/test_unpack
|
||||
suites/api/test_version
|
||||
|
|
|
@ -1,22 +1,10 @@
|
|||
DIST_SUBDIRS = testprogs testdata
|
||||
SUBDIRS = testprogs
|
||||
SUBDIRS = bin suites ossfuzz
|
||||
EXTRA_DIST = scripts run-suites
|
||||
|
||||
check_PROGRAMS = loadf_dumpf loads_dumps load_file_dump_file
|
||||
|
||||
AM_CPPFLAGS = -I$(top_srcdir)/src
|
||||
AM_CFLAGS = -Wall -Werror
|
||||
LDFLAGS = -static # for speed and Valgrind
|
||||
LDADD = ../src/libjansson.la
|
||||
|
||||
TESTS = test-api test-invalid test-valid
|
||||
|
||||
EXTRA_DIST = \
|
||||
test-api \
|
||||
test-invalid \
|
||||
test-valid \
|
||||
run-test \
|
||||
json-compare.py \
|
||||
split-testfile.py
|
||||
TESTS = run-suites
|
||||
TESTS_ENVIRONMENT = \
|
||||
top_srcdir=$(top_srcdir) \
|
||||
top_builddir=$(top_builddir)
|
||||
|
||||
clean-local:
|
||||
rm -rf testlogs
|
||||
rm -rf logs
|
||||
|
|
5
test/bin/Makefile.am
Normal file
5
test/bin/Makefile.am
Normal file
|
@ -0,0 +1,5 @@
|
|||
check_PROGRAMS = json_process
|
||||
|
||||
AM_CPPFLAGS = -I$(top_builddir)/src -I$(top_srcdir)/src
|
||||
LDFLAGS = -static # for speed and Valgrind
|
||||
LDADD = $(top_builddir)/src/libjansson.la
|
261
test/bin/json_process.c
Normal file
261
test/bin/json_process.c
Normal file
|
@ -0,0 +1,261 @@
|
|||
/*
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include <jansson_private_config.h>
|
||||
#endif
|
||||
|
||||
#include <ctype.h>
|
||||
#include <jansson.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#ifdef HAVE_LOCALE_H
|
||||
#include <locale.h>
|
||||
#endif
|
||||
|
||||
#if _WIN32
|
||||
#include <fcntl.h> /* for _O_BINARY */
|
||||
#include <io.h> /* for _setmode() */
|
||||
|
||||
static const char dir_sep = '\\';
|
||||
#else
|
||||
static const char dir_sep = '/';
|
||||
#endif
|
||||
|
||||
struct config {
|
||||
int indent;
|
||||
int compact;
|
||||
int preserve_order;
|
||||
int ensure_ascii;
|
||||
int sort_keys;
|
||||
int strip;
|
||||
int have_hashseed;
|
||||
int hashseed;
|
||||
int precision;
|
||||
} conf;
|
||||
|
||||
#define l_isspace(c) ((c) == ' ' || (c) == '\n' || (c) == '\r' || (c) == '\t')
|
||||
|
||||
/* Return a pointer to the first non-whitespace character of str.
|
||||
Modifies str so that all trailing whitespace characters are
|
||||
replaced by '\0'. */
|
||||
static const char *strip(char *str) {
|
||||
size_t length;
|
||||
char *result = str;
|
||||
while (*result && l_isspace(*result))
|
||||
result++;
|
||||
|
||||
length = strlen(result);
|
||||
if (length == 0)
|
||||
return result;
|
||||
|
||||
while (l_isspace(result[length - 1]))
|
||||
result[--length] = '\0';
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static char *loadfile(FILE *file) {
|
||||
size_t fsize, ret;
|
||||
char *buf;
|
||||
|
||||
fseek(file, 0, SEEK_END);
|
||||
fsize = ftell(file);
|
||||
fseek(file, 0, SEEK_SET);
|
||||
|
||||
buf = malloc(fsize + 1);
|
||||
ret = fread(buf, 1, fsize, file);
|
||||
if (ret != fsize)
|
||||
exit(1);
|
||||
buf[fsize] = '\0';
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
static void read_conf(FILE *conffile) {
|
||||
char *buffer, *line, *val;
|
||||
conf.have_hashseed = 0;
|
||||
|
||||
buffer = loadfile(conffile);
|
||||
for (line = strtok(buffer, "\r\n"); line; line = strtok(NULL, "\r\n")) {
|
||||
val = strchr(line, '=');
|
||||
if (!val) {
|
||||
printf("invalid configuration line\n");
|
||||
break;
|
||||
}
|
||||
*val++ = '\0';
|
||||
|
||||
if (!strcmp(line, "JSON_INDENT"))
|
||||
conf.indent = atoi(val);
|
||||
if (!strcmp(line, "JSON_COMPACT"))
|
||||
conf.compact = atoi(val);
|
||||
if (!strcmp(line, "JSON_ENSURE_ASCII"))
|
||||
conf.ensure_ascii = atoi(val);
|
||||
if (!strcmp(line, "JSON_PRESERVE_ORDER"))
|
||||
conf.preserve_order = atoi(val);
|
||||
if (!strcmp(line, "JSON_SORT_KEYS"))
|
||||
conf.sort_keys = atoi(val);
|
||||
if (!strcmp(line, "JSON_REAL_PRECISION"))
|
||||
conf.precision = atoi(val);
|
||||
if (!strcmp(line, "STRIP"))
|
||||
conf.strip = atoi(val);
|
||||
if (!strcmp(line, "HASHSEED")) {
|
||||
conf.have_hashseed = 1;
|
||||
conf.hashseed = atoi(val);
|
||||
}
|
||||
}
|
||||
|
||||
free(buffer);
|
||||
}
|
||||
|
||||
static int cmpfile(const char *str, const char *path, const char *fname) {
|
||||
char filename[1024], *buffer;
|
||||
int ret;
|
||||
FILE *file;
|
||||
|
||||
sprintf(filename, "%s%c%s", path, dir_sep, fname);
|
||||
file = fopen(filename, "rb");
|
||||
if (!file) {
|
||||
if (conf.strip)
|
||||
strcat(filename, ".strip");
|
||||
else
|
||||
strcat(filename, ".normal");
|
||||
file = fopen(filename, "rb");
|
||||
}
|
||||
if (!file) {
|
||||
printf("Error: test result file could not be opened.\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
buffer = loadfile(file);
|
||||
if (strcmp(buffer, str) != 0) {
|
||||
fprintf(stderr, "=== Expected %s ===\n", fname);
|
||||
fprintf(stderr, "%s\n", buffer);
|
||||
fprintf(stderr, "=== Actual %s ===\n", fname);
|
||||
fprintf(stderr, "%s\n", str);
|
||||
ret = 1;
|
||||
} else {
|
||||
ret = 0;
|
||||
}
|
||||
|
||||
free(buffer);
|
||||
fclose(file);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int use_conf(char *test_path) {
|
||||
int ret;
|
||||
size_t flags = 0;
|
||||
char filename[1024], errstr[1024];
|
||||
char *buffer;
|
||||
FILE *infile, *conffile;
|
||||
json_t *json;
|
||||
json_error_t error;
|
||||
|
||||
sprintf(filename, "%s%cinput", test_path, dir_sep);
|
||||
if (!(infile = fopen(filename, "rb"))) {
|
||||
fprintf(stderr, "Could not open \"%s\"\n", filename);
|
||||
return 2;
|
||||
}
|
||||
|
||||
sprintf(filename, "%s%cenv", test_path, dir_sep);
|
||||
conffile = fopen(filename, "rb");
|
||||
if (conffile) {
|
||||
read_conf(conffile);
|
||||
fclose(conffile);
|
||||
}
|
||||
|
||||
if (conf.indent < 0 || conf.indent > 31) {
|
||||
fprintf(stderr, "invalid value for JSON_INDENT: %d\n", conf.indent);
|
||||
fclose(infile);
|
||||
return 2;
|
||||
}
|
||||
if (conf.indent)
|
||||
flags |= JSON_INDENT(conf.indent);
|
||||
|
||||
if (conf.compact)
|
||||
flags |= JSON_COMPACT;
|
||||
|
||||
if (conf.ensure_ascii)
|
||||
flags |= JSON_ENSURE_ASCII;
|
||||
|
||||
if (conf.preserve_order)
|
||||
flags |= JSON_PRESERVE_ORDER;
|
||||
|
||||
if (conf.sort_keys)
|
||||
flags |= JSON_SORT_KEYS;
|
||||
|
||||
if (conf.precision < 0 || conf.precision > 31) {
|
||||
fprintf(stderr, "invalid value for JSON_REAL_PRECISION: %d\n", conf.precision);
|
||||
fclose(infile);
|
||||
return 2;
|
||||
}
|
||||
if (conf.precision)
|
||||
flags |= JSON_REAL_PRECISION(conf.precision);
|
||||
|
||||
if (conf.have_hashseed)
|
||||
json_object_seed(conf.hashseed);
|
||||
|
||||
if (conf.strip) {
|
||||
/* Load to memory, strip leading and trailing whitespace */
|
||||
buffer = loadfile(infile);
|
||||
json = json_loads(strip(buffer), 0, &error);
|
||||
free(buffer);
|
||||
} else {
|
||||
json = json_loadf(infile, 0, &error);
|
||||
}
|
||||
|
||||
fclose(infile);
|
||||
|
||||
if (!json) {
|
||||
sprintf(errstr, "%d %d %d\n%s\n", error.line, error.column, error.position,
|
||||
error.text);
|
||||
|
||||
ret = cmpfile(errstr, test_path, "error");
|
||||
return ret;
|
||||
}
|
||||
|
||||
buffer = json_dumps(json, flags);
|
||||
ret = cmpfile(buffer, test_path, "output");
|
||||
free(buffer);
|
||||
json_decref(json);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
int i;
|
||||
char *test_path = NULL;
|
||||
|
||||
#ifdef HAVE_SETLOCALE
|
||||
setlocale(LC_ALL, "");
|
||||
#endif
|
||||
|
||||
if (argc < 2) {
|
||||
goto usage;
|
||||
}
|
||||
|
||||
for (i = 1; i < argc; i++) {
|
||||
if (!strcmp(argv[i], "--strip"))
|
||||
conf.strip = 1;
|
||||
else
|
||||
test_path = argv[i];
|
||||
}
|
||||
|
||||
if (!test_path) {
|
||||
goto usage;
|
||||
}
|
||||
|
||||
return use_conf(test_path);
|
||||
|
||||
usage:
|
||||
fprintf(stderr, "usage: %s [--strip] test_dir\n", argv[0]);
|
||||
return 2;
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
#
|
||||
# Jansson is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the MIT license. See LICENSE for details.
|
||||
|
||||
import simplejson
|
||||
import sys
|
||||
|
||||
def load(filename):
|
||||
try:
|
||||
jsonfile = open(filename)
|
||||
except IOError, err:
|
||||
print >>sys.stderr, "unable to load %s: %s" % \
|
||||
(filename, err.strerror)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
json = simplejson.load(jsonfile)
|
||||
except ValueError, err:
|
||||
print "%s is malformed: %s" % (filename, err)
|
||||
sys.exit(1)
|
||||
finally:
|
||||
jsonfile.close()
|
||||
|
||||
return json
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
print >>sys.stderr, "usage: %s json1 json2" % sys.argv[0]
|
||||
return 2
|
||||
|
||||
json1 = load(sys.argv[1])
|
||||
json2 = load(sys.argv[2])
|
||||
if json1 == json2:
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main() or 0)
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <jansson.h>
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
json_t *json;
|
||||
json_error_t error;
|
||||
|
||||
if(argc != 3) {
|
||||
fprintf(stderr, "usage: %s infile outfile\n", argv[0]);
|
||||
return 2;
|
||||
}
|
||||
|
||||
json = json_load_file(argv[1], &error);
|
||||
if(!json) {
|
||||
fprintf(stderr, "%d\n%s\n", error.line, error.text);
|
||||
return 1;
|
||||
}
|
||||
|
||||
json_dump_file(json, argv[2], 0);
|
||||
json_decref(json);
|
||||
|
||||
return 0;
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <jansson.h>
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
json_t *json;
|
||||
json_error_t error;
|
||||
|
||||
if(argc != 1) {
|
||||
fprintf(stderr, "usage: %s\n", argv[0]);
|
||||
return 2;
|
||||
}
|
||||
|
||||
json = json_loadf(stdin, &error);
|
||||
if(!json) {
|
||||
fprintf(stderr, "%d\n%s\n", error.line, error.text);
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* loadf_dumpf indents, others don't, so dumping with and without
|
||||
indenting is tested */
|
||||
json_dumpf(json, stdout, JSON_INDENT(4));
|
||||
json_decref(json);
|
||||
|
||||
return 0;
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <jansson.h>
|
||||
|
||||
#define BUFFER_SIZE (256 * 1024)
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
json_t *json;
|
||||
json_error_t error;
|
||||
int count;
|
||||
char buffer[BUFFER_SIZE];
|
||||
char *result;
|
||||
|
||||
if(argc != 1) {
|
||||
fprintf(stderr, "usage: %s\n", argv[0]);
|
||||
return 2;
|
||||
}
|
||||
|
||||
count = fread(buffer, 1, BUFFER_SIZE, stdin);
|
||||
if(count < 0 || count >= BUFFER_SIZE) {
|
||||
fprintf(stderr, "unable to read input\n");
|
||||
return 1;
|
||||
}
|
||||
buffer[count] = '\0';
|
||||
|
||||
json = json_loads(buffer, &error);
|
||||
if(!json) {
|
||||
fprintf(stderr, "%d\n%s\n", error.line, error.text);
|
||||
return 1;
|
||||
}
|
||||
|
||||
result = json_dumps(json, 0);
|
||||
json_decref(json);
|
||||
|
||||
puts(result);
|
||||
free(result);
|
||||
|
||||
return 0;
|
||||
}
|
1
test/ossfuzz/.gitignore
vendored
Normal file
1
test/ossfuzz/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
json_load_dump_fuzzer
|
32
test/ossfuzz/Makefile.am
Normal file
32
test/ossfuzz/Makefile.am
Normal file
|
@ -0,0 +1,32 @@
|
|||
AM_CPPFLAGS = -I$(top_builddir)/src -I$(top_srcdir)/src
|
||||
LDADD = $(top_builddir)/src/libjansson.la
|
||||
|
||||
if USE_OSSFUZZ_FLAG
|
||||
FUZZ_FLAG = $(LIB_FUZZING_ENGINE)
|
||||
else
|
||||
if USE_OSSFUZZ_STATIC
|
||||
LDADD += $(LIB_FUZZING_ENGINE)
|
||||
FUZZ_FLAG =
|
||||
else
|
||||
LDADD += libstandaloneengine.a
|
||||
FUZZ_FLAG =
|
||||
endif
|
||||
endif
|
||||
|
||||
noinst_PROGRAMS =
|
||||
noinst_LIBRARIES =
|
||||
|
||||
if USE_OSSFUZZERS
|
||||
noinst_PROGRAMS += \
|
||||
json_load_dump_fuzzer
|
||||
|
||||
noinst_LIBRARIES += \
|
||||
libstandaloneengine.a
|
||||
endif
|
||||
|
||||
json_load_dump_fuzzer_SOURCES = json_load_dump_fuzzer.cc testinput.h
|
||||
json_load_dump_fuzzer_CXXFLAGS = $(AM_CXXFLAGS) $(FUZZ_FLAG)
|
||||
json_load_dump_fuzzer_LDFLAGS = $(AM_LDFLAGS) -static
|
||||
|
||||
libstandaloneengine_a_SOURCES = standaloneengine.cc
|
||||
libstandaloneengine_a_CXXFLAGS = $(AM_CXXFLAGS)
|
132
test/ossfuzz/json_load_dump_fuzzer.cc
Normal file
132
test/ossfuzz/json_load_dump_fuzzer.cc
Normal file
|
@ -0,0 +1,132 @@
|
|||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
#include <sys/types.h>
|
||||
#include <inttypes.h>
|
||||
|
||||
#include "jansson.h"
|
||||
|
||||
static int enable_diags;
|
||||
|
||||
#define FUZZ_DEBUG(FMT, ...) \
|
||||
if (enable_diags) \
|
||||
{ \
|
||||
fprintf(stderr, FMT, ##__VA_ARGS__); \
|
||||
fprintf(stderr, "\n"); \
|
||||
}
|
||||
|
||||
|
||||
static int json_dump_counter(const char *buffer, size_t size, void *data)
|
||||
{
|
||||
uint64_t *counter = reinterpret_cast<uint64_t *>(data);
|
||||
*counter += size;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
#define NUM_COMMAND_BYTES (sizeof(size_t) + sizeof(size_t) + 1)
|
||||
|
||||
#define FUZZ_DUMP_CALLBACK 0x00
|
||||
#define FUZZ_DUMP_STRING 0x01
|
||||
|
||||
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size)
|
||||
{
|
||||
json_error_t error;
|
||||
unsigned char dump_mode;
|
||||
|
||||
// Enable or disable diagnostics based on the FUZZ_VERBOSE environment flag.
|
||||
enable_diags = (getenv("FUZZ_VERBOSE") != NULL);
|
||||
|
||||
FUZZ_DEBUG("Input data length: %zd", size);
|
||||
|
||||
if (size < NUM_COMMAND_BYTES)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Use the first sizeof(size_t) bytes as load flags.
|
||||
size_t load_flags = *(const size_t*)data;
|
||||
data += sizeof(size_t);
|
||||
|
||||
FUZZ_DEBUG("load_flags: 0x%zx\n"
|
||||
"& JSON_REJECT_DUPLICATES = 0x%zx\n"
|
||||
"& JSON_DECODE_ANY = 0x%zx\n"
|
||||
"& JSON_DISABLE_EOF_CHECK = 0x%zx\n"
|
||||
"& JSON_DECODE_INT_AS_REAL = 0x%zx\n"
|
||||
"& JSON_ALLOW_NUL = 0x%zx\n",
|
||||
load_flags,
|
||||
load_flags & JSON_REJECT_DUPLICATES,
|
||||
load_flags & JSON_DECODE_ANY,
|
||||
load_flags & JSON_DISABLE_EOF_CHECK,
|
||||
load_flags & JSON_DECODE_INT_AS_REAL,
|
||||
load_flags & JSON_ALLOW_NUL);
|
||||
|
||||
// Use the next sizeof(size_t) bytes as dump flags.
|
||||
size_t dump_flags = *(const size_t*)data;
|
||||
data += sizeof(size_t);
|
||||
|
||||
FUZZ_DEBUG("dump_flags: 0x%zx\n"
|
||||
"& JSON_MAX_INDENT = 0x%zx\n"
|
||||
"& JSON_COMPACT = 0x%zx\n"
|
||||
"& JSON_ENSURE_ASCII = 0x%zx\n"
|
||||
"& JSON_SORT_KEYS = 0x%zx\n"
|
||||
"& JSON_PRESERVE_ORDER = 0x%zx\n"
|
||||
"& JSON_ENCODE_ANY = 0x%zx\n"
|
||||
"& JSON_ESCAPE_SLASH = 0x%zx\n"
|
||||
"& JSON_REAL_PRECISION = 0x%zx\n"
|
||||
"& JSON_EMBED = 0x%zx\n",
|
||||
dump_flags,
|
||||
dump_flags & JSON_MAX_INDENT,
|
||||
dump_flags & JSON_COMPACT,
|
||||
dump_flags & JSON_ENSURE_ASCII,
|
||||
dump_flags & JSON_SORT_KEYS,
|
||||
dump_flags & JSON_PRESERVE_ORDER,
|
||||
dump_flags & JSON_ENCODE_ANY,
|
||||
dump_flags & JSON_ESCAPE_SLASH,
|
||||
((dump_flags >> 11) & 0x1F) << 11,
|
||||
dump_flags & JSON_EMBED);
|
||||
|
||||
// Use the next byte as the dump mode.
|
||||
dump_mode = data[0];
|
||||
data++;
|
||||
|
||||
FUZZ_DEBUG("dump_mode: 0x%x", (unsigned int)dump_mode);
|
||||
|
||||
// Remove the command bytes from the size total.
|
||||
size -= NUM_COMMAND_BYTES;
|
||||
|
||||
// Attempt to load the remainder of the data with the given load flags.
|
||||
const char* text = reinterpret_cast<const char *>(data);
|
||||
json_t* jobj = json_loadb(text, size, load_flags, &error);
|
||||
|
||||
if (jobj == NULL)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (dump_mode & FUZZ_DUMP_STRING)
|
||||
{
|
||||
// Dump as a string. Remove indents so that we don't run out of memory.
|
||||
char *out = json_dumps(jobj, dump_flags & ~JSON_MAX_INDENT);
|
||||
if (out != NULL)
|
||||
{
|
||||
free(out);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Default is callback mode.
|
||||
//
|
||||
// Attempt to dump the loaded json object with the given dump flags.
|
||||
uint64_t counter = 0;
|
||||
|
||||
json_dump_callback(jobj, json_dump_counter, &counter, dump_flags);
|
||||
FUZZ_DEBUG("Counter function counted %" PRIu64 " bytes.", counter);
|
||||
}
|
||||
|
||||
if (jobj)
|
||||
{
|
||||
json_decref(jobj);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
30
test/ossfuzz/ossfuzz.sh
Executable file
30
test/ossfuzz/ossfuzz.sh
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/bin/bash -eu
|
||||
|
||||
# This script is called by the oss-fuzz main project when compiling the fuzz
|
||||
# targets. This script is regression tested by travisoss.sh.
|
||||
|
||||
# Save off the current folder as the build root.
|
||||
export BUILD_ROOT=$PWD
|
||||
|
||||
echo "CC: $CC"
|
||||
echo "CXX: $CXX"
|
||||
echo "LIB_FUZZING_ENGINE: $LIB_FUZZING_ENGINE"
|
||||
echo "CFLAGS: $CFLAGS"
|
||||
echo "CXXFLAGS: $CXXFLAGS"
|
||||
echo "OUT: $OUT"
|
||||
|
||||
export MAKEFLAGS+="-j$(nproc)"
|
||||
|
||||
# Install dependencies
|
||||
apt-get -y install automake libtool
|
||||
|
||||
# Compile the fuzzer.
|
||||
autoreconf -i
|
||||
./configure --enable-ossfuzzers
|
||||
make
|
||||
|
||||
# Copy the fuzzer to the output directory.
|
||||
cp -v test/ossfuzz/json_load_dump_fuzzer $OUT/
|
||||
|
||||
# Zip up all input files to use as a test corpus
|
||||
find test/suites -name "input" -print | zip $OUT/json_load_dump_fuzzer_seed_corpus.zip -@
|
74
test/ossfuzz/standaloneengine.cc
Normal file
74
test/ossfuzz/standaloneengine.cc
Normal file
|
@ -0,0 +1,74 @@
|
|||
#include <stdint.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "testinput.h"
|
||||
|
||||
/**
|
||||
* Main procedure for standalone fuzzing engine.
|
||||
*
|
||||
* Reads filenames from the argument array. For each filename, read the file
|
||||
* into memory and then call the fuzzing interface with the data.
|
||||
*/
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
int ii;
|
||||
for(ii = 1; ii < argc; ii++)
|
||||
{
|
||||
FILE *infile;
|
||||
printf("[%s] ", argv[ii]);
|
||||
|
||||
/* Try and open the file. */
|
||||
infile = fopen(argv[ii], "rb");
|
||||
if(infile)
|
||||
{
|
||||
uint8_t *buffer = NULL;
|
||||
size_t buffer_len;
|
||||
|
||||
printf("Opened.. ");
|
||||
|
||||
/* Get the length of the file. */
|
||||
fseek(infile, 0L, SEEK_END);
|
||||
buffer_len = ftell(infile);
|
||||
|
||||
/* Reset the file indicator to the beginning of the file. */
|
||||
fseek(infile, 0L, SEEK_SET);
|
||||
|
||||
/* Allocate a buffer for the file contents. */
|
||||
buffer = (uint8_t *)calloc(buffer_len, sizeof(uint8_t));
|
||||
if(buffer)
|
||||
{
|
||||
/* Read all the text from the file into the buffer. */
|
||||
fread(buffer, sizeof(uint8_t), buffer_len, infile);
|
||||
printf("Read %zu bytes, fuzzing.. ", buffer_len);
|
||||
|
||||
/* Call the fuzzer with the data. */
|
||||
LLVMFuzzerTestOneInput(buffer, buffer_len);
|
||||
|
||||
printf("complete !!");
|
||||
|
||||
/* Free the buffer as it's no longer needed. */
|
||||
free(buffer);
|
||||
buffer = NULL;
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr,
|
||||
"[%s] Failed to allocate %zu bytes \n",
|
||||
argv[ii],
|
||||
buffer_len);
|
||||
}
|
||||
|
||||
/* Close the file as it's no longer needed. */
|
||||
fclose(infile);
|
||||
infile = NULL;
|
||||
}
|
||||
else
|
||||
{
|
||||
/* Failed to open the file. Maybe wrong name or wrong permissions? */
|
||||
fprintf(stderr, "[%s] Open failed. \n", argv[ii]);
|
||||
}
|
||||
|
||||
printf("\n");
|
||||
}
|
||||
}
|
3
test/ossfuzz/testinput.h
Normal file
3
test/ossfuzz/testinput.h
Normal file
|
@ -0,0 +1,3 @@
|
|||
#include <inttypes.h>
|
||||
|
||||
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size);
|
50
test/run-suites
Executable file
50
test/run-suites
Executable file
|
@ -0,0 +1,50 @@
|
|||
#!/bin/sh
|
||||
|
||||
while [ -n "$1" ]; do
|
||||
suite=$1
|
||||
if [ -x $top_srcdir/test/suites/$suite/run ]; then
|
||||
SUITES="$SUITES $suite"
|
||||
else
|
||||
echo "No such suite: $suite"
|
||||
exit 1
|
||||
fi
|
||||
shift
|
||||
done
|
||||
|
||||
if [ -z "$SUITES" ]; then
|
||||
suitedirs=$top_srcdir/test/suites/*
|
||||
for suitedir in $suitedirs; do
|
||||
if [ -d $suitedir ]; then
|
||||
SUITES="$SUITES `basename $suitedir`"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
[ -z "$STOP" ] && STOP=0
|
||||
|
||||
suites_srcdir=$top_srcdir/test/suites
|
||||
suites_builddir=suites
|
||||
scriptdir=$top_srcdir/test/scripts
|
||||
logdir=logs
|
||||
bindir=bin
|
||||
export suites_srcdir suites_builddir scriptdir logdir bindir
|
||||
|
||||
passed=0
|
||||
failed=0
|
||||
for suite in $SUITES; do
|
||||
echo "Suite: $suite"
|
||||
if $suites_srcdir/$suite/run $suite; then
|
||||
passed=`expr $passed + 1`
|
||||
else
|
||||
failed=`expr $failed + 1`
|
||||
[ $STOP -eq 1 ] && break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $failed -gt 0 ]; then
|
||||
echo "$failed of `expr $passed + $failed` test suites failed"
|
||||
exit 1
|
||||
else
|
||||
echo "$passed test suites passed"
|
||||
rm -rf $logdir
|
||||
fi
|
|
@ -1,54 +0,0 @@
|
|||
# Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
#
|
||||
# Jansson is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the MIT license. See LICENSE for details.
|
||||
|
||||
VALGRIND_CMDLINE="valgrind --leak-check=full --show-reachable=yes --track-origins=yes -q"
|
||||
|
||||
run_testprog() {
|
||||
local prog=$1
|
||||
local prefix=$2
|
||||
if [ -n "$VALGRIND" ]; then
|
||||
local runner="$VALGRIND_CMDLINE "
|
||||
fi
|
||||
|
||||
case "$prog" in
|
||||
load_file_dump_file)
|
||||
$runner./$prog \
|
||||
$prefix.in \
|
||||
$prefix.$prog.stdout \
|
||||
2>$prefix.$prog.stderr
|
||||
;;
|
||||
*)
|
||||
$runner./$prog \
|
||||
<$prefix.in \
|
||||
>$prefix.$prog.stdout \
|
||||
2>$prefix.$prog.stderr
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -n "$VALGRIND" ]; then
|
||||
# Check for Valgrind error output. The valgrind option
|
||||
# --error-exitcode is not enough because Valgrind doesn't
|
||||
# think unfreed allocs are errors.
|
||||
if grep -E -q '^==[0-9]+== ' $prefix.$prog.stderr; then
|
||||
echo "### $prefix ($prog) failed:" >&2
|
||||
echo "valgrind detected an error" >&2
|
||||
echo "for details, see test/$prefix.$prog.stderr" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
for testfile in $TESTFILES; do
|
||||
tmpdir="testlogs/`basename $testfile`"
|
||||
rm -rf $tmpdir
|
||||
mkdir -p $tmpdir
|
||||
${srcdir}/split-testfile.py $testfile $tmpdir | while read name; do
|
||||
run_test loadf_dumpf $tmpdir/$name
|
||||
run_test loads_dumps $tmpdir/$name
|
||||
run_test load_file_dump_file $tmpdir/$name
|
||||
echo -n '.'
|
||||
done || exit 1
|
||||
echo
|
||||
done
|
100
test/scripts/run-tests.sh
Normal file
100
test/scripts/run-tests.sh
Normal file
|
@ -0,0 +1,100 @@
|
|||
# Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
#
|
||||
# Jansson is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the MIT license. See LICENSE for details.
|
||||
|
||||
die() {
|
||||
echo "$1" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
[ -n "$1" ] || die "Usage: $0 suite-name"
|
||||
[ -n "$bindir" ] || die "Set bindir"
|
||||
[ -n "$logdir" ] || die "Set logdir"
|
||||
[ -n "$scriptdir" ] || die "Set scriptdir"
|
||||
[ -n "$suites_srcdir" ] || die "Set suites_srcdir"
|
||||
[ -n "$suites_builddir" ] || die "Set suites_builddir"
|
||||
|
||||
json_process=$bindir/json_process
|
||||
|
||||
suite_name=$1
|
||||
suite_srcdir=$suites_srcdir/$suite_name
|
||||
suite_builddir=$suites_builddir/$suite_name
|
||||
suite_log=$logdir/$suite_name
|
||||
|
||||
[ -z "$VERBOSE" ] && VERBOSE=0
|
||||
[ -z "$STOP" ] && STOP=0
|
||||
|
||||
. $scriptdir/valgrind.sh
|
||||
|
||||
rm -rf $suite_log
|
||||
mkdir -p $suite_log
|
||||
|
||||
for test_path in $suite_srcdir/*; do
|
||||
test_name=$(basename $test_path)
|
||||
test_builddir=$suite_builddir/$test_name
|
||||
test_log=$suite_log/$test_name
|
||||
|
||||
[ "$test_name" = "run" ] && continue
|
||||
is_test || continue
|
||||
|
||||
rm -rf $test_log
|
||||
mkdir -p $test_log
|
||||
if [ $VERBOSE -eq 1 ]; then
|
||||
printf '%s... ' "$test_name"
|
||||
fi
|
||||
|
||||
run_test
|
||||
case $? in
|
||||
0)
|
||||
# Success
|
||||
if [ $VERBOSE -eq 1 ]; then
|
||||
printf 'ok\n'
|
||||
else
|
||||
printf '.'
|
||||
fi
|
||||
rm -rf $test_log
|
||||
;;
|
||||
|
||||
77)
|
||||
# Skip
|
||||
if [ $VERBOSE -eq 1 ]; then
|
||||
printf 'skipped\n'
|
||||
else
|
||||
printf 'S'
|
||||
fi
|
||||
rm -rf $test_log
|
||||
;;
|
||||
|
||||
*)
|
||||
# Failure
|
||||
if [ $VERBOSE -eq 1 ]; then
|
||||
printf 'FAILED\n'
|
||||
else
|
||||
printf 'F'
|
||||
fi
|
||||
|
||||
[ $STOP -eq 1 ] && break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ $VERBOSE -eq 0 ]; then
|
||||
printf '\n'
|
||||
fi
|
||||
|
||||
if [ -n "$(ls -A $suite_log)" ]; then
|
||||
for test_log in $suite_log/*; do
|
||||
test_name=$(basename $test_log)
|
||||
test_path=$suite_srcdir/$test_name
|
||||
echo "================================================================="
|
||||
echo "$suite_name/$test_name"
|
||||
echo "================================================================="
|
||||
show_error
|
||||
echo
|
||||
done
|
||||
echo "================================================================="
|
||||
exit 1
|
||||
else
|
||||
rm -rf $suite_log
|
||||
fi
|
35
test/scripts/valgrind.sh
Normal file
35
test/scripts/valgrind.sh
Normal file
|
@ -0,0 +1,35 @@
|
|||
# Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
#
|
||||
# Jansson is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the MIT license. See LICENSE for details.
|
||||
|
||||
[ -z "$VALGRIND" ] && VALGRIND=0
|
||||
|
||||
VALGRIND_CMDLINE="valgrind --leak-check=full --show-reachable=yes --track-origins=yes -q"
|
||||
|
||||
if [ $VALGRIND -eq 1 ]; then
|
||||
test_runner="$VALGRIND_CMDLINE"
|
||||
json_process="$VALGRIND_CMDLINE $json_process"
|
||||
else
|
||||
test_runner=""
|
||||
fi
|
||||
|
||||
valgrind_check() {
|
||||
if [ $VALGRIND -eq 1 ]; then
|
||||
# Check for Valgrind error output. The valgrind option
|
||||
# --error-exitcode is not enough because Valgrind doesn't
|
||||
# think unfreed allocs are errors.
|
||||
if grep -E -q '^==[0-9]+== ' $1; then
|
||||
touch $test_log/valgrind_error
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
valgrind_show_error() {
|
||||
if [ $VALGRIND -eq 1 -a -f $test_log/valgrind_error ]; then
|
||||
echo "valgrind detected an error"
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
|
@ -1,54 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright (c) 2009 Petri Lehtinen <petri@digip.org>
|
||||
#
|
||||
# Jansson is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the MIT license. See LICENSE for details.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
def open_files(outdir, i, name):
|
||||
basename = '%02d_%s' % (i, name)
|
||||
print basename
|
||||
input_path = os.path.join(outdir, basename + '.in')
|
||||
output_path = os.path.join(outdir, basename + '.out')
|
||||
return open(input_path, 'w'), open(output_path, 'w')
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
print 'usage: %s input-file output-directory' % sys.argv[0]
|
||||
return 2
|
||||
|
||||
infile = os.path.normpath(sys.argv[1])
|
||||
outdir = os.path.normpath(sys.argv[2])
|
||||
|
||||
if not os.path.exists(outdir):
|
||||
print >>sys.stderr, 'output directory %r does not exist!' % outdir
|
||||
return 1
|
||||
|
||||
n = 0
|
||||
current = None
|
||||
input, output = None, None
|
||||
|
||||
for line in open(infile):
|
||||
if line.startswith('==== '):
|
||||
n += 1
|
||||
if input is not None and output is not None:
|
||||
input.close()
|
||||
output.close()
|
||||
input, output = open_files(outdir, n, line[5:line.find(' ====\n')])
|
||||
current = input
|
||||
elif line == '====\n':
|
||||
current = output
|
||||
else:
|
||||
current.write(line)
|
||||
|
||||
if input is not None and output is not None:
|
||||
input.close()
|
||||
output.close()
|
||||
|
||||
print >>sys.stderr, "%s: %d test cases" % (infile, n)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main() or 0)
|
2
test/suites/.gitattributes
vendored
Normal file
2
test/suites/.gitattributes
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
api/ text=auto
|
||||
* text eol=lf
|
|
@ -1 +1,2 @@
|
|||
SUBDIRS = api
|
||||
EXTRA_DIST = invalid invalid-unicode valid
|
42
test/suites/api/Makefile.am
Normal file
42
test/suites/api/Makefile.am
Normal file
|
@ -0,0 +1,42 @@
|
|||
EXTRA_DIST = run check-exports
|
||||
|
||||
check_PROGRAMS = \
|
||||
test_array \
|
||||
test_chaos \
|
||||
test_copy \
|
||||
test_dump \
|
||||
test_dump_callback \
|
||||
test_equal \
|
||||
test_fixed_size \
|
||||
test_load \
|
||||
test_load_callback \
|
||||
test_loadb \
|
||||
test_memory_funcs \
|
||||
test_number \
|
||||
test_object \
|
||||
test_pack \
|
||||
test_simple \
|
||||
test_sprintf \
|
||||
test_unpack \
|
||||
test_version
|
||||
|
||||
test_array_SOURCES = test_array.c util.h
|
||||
test_chaos_SOURCES = test_chaos.c util.h
|
||||
test_copy_SOURCES = test_copy.c util.h
|
||||
test_dump_SOURCES = test_dump.c util.h
|
||||
test_dump_callback_SOURCES = test_dump_callback.c util.h
|
||||
test_fixed_size_SOURCES = test_fixed_size.c util.h
|
||||
test_load_SOURCES = test_load.c util.h
|
||||
test_loadb_SOURCES = test_loadb.c util.h
|
||||
test_memory_funcs_SOURCES = test_memory_funcs.c util.h
|
||||
test_number_SOURCES = test_number.c util.h
|
||||
test_object_SOURCES = test_object.c util.h
|
||||
test_pack_SOURCES = test_pack.c util.h
|
||||
test_simple_SOURCES = test_simple.c util.h
|
||||
test_sprintf_SOURCES = test_sprintf.c util.h
|
||||
test_unpack_SOURCES = test_unpack.c util.h
|
||||
test_version_SOURCES = test_version.c util.h
|
||||
|
||||
AM_CPPFLAGS = -I$(top_builddir)/src -I$(top_srcdir)/src
|
||||
LDFLAGS = -static # for speed and Valgrind
|
||||
LDADD = $(top_builddir)/src/libjansson.la
|
23
test/suites/api/check-exports
Executable file
23
test/suites/api/check-exports
Executable file
|
@ -0,0 +1,23 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# This test checks that libjansson.so exports the correct symbols.
|
||||
#
|
||||
|
||||
SOFILE="../src/.libs/libjansson.so"
|
||||
|
||||
# The list of symbols, which the shared object should export, is read
|
||||
# from the def file, which is used in Windows builds
|
||||
grep 'json_\|jansson_' $top_srcdir/src/jansson.def \
|
||||
| sed -e 's/ //g' \
|
||||
| sort \
|
||||
>$test_log/exports
|
||||
|
||||
nm -D $SOFILE >/dev/null >$test_log/symbols 2>/dev/null \
|
||||
|| exit 77 # Skip if "nm -D" doesn't seem to work
|
||||
|
||||
grep ' [DT] ' $test_log/symbols | cut -d' ' -f3 | grep -v '^_' | sed 's/@@libjansson.*//' | sort >$test_log/output
|
||||
|
||||
if ! cmp -s $test_log/exports $test_log/output; then
|
||||
diff -u $test_log/exports $test_log/output >&2
|
||||
exit 1
|
||||
fi
|
36
test/suites/api/run
Executable file
36
test/suites/api/run
Executable file
|
@ -0,0 +1,36 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
#
|
||||
# Jansson is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the MIT license. See LICENSE for details.
|
||||
|
||||
is_test() {
|
||||
case "$test_name" in
|
||||
*.c|check-exports)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
run_test() {
|
||||
if [ "$test_name" = "check-exports" ]; then
|
||||
test_log=$test_log $test_path >$test_log/stdout 2>$test_log/stderr
|
||||
else
|
||||
$test_runner $suite_builddir/${test_name%.c} \
|
||||
>$test_log/stdout \
|
||||
2>$test_log/stderr \
|
||||
|| return 1
|
||||
valgrind_check $test_log/stderr || return 1
|
||||
fi
|
||||
}
|
||||
|
||||
show_error() {
|
||||
valgrind_show_error && return
|
||||
cat $test_log/stderr
|
||||
}
|
||||
|
||||
. $top_srcdir/test/scripts/run-tests.sh
|
484
test/suites/api/test_array.c
Normal file
484
test/suites/api/test_array.c
Normal file
|
@ -0,0 +1,484 @@
|
|||
/*
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include "util.h"
|
||||
#include <jansson.h>
|
||||
|
||||
static void test_misc(void) {
|
||||
json_t *array, *five, *seven, *value;
|
||||
size_t i;
|
||||
|
||||
array = json_array();
|
||||
five = json_integer(5);
|
||||
seven = json_integer(7);
|
||||
|
||||
if (!array)
|
||||
fail("unable to create array");
|
||||
if (!five || !seven)
|
||||
fail("unable to create integer");
|
||||
|
||||
if (json_array_size(array) != 0)
|
||||
fail("empty array has nonzero size");
|
||||
|
||||
if (!json_array_append(array, NULL))
|
||||
fail("able to append NULL");
|
||||
|
||||
if (json_array_append(array, five))
|
||||
fail("unable to append");
|
||||
|
||||
if (json_array_size(array) != 1)
|
||||
fail("wrong array size");
|
||||
|
||||
value = json_array_get(array, 0);
|
||||
if (!value)
|
||||
fail("unable to get item");
|
||||
if (value != five)
|
||||
fail("got wrong value");
|
||||
|
||||
if (json_array_append(array, seven))
|
||||
fail("unable to append value");
|
||||
|
||||
if (json_array_size(array) != 2)
|
||||
fail("wrong array size");
|
||||
|
||||
value = json_array_get(array, 1);
|
||||
if (!value)
|
||||
fail("unable to get item");
|
||||
if (value != seven)
|
||||
fail("got wrong value");
|
||||
|
||||
if (json_array_set(array, 0, seven))
|
||||
fail("unable to set value");
|
||||
|
||||
if (!json_array_set(array, 0, NULL))
|
||||
fail("able to set NULL");
|
||||
|
||||
if (json_array_size(array) != 2)
|
||||
fail("wrong array size");
|
||||
|
||||
value = json_array_get(array, 0);
|
||||
if (!value)
|
||||
fail("unable to get item");
|
||||
if (value != seven)
|
||||
fail("got wrong value");
|
||||
|
||||
if (json_array_get(array, 2) != NULL)
|
||||
fail("able to get value out of bounds");
|
||||
|
||||
if (!json_array_set(array, 2, seven))
|
||||
fail("able to set value out of bounds");
|
||||
|
||||
for (i = 2; i < 30; i++) {
|
||||
if (json_array_append(array, seven))
|
||||
fail("unable to append value");
|
||||
|
||||
if (json_array_size(array) != i + 1)
|
||||
fail("wrong array size");
|
||||
}
|
||||
|
||||
for (i = 0; i < 30; i++) {
|
||||
value = json_array_get(array, i);
|
||||
if (!value)
|
||||
fail("unable to get item");
|
||||
if (value != seven)
|
||||
fail("got wrong value");
|
||||
}
|
||||
|
||||
if (json_array_set_new(array, 15, json_integer(123)))
|
||||
fail("unable to set new value");
|
||||
|
||||
value = json_array_get(array, 15);
|
||||
if (!json_is_integer(value) || json_integer_value(value) != 123)
|
||||
fail("json_array_set_new works incorrectly");
|
||||
|
||||
if (!json_array_set_new(array, 15, NULL))
|
||||
fail("able to set_new NULL value");
|
||||
|
||||
if (json_array_append_new(array, json_integer(321)))
|
||||
fail("unable to append new value");
|
||||
|
||||
value = json_array_get(array, json_array_size(array) - 1);
|
||||
if (!json_is_integer(value) || json_integer_value(value) != 321)
|
||||
fail("json_array_append_new works incorrectly");
|
||||
|
||||
if (!json_array_append_new(array, NULL))
|
||||
fail("able to append_new NULL value");
|
||||
|
||||
json_decref(five);
|
||||
json_decref(seven);
|
||||
json_decref(array);
|
||||
}
|
||||
|
||||
static void test_insert(void) {
|
||||
json_t *array, *five, *seven, *eleven, *value;
|
||||
int i;
|
||||
|
||||
array = json_array();
|
||||
five = json_integer(5);
|
||||
seven = json_integer(7);
|
||||
eleven = json_integer(11);
|
||||
|
||||
if (!array)
|
||||
fail("unable to create array");
|
||||
if (!five || !seven || !eleven)
|
||||
fail("unable to create integer");
|
||||
|
||||
if (!json_array_insert(array, 1, five))
|
||||
fail("able to insert value out of bounds");
|
||||
|
||||
if (json_array_insert(array, 0, five))
|
||||
fail("unable to insert value in an empty array");
|
||||
|
||||
if (json_array_get(array, 0) != five)
|
||||
fail("json_array_insert works incorrectly");
|
||||
|
||||
if (json_array_size(array) != 1)
|
||||
fail("array size is invalid after insertion");
|
||||
|
||||
if (json_array_insert(array, 1, seven))
|
||||
fail("unable to insert value at the end of an array");
|
||||
|
||||
if (json_array_get(array, 0) != five)
|
||||
fail("json_array_insert works incorrectly");
|
||||
|
||||
if (json_array_get(array, 1) != seven)
|
||||
fail("json_array_insert works incorrectly");
|
||||
|
||||
if (json_array_size(array) != 2)
|
||||
fail("array size is invalid after insertion");
|
||||
|
||||
if (json_array_insert(array, 1, eleven))
|
||||
fail("unable to insert value in the middle of an array");
|
||||
|
||||
if (json_array_get(array, 0) != five)
|
||||
fail("json_array_insert works incorrectly");
|
||||
|
||||
if (json_array_get(array, 1) != eleven)
|
||||
fail("json_array_insert works incorrectly");
|
||||
|
||||
if (json_array_get(array, 2) != seven)
|
||||
fail("json_array_insert works incorrectly");
|
||||
|
||||
if (json_array_size(array) != 3)
|
||||
fail("array size is invalid after insertion");
|
||||
|
||||
if (json_array_insert_new(array, 2, json_integer(123)))
|
||||
fail("unable to insert value in the middle of an array");
|
||||
|
||||
value = json_array_get(array, 2);
|
||||
if (!json_is_integer(value) || json_integer_value(value) != 123)
|
||||
fail("json_array_insert_new works incorrectly");
|
||||
|
||||
if (json_array_size(array) != 4)
|
||||
fail("array size is invalid after insertion");
|
||||
|
||||
for (i = 0; i < 20; i++) {
|
||||
if (json_array_insert(array, 0, seven))
|
||||
fail("unable to insert value at the beginning of an array");
|
||||
}
|
||||
|
||||
for (i = 0; i < 20; i++) {
|
||||
if (json_array_get(array, i) != seven)
|
||||
fail("json_aray_insert works incorrectly");
|
||||
}
|
||||
|
||||
if (json_array_size(array) != 24)
|
||||
fail("array size is invalid after loop insertion");
|
||||
|
||||
json_decref(five);
|
||||
json_decref(seven);
|
||||
json_decref(eleven);
|
||||
json_decref(array);
|
||||
}
|
||||
|
||||
static void test_remove(void) {
|
||||
json_t *array, *five, *seven;
|
||||
int i;
|
||||
|
||||
array = json_array();
|
||||
five = json_integer(5);
|
||||
seven = json_integer(7);
|
||||
|
||||
if (!array)
|
||||
fail("unable to create array");
|
||||
if (!five)
|
||||
fail("unable to create integer");
|
||||
if (!seven)
|
||||
fail("unable to create integer");
|
||||
|
||||
if (!json_array_remove(array, 0))
|
||||
fail("able to remove an unexisting index");
|
||||
|
||||
if (json_array_append(array, five))
|
||||
fail("unable to append");
|
||||
|
||||
if (!json_array_remove(array, 1))
|
||||
fail("able to remove an unexisting index");
|
||||
|
||||
if (json_array_remove(array, 0))
|
||||
fail("unable to remove");
|
||||
|
||||
if (json_array_size(array) != 0)
|
||||
fail("array size is invalid after removing");
|
||||
|
||||
if (json_array_append(array, five) || json_array_append(array, seven) ||
|
||||
json_array_append(array, five) || json_array_append(array, seven))
|
||||
fail("unable to append");
|
||||
|
||||
if (json_array_remove(array, 2))
|
||||
fail("unable to remove");
|
||||
|
||||
if (json_array_size(array) != 3)
|
||||
fail("array size is invalid after removing");
|
||||
|
||||
if (json_array_get(array, 0) != five || json_array_get(array, 1) != seven ||
|
||||
json_array_get(array, 2) != seven)
|
||||
fail("remove works incorrectly");
|
||||
|
||||
json_decref(array);
|
||||
|
||||
array = json_array();
|
||||
for (i = 0; i < 4; i++) {
|
||||
json_array_append(array, five);
|
||||
json_array_append(array, seven);
|
||||
}
|
||||
if (json_array_size(array) != 8)
|
||||
fail("unable to append 8 items to array");
|
||||
|
||||
/* Remove an element from a "full" array. */
|
||||
json_array_remove(array, 5);
|
||||
|
||||
json_decref(five);
|
||||
json_decref(seven);
|
||||
json_decref(array);
|
||||
}
|
||||
|
||||
static void test_clear(void) {
|
||||
json_t *array, *five, *seven;
|
||||
int i;
|
||||
|
||||
array = json_array();
|
||||
five = json_integer(5);
|
||||
seven = json_integer(7);
|
||||
|
||||
if (!array)
|
||||
fail("unable to create array");
|
||||
if (!five || !seven)
|
||||
fail("unable to create integer");
|
||||
|
||||
for (i = 0; i < 10; i++) {
|
||||
if (json_array_append(array, five))
|
||||
fail("unable to append");
|
||||
}
|
||||
for (i = 0; i < 10; i++) {
|
||||
if (json_array_append(array, seven))
|
||||
fail("unable to append");
|
||||
}
|
||||
|
||||
if (json_array_size(array) != 20)
|
||||
fail("array size is invalid after appending");
|
||||
|
||||
if (json_array_clear(array))
|
||||
fail("unable to clear");
|
||||
|
||||
if (json_array_size(array) != 0)
|
||||
fail("array size is invalid after clearing");
|
||||
|
||||
json_decref(five);
|
||||
json_decref(seven);
|
||||
json_decref(array);
|
||||
}
|
||||
|
||||
static void test_extend(void) {
|
||||
json_t *array1, *array2, *five, *seven;
|
||||
int i;
|
||||
|
||||
array1 = json_array();
|
||||
array2 = json_array();
|
||||
five = json_integer(5);
|
||||
seven = json_integer(7);
|
||||
|
||||
if (!array1 || !array2)
|
||||
fail("unable to create array");
|
||||
if (!five || !seven)
|
||||
fail("unable to create integer");
|
||||
|
||||
for (i = 0; i < 10; i++) {
|
||||
if (json_array_append(array1, five))
|
||||
fail("unable to append");
|
||||
}
|
||||
for (i = 0; i < 10; i++) {
|
||||
if (json_array_append(array2, seven))
|
||||
fail("unable to append");
|
||||
}
|
||||
|
||||
if (json_array_size(array1) != 10 || json_array_size(array2) != 10)
|
||||
fail("array size is invalid after appending");
|
||||
|
||||
if (json_array_extend(array1, array2))
|
||||
fail("unable to extend");
|
||||
|
||||
for (i = 0; i < 10; i++) {
|
||||
if (json_array_get(array1, i) != five)
|
||||
fail("invalid array contents after extending");
|
||||
}
|
||||
for (i = 10; i < 20; i++) {
|
||||
if (json_array_get(array1, i) != seven)
|
||||
fail("invalid array contents after extending");
|
||||
}
|
||||
|
||||
json_decref(five);
|
||||
json_decref(seven);
|
||||
json_decref(array1);
|
||||
json_decref(array2);
|
||||
}
|
||||
|
||||
static void test_circular() {
|
||||
json_t *array1, *array2;
|
||||
|
||||
/* the simple cases are checked */
|
||||
|
||||
array1 = json_array();
|
||||
if (!array1)
|
||||
fail("unable to create array");
|
||||
|
||||
if (json_array_append(array1, array1) == 0)
|
||||
fail("able to append self");
|
||||
|
||||
if (json_array_insert(array1, 0, array1) == 0)
|
||||
fail("able to insert self");
|
||||
|
||||
if (json_array_append_new(array1, json_true()))
|
||||
fail("failed to append true");
|
||||
|
||||
if (json_array_set(array1, 0, array1) == 0)
|
||||
fail("able to set self");
|
||||
|
||||
json_decref(array1);
|
||||
|
||||
/* create circular references */
|
||||
|
||||
array1 = json_array();
|
||||
array2 = json_array();
|
||||
if (!array1 || !array2)
|
||||
fail("unable to create array");
|
||||
|
||||
if (json_array_append(array1, array2) || json_array_append(array2, array1))
|
||||
fail("unable to append");
|
||||
|
||||
/* circularity is detected when dumping */
|
||||
if (json_dumps(array1, 0) != NULL)
|
||||
fail("able to dump circulars");
|
||||
|
||||
/* decref twice to deal with the circular references */
|
||||
json_decref(array1);
|
||||
json_decref(array2);
|
||||
json_decref(array1);
|
||||
}
|
||||
|
||||
static void test_array_foreach() {
|
||||
size_t index;
|
||||
json_t *array1, *array2, *value;
|
||||
|
||||
array1 = json_pack("[sisisi]", "foo", 1, "bar", 2, "baz", 3);
|
||||
array2 = json_array();
|
||||
|
||||
json_array_foreach(array1, index, value) { json_array_append(array2, value); }
|
||||
|
||||
if (!json_equal(array1, array2))
|
||||
fail("json_array_foreach failed to iterate all elements");
|
||||
|
||||
json_decref(array1);
|
||||
json_decref(array2);
|
||||
}
|
||||
|
||||
static void test_bad_args(void) {
|
||||
json_t *arr = json_array();
|
||||
json_t *num = json_integer(1);
|
||||
|
||||
if (!arr || !num)
|
||||
fail("failed to create required objects");
|
||||
|
||||
if (json_array_size(NULL) != 0)
|
||||
fail("NULL array has nonzero size");
|
||||
if (json_array_size(num) != 0)
|
||||
fail("non-array has nonzero array size");
|
||||
|
||||
if (json_array_get(NULL, 0))
|
||||
fail("json_array_get did not return NULL for non-array");
|
||||
if (json_array_get(num, 0))
|
||||
fail("json_array_get did not return NULL for non-array");
|
||||
|
||||
if (!json_array_set_new(NULL, 0, json_incref(num)))
|
||||
fail("json_array_set_new did not return error for non-array");
|
||||
if (!json_array_set_new(num, 0, json_incref(num)))
|
||||
fail("json_array_set_new did not return error for non-array");
|
||||
if (!json_array_set_new(arr, 0, NULL))
|
||||
fail("json_array_set_new did not return error for NULL value");
|
||||
if (!json_array_set_new(arr, 0, json_incref(arr)))
|
||||
fail("json_array_set_new did not return error for value == array");
|
||||
|
||||
if (!json_array_remove(NULL, 0))
|
||||
fail("json_array_remove did not return error for non-array");
|
||||
if (!json_array_remove(num, 0))
|
||||
fail("json_array_remove did not return error for non-array");
|
||||
|
||||
if (!json_array_clear(NULL))
|
||||
fail("json_array_clear did not return error for non-array");
|
||||
if (!json_array_clear(num))
|
||||
fail("json_array_clear did not return error for non-array");
|
||||
|
||||
if (!json_array_append_new(NULL, json_incref(num)))
|
||||
fail("json_array_append_new did not return error for non-array");
|
||||
if (!json_array_append_new(num, json_incref(num)))
|
||||
fail("json_array_append_new did not return error for non-array");
|
||||
if (!json_array_append_new(arr, NULL))
|
||||
fail("json_array_append_new did not return error for NULL value");
|
||||
if (!json_array_append_new(arr, json_incref(arr)))
|
||||
fail("json_array_append_new did not return error for value == array");
|
||||
|
||||
if (!json_array_insert_new(NULL, 0, json_incref(num)))
|
||||
fail("json_array_insert_new did not return error for non-array");
|
||||
if (!json_array_insert_new(num, 0, json_incref(num)))
|
||||
fail("json_array_insert_new did not return error for non-array");
|
||||
if (!json_array_insert_new(arr, 0, NULL))
|
||||
fail("json_array_insert_new did not return error for NULL value");
|
||||
if (!json_array_insert_new(arr, 0, json_incref(arr)))
|
||||
fail("json_array_insert_new did not return error for value == array");
|
||||
|
||||
if (!json_array_extend(NULL, arr))
|
||||
fail("json_array_extend did not return error for first argument "
|
||||
"non-array");
|
||||
if (!json_array_extend(num, arr))
|
||||
fail("json_array_extend did not return error for first argument "
|
||||
"non-array");
|
||||
if (!json_array_extend(arr, NULL))
|
||||
fail("json_array_extend did not return error for second argument "
|
||||
"non-array");
|
||||
if (!json_array_extend(arr, num))
|
||||
fail("json_array_extend did not return error for second argument "
|
||||
"non-array");
|
||||
|
||||
if (num->refcount != 1)
|
||||
fail("unexpected reference count on num");
|
||||
if (arr->refcount != 1)
|
||||
fail("unexpected reference count on arr");
|
||||
|
||||
json_decref(num);
|
||||
json_decref(arr);
|
||||
}
|
||||
|
||||
static void run_tests() {
|
||||
test_misc();
|
||||
test_insert();
|
||||
test_remove();
|
||||
test_clear();
|
||||
test_extend();
|
||||
test_circular();
|
||||
test_array_foreach();
|
||||
test_bad_args();
|
||||
}
|
168
test/suites/api/test_chaos.c
Normal file
168
test/suites/api/test_chaos.c
Normal file
|
@ -0,0 +1,168 @@
|
|||
#ifndef _GNU_SOURCE
|
||||
#define _GNU_SOURCE
|
||||
#endif
|
||||
|
||||
#include "util.h"
|
||||
#include <jansson.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
static int chaos_pos = 0;
|
||||
static int chaos_fail = 0;
|
||||
#define CHAOS_MAX_FAILURE 100
|
||||
|
||||
void *chaos_malloc(size_t size) {
|
||||
if (chaos_pos == chaos_fail)
|
||||
return NULL;
|
||||
|
||||
chaos_pos++;
|
||||
|
||||
return malloc(size);
|
||||
}
|
||||
|
||||
void chaos_free(void *obj) { free(obj); }
|
||||
|
||||
/* Test all potential allocation failures. */
|
||||
#define chaos_loop(condition, code, cleanup) \
|
||||
{ \
|
||||
chaos_pos = chaos_fail = 0; \
|
||||
while (condition) { \
|
||||
if (chaos_fail > CHAOS_MAX_FAILURE) \
|
||||
fail("too many chaos failures"); \
|
||||
code chaos_pos = 0; \
|
||||
chaos_fail++; \
|
||||
} \
|
||||
cleanup \
|
||||
}
|
||||
|
||||
#define chaos_loop_new_value(json, initcall) \
|
||||
chaos_loop(!json, json = initcall;, json_decref(json); json = NULL;)
|
||||
|
||||
int test_unpack() {
|
||||
int ret = -1;
|
||||
int v1;
|
||||
int v2;
|
||||
json_error_t error;
|
||||
json_t *root = json_pack("{s:i, s:i, s:i, s:i}", "n1", 1, "n2", 2, "n3", 3, "n4", 4);
|
||||
|
||||
if (!root)
|
||||
return -1;
|
||||
|
||||
if (!json_unpack_ex(root, &error, JSON_STRICT, "{s:i, s:i}", "n1", &v1, "n2", &v2))
|
||||
fail("Unexpected success");
|
||||
|
||||
if (json_error_code(&error) != json_error_end_of_input_expected) {
|
||||
if (json_error_code(&error) != json_error_out_of_memory)
|
||||
fail("Unexpected error code");
|
||||
|
||||
goto out;
|
||||
}
|
||||
|
||||
if (strcmp(error.text, "2 object item(s) left unpacked: n3, n4"))
|
||||
goto out;
|
||||
|
||||
ret = 0;
|
||||
|
||||
out:
|
||||
json_decref(root);
|
||||
return ret;
|
||||
}
|
||||
|
||||
int dump_chaos_callback(const char *buffer, size_t size, void *data) {
|
||||
json_t *obj = json_object();
|
||||
|
||||
(void)buffer;
|
||||
(void)size;
|
||||
(void)data;
|
||||
|
||||
if (!obj)
|
||||
return -1;
|
||||
|
||||
json_decref(obj);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void test_chaos() {
|
||||
json_malloc_t orig_malloc;
|
||||
json_free_t orig_free;
|
||||
json_t *json = NULL;
|
||||
json_t *obj = json_object();
|
||||
json_t *arr1 = json_array();
|
||||
json_t *arr2 = json_array();
|
||||
json_t *txt = json_string("test");
|
||||
json_t *intnum = json_integer(1);
|
||||
json_t *dblnum = json_real(0.5);
|
||||
char *dumptxt = NULL;
|
||||
json_t *dumpobj = json_pack("{s:[iiis], s:s}", "key1", 1, 2, 3, "txt", "key2", "v2");
|
||||
int keyno;
|
||||
|
||||
if (!obj || !arr1 || !arr2 || !txt || !intnum || !dblnum || !dumpobj)
|
||||
fail("failed to allocate basic objects");
|
||||
|
||||
json_get_alloc_funcs(&orig_malloc, &orig_free);
|
||||
json_set_alloc_funcs(chaos_malloc, chaos_free);
|
||||
|
||||
chaos_loop_new_value(json, json_pack("{s:s}", "key", "value"));
|
||||
chaos_loop_new_value(json, json_pack("{s:[]}", "key"));
|
||||
chaos_loop_new_value(json, json_pack("[biIf]", 1, 1, (json_int_t)1, 1.0));
|
||||
chaos_loop_new_value(json, json_pack("[s*,s*]", "v1", "v2"));
|
||||
chaos_loop_new_value(json, json_pack("o", json_incref(txt)));
|
||||
chaos_loop_new_value(json, json_pack("O", txt));
|
||||
chaos_loop_new_value(json, json_pack("s++", "a", "long string to force realloc",
|
||||
"another long string to force yet another "
|
||||
"reallocation of the string because "
|
||||
"that's what we are testing."));
|
||||
|
||||
chaos_loop(test_unpack(), , );
|
||||
|
||||
chaos_loop(json_dump_callback(dumpobj, dump_chaos_callback, NULL, JSON_INDENT(1)),
|
||||
, );
|
||||
chaos_loop(json_dump_callback(dumpobj, dump_chaos_callback, NULL,
|
||||
JSON_INDENT(1) | JSON_SORT_KEYS),
|
||||
, );
|
||||
chaos_loop(!dumptxt, dumptxt = json_dumps(dumpobj, JSON_COMPACT);, free(dumptxt);
|
||||
dumptxt = NULL;);
|
||||
|
||||
chaos_loop_new_value(json, json_copy(obj));
|
||||
chaos_loop_new_value(json, json_deep_copy(obj));
|
||||
|
||||
chaos_loop_new_value(json, json_copy(arr1));
|
||||
chaos_loop_new_value(json, json_deep_copy(arr1));
|
||||
|
||||
chaos_loop_new_value(json, json_copy(txt));
|
||||
chaos_loop_new_value(json, json_copy(intnum));
|
||||
chaos_loop_new_value(json, json_copy(dblnum));
|
||||
|
||||
#define JSON_LOAD_TXT "{\"n\":[1,2,3,4,5,6,7,8,9,10]}"
|
||||
chaos_loop_new_value(json, json_loads(JSON_LOAD_TXT, 0, NULL));
|
||||
chaos_loop_new_value(json, json_loadb(JSON_LOAD_TXT, strlen(JSON_LOAD_TXT), 0, NULL));
|
||||
|
||||
chaos_loop_new_value(json, json_sprintf("%s", "string"));
|
||||
|
||||
for (keyno = 0; keyno < 100; ++keyno) {
|
||||
#if !defined(_MSC_VER) || _MSC_VER >= 1900
|
||||
/* Skip this test on old Windows compilers. */
|
||||
char testkey[10];
|
||||
|
||||
snprintf(testkey, sizeof(testkey), "test%d", keyno);
|
||||
chaos_loop(json_object_set_new_nocheck(obj, testkey, json_object()), , );
|
||||
#endif
|
||||
chaos_loop(json_array_append_new(arr1, json_null()), , );
|
||||
chaos_loop(json_array_insert_new(arr2, 0, json_null()), , );
|
||||
}
|
||||
|
||||
chaos_loop(json_array_extend(arr1, arr2), , );
|
||||
chaos_loop(json_string_set_nocheck(txt, "test"), , );
|
||||
|
||||
json_set_alloc_funcs(orig_malloc, orig_free);
|
||||
json_decref(obj);
|
||||
json_decref(arr1);
|
||||
json_decref(arr2);
|
||||
json_decref(txt);
|
||||
json_decref(intnum);
|
||||
json_decref(dblnum);
|
||||
json_decref(dumpobj);
|
||||
}
|
||||
|
||||
static void run_tests() { test_chaos(); }
|
375
test/suites/api/test_copy.c
Normal file
375
test/suites/api/test_copy.c
Normal file
|
@ -0,0 +1,375 @@
|
|||
/*
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include "util.h"
|
||||
#include <jansson.h>
|
||||
#include <string.h>
|
||||
|
||||
static void test_copy_simple(void) {
|
||||
json_t *value, *copy;
|
||||
|
||||
if (json_copy(NULL))
|
||||
fail("copying NULL doesn't return NULL");
|
||||
|
||||
/* true */
|
||||
value = json_true();
|
||||
copy = json_copy(value);
|
||||
if (value != copy)
|
||||
fail("copying true failed");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
|
||||
/* false */
|
||||
value = json_false();
|
||||
copy = json_copy(value);
|
||||
if (value != copy)
|
||||
fail("copying false failed");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
|
||||
/* null */
|
||||
value = json_null();
|
||||
copy = json_copy(value);
|
||||
if (value != copy)
|
||||
fail("copying null failed");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
|
||||
/* string */
|
||||
value = json_string("foo");
|
||||
if (!value)
|
||||
fail("unable to create a string");
|
||||
copy = json_copy(value);
|
||||
if (!copy)
|
||||
fail("unable to copy a string");
|
||||
if (copy == value)
|
||||
fail("copying a string doesn't copy");
|
||||
if (!json_equal(copy, value))
|
||||
fail("copying a string produces an inequal copy");
|
||||
if (value->refcount != 1 || copy->refcount != 1)
|
||||
fail("invalid refcounts");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
|
||||
/* integer */
|
||||
value = json_integer(543);
|
||||
if (!value)
|
||||
fail("unable to create an integer");
|
||||
copy = json_copy(value);
|
||||
if (!copy)
|
||||
fail("unable to copy an integer");
|
||||
if (copy == value)
|
||||
fail("copying an integer doesn't copy");
|
||||
if (!json_equal(copy, value))
|
||||
fail("copying an integer produces an inequal copy");
|
||||
if (value->refcount != 1 || copy->refcount != 1)
|
||||
fail("invalid refcounts");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
|
||||
/* real */
|
||||
value = json_real(123e9);
|
||||
if (!value)
|
||||
fail("unable to create a real");
|
||||
copy = json_copy(value);
|
||||
if (!copy)
|
||||
fail("unable to copy a real");
|
||||
if (copy == value)
|
||||
fail("copying a real doesn't copy");
|
||||
if (!json_equal(copy, value))
|
||||
fail("copying a real produces an inequal copy");
|
||||
if (value->refcount != 1 || copy->refcount != 1)
|
||||
fail("invalid refcounts");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
}
|
||||
|
||||
static void test_deep_copy_simple(void) {
|
||||
json_t *value, *copy;
|
||||
|
||||
if (json_deep_copy(NULL))
|
||||
fail("deep copying NULL doesn't return NULL");
|
||||
|
||||
/* true */
|
||||
value = json_true();
|
||||
copy = json_deep_copy(value);
|
||||
if (value != copy)
|
||||
fail("deep copying true failed");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
|
||||
/* false */
|
||||
value = json_false();
|
||||
copy = json_deep_copy(value);
|
||||
if (value != copy)
|
||||
fail("deep copying false failed");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
|
||||
/* null */
|
||||
value = json_null();
|
||||
copy = json_deep_copy(value);
|
||||
if (value != copy)
|
||||
fail("deep copying null failed");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
|
||||
/* string */
|
||||
value = json_string("foo");
|
||||
if (!value)
|
||||
fail("unable to create a string");
|
||||
copy = json_deep_copy(value);
|
||||
if (!copy)
|
||||
fail("unable to deep copy a string");
|
||||
if (copy == value)
|
||||
fail("deep copying a string doesn't copy");
|
||||
if (!json_equal(copy, value))
|
||||
fail("deep copying a string produces an inequal copy");
|
||||
if (value->refcount != 1 || copy->refcount != 1)
|
||||
fail("invalid refcounts");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
|
||||
/* integer */
|
||||
value = json_integer(543);
|
||||
if (!value)
|
||||
fail("unable to create an integer");
|
||||
copy = json_deep_copy(value);
|
||||
if (!copy)
|
||||
fail("unable to deep copy an integer");
|
||||
if (copy == value)
|
||||
fail("deep copying an integer doesn't copy");
|
||||
if (!json_equal(copy, value))
|
||||
fail("deep copying an integer produces an inequal copy");
|
||||
if (value->refcount != 1 || copy->refcount != 1)
|
||||
fail("invalid refcounts");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
|
||||
/* real */
|
||||
value = json_real(123e9);
|
||||
if (!value)
|
||||
fail("unable to create a real");
|
||||
copy = json_deep_copy(value);
|
||||
if (!copy)
|
||||
fail("unable to deep copy a real");
|
||||
if (copy == value)
|
||||
fail("deep copying a real doesn't copy");
|
||||
if (!json_equal(copy, value))
|
||||
fail("deep copying a real produces an inequal copy");
|
||||
if (value->refcount != 1 || copy->refcount != 1)
|
||||
fail("invalid refcounts");
|
||||
json_decref(value);
|
||||
json_decref(copy);
|
||||
}
|
||||
|
||||
static void test_copy_array(void) {
|
||||
const char *json_array_text = "[1, \"foo\", 3.141592, {\"foo\": \"bar\"}]";
|
||||
|
||||
json_t *array, *copy;
|
||||
size_t i;
|
||||
|
||||
array = json_loads(json_array_text, 0, NULL);
|
||||
if (!array)
|
||||
fail("unable to parse an array");
|
||||
|
||||
copy = json_copy(array);
|
||||
if (!copy)
|
||||
fail("unable to copy an array");
|
||||
if (copy == array)
|
||||
fail("copying an array doesn't copy");
|
||||
if (!json_equal(copy, array))
|
||||
fail("copying an array produces an inequal copy");
|
||||
|
||||
for (i = 0; i < json_array_size(copy); i++) {
|
||||
if (json_array_get(array, i) != json_array_get(copy, i))
|
||||
fail("copying an array modifies its elements");
|
||||
}
|
||||
|
||||
json_decref(array);
|
||||
json_decref(copy);
|
||||
}
|
||||
|
||||
static void test_deep_copy_array(void) {
|
||||
const char *json_array_text = "[1, \"foo\", 3.141592, {\"foo\": \"bar\"}]";
|
||||
|
||||
json_t *array, *copy;
|
||||
size_t i;
|
||||
|
||||
array = json_loads(json_array_text, 0, NULL);
|
||||
if (!array)
|
||||
fail("unable to parse an array");
|
||||
|
||||
copy = json_deep_copy(array);
|
||||
if (!copy)
|
||||
fail("unable to deep copy an array");
|
||||
if (copy == array)
|
||||
fail("deep copying an array doesn't copy");
|
||||
if (!json_equal(copy, array))
|
||||
fail("deep copying an array produces an inequal copy");
|
||||
|
||||
for (i = 0; i < json_array_size(copy); i++) {
|
||||
if (json_array_get(array, i) == json_array_get(copy, i))
|
||||
fail("deep copying an array doesn't copy its elements");
|
||||
}
|
||||
|
||||
json_decref(array);
|
||||
json_decref(copy);
|
||||
}
|
||||
|
||||
static void test_copy_object(void) {
|
||||
const char *json_object_text =
|
||||
"{\"foo\": \"bar\", \"a\": 1, \"b\": 3.141592, \"c\": [1,2,3,4]}";
|
||||
|
||||
const char *keys[] = {"foo", "a", "b", "c"};
|
||||
int i;
|
||||
|
||||
json_t *object, *copy;
|
||||
void *iter;
|
||||
|
||||
object = json_loads(json_object_text, 0, NULL);
|
||||
if (!object)
|
||||
fail("unable to parse an object");
|
||||
|
||||
copy = json_copy(object);
|
||||
if (!copy)
|
||||
fail("unable to copy an object");
|
||||
if (copy == object)
|
||||
fail("copying an object doesn't copy");
|
||||
if (!json_equal(copy, object))
|
||||
fail("copying an object produces an inequal copy");
|
||||
|
||||
i = 0;
|
||||
iter = json_object_iter(object);
|
||||
while (iter) {
|
||||
const char *key;
|
||||
json_t *value1, *value2;
|
||||
|
||||
key = json_object_iter_key(iter);
|
||||
value1 = json_object_iter_value(iter);
|
||||
value2 = json_object_get(copy, key);
|
||||
|
||||
if (value1 != value2)
|
||||
fail("copying an object modifies its items");
|
||||
|
||||
if (strcmp(key, keys[i]) != 0)
|
||||
fail("copying an object doesn't preserve key order");
|
||||
|
||||
iter = json_object_iter_next(object, iter);
|
||||
i++;
|
||||
}
|
||||
|
||||
json_decref(object);
|
||||
json_decref(copy);
|
||||
}
|
||||
|
||||
static void test_deep_copy_object(void) {
|
||||
const char *json_object_text =
|
||||
"{\"foo\": \"bar\", \"a\": 1, \"b\": 3.141592, \"c\": [1,2,3,4]}";
|
||||
|
||||
const char *keys[] = {"foo", "a", "b", "c"};
|
||||
int i;
|
||||
|
||||
json_t *object, *copy;
|
||||
void *iter;
|
||||
|
||||
object = json_loads(json_object_text, 0, NULL);
|
||||
if (!object)
|
||||
fail("unable to parse an object");
|
||||
|
||||
copy = json_deep_copy(object);
|
||||
if (!copy)
|
||||
fail("unable to deep copy an object");
|
||||
if (copy == object)
|
||||
fail("deep copying an object doesn't copy");
|
||||
if (!json_equal(copy, object))
|
||||
fail("deep copying an object produces an inequal copy");
|
||||
|
||||
i = 0;
|
||||
iter = json_object_iter(object);
|
||||
while (iter) {
|
||||
const char *key;
|
||||
json_t *value1, *value2;
|
||||
|
||||
key = json_object_iter_key(iter);
|
||||
value1 = json_object_iter_value(iter);
|
||||
value2 = json_object_get(copy, key);
|
||||
|
||||
if (value1 == value2)
|
||||
fail("deep copying an object doesn't copy its items");
|
||||
|
||||
if (strcmp(key, keys[i]) != 0)
|
||||
fail("deep copying an object doesn't preserve key order");
|
||||
|
||||
iter = json_object_iter_next(object, iter);
|
||||
i++;
|
||||
}
|
||||
|
||||
json_decref(object);
|
||||
json_decref(copy);
|
||||
}
|
||||
|
||||
static void test_deep_copy_circular_references(void) {
|
||||
/* Construct a JSON object/array with a circular reference:
|
||||
|
||||
object: {"a": {"b": {"c": <circular reference to $.a>}}}
|
||||
array: [[[<circular reference to the $[0] array>]]]
|
||||
|
||||
Deep copy it, remove the circular reference and deep copy again.
|
||||
*/
|
||||
|
||||
json_t *json;
|
||||
json_t *copy;
|
||||
|
||||
json = json_object();
|
||||
json_object_set_new(json, "a", json_object());
|
||||
json_object_set_new(json_object_get(json, "a"), "b", json_object());
|
||||
json_object_set(json_object_get(json_object_get(json, "a"), "b"), "c",
|
||||
json_object_get(json, "a"));
|
||||
|
||||
copy = json_deep_copy(json);
|
||||
if (copy)
|
||||
fail("json_deep_copy copied a circular reference!");
|
||||
|
||||
json_object_del(json_object_get(json_object_get(json, "a"), "b"), "c");
|
||||
|
||||
copy = json_deep_copy(json);
|
||||
if (!copy)
|
||||
fail("json_deep_copy failed!");
|
||||
|
||||
json_decref(copy);
|
||||
json_decref(json);
|
||||
|
||||
json = json_array();
|
||||
json_array_append_new(json, json_array());
|
||||
json_array_append_new(json_array_get(json, 0), json_array());
|
||||
json_array_append(json_array_get(json_array_get(json, 0), 0),
|
||||
json_array_get(json, 0));
|
||||
|
||||
copy = json_deep_copy(json);
|
||||
if (copy)
|
||||
fail("json_deep_copy copied a circular reference!");
|
||||
|
||||
json_array_remove(json_array_get(json_array_get(json, 0), 0), 0);
|
||||
|
||||
copy = json_deep_copy(json);
|
||||
if (!copy)
|
||||
fail("json_deep_copy failed!");
|
||||
|
||||
json_decref(copy);
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void run_tests() {
|
||||
test_copy_simple();
|
||||
test_deep_copy_simple();
|
||||
test_copy_array();
|
||||
test_deep_copy_array();
|
||||
test_copy_object();
|
||||
test_deep_copy_object();
|
||||
test_deep_copy_circular_references();
|
||||
}
|
311
test/suites/api/test_dump.c
Normal file
311
test/suites/api/test_dump.c
Normal file
|
@ -0,0 +1,311 @@
|
|||
/*
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include "jansson_private_config.h"
|
||||
|
||||
#include <jansson.h>
|
||||
#include <string.h>
|
||||
#ifdef HAVE_UNISTD_H
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
#include "util.h"
|
||||
#ifdef __MINGW32__
|
||||
#include <fcntl.h>
|
||||
#define pipe(fds) _pipe(fds, 1024, _O_BINARY)
|
||||
#endif
|
||||
|
||||
static int encode_null_callback(const char *buffer, size_t size, void *data) {
|
||||
(void)buffer;
|
||||
(void)size;
|
||||
(void)data;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void encode_null() {
|
||||
if (json_dumps(NULL, JSON_ENCODE_ANY) != NULL)
|
||||
fail("json_dumps didn't fail for NULL");
|
||||
|
||||
if (json_dumpb(NULL, NULL, 0, JSON_ENCODE_ANY) != 0)
|
||||
fail("json_dumpb didn't fail for NULL");
|
||||
|
||||
if (json_dumpf(NULL, stderr, JSON_ENCODE_ANY) != -1)
|
||||
fail("json_dumpf didn't fail for NULL");
|
||||
|
||||
#ifdef HAVE_UNISTD_H
|
||||
if (json_dumpfd(NULL, STDERR_FILENO, JSON_ENCODE_ANY) != -1)
|
||||
fail("json_dumpfd didn't fail for NULL");
|
||||
#endif
|
||||
|
||||
/* Don't test json_dump_file to avoid creating a file */
|
||||
|
||||
if (json_dump_callback(NULL, encode_null_callback, NULL, JSON_ENCODE_ANY) != -1)
|
||||
fail("json_dump_callback didn't fail for NULL");
|
||||
}
|
||||
|
||||
static void encode_twice() {
|
||||
/* Encode an empty object/array, add an item, encode again */
|
||||
|
||||
json_t *json;
|
||||
char *result;
|
||||
|
||||
json = json_object();
|
||||
result = json_dumps(json, 0);
|
||||
if (!result || strcmp(result, "{}"))
|
||||
fail("json_dumps failed");
|
||||
free(result);
|
||||
|
||||
json_object_set_new(json, "foo", json_integer(5));
|
||||
result = json_dumps(json, 0);
|
||||
if (!result || strcmp(result, "{\"foo\": 5}"))
|
||||
fail("json_dumps failed");
|
||||
free(result);
|
||||
|
||||
json_decref(json);
|
||||
|
||||
json = json_array();
|
||||
result = json_dumps(json, 0);
|
||||
if (!result || strcmp(result, "[]"))
|
||||
fail("json_dumps failed");
|
||||
free(result);
|
||||
|
||||
json_array_append_new(json, json_integer(5));
|
||||
result = json_dumps(json, 0);
|
||||
if (!result || strcmp(result, "[5]"))
|
||||
fail("json_dumps failed");
|
||||
free(result);
|
||||
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void circular_references() {
|
||||
/* Construct a JSON object/array with a circular reference:
|
||||
|
||||
object: {"a": {"b": {"c": <circular reference to $.a>}}}
|
||||
array: [[[<circular reference to the $[0] array>]]]
|
||||
|
||||
Encode it, remove the circular reference and encode again.
|
||||
*/
|
||||
|
||||
json_t *json;
|
||||
char *result;
|
||||
|
||||
json = json_object();
|
||||
json_object_set_new(json, "a", json_object());
|
||||
json_object_set_new(json_object_get(json, "a"), "b", json_object());
|
||||
json_object_set(json_object_get(json_object_get(json, "a"), "b"), "c",
|
||||
json_object_get(json, "a"));
|
||||
|
||||
if (json_dumps(json, 0))
|
||||
fail("json_dumps encoded a circular reference!");
|
||||
|
||||
json_object_del(json_object_get(json_object_get(json, "a"), "b"), "c");
|
||||
|
||||
result = json_dumps(json, 0);
|
||||
if (!result || strcmp(result, "{\"a\": {\"b\": {}}}"))
|
||||
fail("json_dumps failed!");
|
||||
free(result);
|
||||
|
||||
json_decref(json);
|
||||
|
||||
json = json_array();
|
||||
json_array_append_new(json, json_array());
|
||||
json_array_append_new(json_array_get(json, 0), json_array());
|
||||
json_array_append(json_array_get(json_array_get(json, 0), 0),
|
||||
json_array_get(json, 0));
|
||||
|
||||
if (json_dumps(json, 0))
|
||||
fail("json_dumps encoded a circular reference!");
|
||||
|
||||
json_array_remove(json_array_get(json_array_get(json, 0), 0), 0);
|
||||
|
||||
result = json_dumps(json, 0);
|
||||
if (!result || strcmp(result, "[[[]]]"))
|
||||
fail("json_dumps failed!");
|
||||
free(result);
|
||||
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void encode_other_than_array_or_object() {
|
||||
/* Encoding anything other than array or object should only
|
||||
* succeed if the JSON_ENCODE_ANY flag is used */
|
||||
|
||||
json_t *json;
|
||||
char *result;
|
||||
|
||||
json = json_string("foo");
|
||||
if (json_dumps(json, 0) != NULL)
|
||||
fail("json_dumps encoded a string!");
|
||||
if (json_dumpf(json, NULL, 0) == 0)
|
||||
fail("json_dumpf encoded a string!");
|
||||
if (json_dumpfd(json, -1, 0) == 0)
|
||||
fail("json_dumpfd encoded a string!");
|
||||
|
||||
result = json_dumps(json, JSON_ENCODE_ANY);
|
||||
if (!result || strcmp(result, "\"foo\"") != 0)
|
||||
fail("json_dumps failed to encode a string with JSON_ENCODE_ANY");
|
||||
|
||||
free(result);
|
||||
json_decref(json);
|
||||
|
||||
json = json_integer(42);
|
||||
if (json_dumps(json, 0) != NULL)
|
||||
fail("json_dumps encoded an integer!");
|
||||
if (json_dumpf(json, NULL, 0) == 0)
|
||||
fail("json_dumpf encoded an integer!");
|
||||
if (json_dumpfd(json, -1, 0) == 0)
|
||||
fail("json_dumpfd encoded an integer!");
|
||||
|
||||
result = json_dumps(json, JSON_ENCODE_ANY);
|
||||
if (!result || strcmp(result, "42") != 0)
|
||||
fail("json_dumps failed to encode an integer with JSON_ENCODE_ANY");
|
||||
|
||||
free(result);
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void escape_slashes() {
|
||||
/* Test dump escaping slashes */
|
||||
|
||||
json_t *json;
|
||||
char *result;
|
||||
|
||||
json = json_object();
|
||||
json_object_set_new(json, "url", json_string("https://github.com/akheron/jansson"));
|
||||
|
||||
result = json_dumps(json, 0);
|
||||
if (!result || strcmp(result, "{\"url\": \"https://github.com/akheron/jansson\"}"))
|
||||
fail("json_dumps failed to not escape slashes");
|
||||
|
||||
free(result);
|
||||
|
||||
result = json_dumps(json, JSON_ESCAPE_SLASH);
|
||||
if (!result ||
|
||||
strcmp(result, "{\"url\": \"https:\\/\\/github.com\\/akheron\\/jansson\"}"))
|
||||
fail("json_dumps failed to escape slashes");
|
||||
|
||||
free(result);
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void encode_nul_byte() {
|
||||
json_t *json;
|
||||
char *result;
|
||||
|
||||
json = json_stringn("nul byte \0 in string", 20);
|
||||
result = json_dumps(json, JSON_ENCODE_ANY);
|
||||
if (!result || memcmp(result, "\"nul byte \\u0000 in string\"", 27))
|
||||
fail("json_dumps failed to dump an embedded NUL byte");
|
||||
|
||||
free(result);
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void dump_file() {
|
||||
json_t *json;
|
||||
int result;
|
||||
|
||||
result = json_dump_file(NULL, "", 0);
|
||||
if (result != -1)
|
||||
fail("json_dump_file succeeded with invalid args");
|
||||
|
||||
json = json_object();
|
||||
result = json_dump_file(json, "json_dump_file.json", 0);
|
||||
if (result != 0)
|
||||
fail("json_dump_file failed");
|
||||
|
||||
json_decref(json);
|
||||
remove("json_dump_file.json");
|
||||
}
|
||||
|
||||
static void dumpb() {
|
||||
char buf[2];
|
||||
json_t *obj;
|
||||
size_t size;
|
||||
|
||||
obj = json_object();
|
||||
|
||||
size = json_dumpb(obj, buf, sizeof(buf), 0);
|
||||
if (size != 2 || strncmp(buf, "{}", 2))
|
||||
fail("json_dumpb failed");
|
||||
|
||||
json_decref(obj);
|
||||
obj = json_pack("{s:s}", "foo", "bar");
|
||||
|
||||
size = json_dumpb(obj, buf, sizeof(buf), JSON_COMPACT);
|
||||
if (size != 13)
|
||||
fail("json_dumpb size check failed");
|
||||
|
||||
json_decref(obj);
|
||||
}
|
||||
|
||||
static void dumpfd() {
|
||||
#ifdef HAVE_UNISTD_H
|
||||
int fds[2] = {-1, -1};
|
||||
json_t *a, *b;
|
||||
|
||||
if (pipe(fds))
|
||||
fail("pipe() failed");
|
||||
|
||||
a = json_pack("{s:s}", "foo", "bar");
|
||||
|
||||
if (json_dumpfd(a, fds[1], 0))
|
||||
fail("json_dumpfd() failed");
|
||||
close(fds[1]);
|
||||
|
||||
b = json_loadfd(fds[0], 0, NULL);
|
||||
if (!b)
|
||||
fail("json_loadfd() failed");
|
||||
close(fds[0]);
|
||||
|
||||
if (!json_equal(a, b))
|
||||
fail("json_equal() failed for fd test");
|
||||
|
||||
json_decref(a);
|
||||
json_decref(b);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void embed() {
|
||||
static const char *plains[] = {"{\"bar\":[],\"foo\":{}}", "[[],{}]", "{}", "[]",
|
||||
NULL};
|
||||
|
||||
size_t i;
|
||||
|
||||
for (i = 0; plains[i]; i++) {
|
||||
const char *plain = plains[i];
|
||||
json_t *parse = NULL;
|
||||
char *embed = NULL;
|
||||
size_t psize = 0;
|
||||
size_t esize = 0;
|
||||
|
||||
psize = strlen(plain) - 2;
|
||||
embed = calloc(1, psize);
|
||||
parse = json_loads(plain, 0, NULL);
|
||||
esize =
|
||||
json_dumpb(parse, embed, psize, JSON_COMPACT | JSON_SORT_KEYS | JSON_EMBED);
|
||||
json_decref(parse);
|
||||
if (esize != psize)
|
||||
fail("json_dumpb(JSON_EMBED) returned an invalid size");
|
||||
if (strncmp(plain + 1, embed, esize) != 0)
|
||||
fail("json_dumps(JSON_EMBED) returned an invalid value");
|
||||
free(embed);
|
||||
}
|
||||
}
|
||||
|
||||
static void run_tests() {
|
||||
encode_null();
|
||||
encode_twice();
|
||||
circular_references();
|
||||
encode_other_than_array_or_object();
|
||||
escape_slashes();
|
||||
encode_nul_byte();
|
||||
dump_file();
|
||||
dumpb();
|
||||
dumpfd();
|
||||
embed();
|
||||
}
|
82
test/suites/api/test_dump_callback.c
Normal file
82
test/suites/api/test_dump_callback.c
Normal file
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include "util.h"
|
||||
#include <jansson.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
struct my_sink {
|
||||
char *buf;
|
||||
size_t off;
|
||||
size_t cap;
|
||||
};
|
||||
|
||||
static int my_writer(const char *buffer, size_t len, void *data) {
|
||||
struct my_sink *s = data;
|
||||
if (len > s->cap - s->off) {
|
||||
return -1;
|
||||
}
|
||||
memcpy(s->buf + s->off, buffer, len);
|
||||
s->off += len;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void run_tests() {
|
||||
struct my_sink s;
|
||||
json_t *json;
|
||||
const char str[] = "[\"A\", {\"B\": \"C\", \"e\": false}, 1, null, \"foo\"]";
|
||||
char *dumped_to_string;
|
||||
|
||||
json = json_loads(str, 0, NULL);
|
||||
if (!json) {
|
||||
fail("json_loads failed");
|
||||
}
|
||||
|
||||
dumped_to_string = json_dumps(json, 0);
|
||||
if (!dumped_to_string) {
|
||||
json_decref(json);
|
||||
fail("json_dumps failed");
|
||||
}
|
||||
|
||||
s.off = 0;
|
||||
s.cap = strlen(dumped_to_string);
|
||||
s.buf = malloc(s.cap);
|
||||
if (!s.buf) {
|
||||
json_decref(json);
|
||||
free(dumped_to_string);
|
||||
fail("malloc failed");
|
||||
}
|
||||
|
||||
if (json_dump_callback(json, my_writer, &s, 0) == -1) {
|
||||
json_decref(json);
|
||||
free(dumped_to_string);
|
||||
free(s.buf);
|
||||
fail("json_dump_callback failed on an exact-length sink buffer");
|
||||
}
|
||||
|
||||
if (strncmp(dumped_to_string, s.buf, s.off) != 0) {
|
||||
json_decref(json);
|
||||
free(dumped_to_string);
|
||||
free(s.buf);
|
||||
fail("json_dump_callback and json_dumps did not produce identical "
|
||||
"output");
|
||||
}
|
||||
|
||||
s.off = 1;
|
||||
if (json_dump_callback(json, my_writer, &s, 0) != -1) {
|
||||
json_decref(json);
|
||||
free(dumped_to_string);
|
||||
free(s.buf);
|
||||
fail("json_dump_callback succeeded on a short buffer when it should "
|
||||
"have failed");
|
||||
}
|
||||
|
||||
json_decref(json);
|
||||
free(dumped_to_string);
|
||||
free(s.buf);
|
||||
}
|
202
test/suites/api/test_equal.c
Normal file
202
test/suites/api/test_equal.c
Normal file
|
@ -0,0 +1,202 @@
|
|||
/*
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include "util.h"
|
||||
#include <jansson.h>
|
||||
|
||||
static void test_equal_simple() {
|
||||
json_t *value1, *value2;
|
||||
|
||||
if (json_equal(NULL, NULL))
|
||||
fail("json_equal fails for two NULLs");
|
||||
|
||||
value1 = json_true();
|
||||
if (json_equal(value1, NULL) || json_equal(NULL, value1))
|
||||
fail("json_equal fails for NULL");
|
||||
|
||||
/* this covers true, false and null as they are singletons */
|
||||
if (!json_equal(value1, value1))
|
||||
fail("identical objects are not equal");
|
||||
json_decref(value1);
|
||||
|
||||
/* integer */
|
||||
value1 = json_integer(1);
|
||||
value2 = json_integer(1);
|
||||
if (!value1 || !value2)
|
||||
fail("unable to create integers");
|
||||
if (!json_equal(value1, value2))
|
||||
fail("json_equal fails for two equal integers");
|
||||
json_decref(value2);
|
||||
|
||||
value2 = json_integer(2);
|
||||
if (!value2)
|
||||
fail("unable to create an integer");
|
||||
if (json_equal(value1, value2))
|
||||
fail("json_equal fails for two inequal integers");
|
||||
|
||||
json_decref(value1);
|
||||
json_decref(value2);
|
||||
|
||||
/* real */
|
||||
value1 = json_real(1.2);
|
||||
value2 = json_real(1.2);
|
||||
if (!value1 || !value2)
|
||||
fail("unable to create reals");
|
||||
if (!json_equal(value1, value2))
|
||||
fail("json_equal fails for two equal reals");
|
||||
json_decref(value2);
|
||||
|
||||
value2 = json_real(3.141592);
|
||||
if (!value2)
|
||||
fail("unable to create an real");
|
||||
if (json_equal(value1, value2))
|
||||
fail("json_equal fails for two inequal reals");
|
||||
|
||||
json_decref(value1);
|
||||
json_decref(value2);
|
||||
|
||||
/* string */
|
||||
value1 = json_string("foo");
|
||||
value2 = json_string("foo");
|
||||
if (!value1 || !value2)
|
||||
fail("unable to create strings");
|
||||
if (!json_equal(value1, value2))
|
||||
fail("json_equal fails for two equal strings");
|
||||
json_decref(value2);
|
||||
|
||||
value2 = json_string("bar");
|
||||
if (!value2)
|
||||
fail("unable to create an string");
|
||||
if (json_equal(value1, value2))
|
||||
fail("json_equal fails for two inequal strings");
|
||||
json_decref(value2);
|
||||
|
||||
value2 = json_string("bar2");
|
||||
if (!value2)
|
||||
fail("unable to create an string");
|
||||
if (json_equal(value1, value2))
|
||||
fail("json_equal fails for two inequal length strings");
|
||||
|
||||
json_decref(value1);
|
||||
json_decref(value2);
|
||||
}
|
||||
|
||||
static void test_equal_array() {
|
||||
json_t *array1, *array2;
|
||||
|
||||
array1 = json_array();
|
||||
array2 = json_array();
|
||||
if (!array1 || !array2)
|
||||
fail("unable to create arrays");
|
||||
|
||||
if (!json_equal(array1, array2))
|
||||
fail("json_equal fails for two empty arrays");
|
||||
|
||||
json_array_append_new(array1, json_integer(1));
|
||||
json_array_append_new(array2, json_integer(1));
|
||||
json_array_append_new(array1, json_string("foo"));
|
||||
json_array_append_new(array2, json_string("foo"));
|
||||
json_array_append_new(array1, json_integer(2));
|
||||
json_array_append_new(array2, json_integer(2));
|
||||
if (!json_equal(array1, array2))
|
||||
fail("json_equal fails for two equal arrays");
|
||||
|
||||
json_array_remove(array2, 2);
|
||||
if (json_equal(array1, array2))
|
||||
fail("json_equal fails for two inequal arrays");
|
||||
|
||||
json_array_append_new(array2, json_integer(3));
|
||||
if (json_equal(array1, array2))
|
||||
fail("json_equal fails for two inequal arrays");
|
||||
|
||||
json_decref(array1);
|
||||
json_decref(array2);
|
||||
}
|
||||
|
||||
static void test_equal_object() {
|
||||
json_t *object1, *object2;
|
||||
|
||||
object1 = json_object();
|
||||
object2 = json_object();
|
||||
if (!object1 || !object2)
|
||||
fail("unable to create objects");
|
||||
|
||||
if (!json_equal(object1, object2))
|
||||
fail("json_equal fails for two empty objects");
|
||||
|
||||
json_object_set_new(object1, "a", json_integer(1));
|
||||
json_object_set_new(object2, "a", json_integer(1));
|
||||
json_object_set_new(object1, "b", json_string("foo"));
|
||||
json_object_set_new(object2, "b", json_string("foo"));
|
||||
json_object_set_new(object1, "c", json_integer(2));
|
||||
json_object_set_new(object2, "c", json_integer(2));
|
||||
if (!json_equal(object1, object2))
|
||||
fail("json_equal fails for two equal objects");
|
||||
|
||||
json_object_del(object2, "c");
|
||||
if (json_equal(object1, object2))
|
||||
fail("json_equal fails for two inequal objects");
|
||||
|
||||
json_object_set_new(object2, "c", json_integer(3));
|
||||
if (json_equal(object1, object2))
|
||||
fail("json_equal fails for two inequal objects");
|
||||
|
||||
json_object_del(object2, "c");
|
||||
json_object_set_new(object2, "d", json_integer(2));
|
||||
if (json_equal(object1, object2))
|
||||
fail("json_equal fails for two inequal objects");
|
||||
|
||||
json_decref(object1);
|
||||
json_decref(object2);
|
||||
}
|
||||
|
||||
static void test_equal_complex() {
|
||||
json_t *value1, *value2, *value3;
|
||||
|
||||
const char *complex_json = "{"
|
||||
" \"integer\": 1, "
|
||||
" \"real\": 3.141592, "
|
||||
" \"string\": \"foobar\", "
|
||||
" \"true\": true, "
|
||||
" \"object\": {"
|
||||
" \"array-in-object\": [1,true,\"foo\",{}],"
|
||||
" \"object-in-object\": {\"foo\": \"bar\"}"
|
||||
" },"
|
||||
" \"array\": [\"foo\", false, null, 1.234]"
|
||||
"}";
|
||||
|
||||
value1 = json_loads(complex_json, 0, NULL);
|
||||
value2 = json_loads(complex_json, 0, NULL);
|
||||
value3 = json_loads(complex_json, 0, NULL);
|
||||
if (!value1 || !value2)
|
||||
fail("unable to parse JSON");
|
||||
if (!json_equal(value1, value2))
|
||||
fail("json_equal fails for two equal objects");
|
||||
|
||||
json_array_set_new(
|
||||
json_object_get(json_object_get(value2, "object"), "array-in-object"), 1,
|
||||
json_false());
|
||||
if (json_equal(value1, value2))
|
||||
fail("json_equal fails for two inequal objects");
|
||||
|
||||
json_object_set_new(
|
||||
json_object_get(json_object_get(value3, "object"), "object-in-object"), "foo",
|
||||
json_string("baz"));
|
||||
if (json_equal(value1, value3))
|
||||
fail("json_equal fails for two inequal objects");
|
||||
|
||||
json_decref(value1);
|
||||
json_decref(value2);
|
||||
json_decref(value3);
|
||||
}
|
||||
|
||||
static void run_tests() {
|
||||
test_equal_simple();
|
||||
test_equal_array();
|
||||
test_equal_object();
|
||||
test_equal_complex();
|
||||
}
|
228
test/suites/api/test_fixed_size.c
Normal file
228
test/suites/api/test_fixed_size.c
Normal file
|
@ -0,0 +1,228 @@
|
|||
/*
|
||||
* Copyright (c) 2020 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include "util.h"
|
||||
#include <jansson.h>
|
||||
#include <string.h>
|
||||
|
||||
static void test_keylen_iterator(json_t *object) {
|
||||
const char key1[] = {'t', 'e', 's', 't', '1'};
|
||||
const char key2[] = {'t', 'e', 's', 't'};
|
||||
const char key3[] = {'t', 'e', 's', '\0', 't'};
|
||||
const char key4[] = {'t', 'e', 's', 't', '\0'};
|
||||
const char *reference_keys[] = {key1, key2, key3, key4};
|
||||
const size_t reference_keys_len[] = {sizeof(key1), sizeof(key2), sizeof(key3),
|
||||
sizeof(key4)};
|
||||
size_t index = 0;
|
||||
json_t *value;
|
||||
const char *key;
|
||||
size_t keylen;
|
||||
|
||||
json_object_keylen_foreach(object, key, keylen, value) {
|
||||
if (keylen != reference_keys_len[index])
|
||||
fail("invalid key len in iterator");
|
||||
if (memcmp(key, reference_keys[index], reference_keys_len[index]) != 0)
|
||||
fail("invalid key in iterator");
|
||||
|
||||
index++;
|
||||
}
|
||||
}
|
||||
|
||||
static void test_keylen(void) {
|
||||
json_t *obj = json_object();
|
||||
const char key[] = {'t', 'e', 's', 't', '1'};
|
||||
const char key2[] = {'t', 'e', 's', 't'};
|
||||
const char key3[] = {'t', 'e', 's', '\0', 't'};
|
||||
const char key4[] = {'t', 'e', 's', 't', '\0'};
|
||||
|
||||
if (json_object_size(obj) != 0)
|
||||
fail("incorrect json");
|
||||
|
||||
json_object_set_new_nocheck(obj, "test1", json_true());
|
||||
|
||||
if (json_object_size(obj) != 1)
|
||||
fail("incorrect json");
|
||||
|
||||
if (json_object_getn(obj, key, sizeof(key)) != json_true())
|
||||
fail("json_object_getn failed");
|
||||
|
||||
if (json_object_getn(obj, key2, sizeof(key2)) != NULL)
|
||||
fail("false positive json_object_getn by key2");
|
||||
|
||||
if (json_object_setn_nocheck(obj, key2, sizeof(key2), json_false()))
|
||||
fail("json_object_setn_nocheck for key2 failed");
|
||||
|
||||
if (json_object_size(obj) != 2)
|
||||
fail("incorrect json");
|
||||
|
||||
if (json_object_get(obj, "test") != json_false())
|
||||
fail("json_object_setn_nocheck for key2 failed");
|
||||
|
||||
if (json_object_getn(obj, key2, sizeof(key2)) != json_false())
|
||||
fail("json_object_getn by key 2 failed");
|
||||
|
||||
if (json_object_getn(obj, key3, sizeof(key3)) != NULL)
|
||||
fail("false positive json_object_getn by key3");
|
||||
|
||||
if (json_object_setn_nocheck(obj, key3, sizeof(key3), json_false()))
|
||||
fail("json_object_setn_nocheck for key3 failed");
|
||||
|
||||
if (json_object_size(obj) != 3)
|
||||
fail("incorrect json");
|
||||
|
||||
if (json_object_getn(obj, key3, sizeof(key3)) != json_false())
|
||||
fail("json_object_getn by key 3 failed");
|
||||
|
||||
if (json_object_getn(obj, key4, sizeof(key4)) != NULL)
|
||||
fail("false positive json_object_getn by key3");
|
||||
|
||||
if (json_object_setn_nocheck(obj, key4, sizeof(key4), json_false()))
|
||||
fail("json_object_setn_nocheck for key3 failed");
|
||||
|
||||
if (json_object_size(obj) != 4)
|
||||
fail("incorrect json");
|
||||
|
||||
test_keylen_iterator(obj);
|
||||
|
||||
if (json_object_getn(obj, key4, sizeof(key4)) != json_false())
|
||||
fail("json_object_getn by key 3 failed");
|
||||
|
||||
if (json_object_size(obj) != 4)
|
||||
fail("incorrect json");
|
||||
|
||||
if (json_object_deln(obj, key4, sizeof(key4)))
|
||||
fail("json_object_deln failed");
|
||||
if (json_object_getn(obj, key4, sizeof(key4)) != NULL)
|
||||
fail("json_object_deln failed");
|
||||
if (json_object_size(obj) != 3)
|
||||
fail("incorrect json");
|
||||
|
||||
if (json_object_deln(obj, key3, sizeof(key3)))
|
||||
fail("json_object_deln failed");
|
||||
if (json_object_getn(obj, key3, sizeof(key3)) != NULL)
|
||||
fail("json_object_deln failed");
|
||||
if (json_object_size(obj) != 2)
|
||||
fail("incorrect json");
|
||||
|
||||
if (json_object_deln(obj, key2, sizeof(key2)))
|
||||
fail("json_object_deln failed");
|
||||
if (json_object_getn(obj, key2, sizeof(key2)) != NULL)
|
||||
fail("json_object_deln failed");
|
||||
if (json_object_size(obj) != 1)
|
||||
fail("incorrect json");
|
||||
|
||||
if (json_object_deln(obj, key, sizeof(key)))
|
||||
fail("json_object_deln failed");
|
||||
if (json_object_getn(obj, key, sizeof(key)) != NULL)
|
||||
fail("json_object_deln failed");
|
||||
if (json_object_size(obj) != 0)
|
||||
fail("incorrect json");
|
||||
|
||||
json_decref(obj);
|
||||
}
|
||||
|
||||
static void test_invalid_keylen(void) {
|
||||
json_t *obj = json_object();
|
||||
json_t *empty_obj = json_object();
|
||||
const char key[] = {'t', 'e', 's', 't', '1'};
|
||||
|
||||
json_object_set_new_nocheck(obj, "test1", json_true());
|
||||
|
||||
if (json_object_getn(NULL, key, sizeof(key)) != NULL)
|
||||
fail("json_object_getn on NULL failed");
|
||||
|
||||
if (json_object_getn(obj, NULL, sizeof(key)) != NULL)
|
||||
fail("json_object_getn on NULL failed");
|
||||
|
||||
if (json_object_getn(obj, key, 0) != NULL)
|
||||
fail("json_object_getn on NULL failed");
|
||||
|
||||
if (!json_object_setn_new(obj, NULL, sizeof(key), json_true()))
|
||||
fail("json_object_setn_new with NULL key failed");
|
||||
|
||||
if (!json_object_setn_new_nocheck(obj, NULL, sizeof(key), json_true()))
|
||||
fail("json_object_setn_new_nocheck with NULL key failed");
|
||||
|
||||
if (!json_object_del(obj, NULL))
|
||||
fail("json_object_del with NULL failed");
|
||||
|
||||
if (!json_object_deln(empty_obj, key, sizeof(key)))
|
||||
fail("json_object_deln with empty object failed");
|
||||
|
||||
if (!json_object_deln(obj, key, sizeof(key) - 1))
|
||||
fail("json_object_deln with incomplete key failed");
|
||||
|
||||
json_decref(obj);
|
||||
json_decref(empty_obj);
|
||||
}
|
||||
|
||||
static void test_binary_keys(void) {
|
||||
json_t *obj = json_object();
|
||||
int key1 = 0;
|
||||
int key2 = 1;
|
||||
|
||||
json_object_setn_nocheck(obj, (const char *)&key1, sizeof(key1), json_true());
|
||||
json_object_setn_nocheck(obj, (const char *)&key2, sizeof(key2), json_true());
|
||||
|
||||
if (!json_is_true(json_object_getn(obj, (const char *)&key1, sizeof(key1))))
|
||||
fail("cannot get integer key1");
|
||||
|
||||
if (!json_is_true(json_object_getn(obj, (const char *)&key1, sizeof(key2))))
|
||||
fail("cannot get integer key2");
|
||||
|
||||
if (json_object_size(obj) != 2)
|
||||
fail("binary object size missmatch");
|
||||
|
||||
if (json_object_deln(obj, (const char *)&key1, sizeof(key1)))
|
||||
fail("cannot del integer key1");
|
||||
|
||||
if (json_object_size(obj) != 1)
|
||||
fail("binary object size missmatch");
|
||||
|
||||
if (json_object_deln(obj, (const char *)&key2, sizeof(key2)))
|
||||
fail("cannot del integer key2");
|
||||
|
||||
if (json_object_size(obj) != 0)
|
||||
fail("binary object size missmatch");
|
||||
|
||||
json_decref(obj);
|
||||
}
|
||||
|
||||
static void test_dump_order(void) {
|
||||
json_t *obj = json_object();
|
||||
char key1[] = {'k', '\0', '-', '2'};
|
||||
char key2[] = {'k', '\0', '-', '1'};
|
||||
const char expected_sorted_str[] =
|
||||
"{\"k\\u0000-1\": \"first\", \"k\\u0000-2\": \"second\"}";
|
||||
const char expected_nonsorted_str[] =
|
||||
"{\"k\\u0000-2\": \"second\", \"k\\u0000-1\": \"first\"}";
|
||||
char *out;
|
||||
|
||||
json_object_setn_new_nocheck(obj, key1, sizeof(key1), json_string("second"));
|
||||
json_object_setn_new_nocheck(obj, key2, sizeof(key2), json_string("first"));
|
||||
|
||||
out = malloc(512);
|
||||
|
||||
json_dumpb(obj, out, 512, 0);
|
||||
|
||||
if (memcmp(expected_nonsorted_str, out, sizeof(expected_nonsorted_str) - 1) != 0)
|
||||
fail("preserve order failed");
|
||||
|
||||
json_dumpb(obj, out, 512, JSON_SORT_KEYS);
|
||||
if (memcmp(expected_sorted_str, out, sizeof(expected_sorted_str) - 1) != 0)
|
||||
fail("utf-8 sort failed");
|
||||
|
||||
free(out);
|
||||
json_decref(obj);
|
||||
}
|
||||
|
||||
static void run_tests() {
|
||||
test_keylen();
|
||||
test_invalid_keylen();
|
||||
test_binary_keys();
|
||||
test_dump_order();
|
||||
}
|
238
test/suites/api/test_load.c
Normal file
238
test/suites/api/test_load.c
Normal file
|
@ -0,0 +1,238 @@
|
|||
/*
|
||||
* Copyright (c) 2009-2016 Petri Lehtinen <petri@digip.org>
|
||||
*
|
||||
* Jansson is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the MIT license. See LICENSE for details.
|
||||
*/
|
||||
|
||||
#include "util.h"
|
||||
#include <jansson.h>
|
||||
#include <string.h>
|
||||
|
||||
static void file_not_found() {
|
||||
json_t *json;
|
||||
json_error_t error;
|
||||
char *pos;
|
||||
|
||||
json = json_load_file("/path/to/nonexistent/file.json", 0, &error);
|
||||
if (json)
|
||||
fail("json_load_file returned non-NULL for a nonexistent file");
|
||||
if (error.line != -1)
|
||||
fail("json_load_file returned an invalid line number");
|
||||
|
||||
/* The error message is locale specific, only check the beginning
|
||||
of the error message. */
|
||||
|
||||
pos = strchr(error.text, ':');
|
||||
if (!pos)
|
||||
fail("json_load_file returne an invalid error message");
|
||||
|
||||
*pos = '\0';
|
||||
|
||||
if (strcmp(error.text, "unable to open /path/to/nonexistent/file.json") != 0)
|
||||
fail("json_load_file returned an invalid error message");
|
||||
if (json_error_code(&error) != json_error_cannot_open_file)
|
||||
fail("json_load_file returned an invalid error code");
|
||||
}
|
||||
|
||||
static void very_long_file_name() {
|
||||
json_t *json;
|
||||
json_error_t error;
|
||||
|
||||
json = json_load_file("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
0, &error);
|
||||
if (json)
|
||||
fail("json_load_file returned non-NULL for a nonexistent file");
|
||||
if (error.line != -1)
|
||||
fail("json_load_file returned an invalid line number");
|
||||
|
||||
if (strncmp(error.source, "...aaa", 6) != 0)
|
||||
fail("error source was set incorrectly");
|
||||
if (json_error_code(&error) != json_error_cannot_open_file)
|
||||
fail("error code was set incorrectly");
|
||||
}
|
||||
|
||||
static void reject_duplicates() {
|
||||
json_error_t error;
|
||||
|
||||
if (json_loads("{\"foo\": 1, \"foo\": 2}", JSON_REJECT_DUPLICATES, &error))
|
||||
fail("json_loads did not detect a duplicate key");
|
||||
check_error(json_error_duplicate_key, "duplicate object key near '\"foo\"'",
|
||||
"<string>", 1, 16, 16);
|
||||
}
|
||||
|
||||
static void disable_eof_check() {
|
||||
json_error_t error;
|
||||
json_t *json;
|
||||
|
||||
const char *text = "{\"foo\": 1} garbage";
|
||||
|
||||
if (json_loads(text, 0, &error))
|
||||
fail("json_loads did not detect garbage after JSON text");
|
||||
check_error(json_error_end_of_input_expected, "end of file expected near 'garbage'",
|
||||
"<string>", 1, 18, 18);
|
||||
|
||||
json = json_loads(text, JSON_DISABLE_EOF_CHECK, &error);
|
||||
if (!json)
|
||||
fail("json_loads failed with JSON_DISABLE_EOF_CHECK");
|
||||
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void decode_any() {
|
||||
json_t *json;
|
||||
json_error_t error;
|
||||
|
||||
json = json_loads("\"foo\"", JSON_DECODE_ANY, &error);
|
||||
if (!json || !json_is_string(json))
|
||||
fail("json_load decoded any failed - string");
|
||||
json_decref(json);
|
||||
|
||||
json = json_loads("42", JSON_DECODE_ANY, &error);
|
||||
if (!json || !json_is_integer(json))
|
||||
fail("json_load decoded any failed - integer");
|
||||
json_decref(json);
|
||||
|
||||
json = json_loads("true", JSON_DECODE_ANY, &error);
|
||||
if (!json || !json_is_true(json))
|
||||
fail("json_load decoded any failed - boolean");
|
||||
json_decref(json);
|
||||
|
||||
json = json_loads("null", JSON_DECODE_ANY, &error);
|
||||
if (!json || !json_is_null(json))
|
||||
fail("json_load decoded any failed - null");
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void decode_int_as_real() {
|
||||
json_t *json;
|
||||
json_error_t error;
|
||||
|
||||
#if JSON_INTEGER_IS_LONG_LONG
|
||||
const char *imprecise;
|
||||
json_int_t expected;
|
||||
#endif
|
||||
|
||||
char big[311];
|
||||
|
||||
json = json_loads("42", JSON_DECODE_INT_AS_REAL | JSON_DECODE_ANY, &error);
|
||||
if (!json || !json_is_real(json) || json_real_value(json) != 42.0)
|
||||
fail("json_load decode int as real failed - int");
|
||||
json_decref(json);
|
||||
|
||||
#if JSON_INTEGER_IS_LONG_LONG
|
||||
/* This number cannot be represented exactly by a double */
|
||||
imprecise = "9007199254740993";
|
||||
expected = 9007199254740992ll;
|
||||
|
||||
json = json_loads(imprecise, JSON_DECODE_INT_AS_REAL | JSON_DECODE_ANY, &error);
|
||||
if (!json || !json_is_real(json) || expected != (json_int_t)json_real_value(json))
|
||||
fail("json_load decode int as real failed - expected imprecision");
|
||||
json_decref(json);
|
||||
#endif
|
||||
|
||||
/* 1E309 overflows. Here we create 1E309 as a decimal number, i.e.
|
||||
1000...(309 zeroes)...0. */
|
||||
big[0] = '1';
|
||||
memset(big + 1, '0', 309);
|
||||
big[310] = '\0';
|
||||
|
||||
json = json_loads(big, JSON_DECODE_INT_AS_REAL | JSON_DECODE_ANY, &error);
|
||||
if (json || strcmp(error.text, "real number overflow") != 0 ||
|
||||
json_error_code(&error) != json_error_numeric_overflow)
|
||||
fail("json_load decode int as real failed - expected overflow");
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void allow_nul() {
|
||||
const char *text = "\"nul byte \\u0000 in string\"";
|
||||
const char *expected = "nul byte \0 in string";
|
||||
size_t len = 20;
|
||||
json_t *json;
|
||||
|
||||
json = json_loads(text, JSON_ALLOW_NUL | JSON_DECODE_ANY, NULL);
|
||||
if (!json || !json_is_string(json))
|
||||
fail("unable to decode embedded NUL byte");
|
||||
|
||||
if (json_string_length(json) != len)
|
||||
fail("decoder returned wrong string length");
|
||||
|
||||
if (memcmp(json_string_value(json), expected, len + 1))
|
||||
fail("decoder returned wrong string content");
|
||||
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void load_wrong_args() {
|
||||
json_t *json;
|
||||
json_error_t error;
|
||||
|
||||
json = json_loads(NULL, 0, &error);
|
||||
if (json)
|
||||
fail("json_loads should return NULL if the first argument is NULL");
|
||||
|
||||
json = json_loadb(NULL, 0, 0, &error);
|
||||
if (json)
|
||||
fail("json_loadb should return NULL if the first argument is NULL");
|
||||
|
||||
json = json_loadf(NULL, 0, &error);
|
||||
if (json)
|
||||
fail("json_loadf should return NULL if the first argument is NULL");
|
||||
|
||||
json = json_loadfd(-1, 0, &error);
|
||||
if (json)
|
||||
fail("json_loadfd should return NULL if the first argument is < 0");
|
||||
|
||||
json = json_load_file(NULL, 0, &error);
|
||||
if (json)
|
||||
fail("json_load_file should return NULL if the first argument is NULL");
|
||||
}
|
||||
|
||||
static void position() {
|
||||
json_t *json;
|
||||
size_t flags = JSON_DISABLE_EOF_CHECK;
|
||||
json_error_t error;
|
||||
|
||||
json = json_loads("{\"foo\": \"bar\"}", 0, &error);
|
||||
if (error.position != 14)
|
||||
fail("json_loads returned a wrong position");
|
||||
json_decref(json);
|
||||
|
||||
json = json_loads("{\"foo\": \"bar\"} baz quux", flags, &error);
|
||||
if (error.position != 14)
|
||||
fail("json_loads returned a wrong position");
|
||||
json_decref(json);
|
||||
}
|
||||
|
||||
static void error_code() {
|
||||
json_error_t error;
|
||||
json_t *json = json_loads("[123] garbage", 0, &error);
|
||||
if (json != NULL)
|
||||
fail("json_loads returned not NULL");
|
||||
if (strlen(error.text) >= JSON_ERROR_TEXT_LENGTH)
|
||||
fail("error.text longer than expected");
|
||||
if (json_error_code(&error) != json_error_end_of_input_expected)
|
||||
fail("json_loads returned incorrect error code");
|
||||
|
||||
json = json_loads("{\"foo\": ", 0, &error);
|
||||
if (json != NULL)
|
||||
fail("json_loads returned not NULL");
|
||||
if (strlen(error.text) >= JSON_ERROR_TEXT_LENGTH)
|
||||
fail("error.text longer than expected");
|
||||
if (json_error_code(&error) != json_error_premature_end_of_input)
|
||||
fail("json_loads returned incorrect error code");
|
||||
}
|
||||
|
||||
static void run_tests() {
|
||||
file_not_found();
|
||||
very_long_file_name();
|
||||
reject_duplicates();
|
||||
disable_eof_check();
|
||||
decode_any();
|
||||
decode_int_as_real();
|
||||
allow_nul();
|
||||
load_wrong_args();
|
||||
position();
|
||||
error_code();
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue