Merge commit '4f9b885a2e8b7a10653653fee3bb91cf5102b0ef' as 'src/deps/src/lua-cjson'

This commit is contained in:
Théophile Diot 2023-06-30 15:37:58 -04:00
commit 47ee3884fb
46 changed files with 12530 additions and 0 deletions

5
src/deps/src/lua-cjson/.gitattributes vendored Normal file
View File

@ -0,0 +1,5 @@
.gitattributes export-ignore
.gitignore export-ignore
build-packages.sh export-ignore
TODO export-ignore
devel export-ignore

View File

@ -0,0 +1,61 @@
name: test
on: [push, pull_request]
jobs:
test:
strategy:
fail-fast: false
matrix:
cc: ["gcc", "clang"]
luaVersion: ["5.1", "5.2", "5.3", "5.4", "luajit", "luajit-openresty"]
include:
- luaVersion: "luajit"
runtestArgs: "LUA_INCLUDE_DIR=.lua/include/luajit-2.1"
runtestEnv: "SKIP_CMAKE=1"
- luaVersion: "luajit-openresty"
runtestArgs: "LUA_INCLUDE_DIR=.lua/include/luajit-2.1"
runtestEnv: "SKIP_CMAKE=1"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: leafo/gh-actions-lua@master
env:
CC: ${{ matrix.cc }}
with:
luaVersion: ${{ matrix.luaVersion }}
luaCompileFlags: CC=${{ matrix.cc }}
- uses: leafo/gh-actions-luarocks@master
- name: runtests.sh
env:
CC: ${{ matrix.cc }}
run: |
LUA_DIR=.lua ${{ matrix.runtestEnv }} ./runtests.sh PREFIX=.lua ${{ matrix.runtestArgs }}
- name: setup prove
env:
CC: ${{ matrix.cc }}
run: |
make PREFIX=.lua ${{ matrix.runtestArgs }}
sudo apt-get update
sudo apt-get install -q valgrind libipc-run3-perl cppcheck cpanminus
sudo cpanm --notest Test::Base Test::LongString
- name: cppcheck
run: cppcheck -i .lua/ -i .install/ -i dtoa.c --force --error-exitcode=1 --enable=warning .
- name: prove
run: LUA_BIN=lua prove -Itests tests
- name: prove (valgrind)
run: LUA_BIN=lua TEST_LUA_USE_VALGRIND=1 prove -Itests tests
- name: build
run: |
luarocks make

11
src/deps/src/lua-cjson/.gitignore vendored Normal file
View File

@ -0,0 +1,11 @@
*.html
*.o
*.so
notes
packages
tags
tests/utf8.dat
*~
*.swp
go
test_case.lua

View File

@ -0,0 +1,63 @@
sudo: required
dist: Focal
os: linux
language: c
compiler:
- gcc
addons:
apt:
packages:
- cppcheck
- valgrind
- cpanminus
- libipc-run3-perl
- lua5.1
- lua5.1-dev
- cmake
cache:
apt: true
env:
global:
- JOBS=3
- LUAROCKS_VER=2.4.2
matrix:
#- LUA=1 LUA_DIR=/usr LUA_INCLUDE_DIR=$LUA_DIR/include/lua5.1
- LUAJIT=1 LUA_DIR=/usr/local LUA_INCLUDE_DIR=$LUA_DIR/include/luajit-2.1 LUA_SUFFIX=--lua-suffix=jit
install:
- sudo ln -s /usr/bin/cmake /usr/local/bin/cmake
- if [ -n "$LUAJIT" ]; then git clone -b v2.1-agentzh https://github.com/openresty/luajit2.git; fi
- if [ -n "$LUAJIT" ]; then cd ./luajit2; fi
- if [ -n "$LUAJIT" ]; then make -j$JOBS CCDEBUG=-g Q= PREFIX=$LUAJIT_PREFIX CC=$CC XCFLAGS='-DLUA_USE_APICHECK -DLUA_USE_ASSERT' > build.log 2>&1 || (cat build.log && exit 1); fi
- if [ -n "$LUAJIT" ]; then sudo make install > build.log 2>&1 || (cat build.log && exit 1); fi
- if [ -n "$LUAJIT" ]; then cd ..; fi
- if [ -n "$LUAJIT" ]; then sudo ln -s $LUA_DIR/bin/luajit $LUA_DIR/bin/lua; fi
- sudo cpanm --notest Test::Base Test::LongString > build.log 2>&1 || (cat build.log && exit 1)
- wget https://luarocks.github.io/luarocks/releases/luarocks-$LUAROCKS_VER.tar.gz
- tar -zxf luarocks-$LUAROCKS_VER.tar.gz
- cd luarocks-$LUAROCKS_VER
- ./configure --with-lua=$LUA_DIR --with-lua-include=$LUA_INCLUDE_DIR $LUA_SUFFIX
- make build
- sudo make install
- cd ..
script:
- cppcheck -i ./luajit2 --force --error-exitcode=1 --enable=warning . > build.log 2>&1 || (cat build.log && exit 1)
- bash runtests.sh
- make
- prove -Itests tests
- TEST_LUA_USE_VALGRIND=1 prove -Itests tests > build.log 2>&1; export e=$?
- cat build.log
- grep -E '^==[0-9]+==' build.log; if [ "$?" == 0 ]; then exit 1; else exit $e; fi
- cmake -DUSE_INTERNAL_FPCONV=1 .
- make
- prove -Itests tests
- TEST_LUA_USE_VALGRIND=1 prove -Itests tests > build.log 2>&1; export e=$?
- cat build.log
- grep -E '^==[0-9]+==' build.log; if [ "$?" == 0 ]; then exit 1; else exit $e; fi

View File

@ -0,0 +1,76 @@
# If Lua is installed in a non-standard location, please set the LUA_DIR
# environment variable to point to prefix for the install. Eg:
# Unix: export LUA_DIR=/home/user/pkg
# Windows: set LUA_DIR=c:\lua51
project(lua-cjson C)
cmake_minimum_required(VERSION 2.8.12)
option(USE_INTERNAL_FPCONV "Use internal strtod() / g_fmt() code for performance")
option(MULTIPLE_THREADS "Support multi-threaded apps with internal fpconv - recommended" ON)
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE Release CACHE STRING
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel."
FORCE)
endif()
find_package(Lua REQUIRED)
include_directories(${LUA_INCLUDE_DIR})
if(NOT USE_INTERNAL_FPCONV)
# Use libc number conversion routines (strtod(), sprintf())
set(FPCONV_SOURCES fpconv.c)
else()
# Use internal number conversion routines
add_definitions(-DUSE_INTERNAL_FPCONV)
set(FPCONV_SOURCES g_fmt.c dtoa.c)
include(TestBigEndian)
TEST_BIG_ENDIAN(IEEE_BIG_ENDIAN)
if(IEEE_BIG_ENDIAN)
add_definitions(-DIEEE_BIG_ENDIAN)
endif()
if(MULTIPLE_THREADS)
set(CMAKE_THREAD_PREFER_PTHREAD TRUE)
find_package(Threads REQUIRED)
if(NOT CMAKE_USE_PTHREADS_INIT)
message(FATAL_ERROR
"Pthreads not found - required by MULTIPLE_THREADS option")
endif()
add_definitions(-DMULTIPLE_THREADS)
endif()
endif()
# Handle platforms missing isinf() macro (Eg, some Solaris systems).
include(CheckSymbolExists)
CHECK_SYMBOL_EXISTS(isinf math.h HAVE_ISINF)
if(NOT HAVE_ISINF)
add_definitions(-DUSE_INTERNAL_ISINF)
endif()
set(_MODULE_LINK "${CMAKE_THREAD_LIBS_INIT}")
get_filename_component(_lua_lib_dir ${LUA_LIBRARY} PATH)
if(APPLE)
set(CMAKE_SHARED_MODULE_CREATE_C_FLAGS
"${CMAKE_SHARED_MODULE_CREATE_C_FLAGS} -undefined dynamic_lookup")
endif()
if(WIN32)
# Win32 modules need to be linked to the Lua library.
set(_MODULE_LINK ${LUA_LIBRARY} ${_MODULE_LINK})
set(_lua_module_dir "${_lua_lib_dir}")
# Windows sprintf()/strtod() handle NaN/inf differently. Not supported.
add_definitions(-DDISABLE_INVALID_NUMBERS)
else()
set(_lua_module_dir "${_lua_lib_dir}/lua/5.1")
endif()
add_library(cjson MODULE lua_cjson.c strbuf.c ${FPCONV_SOURCES})
set_target_properties(cjson PROPERTIES PREFIX "")
target_link_libraries(cjson ${_MODULE_LINK})
install(TARGETS cjson DESTINATION "${_lua_module_dir}")
# vi:ai et sw=4 ts=4:

View File

@ -0,0 +1,20 @@
Copyright (c) 2010-2012 Mark Pulford <mark@kyne.com.au>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,121 @@
##### Available defines for CJSON_CFLAGS #####
##
## USE_INTERNAL_ISINF: Workaround for Solaris platforms missing isinf().
## DISABLE_INVALID_NUMBERS: Permanently disable invalid JSON numbers:
## NaN, Infinity, hex.
##
## Optional built-in number conversion uses the following defines:
## USE_INTERNAL_FPCONV: Use builtin strtod/dtoa for numeric conversions.
## IEEE_BIG_ENDIAN: Required on big endian architectures.
## MULTIPLE_THREADS: Must be set when Lua CJSON may be used in a
## multi-threaded application. Requries _pthreads_.
##### Build defaults #####
LUA_VERSION = 5.1
TARGET = cjson.so
PREFIX = /usr/local
#CFLAGS = -g -Wall -pedantic -fno-inline
CFLAGS = -O3 -Wall -pedantic -DNDEBUG -g
CJSON_CFLAGS = -fpic
CJSON_LDFLAGS = -shared
LUA_INCLUDE_DIR ?= $(PREFIX)/include
LUA_CMODULE_DIR ?= $(PREFIX)/lib/lua/$(LUA_VERSION)
LUA_MODULE_DIR ?= $(PREFIX)/share/lua/$(LUA_VERSION)
LUA_BIN_DIR ?= $(PREFIX)/bin
##### Platform overrides #####
##
## Tweak one of the platform sections below to suit your situation.
##
## See http://lua-users.org/wiki/BuildingModules for further platform
## specific details.
## Linux
## FreeBSD
#LUA_INCLUDE_DIR = $(PREFIX)/include/lua51
## MacOSX (Macports)
#PREFIX = /opt/local
#CJSON_LDFLAGS = -bundle -undefined dynamic_lookup
## Solaris
#PREFIX = /home/user/opt
#CC = gcc
#CJSON_CFLAGS = -fpic -DUSE_INTERNAL_ISINF
## Windows (MinGW)
#TARGET = cjson.dll
#PREFIX = /home/user/opt
#CJSON_CFLAGS = -DDISABLE_INVALID_NUMBERS
#CJSON_LDFLAGS = -shared -L$(PREFIX)/lib -llua51
#LUA_BIN_SUFFIX = .lua
##### Number conversion configuration #####
## Use Libc support for number conversion (default)
FPCONV_OBJS = fpconv.o
## Use built in number conversion
#FPCONV_OBJS = g_fmt.o dtoa.o
#CJSON_CFLAGS += -DUSE_INTERNAL_FPCONV
## Compile built in number conversion for big endian architectures
#CJSON_CFLAGS += -DIEEE_BIG_ENDIAN
## Compile built in number conversion to support multi-threaded
## applications (recommended)
#CJSON_CFLAGS += -pthread -DMULTIPLE_THREADS
#CJSON_LDFLAGS += -pthread
##### End customisable sections #####
TEST_FILES = README bench.lua genutf8.pl test.lua octets-escaped.dat \
example1.json example2.json example3.json example4.json \
example5.json numbers.json rfc-example1.json \
rfc-example2.json types.json
DATAPERM = 644
EXECPERM = 755
ASCIIDOC = asciidoc
BUILD_CFLAGS = -I$(LUA_INCLUDE_DIR) $(CJSON_CFLAGS)
OBJS = lua_cjson.o strbuf.o $(FPCONV_OBJS)
.PHONY: all clean install install-extra doc
.SUFFIXES: .html .txt
.c.o:
$(CC) -c $(CFLAGS) $(CPPFLAGS) $(BUILD_CFLAGS) -o $@ $<
.txt.html:
$(ASCIIDOC) -n -a toc $<
all: $(TARGET)
doc: manual.html performance.html
$(TARGET): $(OBJS)
$(CC) $(LDFLAGS) $(CJSON_LDFLAGS) -o $@ $(OBJS)
install: $(TARGET)
mkdir -p $(DESTDIR)$(LUA_CMODULE_DIR)
rm -f $(DESTDIR)$(LUA_CMODULE_DIR)/$(TARGET)
cp $(TARGET) $(DESTDIR)$(LUA_CMODULE_DIR)
chmod $(EXECPERM) $(DESTDIR)$(LUA_CMODULE_DIR)/$(TARGET)
install-extra:
mkdir -p $(DESTDIR)$(LUA_MODULE_DIR)/cjson/tests \
$(DESTDIR)$(LUA_BIN_DIR)
cp lua/cjson/util.lua $(DESTDIR)$(LUA_MODULE_DIR)/cjson
chmod $(DATAPERM) $(DESTDIR)$(LUA_MODULE_DIR)/cjson/util.lua
cp lua/lua2json.lua $(DESTDIR)$(LUA_BIN_DIR)/lua2json$(LUA_BIN_SUFFIX)
chmod $(EXECPERM) $(DESTDIR)$(LUA_BIN_DIR)/lua2json$(LUA_BIN_SUFFIX)
cp lua/json2lua.lua $(DESTDIR)$(LUA_BIN_DIR)/json2lua$(LUA_BIN_SUFFIX)
chmod $(EXECPERM) $(DESTDIR)$(LUA_BIN_DIR)/json2lua$(LUA_BIN_SUFFIX)
cd tests; cp $(TEST_FILES) $(DESTDIR)$(LUA_MODULE_DIR)/cjson/tests
cd tests; chmod $(DATAPERM) $(TEST_FILES); chmod $(EXECPERM) *.lua *.pl
clean:
rm -f *.o $(TARGET)

View File

@ -0,0 +1,44 @@
Version 2.1.0 (Mar 1 2012)
* Added cjson.safe module interface which returns nil after an error
* Improved Makefile compatibility with Solaris make
Version 2.0.0 (Jan 22 2012)
* Improved platform compatibility for strtod/sprintf locale workaround
* Added option to build with David Gay's dtoa.c for improved performance
* Added support for Lua 5.2
* Added option to encode infinity/NaN as JSON null
* Fixed encode bug with a raised default limit and deeply nested tables
* Updated Makefile for compatibility with non-GNU make implementations
* Added CMake build support
* Added HTML manual
* Increased default nesting limit to 1000
* Added support for re-entrant use of encode and decode
* Added support for installing lua2json and json2lua utilities
* Added encode_invalid_numbers() and decode_invalid_numbers()
* Added decode_max_depth()
* Removed registration of global cjson module table
* Removed refuse_invalid_numbers()
Version 1.0.4 (Nov 30 2011)
* Fixed numeric conversion under locales with a comma decimal separator
Version 1.0.3 (Sep 15 2011)
* Fixed detection of objects with numeric string keys
* Provided work around for missing isinf() on Solaris
Version 1.0.2 (May 30 2011)
* Portability improvements for Windows
- No longer links with -lm
- Use "socket" instead of "posix" for sub-second timing
* Removed UTF-8 test dependency on Perl Text::Iconv
* Added simple CLI commands for testing Lua <-> JSON conversions
* Added cjson.encode_number_precision()
Version 1.0.1 (May 10 2011)
* Added build support for OSX
* Removed unnecessary whitespace from JSON output
* Added cjson.encode_keep_buffer()
* Fixed memory leak on Lua stack overflow exception
Version 1.0 (May 9 2011)
* Initial release

View File

@ -0,0 +1,208 @@
Name
====
lua-cjson - Fast JSON encoding/parsing
Table of Contents
=================
* [Name](#name)
* [Description](#description)
* [Additions to mpx/lua](#additions)
* [encode_empty_table_as_object](#encode_empty_table_as_object)
* [empty_array](#empty_array)
* [array_mt](#array_mt)
* [empty_array_mt](#empty_array_mt)
* [encode_number_precision](#encode_number_precision)
* [encode_escape_forward_slash](#encode_escape_forward_slash)
* [decode_array_with_array_mt](#decode_array_with_array_mt)
Description
===========
This fork of [mpx/lua-cjson](https://github.com/mpx/lua-cjson) is included in
the [OpenResty](https://openresty.org/) bundle and includes a few bugfixes and
improvements, especially to facilitate the encoding of empty tables as JSON Arrays.
Please refer to the [lua-cjson documentation](http://www.kyne.com.au/~mark/software/lua-cjson.php)
for standard usage, this README only provides informations regarding this fork's additions.
See [`mpx/master..openresty/master`](https://github.com/mpx/lua-cjson/compare/master...openresty:master)
for the complete history of changes.
[Back to TOC](#table-of-contents)
Additions
=========
encode_empty_table_as_object
----------------------------
**syntax:** `cjson.encode_empty_table_as_object(true|false|"on"|"off")`
Change the default behavior when encoding an empty Lua table.
By default, empty Lua tables are encoded as empty JSON Objects (`{}`). If this is set to false,
empty Lua tables will be encoded as empty JSON Arrays instead (`[]`).
This method either accepts a boolean or a string (`"on"`, `"off"`).
[Back to TOC](#table-of-contents)
empty_array
-----------
**syntax:** `cjson.empty_array`
A lightuserdata, similar to `cjson.null`, which will be encoded as an empty JSON Array by
`cjson.encode()`.
For example, since `encode_empty_table_as_object` is `true` by default:
```lua
local cjson = require "cjson"
local json = cjson.encode({
foo = "bar",
some_object = {},
some_array = cjson.empty_array
})
```
This will generate:
```json
{
"foo": "bar",
"some_object": {},
"some_array": []
}
```
[Back to TOC](#table-of-contents)
array_mt
--------
**syntax:** `setmetatable({}, cjson.array_mt)`
When lua-cjson encodes a table with this metatable, it will systematically
encode it as a JSON Array. The resulting, encoded Array will contain the array
part of the table, and will be of the same length as the `#` operator on that
table. Holes in the table will be encoded with the `null` JSON value.
Example:
```lua
local t = { "hello", "world" }
setmetatable(t, cjson.array_mt)
cjson.encode(t) -- ["hello","world"]
```
Or:
```lua
local t = {}
t[1] = "one"
t[2] = "two"
t[4] = "three"
t.foo = "bar"
setmetatable(t, cjson.array_mt)
cjson.encode(t) -- ["one","two",null,"three"]
```
This value was introduced in the `2.1.0.5` release of this module.
[Back to TOC](#table-of-contents)
empty_array_mt
--------------
**syntax:** `setmetatable({}, cjson.empty_array_mt)`
A metatable which can "tag" a table as a JSON Array in case it is empty (that is, if the
table has no elements, `cjson.encode()` will encode it as an empty JSON Array).
Instead of:
```lua
local function serialize(arr)
if #arr < 1 then
arr = cjson.empty_array
end
return cjson.encode({some_array = arr})
end
```
This is more concise:
```lua
local function serialize(arr)
setmetatable(arr, cjson.empty_array_mt)
return cjson.encode({some_array = arr})
end
```
Both will generate:
```json
{
"some_array": []
}
```
[Back to TOC](#table-of-contents)
encode_number_precision
-----------------------
**syntax:** `cjson.encode_number_precision(precision)`
This fork allows encoding of numbers with a `precision` up to 16 decimals (vs. 14 in mpx/lua-cjson).
[Back to TOC](#table-of-contents)
encode_escape_forward_slash
---------------------------
**syntax:** `cjson.encode_escape_forward_slash(enabled)`
**default:** true
If enabled, forward slash '/' will be encoded as '\\/'.
If disabled, forward slash '/' will be encoded as '/' (no escape is applied).
[Back to TOC](#table-of-contents)
decode_array_with_array_mt
--------------------------
**syntax:** `cjson.decode_array_with_array_mt(enabled)`
**default:** false
If enabled, JSON Arrays decoded by `cjson.decode` will result in Lua
tables with the [`array_mt`](#array_mt) metatable. This can ensure a 1-to-1
relationship between arrays upon multiple encoding/decoding of your
JSON data with this module.
If disabled, JSON Arrays will be decoded to plain Lua tables, without
the `array_mt` metatable.
The `enabled` argument is a boolean.
Example:
```lua
local cjson = require "cjson"
-- default behavior
local my_json = [[{"my_array":[]}]]
local t = cjson.decode(my_json)
cjson.encode(t) -- {"my_array":{}} back to an object
-- now, if this behavior is enabled
cjson.decode_array_with_array_mt(true)
local my_json = [[{"my_array":[]}]]
local t = cjson.decode(my_json)
cjson.encode(t) -- {"my_array":[]} properly re-encoded as an array
```
[Back to TOC](#table-of-contents)

View File

@ -0,0 +1,9 @@
The following people have helped with bug reports, testing and/or
suggestions:
- Louis-Philippe Perron (@loopole)
- Ondřej Jirman
- Steve Donovan <steve.j.donovan@gmail.com>
- Zhang "agentzh" Yichun <agentzh@gmail.com>
Thanks!

View File

@ -0,0 +1,36 @@
#!/bin/sh
# build-packages.sh [ REF ]
# Build packages. Use current checked out version, or a specific tag/commit.
# Files requiring a version bump
VERSION_FILES="lua-cjson-2.1devel-1.rockspec lua-cjson.spec lua_cjson.c manual.txt runtests.sh tests/test.lua"
[ "$1" ] && BRANCH="$1" || BRANCH="`git describe --match '[1-3].[0-9]*'`"
VERSION="`git describe --match '[1-3].[0-9]*' $BRANCH`"
VERSION="${VERSION//-/.}"
PREFIX="lua-cjson-$VERSION"
set -x
set -e
DESTDIR="`pwd`/packages"
mkdir -p "$DESTDIR"
BUILDROOT="`mktemp -d`"
trap "rm -rf '$BUILDROOT'" 0
git archive --prefix="$PREFIX/" "$BRANCH" | tar xf - -C "$BUILDROOT"
cd "$BUILDROOT"
cd "$PREFIX"
rename 2.1devel "$VERSION" $VERSION_FILES
perl -pi -e "s/\\b2.1devel\\b/$VERSION/g" ${VERSION_FILES/2.1devel/$VERSION};
cd ..
make -C "$PREFIX" doc
tar cf - "$PREFIX" | gzip -9 > "$DESTDIR/$PREFIX.tar.gz"
zip -9rq "$DESTDIR/$PREFIX.zip" "$PREFIX"
# vi:ai et sw=4 ts=4:

View File

@ -0,0 +1,50 @@
parser:
- call parse_value
- next_token
? <EOF> nop.
parse_value:
- next_token
? <OBJ_BEGIN> call parse_object.
? <ARR_BEGIN> call parse_array.
? <STRING> push. return.
? <BOOLEAN> push. return.
? <NULL> push. return.
? <NUMBER> push. return.
parse_object:
- push table
- next_token
? <STRING> push.
- next_token
? <COLON> nop.
- call parse_value
- set table
- next_token
? <OBJ_END> return.
? <COMMA> loop parse_object.
parse_array:
- push table
- call parse_value
- table append
- next_token
? <COMMA> loop parse_array.
? ] return.
next_token:
- check next character
? { return <OBJ_BEGIN>
? } return <OBJ_END>
? [ return <ARR_BEGIN>
? ] return <ARR_END>
? , return <COMMA>
? : return <COLON>
? [-0-9] gobble number. return <NUMBER>
? " gobble string. return <STRING>
? [ \t\n] eat whitespace.
? n Check "null". return <NULL> or <UNKNOWN>
? t Check "true". return <BOOLEAN> or <UNKNOWN>
? f Check "false". return <BOOLEAN> or <UNKNOWN>
? . return <UNKNOWN>
? \0 return <END>

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,78 @@
#ifndef _DTOA_CONFIG_H
#define _DTOA_CONFIG_H
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
/* Ensure dtoa.c does not USE_LOCALE. Lua CJSON must not use locale
* aware conversion routines. */
#undef USE_LOCALE
/* dtoa.c should not touch errno, Lua CJSON does not use it, and it
* may not be threadsafe */
#define NO_ERRNO
#define Long int32_t
#define ULong uint32_t
#define Llong int64_t
#define ULLong uint64_t
#ifdef IEEE_BIG_ENDIAN
#define IEEE_MC68k
#else
#define IEEE_8087
#endif
#define MALLOC xmalloc
static void *xmalloc(size_t size)
{
void *p;
p = malloc(size);
if (!p) {
fprintf(stderr, "Out of memory");
abort();
}
return p;
}
#ifdef MULTIPLE_THREADS
/* Enable locking to support multi-threaded applications */
#include <pthread.h>
static pthread_mutex_t private_dtoa_lock[2] = {
PTHREAD_MUTEX_INITIALIZER,
PTHREAD_MUTEX_INITIALIZER
};
#define dtoa_get_threadno pthread_self
void
set_max_dtoa_threads(unsigned int n);
#define ACQUIRE_DTOA_LOCK(n) do { \
int r = pthread_mutex_lock(&private_dtoa_lock[n]); \
if (r) { \
fprintf(stderr, "pthread_mutex_lock failed with %d\n", r); \
abort(); \
} \
} while (0)
#define FREE_DTOA_LOCK(n) do { \
int r = pthread_mutex_unlock(&private_dtoa_lock[n]); \
if (r) { \
fprintf(stderr, "pthread_mutex_unlock failed with %d\n", r);\
abort(); \
} \
} while (0)
#endif /* MULTIPLE_THREADS */
#endif /* _DTOA_CONFIG_H */
/* vi:ai et sw=4 ts=4:
*/

View File

@ -0,0 +1,211 @@
/* fpconv - Floating point conversion routines
*
* Copyright (c) 2011-2012 Mark Pulford <mark@kyne.com.au>
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/* JSON uses a '.' decimal separator. strtod() / sprintf() under C libraries
* with locale support will break when the decimal separator is a comma.
*
* fpconv_* will around these issues with a translation buffer if required.
*/
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include <string.h>
#include "fpconv.h"
/* Workaround for MSVC */
#ifdef _MSC_VER
#define inline __inline
#define snprintf sprintf_s
#endif
/* Lua CJSON assumes the locale is the same for all threads within a
* process and doesn't change after initialisation.
*
* This avoids the need for per thread storage or expensive checks
* for call. */
static char locale_decimal_point = '.';
/* In theory multibyte decimal_points are possible, but
* Lua CJSON only supports UTF-8 and known locales only have
* single byte decimal points ([.,]).
*
* localconv() may not be thread safe (=>crash), and nl_langinfo() is
* not supported on some platforms. Use sprintf() instead - if the
* locale does change, at least Lua CJSON won't crash. */
static void fpconv_update_locale(void)
{
char buf[8];
snprintf(buf, sizeof(buf), "%g", 0.5);
/* Failing this test might imply the platform has a buggy dtoa
* implementation or wide characters */
if (buf[0] != '0' || buf[2] != '5' || buf[3] != 0) {
fprintf(stderr, "Error: wide characters found or printf() bug.");
abort();
}
locale_decimal_point = buf[1];
}
/* Check for a valid number character: [-+0-9a-yA-Y.]
* Eg: -0.6e+5, infinity, 0xF0.F0pF0
*
* Used to find the probable end of a number. It doesn't matter if
* invalid characters are counted - strtod() will find the valid
* number if it exists. The risk is that slightly more memory might
* be allocated before a parse error occurs. */
static inline int valid_number_character(char ch)
{
char lower_ch;
if ('0' <= ch && ch <= '9')
return 1;
if (ch == '-' || ch == '+' || ch == '.')
return 1;
/* Hex digits, exponent (e), base (p), "infinity",.. */
lower_ch = ch | 0x20;
if ('a' <= lower_ch && lower_ch <= 'y')
return 1;
return 0;
}
/* Calculate the size of the buffer required for a strtod locale
* conversion. */
static int strtod_buffer_size(const char *s)
{
const char *p = s;
while (valid_number_character(*p))
p++;
return p - s;
}
/* Similar to strtod(), but must be passed the current locale's decimal point
* character. Guaranteed to be called at the start of any valid number in a string */
double fpconv_strtod(const char *nptr, char **endptr)
{
char localbuf[FPCONV_G_FMT_BUFSIZE];
char *buf, *endbuf, *dp;
int buflen;
double value;
/* System strtod() is fine when decimal point is '.' */
if (locale_decimal_point == '.')
return strtod(nptr, endptr);
buflen = strtod_buffer_size(nptr);
if (!buflen) {
/* No valid characters found, standard strtod() return */
*endptr = (char *)nptr;
return 0;
}
/* Duplicate number into buffer */
if (buflen >= FPCONV_G_FMT_BUFSIZE) {
/* Handle unusually large numbers */
buf = malloc(buflen + 1);
if (!buf) {
fprintf(stderr, "Out of memory");
abort();
}
} else {
/* This is the common case.. */
buf = localbuf;
}
memcpy(buf, nptr, buflen);
buf[buflen] = 0;
/* Update decimal point character if found */
dp = strchr(buf, '.');
if (dp)
*dp = locale_decimal_point;
value = strtod(buf, &endbuf);
*endptr = (char *)&nptr[endbuf - buf];
if (buflen >= FPCONV_G_FMT_BUFSIZE)
free(buf);
return value;
}
/* "fmt" must point to a buffer of at least 6 characters */
static void set_number_format(char *fmt, int precision)
{
int d1, d2, i;
assert(1 <= precision && precision <= 16);
/* Create printf format (%.14g) from precision */
d1 = precision / 10;
d2 = precision % 10;
fmt[0] = '%';
fmt[1] = '.';
i = 2;
if (d1) {
fmt[i++] = '0' + d1;
}
fmt[i++] = '0' + d2;
fmt[i++] = 'g';
fmt[i] = 0;
}
/* Assumes there is always at least 32 characters available in the target buffer */
int fpconv_g_fmt(char *str, double num, int precision)
{
char buf[FPCONV_G_FMT_BUFSIZE];
char fmt[6];
int len;
char *b;
set_number_format(fmt, precision);
/* Pass through when decimal point character is dot. */
if (locale_decimal_point == '.')
return snprintf(str, FPCONV_G_FMT_BUFSIZE, fmt, num);
/* snprintf() to a buffer then translate for other decimal point characters */
len = snprintf(buf, FPCONV_G_FMT_BUFSIZE, fmt, num);
/* Copy into target location. Translate decimal point if required */
b = buf;
do {
*str++ = (*b == locale_decimal_point ? '.' : *b);
} while(*b++);
return len;
}
void fpconv_init(void)
{
fpconv_update_locale();
}
/* vi:ai et sw=4 ts=4:
*/

View File

@ -0,0 +1,32 @@
/* Lua CJSON floating point conversion routines */
/* Buffer required to store the largest string representation of a double.
*
* Longest double printed with %.14g is 21 characters long:
* -1.7976931348623e+308 */
# define FPCONV_G_FMT_BUFSIZE 32
#ifdef USE_INTERNAL_FPCONV
#ifdef MULTIPLE_THREADS
#include "dtoa_config.h"
#include <unistd.h>
static inline void fpconv_init()
{
// Add one to try and avoid core id multiplier alignment
set_max_dtoa_threads((sysconf(_SC_NPROCESSORS_CONF) + 1) * 3);
}
#else
static inline void fpconv_init()
{
/* Do nothing - not required */
}
#endif
#else
extern void fpconv_init(void);
#endif
extern int fpconv_g_fmt(char*, double, int);
extern double fpconv_strtod(const char*, char**);
/* vi:ai et sw=4 ts=4:
*/

View File

@ -0,0 +1,111 @@
/****************************************************************
*
* The author of this software is David M. Gay.
*
* Copyright (c) 1991, 1996 by Lucent Technologies.
*
* Permission to use, copy, modify, and distribute this software for any
* purpose without fee is hereby granted, provided that this entire notice
* is included in all copies of any software which is or includes a copy
* or modification of this software and in all copies of the supporting
* documentation for such software.
*
* THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
* WARRANTY. IN PARTICULAR, NEITHER THE AUTHOR NOR LUCENT MAKES ANY
* REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
* OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.
*
***************************************************************/
/* g_fmt(buf,x) stores the closest decimal approximation to x in buf;
* it suffices to declare buf
* char buf[32];
*/
#ifdef __cplusplus
extern "C" {
#endif
extern char *dtoa(double, int, int, int *, int *, char **);
extern int g_fmt(char *, double, int);
extern void freedtoa(char*);
#ifdef __cplusplus
}
#endif
int
fpconv_g_fmt(char *b, double x, int precision)
{
register int i, k;
register char *s;
int decpt, j, sign;
char *b0, *s0, *se;
b0 = b;
#ifdef IGNORE_ZERO_SIGN
if (!x) {
*b++ = '0';
*b = 0;
goto done;
}
#endif
s = s0 = dtoa(x, 2, precision, &decpt, &sign, &se);
if (sign)
*b++ = '-';
if (decpt == 9999) /* Infinity or Nan */ {
while((*b++ = *s++));
/* "b" is used to calculate the return length. Decrement to exclude the
* Null terminator from the length */
b--;
goto done0;
}
if (decpt <= -4 || decpt > precision) {
*b++ = *s++;
if (*s) {
*b++ = '.';
while((*b = *s++))
b++;
}
*b++ = 'e';
/* sprintf(b, "%+.2d", decpt - 1); */
if (--decpt < 0) {
*b++ = '-';
decpt = -decpt;
}
else
*b++ = '+';
for(j = 2, k = 10; 10*k <= decpt; j++, k *= 10);
for(;;) {
i = decpt / k;
*b++ = i + '0';
if (--j <= 0)
break;
decpt -= i*k;
decpt *= 10;
}
*b = 0;
}
else if (decpt <= 0) {
*b++ = '0';
*b++ = '.';
for(; decpt < 0; decpt++)
*b++ = '0';
while((*b++ = *s++));
b--;
}
else {
while((*b = *s++)) {
b++;
if (--decpt == 0 && *s)
*b++ = '.';
}
for(; decpt > 0; decpt--)
*b++ = '0';
*b = 0;
}
done0:
freedtoa(s0);
#ifdef IGNORE_ZERO_SIGN
done:
#endif
return b - b0;
}

View File

@ -0,0 +1,60 @@
package = "lua-cjson"
version = "2.1.0.11-1"
source = {
url = "git+https://github.com/openresty/lua-cjson",
tag = "2.1.0.11",
}
description = {
summary = "A fast JSON encoding/parsing module",
detailed = [[
The Lua CJSON module provides JSON support for Lua. It features:
- Fast, standards compliant encoding/parsing routines
- Full support for JSON with UTF-8, including decoding surrogate pairs
- Optional run-time support for common exceptions to the JSON specification
(infinity, NaN,..)
- No dependencies on other libraries
]],
homepage = "http://www.kyne.com.au/~mark/software/lua-cjson.php",
license = "MIT"
}
dependencies = {
"lua >= 5.1"
}
build = {
type = "builtin",
modules = {
cjson = {
sources = { "lua_cjson.c", "strbuf.c", "fpconv.c" },
defines = {
-- LuaRocks does not support platform specific configuration for Solaris.
-- Uncomment the line below on Solaris platforms if required.
-- "USE_INTERNAL_ISINF"
}
},
["cjson.safe"] = {
sources = { "lua_cjson.c", "strbuf.c", "fpconv.c" }
}
},
install = {
lua = {
["cjson.util"] = "lua/cjson/util.lua"
},
bin = {
json2lua = "lua/json2lua.lua",
lua2json = "lua/lua2json.lua"
}
},
-- Override default build options (per platform)
platforms = {
win32 = { modules = { cjson = { defines = {
"DISABLE_INVALID_NUMBERS", "USE_INTERNAL_ISINF"
} } } }
},
copy_directories = { "tests" }
}
-- vi:ai et sw=4 ts=4:

View File

@ -0,0 +1,80 @@
%define luaver 5.1
%define lualibdir %{_libdir}/lua/%{luaver}
%define luadatadir %{_datadir}/lua/%{luaver}
Name: lua-cjson
Version: 2.1devel
Release: 1%{?dist}
Summary: A fast JSON encoding/parsing module for Lua
Group: Development/Libraries
License: MIT
URL: http://www.kyne.com.au/~mark/software/lua-cjson/
Source0: http://www.kyne.com.au/~mark/software/lua-cjson/download/lua-cjson-%{version}.tar.gz
BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
BuildRequires: lua >= %{luaver}, lua-devel >= %{luaver}
Requires: lua >= %{luaver}
%description
The Lua CJSON module provides JSON support for Lua. It features:
- Fast, standards compliant encoding/parsing routines
- Full support for JSON with UTF-8, including decoding surrogate pairs
- Optional run-time support for common exceptions to the JSON specification
(infinity, NaN,..)
- No dependencies on other libraries
%prep
%setup -q
%build
make %{?_smp_mflags} CFLAGS="%{optflags}" LUA_INCLUDE_DIR="%{_includedir}"
%install
rm -rf "$RPM_BUILD_ROOT"
make install DESTDIR="$RPM_BUILD_ROOT" LUA_CMODULE_DIR="%{lualibdir}"
make install-extra DESTDIR="$RPM_BUILD_ROOT" LUA_MODULE_DIR="%{luadatadir}" \
LUA_BIN_DIR="%{_bindir}"
%clean
rm -rf "$RPM_BUILD_ROOT"
%preun
/bin/rm -f "%{luadatadir}/cjson/tests/utf8.dat"
%files
%defattr(-,root,root,-)
%doc LICENSE NEWS performance.html performance.txt manual.html manual.txt rfc4627.txt THANKS
%{lualibdir}/*
%{luadatadir}/*
%{_bindir}/*
%changelog
* Thu Mar 1 2012 Mark Pulford <mark@kyne.com.au> - 2.1.0-1
- Update for 2.1.0
* Sun Jan 22 2012 Mark Pulford <mark@kyne.com.au> - 2.0.0-1
- Update for 2.0.0
- Install lua2json / json2lua utilities
* Wed Nov 27 2011 Mark Pulford <mark@kyne.com.au> - 1.0.4-1
- Update for 1.0.4
* Wed Sep 15 2011 Mark Pulford <mark@kyne.com.au> - 1.0.3-1
- Update for 1.0.3
* Sun May 29 2011 Mark Pulford <mark@kyne.com.au> - 1.0.2-1
- Update for 1.0.2
* Sun May 10 2011 Mark Pulford <mark@kyne.com.au> - 1.0.1-1
- Update for 1.0.1
* Sun May 1 2011 Mark Pulford <mark@kyne.com.au> - 1.0-1
- Initial package

View File

@ -0,0 +1,312 @@
local json = require "cjson"
local unpack = unpack or table.unpack
local maxn = table.maxn or function(t)
local max = 0
for k,v in pairs(t) do
if type(k) == "number" and k > max then
max = k
end
end
return max
end
local _one_of_mt = {}
local function one_of(t)
setmetatable(t, _one_of_mt)
return t
end
local function is_one_of(t)
return type(t) == "table" and getmetatable(t) == _one_of_mt
end
-- Various common routines used by the Lua CJSON package
--
-- Mark Pulford <mark@kyne.com.au>
-- Determine with a Lua table can be treated as an array.
-- Explicitly returns "not an array" for very sparse arrays.
-- Returns:
-- -1 Not an array
-- 0 Empty table
-- >0 Highest index in the array
local function is_array(table)
local max = 0
local count = 0
for k, v in pairs(table) do
if type(k) == "number" then
if k > max then max = k end
count = count + 1
else
return -1
end
end
if max > count * 2 then
return -1
end
return max
end
local serialise_value
local function serialise_table(value, indent, depth)
local spacing, spacing2, indent2
if indent then
spacing = "\n" .. indent
spacing2 = spacing .. " "
indent2 = indent .. " "
else
spacing, spacing2, indent2 = " ", " ", false
end
depth = depth + 1
if depth > 50 then
return "Cannot serialise any further: too many nested tables"
end
local max = is_array(value)
local comma = false
local prefix = "{"
if is_one_of(value) then
prefix = "ONE_OF{"
end
local fragment = { prefix .. spacing2 }
if max > 0 then
-- Serialise array
for i = 1, max do
if comma then
table.insert(fragment, "," .. spacing2)
end
table.insert(fragment, serialise_value(value[i], indent2, depth))
comma = true
end
elseif max < 0 then
-- Serialise table
for k, v in pairs(value) do
if comma then
table.insert(fragment, "," .. spacing2)
end
table.insert(fragment,
("[%s] = %s"):format(serialise_value(k, indent2, depth),
serialise_value(v, indent2, depth)))
comma = true
end
end
table.insert(fragment, spacing .. "}")
return table.concat(fragment)
end
function serialise_value(value, indent, depth)
if indent == nil then indent = "" end
if depth == nil then depth = 0 end
if value == json.null then
return "json.null"
elseif type(value) == "string" then
return ("%q"):format(value)
elseif type(value) == "nil" or type(value) == "number" or
type(value) == "boolean" then
return tostring(value)
elseif type(value) == "table" then
return serialise_table(value, indent, depth)
else
return "\"<" .. type(value) .. ">\""
end
end
local function file_load(filename)
local file
if filename == nil then
file = io.stdin
else
local err
file, err = io.open(filename, "rb")
if file == nil then
error(("Unable to read '%s': %s"):format(filename, err))
end
end
local data = file:read("*a")
if filename ~= nil then
file:close()
end
if data == nil then
error("Failed to read " .. filename)
end
return data
end
local function file_save(filename, data)
local file
if filename == nil then
file = io.stdout
else
local err
file, err = io.open(filename, "wb")
if file == nil then
error(("Unable to write '%s': %s"):format(filename, err))
end
end
file:write(data)
if filename ~= nil then
file:close()
end
end
local function compare_values(val1, val2)
if is_one_of(val2) then
for _, option in ipairs(val2) do
if compare_values(val1, option) then
return true
end
end
return false
end
local type1 = type(val1)
local type2 = type(val2)
if type1 ~= type2 then
return false
end
-- Check for NaN
if type1 == "number" and val1 ~= val1 and val2 ~= val2 then
return true
end
if type1 ~= "table" then
return val1 == val2
end
-- check_keys stores all the keys that must be checked in val2
local check_keys = {}
for k, _ in pairs(val1) do
check_keys[k] = true
end
for k, v in pairs(val2) do
if not check_keys[k] then
return false
end
if not compare_values(val1[k], val2[k]) then
return false
end
check_keys[k] = nil
end
for k, _ in pairs(check_keys) do
-- Not the same if any keys from val1 were not found in val2
return false
end
return true
end
local test_count_pass = 0
local test_count_total = 0
local function run_test_summary()
return test_count_pass, test_count_total
end
local function run_test(testname, func, input, should_work, output)
local function status_line(name, status, value)
local statusmap = { [true] = ":success", [false] = ":error" }
if status ~= nil then
name = name .. statusmap[status]
end
print(("[%s] %s"):format(name, serialise_value(value, false)))
end
local result = {}
local tmp = { pcall(func, unpack(input)) }
local success = tmp[1]
for i = 2, maxn(tmp) do
result[i - 1] = tmp[i]
end
local correct = false
if success == should_work and compare_values(result, output) then
correct = true
test_count_pass = test_count_pass + 1
end
test_count_total = test_count_total + 1
local teststatus = { [true] = "PASS", [false] = "FAIL" }
print(("==> Test [%d] %s: %s"):format(test_count_total, testname,
teststatus[correct]))
status_line("Input", nil, input)
if not correct then
status_line("Expected", should_work, output)
end
status_line("Received", success, result)
print()
return correct, result
end
local function run_test_group(tests)
local function run_helper(name, func, input)
if type(name) == "string" and #name > 0 then
print("==> " .. name)
end
-- Not a protected call, these functions should never generate errors.
func(unpack(input or {}))
print()
end
for _, v in ipairs(tests) do
-- Run the helper if "should_work" is missing
if v[4] == nil then
run_helper(unpack(v))
else
run_test(unpack(v))
end
end
end
-- Run a Lua script in a separate environment
local function run_script(script, env)
local env = env or {}
local func
-- Use setfenv() if it exists, otherwise assume Lua 5.2 load() exists
if _G.setfenv then
func = loadstring(script)
if func then
setfenv(func, env)
end
else
func = load(script, nil, nil, env)
end
if func == nil then
error("Invalid syntax.")
end
func()
return env
end
-- Export functions
return {
serialise_value = serialise_value,
file_load = file_load,
file_save = file_save,
compare_values = compare_values,
run_test_summary = run_test_summary,
run_test = run_test,
run_test_group = run_test_group,
run_script = run_script,
one_of = one_of
}
-- vi:ai et sw=4 ts=4:

View File

@ -0,0 +1,14 @@
#!/usr/bin/env lua
-- usage: json2lua.lua [json_file]
--
-- Eg:
-- echo '[ "testing" ]' | ./json2lua.lua
-- ./json2lua.lua test.json
local json = require "cjson"
local util = require "cjson.util"
local json_text = util.file_load(arg[1])
local t = json.decode(json_text)
print(util.serialise_value(t))

View File

@ -0,0 +1,20 @@
#!/usr/bin/env lua
-- usage: lua2json.lua [lua_file]
--
-- Eg:
-- echo '{ "testing" }' | ./lua2json.lua
-- ./lua2json.lua test.lua
local json = require "cjson"
local util = require "cjson.util"
local env = {
json = { null = json.null },
null = json.null
}
local t = util.run_script("data = " .. util.file_load(arg[1]), env)
print(json.encode(t.data))
-- vi:ai et sw=4 ts=4:

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,612 @@
= Lua CJSON 2.1devel Manual =
Mark Pulford <mark@kyne.com.au>
:revdate: 1st March 2012
Overview
--------
The Lua CJSON module provides JSON support for Lua.
*Features*::
- Fast, standards compliant encoding/parsing routines
- Full support for JSON with UTF-8, including decoding surrogate pairs
- Optional run-time support for common exceptions to the JSON
specification (infinity, NaN,..)
- No dependencies on other libraries
*Caveats*::
- UTF-16 and UTF-32 are not supported
Lua CJSON is covered by the MIT license. Review the file +LICENSE+ for
details.
The latest version of this software is available from the
http://www.kyne.com.au/%7Emark/software/lua-cjson.php[Lua CJSON website].
Feel free to email me if you have any patches, suggestions, or comments.
Installation
------------
Lua CJSON requires either http://www.lua.org[Lua] 5.1, Lua 5.2, or
http://www.luajit.org[LuaJIT] to build.
The build method can be selected from 4 options:
Make:: Unix (including Linux, BSD, Mac OSX & Solaris), Windows
CMake:: Unix, Windows
RPM:: Linux
LuaRocks:: Unix, Windows
Make
~~~~
The included +Makefile+ has generic settings.
First, review and update the included makefile to suit your platform (if
required).
Next, build and install the module:
[source,sh]
make install
Or install manually into your Lua module directory:
[source,sh]
make
cp cjson.so $LUA_MODULE_DIRECTORY
CMake
~~~~~
http://www.cmake.org[CMake] can generate build configuration for many
different platforms (including Unix and Windows).
First, generate the makefile for your platform using CMake. If CMake is
unable to find Lua, manually set the +LUA_DIR+ environment variable to
the base prefix of your Lua 5.1 installation.
While +cmake+ is used in the example below, +ccmake+ or +cmake-gui+ may
be used to present an interface for changing the default build options.
[source,sh]
mkdir build
cd build
# Optional: export LUA_DIR=$LUA51_PREFIX
cmake ..
Next, build and install the module:
[source,sh]
make install
# Or:
make
cp cjson.so $LUA_MODULE_DIRECTORY
Review the
http://www.cmake.org/cmake/help/documentation.html[CMake documentation]
for further details.
RPM
~~~
Linux distributions using http://rpm.org[RPM] can create a package via
the included RPM spec file. Ensure the +rpm-build+ package (or similar)
has been installed.
Build and install the module via RPM:
[source,sh]
rpmbuild -tb lua-cjson-2.1devel.tar.gz
rpm -Uvh $LUA_CJSON_RPM
LuaRocks
~~~~~~~~
http://luarocks.org[LuaRocks] can be used to install and manage Lua
modules on a wide range of platforms (including Windows).
First, extract the Lua CJSON source package.
Next, install the module:
[source,sh]
cd lua-cjson-2.1devel
luarocks make
[NOTE]
LuaRocks does not support platform specific configuration for Solaris.
On Solaris, you may need to manually uncomment +USE_INTERNAL_ISINF+ in
the rockspec before building this module.
Review the http://luarocks.org/en/Documentation[LuaRocks documentation]
for further details.
[[build_options]]
Build Options (#define)
~~~~~~~~~~~~~~~~~~~~~~~
Lua CJSON offers several +#define+ build options to address portability
issues, and enable non-default features. Some build methods may
automatically set platform specific options if required. Other features
should be enabled manually.
USE_INTERNAL_ISINF:: Workaround for Solaris platforms missing +isinf+.
DISABLE_INVALID_NUMBERS:: Recommended on platforms where +strtod+ /
+sprintf+ are not POSIX compliant (eg, Windows MinGW). Prevents
+cjson.encode_invalid_numbers+ and +cjson.decode_invalid_numbers+ from
being enabled. However, +cjson.encode_invalid_numbers+ may still be
set to +"null"+. When using the Lua CJSON built-in floating point
conversion this option is unnecessary and is ignored.
Built-in floating point conversion
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Lua CJSON may be built with David Gay's
http://www.netlib.org/fp/[floating point conversion routines]. This can
increase overall performance by up to 50% on some platforms when
converting a large amount of numeric data. However, this option reduces
portability and is disabled by default.
USE_INTERNAL_FPCONV:: Enable internal number conversion routines.
IEEE_BIG_ENDIAN:: Must be set on big endian architectures.
MULTIPLE_THREADS:: Must be set if Lua CJSON may be used in a
multi-threaded application. Requires the _pthreads_ library.
API (Functions)
---------------
Synopsis
~~~~~~~~
[source,lua]
------------
-- Module instantiation
local cjson = require "cjson"
local cjson2 = cjson.new()
local cjson_safe = require "cjson.safe"
-- Translate Lua value to/from JSON
text = cjson.encode(value)
value = cjson.decode(text)
-- Get and/or set Lua CJSON configuration
setting = cjson.decode_invalid_numbers([setting])
setting = cjson.encode_invalid_numbers([setting])
keep = cjson.encode_keep_buffer([keep])
depth = cjson.encode_max_depth([depth])
depth = cjson.decode_max_depth([depth])
convert, ratio, safe = cjson.encode_sparse_array([convert[, ratio[, safe]]])
------------
Module Instantiation
~~~~~~~~~~~~~~~~~~~~
[source,lua]
------------
local cjson = require "cjson"
local cjson2 = cjson.new()
local cjson_safe = require "cjson.safe"
------------
Import Lua CJSON via the Lua +require+ function. Lua CJSON does not
register a global module table.
The +cjson+ module will throw an error during JSON conversion if any
invalid data is encountered. Refer to <<cjson_encode,+cjson.encode+>>
and <<cjson_decode,+cjson.decode+>> for details.
The +cjson.safe+ module behaves identically to the +cjson+ module,
except when errors are encountered during JSON conversion. On error, the
+cjson_safe.encode+ and +cjson_safe.decode+ functions will return
+nil+ followed by the error message.
+cjson.new+ can be used to instantiate an independent copy of the Lua
CJSON module. The new module has a separate persistent encoding buffer,
and default settings.
Lua CJSON can support Lua implementations using multiple preemptive
threads within a single Lua state provided the persistent encoding
buffer is not shared. This can be achieved by one of the following
methods:
- Disabling the persistent encoding buffer with
<<encode_keep_buffer,+cjson.encode_keep_buffer+>>
- Ensuring each thread calls <<encode,+cjson.encode+>> separately (ie,
treat +cjson.encode+ as non-reentrant).
- Using a separate +cjson+ module table per preemptive thread
(+cjson.new+)
[NOTE]
Lua CJSON uses +strtod+ and +snprintf+ to perform numeric conversion as
they are usually well supported, fast and bug free. However, these
functions require a workaround for JSON encoding/parsing under locales
using a comma decimal separator. Lua CJSON detects the current locale
during instantiation to determine and automatically implement the
workaround if required. Lua CJSON should be reinitialised via
+cjson.new+ if the locale of the current process changes. Using a
different locale per thread is not supported.
decode
~~~~~~
[source,lua]
------------
value = cjson.decode(json_text)
------------
+cjson.decode+ will deserialise any UTF-8 JSON string into a Lua value
or table.
UTF-16 and UTF-32 JSON strings are not supported.
+cjson.decode+ requires that any NULL (ASCII 0) and double quote (ASCII
34) characters are escaped within strings. All escape codes will be
decoded and other bytes will be passed transparently. UTF-8 characters
are not validated during decoding and should be checked elsewhere if
required.
JSON +null+ will be converted to a NULL +lightuserdata+ value. This can
be compared with +cjson.null+ for convenience.
By default, numbers incompatible with the JSON specification (infinity,
NaN, hexadecimal) can be decoded. This default can be changed with
<<decode_invalid_numbers,+cjson.decode_invalid_numbers+>>.
.Example: Decoding
[source,lua]
json_text = '[ true, { "foo": "bar" } ]'
value = cjson.decode(json_text)
-- Returns: { true, { foo = "bar" } }
[CAUTION]
Care must be taken after decoding JSON objects with numeric keys. Each
numeric key will be stored as a Lua +string+. Any subsequent code
assuming type +number+ may break.
[[decode_invalid_numbers]]
decode_invalid_numbers
~~~~~~~~~~~~~~~~~~~~~~
[source,lua]
------------
setting = cjson.decode_invalid_numbers([setting])
-- "setting" must be a boolean. Default: true.
------------
Lua CJSON may generate an error when trying to decode numbers not
supported by the JSON specification. _Invalid numbers_ are defined as:
- infinity
- NaN
- hexadecimal
Available settings:
+true+:: Accept and decode _invalid numbers_. This is the default
setting.
+false+:: Throw an error when _invalid numbers_ are encountered.
The current setting is always returned, and is only updated when an
argument is provided.
[[decode_max_depth]]
decode_max_depth
~~~~~~~~~~~~~~~~
[source,lua]
------------
depth = cjson.decode_max_depth([depth])
-- "depth" must be a positive integer. Default: 1000.
------------
Lua CJSON will generate an error when parsing deeply nested JSON once
the maximum array/object depth has been exceeded. This check prevents
unnecessarily complicated JSON from slowing down the application, or
crashing the application due to lack of process stack space.
An error may be generated before the depth limit is hit if Lua is unable
to allocate more objects on the Lua stack.
By default, Lua CJSON will reject JSON with arrays and/or objects nested
more than 1000 levels deep.
The current setting is always returned, and is only updated when an
argument is provided.
[[encode]]
encode
~~~~~~
[source,lua]
------------
json_text = cjson.encode(value)
------------
+cjson.encode+ will serialise a Lua value into a string containing the
JSON representation.
+cjson.encode+ supports the following types:
- +boolean+
- +lightuserdata+ (NULL value only)
- +nil+
- +number+
- +string+
- +table+
The remaining Lua types will generate an error:
- +function+
- +lightuserdata+ (non-NULL values)
- +thread+
- +userdata+
By default, numbers are encoded with 14 significant digits. Refer to
<<encode_number_precision,+cjson.encode_number_precision+>> for details.
Lua CJSON will escape the following characters within each UTF-8 string:
- Control characters (ASCII 0 - 31)
- Double quote (ASCII 34)
- Forward slash (ASCII 47)
- Blackslash (ASCII 92)
- Delete (ASCII 127)
All other bytes are passed transparently.
[CAUTION]
=========
Lua CJSON will successfully encode/decode binary strings, but this is
technically not supported by JSON and may not be compatible with other
JSON libraries. To ensure the output is valid JSON, applications should
ensure all Lua strings passed to +cjson.encode+ are UTF-8.
Base64 is commonly used to encode binary data as the most efficient
encoding under UTF-8 can only reduce the encoded size by a further
&#126;8%. Lua Base64 routines can be found in the
http://w3.impa.br/%7Ediego/software/luasocket/[LuaSocket] and
http://www.tecgraf.puc-rio.br/%7Elhf/ftp/lua/#lbase64[lbase64] packages.
=========
Lua CJSON uses a heuristic to determine whether to encode a Lua table as
a JSON array or an object. A Lua table with only positive integer keys
of type +number+ will be encoded as a JSON array. All other tables will
be encoded as a JSON object.
Lua CJSON does not use metamethods when serialising tables.
- +rawget+ is used to iterate over Lua arrays
- +next+ is used to iterate over Lua objects
Lua arrays with missing entries (_sparse arrays_) may optionally be
encoded in several different ways. Refer to
<<encode_sparse_array,+cjson.encode_sparse_array+>> for details.
JSON object keys are always strings. Hence +cjson.encode+ only supports
table keys which are type +number+ or +string+. All other types will
generate an error.
[NOTE]
Standards compliant JSON must be encapsulated in either an object (+{}+)
or an array (+[]+). If strictly standards compliant JSON is desired, a
table must be passed to +cjson.encode+.
By default, encoding the following Lua values will generate errors:
- Numbers incompatible with the JSON specification (infinity, NaN)
- Tables nested more than 1000 levels deep
- Excessively sparse Lua arrays
These defaults can be changed with:
- <<encode_invalid_numbers,+cjson.encode_invalid_numbers+>>
- <<encode_max_depth,+cjson.encode_max_depth+>>
- <<encode_sparse_array,+cjson.encode_sparse_array+>>
.Example: Encoding
[source,lua]
value = { true, { foo = "bar" } }
json_text = cjson.encode(value)
-- Returns: '[true,{"foo":"bar"}]'
[[encode_invalid_numbers]]
encode_invalid_numbers
~~~~~~~~~~~~~~~~~~~~~~
[source,lua]
------------
setting = cjson.encode_invalid_numbers([setting])
-- "setting" must a boolean or "null". Default: false.
------------
Lua CJSON may generate an error when encoding floating point numbers not
supported by the JSON specification (_invalid numbers_):
- infinity
- NaN
Available settings:
+true+:: Allow _invalid numbers_ to be encoded using the Javascript
compatible values +NaN+ and +Infinity+. This will generate
non-standard JSON, but these values are supported by some libraries.
+"null"+:: Encode _invalid numbers_ as a JSON +null+ value. This allows
infinity and NaN to be encoded into valid JSON.
+false+:: Throw an error when attempting to encode _invalid numbers_.
This is the default setting.
The current setting is always returned, and is only updated when an
argument is provided.
[[encode_keep_buffer]]
encode_keep_buffer
~~~~~~~~~~~~~~~~~~
[source,lua]
------------
keep = cjson.encode_keep_buffer([keep])
-- "keep" must be a boolean. Default: true.
------------
Lua CJSON can reuse the JSON encoding buffer to improve performance.
Available settings:
+true+:: The buffer will grow to the largest size required and is not
freed until the Lua CJSON module is garbage collected. This is the
default setting.
+false+:: Free the encode buffer after each call to +cjson.encode+.
The current setting is always returned, and is only updated when an
argument is provided.
[[encode_max_depth]]
encode_max_depth
~~~~~~~~~~~~~~~~
[source,lua]
------------
depth = cjson.encode_max_depth([depth])
-- "depth" must be a positive integer. Default: 1000.
------------
Once the maximum table depth has been exceeded Lua CJSON will generate
an error. This prevents a deeply nested or recursive data structure from
crashing the application.
By default, Lua CJSON will generate an error when trying to encode data
structures with more than 1000 nested tables.
The current setting is always returned, and is only updated when an
argument is provided.
.Example: Recursive Lua table
[source,lua]
a = {}; a[1] = a
[[encode_number_precision]]
encode_number_precision
~~~~~~~~~~~~~~~~~~~~~~~
[source,lua]
------------
precision = cjson.encode_number_precision([precision])
-- "precision" must be an integer between 1 and 14. Default: 14.
------------
The amount of significant digits returned by Lua CJSON when encoding
numbers can be changed to balance accuracy versus performance. For data
structures containing many numbers, setting
+cjson.encode_number_precision+ to a smaller integer, for example +3+,
can improve encoding performance by up to 50%.
By default, Lua CJSON will output 14 significant digits when converting
a number to text.
The current setting is always returned, and is only updated when an
argument is provided.
[[encode_sparse_array]]
encode_sparse_array
~~~~~~~~~~~~~~~~~~~
[source,lua]
------------
convert, ratio, safe = cjson.encode_sparse_array([convert[, ratio[, safe]]])
-- "convert" must be a boolean. Default: false.
-- "ratio" must be a positive integer. Default: 2.
-- "safe" must be a positive integer. Default: 10.
------------
Lua CJSON classifies a Lua table into one of three kinds when encoding a
JSON array. This is determined by the number of values missing from the
Lua array as follows:
Normal:: All values are available.
Sparse:: At least 1 value is missing.
Excessively sparse:: The number of values missing exceeds the configured
ratio.
Lua CJSON encodes sparse Lua arrays as JSON arrays using JSON +null+ for
the missing entries.
An array is excessively sparse when all the following conditions are
met:
- +ratio+ > +0+
- _maximum_index_ > +safe+
- _maximum_index_ > _item_count_ * +ratio+
Lua CJSON will never consider an array to be _excessively sparse_ when
+ratio+ = +0+. The +safe+ limit ensures that small Lua arrays are always
encoded as sparse arrays.
By default, attempting to encode an _excessively sparse_ array will
generate an error. If +convert+ is set to +true+, _excessively sparse_
arrays will be converted to a JSON object.
The current settings are always returned. A particular setting is only
changed when the argument is provided (non-++nil++).
.Example: Encoding a sparse array
[source,lua]
cjson.encode({ [3] = "data" })
-- Returns: '[null,null,"data"]'
.Example: Enabling conversion to a JSON object
[source,lua]
cjson.encode_sparse_array(true)
cjson.encode({ [1000] = "excessively sparse" })
-- Returns: '{"1000":"excessively sparse"}'
API (Variables)
---------------
_NAME
~~~~~
The name of the Lua CJSON module (+"cjson"+).
_VERSION
~~~~~~~~
The version number of the Lua CJSON module (+"2.1devel"+).
null
~~~~
Lua CJSON decodes JSON +null+ as a Lua +lightuserdata+ NULL pointer.
+cjson.null+ is provided for comparison.
[sect1]
References
----------
- http://tools.ietf.org/html/rfc4627[RFC 4627]
- http://www.json.org/[JSON website]
// vi:ft=asciidoc tw=72:

View File

@ -0,0 +1,89 @@
JSON module performance comparison under Lua
============================================
Mark Pulford <mark@kyne.com.au>
:revdate: January 22, 2012
This performance comparison aims to provide a guide of relative
performance between several fast and popular JSON modules.
The examples used in this comparison were mostly sourced from the
http://json.org[JSON website] and
http://tools.ietf.org/html/rfc4627[RFC 4627].
Performance will vary widely between platforms and data sets. These
results should only be used as an approximation.
Modules
-------
The following JSON modules for Lua were tested:
http://chiselapp.com/user/dhkolf/repository/dkjson/[DKJSON 2.1]::
- Lua implementation with no dependencies on other libraries
- Supports LPeg to improve decode performance
https://github.com/brimworks/lua-yajl[Lua YAJL 2.0]::
- C wrapper for the YAJL library
http://www.kyne.com.au/%7Emark/software/lua-cjson.php[Lua CSJON 2.0.0]::
- C implementation with no dependencies on other libraries
Summary
-------
All modules were built and tested as follows:
DKJSON:: Tested with/without LPeg 10.2.
Lua YAJL:: Tested with YAJL 2.0.4.
Lua CJSON:: Tested with Libc and internal floating point conversion
routines.
The following Lua implementations were used for this comparison:
- http://www.lua.org[Lua 5.1.4] (_Lua_)
- http://www.luajit.org[LuaJIT 2.0.0-beta9] (_JIT_)
These results show the number of JSON operations per second sustained by
each module. All results have been normalised against the pure Lua
DKJSON implementation.
.Decoding performance
............................................................................
| DKJSON | Lua YAJL | Lua CJSON
| No LPeg With LPeg | | Libc Internal
| Lua JIT Lua JIT | Lua JIT | Lua JIT Lua JIT
example1 | 1x 2x 2.6x 3.4x | 7.1x 10x | 14x 20x 14x 20x
example2 | 1x 2.2x 2.9x 4.4x | 6.7x 9.9x | 14x 22x 14x 22x
example3 | 1x 2.1x 3x 4.3x | 6.9x 9.3x | 14x 21x 15x 22x
example4 | 1x 2x 2.5x 3.7x | 7.3x 10x | 12x 19x 12x 20x
example5 | 1x 2.2x 3x 4.5x | 7.8x 11x | 16x 24x 16x 24x
numbers | 1x 2.2x 2.3x 4x | 4.6x 5.5x | 8.9x 10x 13x 17x
rfc-example1 | 1x 2.1x 2.8x 4.3x | 6.1x 8.1x | 13x 19x 14x 21x
rfc-example2 | 1x 2.1x 3.1x 4.2x | 7.1x 9.2x | 15x 21x 17x 24x
types | 1x 2.2x 2.6x 4.3x | 5.3x 7.4x | 12x 20x 13x 21x
-------------|-------------------------|------------|-----------------------
= Average => | 1x 2.1x 2.7x 4.1x | 6.5x 9x | 13x 20x 14x 21x
............................................................................
.Encoding performance
.............................................................................
| DKJSON | Lua YAJL | Lua CJSON
| No LPeg With LPeg | | Libc Internal
| Lua JIT Lua JIT | Lua JIT | Lua JIT Lua JIT
example1 | 1x 1.8x 0.97x 1.6x | 3.1x 5.2x | 23x 29x 23x 29x
example2 | 1x 2x 0.97x 1.7x | 2.6x 4.3x | 22x 28x 22x 28x
example3 | 1x 1.9x 0.98x 1.6x | 2.8x 4.3x | 13x 15x 16x 18x
example4 | 1x 1.7x 0.96x 1.3x | 3.9x 6.1x | 15x 19x 17x 21x
example5 | 1x 2x 0.98x 1.7x | 2.7x 4.5x | 20x 23x 20x 23x
numbers | 1x 2.3x 1x 2.2x | 1.3x 1.9x | 3.8x 4.1x 4.2x 4.6x
rfc-example1 | 1x 1.9x 0.97x 1.6x | 2.2x 3.2x | 8.5x 9.3x 11x 12x
rfc-example2 | 1x 1.9x 0.98x 1.6x | 2.6x 3.9x | 10x 11x 17x 19x
types | 1x 2.2x 0.97x 2x | 1.2x 1.9x | 11x 13x 12x 14x
-------------|-------------------------|------------|-----------------------
= Average => | 1x 1.9x 0.98x 1.7x | 2.5x 3.9x | 14x 17x 16x 19x
.............................................................................
// vi:ft=asciidoc tw=72:

View File

@ -0,0 +1,563 @@
Network Working Group D. Crockford
Request for Comments: 4627 JSON.org
Category: Informational July 2006
The application/json Media Type for JavaScript Object Notation (JSON)
Status of This Memo
This memo provides information for the Internet community. It does
not specify an Internet standard of any kind. Distribution of this
memo is unlimited.
Copyright Notice
Copyright (C) The Internet Society (2006).
Abstract
JavaScript Object Notation (JSON) is a lightweight, text-based,
language-independent data interchange format. It was derived from
the ECMAScript Programming Language Standard. JSON defines a small
set of formatting rules for the portable representation of structured
data.
1. Introduction
JavaScript Object Notation (JSON) is a text format for the
serialization of structured data. It is derived from the object
literals of JavaScript, as defined in the ECMAScript Programming
Language Standard, Third Edition [ECMA].
JSON can represent four primitive types (strings, numbers, booleans,
and null) and two structured types (objects and arrays).
A string is a sequence of zero or more Unicode characters [UNICODE].
An object is an unordered collection of zero or more name/value
pairs, where a name is a string and a value is a string, number,
boolean, null, object, or array.
An array is an ordered sequence of zero or more values.
The terms "object" and "array" come from the conventions of
JavaScript.
JSON's design goals were for it to be minimal, portable, textual, and
a subset of JavaScript.
Crockford Informational [Page 1]
RFC 4627 JSON July 2006
1.1. Conventions Used in This Document
The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT",
"SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this
document are to be interpreted as described in [RFC2119].
The grammatical rules in this document are to be interpreted as
described in [RFC4234].
2. JSON Grammar
A JSON text is a sequence of tokens. The set of tokens includes six
structural characters, strings, numbers, and three literal names.
A JSON text is a serialized object or array.
JSON-text = object / array
These are the six structural characters:
begin-array = ws %x5B ws ; [ left square bracket
begin-object = ws %x7B ws ; { left curly bracket
end-array = ws %x5D ws ; ] right square bracket
end-object = ws %x7D ws ; } right curly bracket
name-separator = ws %x3A ws ; : colon
value-separator = ws %x2C ws ; , comma
Insignificant whitespace is allowed before or after any of the six
structural characters.
ws = *(
%x20 / ; Space
%x09 / ; Horizontal tab
%x0A / ; Line feed or New line
%x0D ; Carriage return
)
2.1. Values
A JSON value MUST be an object, array, number, or string, or one of
the following three literal names:
false null true
Crockford Informational [Page 2]
RFC 4627 JSON July 2006
The literal names MUST be lowercase. No other literal names are
allowed.
value = false / null / true / object / array / number / string
false = %x66.61.6c.73.65 ; false
null = %x6e.75.6c.6c ; null
true = %x74.72.75.65 ; true
2.2. Objects
An object structure is represented as a pair of curly brackets
surrounding zero or more name/value pairs (or members). A name is a
string. A single colon comes after each name, separating the name
from the value. A single comma separates a value from a following
name. The names within an object SHOULD be unique.
object = begin-object [ member *( value-separator member ) ]
end-object
member = string name-separator value
2.3. Arrays
An array structure is represented as square brackets surrounding zero
or more values (or elements). Elements are separated by commas.
array = begin-array [ value *( value-separator value ) ] end-array
2.4. Numbers
The representation of numbers is similar to that used in most
programming languages. A number contains an integer component that
may be prefixed with an optional minus sign, which may be followed by
a fraction part and/or an exponent part.
Octal and hex forms are not allowed. Leading zeros are not allowed.
A fraction part is a decimal point followed by one or more digits.
An exponent part begins with the letter E in upper or lowercase,
which may be followed by a plus or minus sign. The E and optional
sign are followed by one or more digits.
Numeric values that cannot be represented as sequences of digits
(such as Infinity and NaN) are not permitted.
Crockford Informational [Page 3]
RFC 4627 JSON July 2006
number = [ minus ] int [ frac ] [ exp ]
decimal-point = %x2E ; .
digit1-9 = %x31-39 ; 1-9
e = %x65 / %x45 ; e E
exp = e [ minus / plus ] 1*DIGIT
frac = decimal-point 1*DIGIT
int = zero / ( digit1-9 *DIGIT )
minus = %x2D ; -
plus = %x2B ; +
zero = %x30 ; 0
2.5. Strings
The representation of strings is similar to conventions used in the C
family of programming languages. A string begins and ends with
quotation marks. All Unicode characters may be placed within the
quotation marks except for the characters that must be escaped:
quotation mark, reverse solidus, and the control characters (U+0000
through U+001F).
Any character may be escaped. If the character is in the Basic
Multilingual Plane (U+0000 through U+FFFF), then it may be
represented as a six-character sequence: a reverse solidus, followed
by the lowercase letter u, followed by four hexadecimal digits that
encode the character's code point. The hexadecimal letters A though
F can be upper or lowercase. So, for example, a string containing
only a single reverse solidus character may be represented as
"\u005C".
Alternatively, there are two-character sequence escape
representations of some popular characters. So, for example, a
string containing only a single reverse solidus character may be
represented more compactly as "\\".
To escape an extended character that is not in the Basic Multilingual
Plane, the character is represented as a twelve-character sequence,
encoding the UTF-16 surrogate pair. So, for example, a string
containing only the G clef character (U+1D11E) may be represented as
"\uD834\uDD1E".
Crockford Informational [Page 4]
RFC 4627 JSON July 2006
string = quotation-mark *char quotation-mark
char = unescaped /
escape (
%x22 / ; " quotation mark U+0022
%x5C / ; \ reverse solidus U+005C
%x2F / ; / solidus U+002F
%x62 / ; b backspace U+0008
%x66 / ; f form feed U+000C
%x6E / ; n line feed U+000A
%x72 / ; r carriage return U+000D
%x74 / ; t tab U+0009
%x75 4HEXDIG ) ; uXXXX U+XXXX
escape = %x5C ; \
quotation-mark = %x22 ; "
unescaped = %x20-21 / %x23-5B / %x5D-10FFFF
3. Encoding
JSON text SHALL be encoded in Unicode. The default encoding is
UTF-8.
Since the first two characters of a JSON text will always be ASCII
characters [RFC0020], it is possible to determine whether an octet
stream is UTF-8, UTF-16 (BE or LE), or UTF-32 (BE or LE) by looking
at the pattern of nulls in the first four octets.
00 00 00 xx UTF-32BE
00 xx 00 xx UTF-16BE
xx 00 00 00 UTF-32LE
xx 00 xx 00 UTF-16LE
xx xx xx xx UTF-8
4. Parsers
A JSON parser transforms a JSON text into another representation. A
JSON parser MUST accept all texts that conform to the JSON grammar.
A JSON parser MAY accept non-JSON forms or extensions.
An implementation may set limits on the size of texts that it
accepts. An implementation may set limits on the maximum depth of
nesting. An implementation may set limits on the range of numbers.
An implementation may set limits on the length and character contents
of strings.
Crockford Informational [Page 5]
RFC 4627 JSON July 2006
5. Generators
A JSON generator produces JSON text. The resulting text MUST
strictly conform to the JSON grammar.
6. IANA Considerations
The MIME media type for JSON text is application/json.
Type name: application
Subtype name: json
Required parameters: n/a
Optional parameters: n/a
Encoding considerations: 8bit if UTF-8; binary if UTF-16 or UTF-32
JSON may be represented using UTF-8, UTF-16, or UTF-32. When JSON
is written in UTF-8, JSON is 8bit compatible. When JSON is
written in UTF-16 or UTF-32, the binary content-transfer-encoding
must be used.
Security considerations:
Generally there are security issues with scripting languages. JSON
is a subset of JavaScript, but it is a safe subset that excludes
assignment and invocation.
A JSON text can be safely passed into JavaScript's eval() function
(which compiles and executes a string) if all the characters not
enclosed in strings are in the set of characters that form JSON
tokens. This can be quickly determined in JavaScript with two
regular expressions and calls to the test and replace methods.
var my_JSON_object = !(/[^,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]/.test(
text.replace(/"(\\.|[^"\\])*"/g, ''))) &&
eval('(' + text + ')');
Interoperability considerations: n/a
Published specification: RFC 4627
Crockford Informational [Page 6]
RFC 4627 JSON July 2006
Applications that use this media type:
JSON has been used to exchange data between applications written
in all of these programming languages: ActionScript, C, C#,
ColdFusion, Common Lisp, E, Erlang, Java, JavaScript, Lua,
Objective CAML, Perl, PHP, Python, Rebol, Ruby, and Scheme.
Additional information:
Magic number(s): n/a
File extension(s): .json
Macintosh file type code(s): TEXT
Person & email address to contact for further information:
Douglas Crockford
douglas@crockford.com
Intended usage: COMMON
Restrictions on usage: none
Author:
Douglas Crockford
douglas@crockford.com
Change controller:
Douglas Crockford
douglas@crockford.com
7. Security Considerations
See Security Considerations in Section 6.
8. Examples
This is a JSON object:
{
"Image": {
"Width": 800,
"Height": 600,
"Title": "View from 15th Floor",
"Thumbnail": {
"Url": "http://www.example.com/image/481989943",
"Height": 125,
"Width": "100"
},
"IDs": [116, 943, 234, 38793]
Crockford Informational [Page 7]
RFC 4627 JSON July 2006
}
}
Its Image member is an object whose Thumbnail member is an object
and whose IDs member is an array of numbers.
This is a JSON array containing two objects:
[
{
"precision": "zip",
"Latitude": 37.7668,
"Longitude": -122.3959,
"Address": "",
"City": "SAN FRANCISCO",
"State": "CA",
"Zip": "94107",
"Country": "US"
},
{
"precision": "zip",
"Latitude": 37.371991,
"Longitude": -122.026020,
"Address": "",
"City": "SUNNYVALE",
"State": "CA",
"Zip": "94085",
"Country": "US"
}
]
9. References
9.1. Normative References
[ECMA] European Computer Manufacturers Association, "ECMAScript
Language Specification 3rd Edition", December 1999,
<http://www.ecma-international.org/publications/files/
ecma-st/ECMA-262.pdf>.
[RFC0020] Cerf, V., "ASCII format for network interchange", RFC 20,
October 1969.
[RFC2119] Bradner, S., "Key words for use in RFCs to Indicate
Requirement Levels", BCP 14, RFC 2119, March 1997.
[RFC4234] Crocker, D. and P. Overell, "Augmented BNF for Syntax
Specifications: ABNF", RFC 4234, October 2005.
Crockford Informational [Page 8]
RFC 4627 JSON July 2006
[UNICODE] The Unicode Consortium, "The Unicode Standard Version 4.0",
2003, <http://www.unicode.org/versions/Unicode4.1.0/>.
Author's Address
Douglas Crockford
JSON.org
EMail: douglas@crockford.com
Crockford Informational [Page 9]
RFC 4627 JSON July 2006
Full Copyright Statement
Copyright (C) The Internet Society (2006).
This document is subject to the rights, licenses and restrictions
contained in BCP 78, and except as set forth therein, the authors
retain all their rights.
This document and the information contained herein are provided on an
"AS IS" basis and THE CONTRIBUTOR, THE ORGANIZATION HE/SHE REPRESENTS
OR IS SPONSORED BY (IF ANY), THE INTERNET SOCIETY AND THE INTERNET
ENGINEERING TASK FORCE DISCLAIM ALL WARRANTIES, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO ANY WARRANTY THAT THE USE OF THE
INFORMATION HEREIN WILL NOT INFRINGE ANY RIGHTS OR ANY IMPLIED
WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
Intellectual Property
The IETF takes no position regarding the validity or scope of any
Intellectual Property Rights or other rights that might be claimed to
pertain to the implementation or use of the technology described in
this document or the extent to which any license under such rights
might or might not be available; nor does it represent that it has
made any independent effort to identify any such rights. Information
on the procedures with respect to rights in RFC documents can be
found in BCP 78 and BCP 79.
Copies of IPR disclosures made to the IETF Secretariat and any
assurances of licenses to be made available, or the result of an
attempt made to obtain a general license or permission for the use of
such proprietary rights by implementers or users of this
specification can be obtained from the IETF on-line IPR repository at
http://www.ietf.org/ipr.
The IETF invites any interested party to bring to its attention any
copyrights, patents or patent applications, or other proprietary
rights that may cover technology that may be required to implement
this standard. Please address the information to the IETF at
ietf-ipr@ietf.org.
Acknowledgement
Funding for the RFC Editor function is provided by the IETF
Administrative Support Activity (IASA).
Crockford Informational [Page 10]

View File

@ -0,0 +1,85 @@
#!/bin/bash
set -eo pipefail
PLATFORM="`uname -s`"
[ "$1" ] && VERSION="$1" || VERSION="2.1devel"
# Portable "ggrep -A" replacement.
# Work around Solaris awk record limit of 2559 bytes.
# contextgrep PATTERN POST_MATCH_LINES
contextgrep() {
cut -c -2500 | awk "/$1/ { count = ($2 + 1) } count > 0 { count--; print }"
}
do_tests() {
echo
cd tests
lua -e 'print("Testing Lua CJSON version " .. require("cjson")._VERSION)'
./test.lua | contextgrep 'FAIL|Summary' 3 | grep -v PASS | cut -c -150
cd ..
}
echo "===== Setting LuaRocks PATH ====="
eval "`luarocks path`"
echo "===== Building UTF-8 test data ====="
( cd tests && ./genutf8.pl; )
echo "===== Cleaning old build data ====="
make clean
rm -f tests/cjson.so
echo "===== Verifying cjson.so is not installed ====="
cd tests
if lua -e 'require "cjson"' 2>/dev/null
then
cat <<EOT
Please ensure you do not have the Lua CJSON module installed before
running these tests.
EOT
exit 1
fi
cd ..
echo "===== Testing LuaRocks build ====="
luarocks make --local
do_tests
luarocks remove --local lua-cjson
make clean
echo "===== Testing Makefile build ====="
make "$@"
cp -r lua/cjson cjson.so tests
do_tests
make clean
rm -rf tests/cjson{,.so}
if [ -z "$SKIP_CMAKE" ]; then
echo "===== Testing Cmake build ====="
mkdir build
cd build
cmake ..
make
cd ..
cp -r lua/cjson build/cjson.so tests
do_tests
rm -rf build tests/cjson{,.so}
echo "===== Testing Cmake fpconv build ====="
mkdir build
cd build
cmake -DUSE_INTERNAL_FPCONV=1 ..
make
cd ..
cp -r lua/cjson build/cjson.so tests
do_tests
rm -rf build tests/cjson{,.so}
else
echo "===== Skipping Cmake build ====="
fi
# vi:ai et sw=4 ts=4:

View File

@ -0,0 +1,251 @@
/* strbuf - String buffer routines
*
* Copyright (c) 2010-2012 Mark Pulford <mark@kyne.com.au>
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include <stdio.h>
#include <stdlib.h>
#include <stdarg.h>
#include <string.h>
#include "strbuf.h"
static void die(const char *fmt, ...)
{
va_list arg;
va_start(arg, fmt);
vfprintf(stderr, fmt, arg);
va_end(arg);
fprintf(stderr, "\n");
exit(-1);
}
void strbuf_init(strbuf_t *s, int len)
{
int size;
if (len <= 0)
size = STRBUF_DEFAULT_SIZE;
else
size = len + 1; /* \0 terminator */
s->buf = NULL;
s->size = size;
s->length = 0;
s->increment = STRBUF_DEFAULT_INCREMENT;
s->dynamic = 0;
s->reallocs = 0;
s->debug = 0;
s->buf = malloc(size);
if (!s->buf)
die("Out of memory");
strbuf_ensure_null(s);
}
strbuf_t *strbuf_new(int len)
{
strbuf_t *s;
s = malloc(sizeof(strbuf_t));
if (!s)
die("Out of memory");
strbuf_init(s, len);
/* Dynamic strbuf allocation / deallocation */
s->dynamic = 1;
return s;
}
void strbuf_set_increment(strbuf_t *s, int increment)
{
/* Increment > 0: Linear buffer growth rate
* Increment < -1: Exponential buffer growth rate */
if (increment == 0 || increment == -1)
die("BUG: Invalid string increment");
s->increment = increment;
}
static inline void debug_stats(strbuf_t *s)
{
if (s->debug) {
fprintf(stderr, "strbuf(%lx) reallocs: %d, length: %d, size: %d\n",
(long)s, s->reallocs, s->length, s->size);
}
}
/* If strbuf_t has not been dynamically allocated, strbuf_free() can
* be called any number of times strbuf_init() */
void strbuf_free(strbuf_t *s)
{
debug_stats(s);
if (s->buf) {
free(s->buf);
s->buf = NULL;
}
if (s->dynamic)
free(s);
}
char *strbuf_free_to_string(strbuf_t *s, int *len)
{
char *buf;
debug_stats(s);
strbuf_ensure_null(s);
buf = s->buf;
if (len)
*len = s->length;
if (s->dynamic)
free(s);
return buf;
}
static int calculate_new_size(strbuf_t *s, int len)
{
int reqsize, newsize;
if (len <= 0)
die("BUG: Invalid strbuf length requested");
/* Ensure there is room for optional NULL termination */
reqsize = len + 1;
/* If the user has requested to shrink the buffer, do it exactly */
if (s->size > reqsize)
return reqsize;
newsize = s->size;
if (s->increment < 0) {
/* Exponential sizing */
while (newsize < reqsize)
newsize *= -s->increment;
} else if (s->increment != 0) {
/* Linear sizing */
newsize = ((newsize + s->increment - 1) / s->increment) * s->increment;
}
return newsize;
}
/* Ensure strbuf can handle a string length bytes long (ignoring NULL
* optional termination). */
void strbuf_resize(strbuf_t *s, int len)
{
int newsize;
newsize = calculate_new_size(s, len);
if (s->debug > 1) {
fprintf(stderr, "strbuf(%lx) resize: %d => %d\n",
(long)s, s->size, newsize);
}
s->size = newsize;
s->buf = realloc(s->buf, s->size);
if (!s->buf)
die("Out of memory");
s->reallocs++;
}
void strbuf_append_string(strbuf_t *s, const char *str)
{
int space, i;
space = strbuf_empty_length(s);
for (i = 0; str[i]; i++) {
if (space < 1) {
strbuf_resize(s, s->length + 1);
space = strbuf_empty_length(s);
}
s->buf[s->length] = str[i];
s->length++;
space--;
}
}
/* strbuf_append_fmt() should only be used when an upper bound
* is known for the output string. */
void strbuf_append_fmt(strbuf_t *s, int len, const char *fmt, ...)
{
va_list arg;
int fmt_len;
strbuf_ensure_empty_length(s, len);
va_start(arg, fmt);
fmt_len = vsnprintf(s->buf + s->length, len, fmt, arg);
va_end(arg);
if (fmt_len < 0)
die("BUG: Unable to convert number"); /* This should never happen.. */
s->length += fmt_len;
}
/* strbuf_append_fmt_retry() can be used when the there is no known
* upper bound for the output string. */
void strbuf_append_fmt_retry(strbuf_t *s, const char *fmt, ...)
{
va_list arg;
int fmt_len, try;
int empty_len;
/* If the first attempt to append fails, resize the buffer appropriately
* and try again */
for (try = 0; ; try++) {
va_start(arg, fmt);
/* Append the new formatted string */
/* fmt_len is the length of the string required, excluding the
* trailing NULL */
empty_len = strbuf_empty_length(s);
/* Add 1 since there is also space to store the terminating NULL. */
fmt_len = vsnprintf(s->buf + s->length, empty_len + 1, fmt, arg);
va_end(arg);
if (fmt_len <= empty_len)
break; /* SUCCESS */
if (try > 0)
die("BUG: length of formatted string changed");
strbuf_resize(s, s->length + fmt_len);
}
s->length += fmt_len;
}
/* vi:ai et sw=4 ts=4:
*/

View File

@ -0,0 +1,165 @@
/* strbuf - String buffer routines
*
* Copyright (c) 2010-2012 Mark Pulford <mark@kyne.com.au>
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include <stdlib.h>
#include <stdarg.h>
/* Workaround for MSVC */
#ifdef _MSC_VER
#define inline __inline
#endif
/* Size: Total bytes allocated to *buf
* Length: String length, excluding optional NULL terminator.
* Increment: Allocation increments when resizing the string buffer.
* Dynamic: True if created via strbuf_new()
*/
typedef struct {
char *buf;
int size;
int length;
int increment;
int dynamic;
int reallocs;
int debug;
} strbuf_t;
#ifndef STRBUF_DEFAULT_SIZE
#define STRBUF_DEFAULT_SIZE 1023
#endif
#ifndef STRBUF_DEFAULT_INCREMENT
#define STRBUF_DEFAULT_INCREMENT -2
#endif
/* Initialise */
extern strbuf_t *strbuf_new(int len);
extern void strbuf_init(strbuf_t *s, int len);
extern void strbuf_set_increment(strbuf_t *s, int increment);
/* Release */
extern void strbuf_free(strbuf_t *s);
extern char *strbuf_free_to_string(strbuf_t *s, int *len);
/* Management */
extern void strbuf_resize(strbuf_t *s, int len);
static int strbuf_empty_length(strbuf_t *s);
static int strbuf_length(strbuf_t *s);
static char *strbuf_string(strbuf_t *s, int *len);
static void strbuf_ensure_empty_length(strbuf_t *s, int len);
static char *strbuf_empty_ptr(strbuf_t *s);
static void strbuf_extend_length(strbuf_t *s, int len);
static void strbuf_set_length(strbuf_t *s, int len);
/* Update */
extern void strbuf_append_fmt(strbuf_t *s, int len, const char *fmt, ...);
extern void strbuf_append_fmt_retry(strbuf_t *s, const char *format, ...);
static void strbuf_append_mem(strbuf_t *s, const char *c, int len);
extern void strbuf_append_string(strbuf_t *s, const char *str);
static void strbuf_append_char(strbuf_t *s, const char c);
static void strbuf_ensure_null(strbuf_t *s);
/* Reset string for before use */
static inline void strbuf_reset(strbuf_t *s)
{
s->length = 0;
}
static inline int strbuf_allocated(strbuf_t *s)
{
return s->buf != NULL;
}
/* Return bytes remaining in the string buffer
* Ensure there is space for a NULL terminator. */
static inline int strbuf_empty_length(strbuf_t *s)
{
return s->size - s->length - 1;
}
static inline void strbuf_ensure_empty_length(strbuf_t *s, int len)
{
if (len > strbuf_empty_length(s))
strbuf_resize(s, s->length + len);
}
static inline char *strbuf_empty_ptr(strbuf_t *s)
{
return s->buf + s->length;
}
static inline void strbuf_set_length(strbuf_t *s, int len)
{
s->length = len;
}
static inline void strbuf_extend_length(strbuf_t *s, int len)
{
s->length += len;
}
static inline int strbuf_length(strbuf_t *s)
{
return s->length;
}
static inline void strbuf_append_char(strbuf_t *s, const char c)
{
strbuf_ensure_empty_length(s, 1);
s->buf[s->length++] = c;
}
static inline void strbuf_append_char_unsafe(strbuf_t *s, const char c)
{
s->buf[s->length++] = c;
}
static inline void strbuf_append_mem(strbuf_t *s, const char *c, int len)
{
strbuf_ensure_empty_length(s, len);
memcpy(s->buf + s->length, c, len);
s->length += len;
}
static inline void strbuf_append_mem_unsafe(strbuf_t *s, const char *c, int len)
{
memcpy(s->buf + s->length, c, len);
s->length += len;
}
static inline void strbuf_ensure_null(strbuf_t *s)
{
s->buf[s->length] = 0;
}
static inline char *strbuf_string(strbuf_t *s, int *len)
{
if (len)
*len = s->length;
return s->buf;
}
/* vi:ai et sw=4 ts=4:
*/

View File

@ -0,0 +1,4 @@
These JSON examples were taken from the JSON website
(http://json.org/example.html) and RFC 4627.
Used with permission.

View File

@ -0,0 +1,72 @@
package TestLua;
use Test::Base -Base;
use IPC::Run3;
use Cwd;
use Test::LongString;
our @EXPORT = qw( run_tests );
$ENV{LUA_CPATH} = "../?.so;;";
$ENV{LUA_PATH} = "../lua/?.lua;;";
#$ENV{LUA_PATH} = ($ENV{LUA_PATH} || "" ) . ';' . getcwd . "/runtime/?.lua" . ';;';
sub run_test ($) {
my $block = shift;
#print $json_xs->pretty->encode(\@new_rows);
#my $res = #print $json_xs->pretty->encode($res);
my $name = $block->name;
my $lua = $block->lua or
die "No --- lua specified for test $name\n";
my $luafile = "test_case.lua";
open my $fh, ">$luafile" or
die "Cannot open $luafile for writing: $!\n";
print $fh $lua;
close $fh;
my ($res, $err);
my @cmd;
my $lua_bin = $ENV{LUA_BIN} || "luajit";
if ($ENV{TEST_LUA_USE_VALGRIND}) {
warn "$name\n";
@cmd = ('valgrind', '-q', '--leak-check=full', $lua_bin, 'test_case.lua');
} else {
@cmd = ($lua_bin, 'test_case.lua');
}
run3 \@cmd, undef, \$res, \$err;
my $rc = $?;
#warn "res:$res\nerr:$err\n";
if (defined $block->err) {
$err =~ /.*:.*:.*: (.*\s)?/;
$err = $1;
is $err, $block->err, "$name - err expected";
} elsif ($rc) {
die "Failed to execute --- lua for test $name: $err\n";
} else {
#is $res, $block->out, "$name - output ok";
is $res, $block->out, "$name - output ok";
}
is $rc, ($block->exit || 0), "$name - exit code ok";
#unlink 'test_case.lua' or warn "could not delete \'test_case.lua\':$!";
}
sub run_tests () {
for my $block (blocks()) {
run_test($block);
}
}
1;

View File

@ -0,0 +1,334 @@
# vim:ft=
use lib '.';
use TestLua;
plan tests => 2 * blocks();
run_tests();
__DATA__
=== TEST 1: empty tables as objects
--- lua
local cjson = require "cjson"
print(cjson.encode({}))
print(cjson.encode({dogs = {}}))
--- out
{}
{"dogs":{}}
=== TEST 2: empty tables as arrays
--- lua
local cjson = require "cjson"
cjson.encode_empty_table_as_object(false)
print(cjson.encode({}))
print(cjson.encode({dogs = {}}))
--- out
[]
{"dogs":[]}
=== TEST 3: empty tables as objects (explicit)
--- lua
local cjson = require "cjson"
cjson.encode_empty_table_as_object(true)
print(cjson.encode({}))
print(cjson.encode({dogs = {}}))
--- out
{}
{"dogs":{}}
=== TEST 4: empty_array userdata
--- lua
local cjson = require "cjson"
print(cjson.encode({arr = cjson.empty_array}))
--- out
{"arr":[]}
=== TEST 5: empty_array_mt
--- lua
local cjson = require "cjson"
local empty_arr = setmetatable({}, cjson.empty_array_mt)
print(cjson.encode({arr = empty_arr}))
--- out
{"arr":[]}
=== TEST 6: empty_array_mt and empty tables as objects (explicit)
--- lua
local cjson = require "cjson"
local sort_json = require "tests.sort_json"
local empty_arr = setmetatable({}, cjson.empty_array_mt)
print(sort_json(cjson.encode({obj = {}, arr = empty_arr})))
--- out
{"arr":[],"obj":{}}
=== TEST 7: empty_array_mt and empty tables as objects (explicit)
--- lua
local cjson = require "cjson"
local sort_json = require "tests.sort_json"
cjson.encode_empty_table_as_object(true)
local empty_arr = setmetatable({}, cjson.empty_array_mt)
local data = {
arr = empty_arr,
foo = {
obj = {},
foobar = {
arr = cjson.empty_array,
obj = {}
}
}
}
print(sort_json(cjson.encode(data)))
--- out
{"arr":[],"foo":{"foobar":{"arr":[],"obj":{}},"obj":{}}}
=== TEST 8: empty_array_mt on non-empty tables
--- lua
local cjson = require "cjson"
local sort_json = require "tests.sort_json"
cjson.encode_empty_table_as_object(true)
local array = {"hello", "world", "lua"}
setmetatable(array, cjson.empty_array_mt)
local data = {
arr = array,
foo = {
obj = {},
foobar = {
arr = cjson.empty_array,
obj = {}
}
}
}
print(sort_json(cjson.encode(data)))
--- out
{"arr":["hello","world","lua"],"foo":{"foobar":{"arr":[],"obj":{}},"obj":{}}}
=== TEST 9: array_mt on empty tables
--- lua
local cjson = require "cjson"
local data = {}
setmetatable(data, cjson.array_mt)
print(cjson.encode(data))
--- out
[]
=== TEST 10: array_mt on non-empty tables
--- lua
local cjson = require "cjson"
local data = { "foo", "bar" }
setmetatable(data, cjson.array_mt)
print(cjson.encode(data))
--- out
["foo","bar"]
=== TEST 11: array_mt on non-empty tables with holes
--- lua
local cjson = require "cjson"
local data = {}
data[1] = "foo"
data[2] = "bar"
data[4] = "last"
data[9] = "none"
setmetatable(data, cjson.array_mt)
print(cjson.encode(data))
--- out
["foo","bar",null,"last"]
=== TEST 12: decode() by default does not set array_mt on empty arrays
--- lua
local cjson = require "cjson"
local json = [[{"my_array":[]}]]
local t = cjson.decode(json)
local has_metatable = getmetatable(t.my_array) == cjson.array_mt
print("decoded JSON array has metatable: " .. tostring(has_metatable))
print(cjson.encode(t))
--- out
decoded JSON array has metatable: false
{"my_array":{}}
=== TEST 13: decode() sets array_mt on non-empty arrays if enabled
--- lua
local cjson = require "cjson"
cjson.decode_array_with_array_mt(true)
local json = [[{"my_array":["hello","world"]}]]
local t = cjson.decode(json)
t.my_array.hash_value = "adding a hash value"
-- emptying the array part
t.my_array[1] = nil
t.my_array[2] = nil
local has_metatable = getmetatable(t.my_array) == cjson.array_mt
print("decoded JSON array has metatable: " .. tostring(has_metatable))
print(cjson.encode(t))
--- out
decoded JSON array has metatable: true
{"my_array":[]}
=== TEST 14: cfg can enable/disable setting array_mt
--- lua
local cjson = require "cjson"
cjson.decode_array_with_array_mt(true)
cjson.decode_array_with_array_mt(false)
local json = [[{"my_array":[]}]]
local t = cjson.decode(json)
local has_metatable = getmetatable(t.my_array) == cjson.array_mt
print("decoded JSON array has metatable: " .. tostring(has_metatable))
print(cjson.encode(t))
--- out
decoded JSON array has metatable: false
{"my_array":{}}
=== TEST 15: array_mt on tables with hash part
--- lua
local cjson = require "cjson"
local data
if jit and string.find(jit.version, "LuaJIT 2.1.0", nil, true) then
local new_tab = require "table.new"
data = new_tab(0, 2) -- allocating hash part only
else
data = {}
end
data.foo = "bar"
data[1] = "hello"
setmetatable(data, cjson.array_mt)
print(cjson.encode(data))
--- out
["hello"]
=== TEST 16: multiple calls to lua_cjson_new (1/3)
--- lua
local cjson = require "cjson"
package.loaded["cjson"] = nil
require "cjson"
local arr = setmetatable({}, cjson.array_mt)
print(cjson.encode(arr))
--- out
[]
=== TEST 17: multiple calls to lua_cjson_new (2/3)
--- lua
local cjson = require "cjson"
package.loaded["cjson"] = nil
require "cjson"
local arr = setmetatable({}, cjson.empty_array_mt)
print(cjson.encode(arr))
--- out
[]
=== TEST 18: multiple calls to lua_cjson_new (3/3)
--- lua
local cjson = require "cjson.safe"
-- load another cjson instance (not in package.loaded)
require "cjson"
local arr = setmetatable({}, cjson.empty_array_mt)
print(cjson.encode(arr))
--- out
[]
=== TEST 19: & in JSON
--- lua
local cjson = require "cjson"
local a="[\"a=1&b=2\"]"
local b=cjson.decode(a)
print(cjson.encode(b))
--- out
["a=1&b=2"]
=== TEST 20: default and max precision
--- lua
local math = require "math"
local cjson = require "cjson"
local double = math.pow(2, 53)
print(cjson.encode(double))
cjson.encode_number_precision(16)
print(cjson.encode(double))
print(string.format("%16.0f", cjson.decode("9007199254740992")))
--- out
9.007199254741e+15
9007199254740992
9007199254740992
=== TEST 21: / in string
--- lua
local cjson = require "cjson"
local a={test = "http://google.com/google"}
local b=cjson.encode(a)
print(b)
cjson.encode_escape_forward_slash(false)
local b=cjson.encode(a)
print(b)
cjson.encode_escape_forward_slash(true)
local b=cjson.encode(a)
print(b)
--- out
{"test":"http:\/\/google.com\/google"}
{"test":"http://google.com/google"}
{"test":"http:\/\/google.com\/google"}
=== TEST 22: disable error on invalid type
--- lua
local cjson = require "cjson"
local f = function (x) return 2*x end
local res, err = pcall(cjson.encode, f)
print(err)
local t = {f = f, valid = "valid"}
local res, err = pcall(cjson.encode, t)
print(err)
local arr = {"one", "two", f, "three"}
local res, err = pcall(cjson.encode, arr)
print(err)
cjson.encode_skip_unsupported_value_types(true)
print(cjson.encode(f))
print(cjson.encode(t))
print(cjson.encode(arr))
--- out
Cannot serialise function: type not supported
Cannot serialise function: type not supported
Cannot serialise function: type not supported
{"valid":"valid"}
["one","two","three"]

View File

@ -0,0 +1,131 @@
#!/usr/bin/env lua
-- This benchmark script measures wall clock time and should be
-- run on an unloaded system.
--
-- Your Mileage May Vary.
--
-- Mark Pulford <mark@kyne.com.au>
local json_module = os.getenv("JSON_MODULE") or "cjson"
require "socket"
local json = require(json_module)
local util = require "cjson.util"
local function find_func(mod, funcnames)
for _, v in ipairs(funcnames) do
if mod[v] then
return mod[v]
end
end
return nil
end
local json_encode = find_func(json, { "encode", "Encode", "to_string", "stringify", "json" })
local json_decode = find_func(json, { "decode", "Decode", "to_value", "parse" })
local function average(t)
local total = 0
for _, v in ipairs(t) do
total = total + v
end
return total / #t
end
function benchmark(tests, seconds, rep)
local function bench(func, iter)
-- Use socket.gettime() to measure microsecond resolution
-- wall clock time.
local t = socket.gettime()
for i = 1, iter do
func(i)
end
t = socket.gettime() - t
-- Don't trust any results when the run lasted for less than a
-- millisecond - return nil.
if t < 0.001 then
return nil
end
return (iter / t)
end
-- Roughly calculate the number of interations required
-- to obtain a particular time period.
local function calc_iter(func, seconds)
local iter = 1
local rate
-- Warm up the bench function first.
func()
while not rate do
rate = bench(func, iter)
iter = iter * 10
end
return math.ceil(seconds * rate)
end
local test_results = {}
for name, func in pairs(tests) do
-- k(number), v(string)
-- k(string), v(function)
-- k(number), v(function)
if type(func) == "string" then
name = func
func = _G[name]
end
local iter = calc_iter(func, seconds)
local result = {}
for i = 1, rep do
result[i] = bench(func, iter)
end
-- Remove the slowest half (round down) of the result set
table.sort(result)
for i = 1, math.floor(#result / 2) do
table.remove(result, 1)
end
test_results[name] = average(result)
end
return test_results
end
function bench_file(filename)
local data_json = util.file_load(filename)
local data_obj = json_decode(data_json)
local function test_encode()
json_encode(data_obj)
end
local function test_decode()
json_decode(data_json)
end
local tests = {}
if json_encode then tests.encode = test_encode end
if json_decode then tests.decode = test_decode end
return benchmark(tests, 0.1, 5)
end
-- Optionally load any custom configuration required for this module
local success, data = pcall(util.file_load, ("bench-%s.lua"):format(json_module))
if success then
util.run_script(data, _G)
configure(json)
end
for i = 1, #arg do
local results = bench_file(arg[i])
for k, v in pairs(results) do
print(("%s\t%s\t%d"):format(arg[i], k, v))
end
end
-- vi:ai et sw=4 ts=4:

View File

@ -0,0 +1,22 @@
{
"glossary": {
"title": "example glossary",
"GlossDiv": {
"title": "S",
"GlossList": {
"GlossEntry": {
"ID": "SGML",
"SortAs": "SGML",
"GlossTerm": "Standard Generalized Mark up Language",
"Acronym": "SGML",
"Abbrev": "ISO 8879:1986",
"GlossDef": {
"para": "A meta-markup language, used to create markup languages such as DocBook.",
"GlossSeeAlso": ["GML", "XML"]
},
"GlossSee": "markup"
}
}
}
}
}

View File

@ -0,0 +1,11 @@
{"menu": {
"id": "file",
"value": "File",
"popup": {
"menuitem": [
{"value": "New", "onclick": "CreateNewDoc()"},
{"value": "Open", "onclick": "OpenDoc()"},
{"value": "Close", "onclick": "CloseDoc()"}
]
}
}}

View File

@ -0,0 +1,26 @@
{"widget": {
"debug": "on",
"window": {
"title": "Sample Konfabulator Widget",
"name": "main_window",
"width": 500,
"height": 500
},
"image": {
"src": "Images/Sun.png",
"name": "sun1",
"hOffset": 250,
"vOffset": 250,
"alignment": "center"
},
"text": {
"data": "Click Here",
"size": 36,
"style": "bold",
"name": "text1",
"hOffset": 250,
"vOffset": 100,
"alignment": "center",
"onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;"
}
}}

View File

@ -0,0 +1,88 @@
{"web-app": {
"servlet": [
{
"servlet-name": "cofaxCDS",
"servlet-class": "org.cofax.cds.CDSServlet",
"init-param": {
"configGlossary:installationAt": "Philadelphia, PA",
"configGlossary:adminEmail": "ksm@pobox.com",
"configGlossary:poweredBy": "Cofax",
"configGlossary:poweredByIcon": "/images/cofax.gif",
"configGlossary:staticPath": "/content/static",
"templateProcessorClass": "org.cofax.WysiwygTemplate",
"templateLoaderClass": "org.cofax.FilesTemplateLoader",
"templatePath": "templates",
"templateOverridePath": "",
"defaultListTemplate": "listTemplate.htm",
"defaultFileTemplate": "articleTemplate.htm",
"useJSP": false,
"jspListTemplate": "listTemplate.jsp",
"jspFileTemplate": "articleTemplate.jsp",
"cachePackageTagsTrack": 200,
"cachePackageTagsStore": 200,
"cachePackageTagsRefresh": 60,
"cacheTemplatesTrack": 100,
"cacheTemplatesStore": 50,
"cacheTemplatesRefresh": 15,
"cachePagesTrack": 200,
"cachePagesStore": 100,
"cachePagesRefresh": 10,
"cachePagesDirtyRead": 10,
"searchEngineListTemplate": "forSearchEnginesList.htm",
"searchEngineFileTemplate": "forSearchEngines.htm",
"searchEngineRobotsDb": "WEB-INF/robots.db",
"useDataStore": true,
"dataStoreClass": "org.cofax.SqlDataStore",
"redirectionClass": "org.cofax.SqlRedirection",
"dataStoreName": "cofax",
"dataStoreDriver": "com.microsoft.jdbc.sqlserver.SQLServerDriver",
"dataStoreUrl": "jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon",
"dataStoreUser": "sa",
"dataStorePassword": "dataStoreTestQuery",
"dataStoreTestQuery": "SET NOCOUNT ON;select test='test';",
"dataStoreLogFile": "/usr/local/tomcat/logs/datastore.log",
"dataStoreInitConns": 10,
"dataStoreMaxConns": 100,
"dataStoreConnUsageLimit": 100,
"dataStoreLogLevel": "debug",
"maxUrlLength": 500}},
{
"servlet-name": "cofaxEmail",
"servlet-class": "org.cofax.cds.EmailServlet",
"init-param": {
"mailHost": "mail1",
"mailHostOverride": "mail2"}},
{
"servlet-name": "cofaxAdmin",
"servlet-class": "org.cofax.cds.AdminServlet"},
{
"servlet-name": "fileServlet",
"servlet-class": "org.cofax.cds.FileServlet"},
{
"servlet-name": "cofaxTools",
"servlet-class": "org.cofax.cms.CofaxToolsServlet",
"init-param": {
"templatePath": "toolstemplates/",
"log": 1,
"logLocation": "/usr/local/tomcat/logs/CofaxTools.log",
"logMaxSize": "",
"dataLog": 1,
"dataLogLocation": "/usr/local/tomcat/logs/dataLog.log",
"dataLogMaxSize": "",
"removePageCache": "/content/admin/remove?cache=pages&id=",
"removeTemplateCache": "/content/admin/remove?cache=templates&id=",
"fileTransferFolder": "/usr/local/tomcat/webapps/content/fileTransferFolder",
"lookInContext": 1,
"adminGroupID": 4,
"betaServer": true}}],
"servlet-mapping": {
"cofaxCDS": "/",
"cofaxEmail": "/cofaxutil/aemail/*",
"cofaxAdmin": "/admin/*",
"fileServlet": "/static/*",
"cofaxTools": "/tools/*"},
"taglib": {
"taglib-uri": "cofax.tld",
"taglib-location": "/WEB-INF/tlds/cofax.tld"}}}

View File

@ -0,0 +1,27 @@
{"menu": {
"header": "SVG Viewer",
"items": [
{"id": "Open"},
{"id": "OpenNew", "label": "Open New"},
null,
{"id": "ZoomIn", "label": "Zoom In"},
{"id": "ZoomOut", "label": "Zoom Out"},
{"id": "OriginalView", "label": "Original View"},
null,
{"id": "Quality"},
{"id": "Pause"},
{"id": "Mute"},
null,
{"id": "Find", "label": "Find..."},
{"id": "FindAgain", "label": "Find Again"},
{"id": "Copy"},
{"id": "CopyAgain", "label": "Copy Again"},
{"id": "CopySVG", "label": "Copy SVG"},
{"id": "ViewSVG", "label": "View SVG"},
{"id": "ViewSource", "label": "View Source"},
{"id": "SaveAs", "label": "Save As"},
null,
{"id": "Help"},
{"id": "About", "label": "About Adobe CVG Viewer..."}
]
}}

View File

@ -0,0 +1,23 @@
#!/usr/bin/env perl
# Create test comparison data using a different UTF-8 implementation.
# The generated utf8.dat file must have the following MD5 sum:
# cff03b039d850f370a7362f3313e5268
use strict;
# 0xD800 - 0xDFFF are used to encode supplementary codepoints
# 0x10000 - 0x10FFFF are supplementary codepoints
my (@codepoints) = (0 .. 0xD7FF, 0xE000 .. 0x10FFFF);
my $utf8 = pack("U*", @codepoints);
defined($utf8) or die "Unable create UTF-8 string\n";
open(FH, ">:utf8", "utf8.dat")
or die "Unable to open utf8.dat: $!\n";
print FH $utf8
or die "Unable to write utf8.dat\n";
close(FH);
# vi:ai et sw=4 ts=4:

View File

@ -0,0 +1,7 @@
[ 0.110001,
0.12345678910111,
0.412454033640,
2.6651441426902,
2.718281828459,
3.1415926535898,
2.1406926327793 ]

View File

@ -0,0 +1 @@
"\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-.\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f€亗儎厗噲墛媽崕彁憭摂晼棙櫄洔潪煚、¥ウЖ┆<D096><E29486><EFBFBD>辈炒刀犯购患骄坷谅媚牌侨墒颂臀闲岩釉罩棕仝圮蒉哙徕沅彐玷殛腱眍镳耱篝貊鼬<E8B28A><E9BCAC><EFBFBD><EFBFBD>"

View File

@ -0,0 +1,13 @@
{
"Image": {
"Width": 800,
"Height": 600,
"Title": "View from 15th Floor",
"Thumbnail": {
"Url": "http://www.example.com/image/481989943",
"Height": 125,
"Width": "100"
},
"IDs": [116, 943, 234, 38793]
}
}

View File

@ -0,0 +1,22 @@
[
{
"precision": "zip",
"Latitude": 37.7668,
"Longitude": -122.3959,
"Address": "",
"City": "SAN FRANCISCO",
"State": "CA",
"Zip": "94107",
"Country": "US"
},
{
"precision": "zip",
"Latitude": 37.371991,
"Longitude": -122.026020,
"Address": "",
"City": "SUNNYVALE",
"State": "CA",
"Zip": "94085",
"Country": "US"
}
]

View File

@ -0,0 +1,61 @@
-- NOTE: This will only work for simple tests. It doesn't parse strings so if
-- you put any symbols like {?[], inside of a string literal then it will break
-- The point of this function is to test basic structures, and not test JSON
-- strings
local function sort_callback(str)
local inside = str:sub(2, -2)
local parts = {}
local buffer = ""
local pos = 1
while true do
if pos > #inside then
break
end
local append
local parens = inside:match("^%b{}", pos)
if parens then
pos = pos + #parens
append = sort_callback(parens)
else
local array = inside:match("^%b[]", pos)
if array then
pos = pos + #array
append = array
else
local front = inside:sub(pos, pos)
pos = pos + 1
if front == "," then
table.insert(parts, buffer)
buffer = ""
else
append = front
end
end
end
if append then
buffer = buffer .. append
end
end
if buffer ~= "" then
table.insert(parts, buffer)
end
table.sort(parts)
return "{" .. table.concat(parts, ",") .. "}"
end
local function sort_json(str)
return (str:gsub("%b{}", sort_callback))
end
return sort_json

View File

@ -0,0 +1,474 @@
#!/usr/bin/env lua
-- Lua CJSON tests
--
-- Mark Pulford <mark@kyne.com.au>
--
-- Note: The output of this script is easier to read with "less -S"
local json = require "cjson"
local json_safe = require "cjson.safe"
local util = require "cjson.util"
local function gen_raw_octets()
local chars = {}
for i = 0, 255 do chars[i + 1] = string.char(i) end
return table.concat(chars)
end
-- Generate every UTF-16 codepoint, including supplementary codes
local function gen_utf16_escaped()
-- Create raw table escapes
local utf16_escaped = {}
local count = 0
local function append_escape(code)
local esc = ('\\u%04X'):format(code)
table.insert(utf16_escaped, esc)
end
table.insert(utf16_escaped, '"')
for i = 0, 0xD7FF do
append_escape(i)
end
-- Skip 0xD800 - 0xDFFF since they are used to encode supplementary
-- codepoints
for i = 0xE000, 0xFFFF do
append_escape(i)
end
-- Append surrogate pair for each supplementary codepoint
for high = 0xD800, 0xDBFF do
for low = 0xDC00, 0xDFFF do
append_escape(high)
append_escape(low)
end
end
table.insert(utf16_escaped, '"')
return table.concat(utf16_escaped)
end
function load_testdata()
local data = {}
-- Data for 8bit raw <-> escaped octets tests
data.octets_raw = gen_raw_octets()
data.octets_escaped = util.file_load("octets-escaped.dat")
-- Data for \uXXXX -> UTF-8 test
data.utf16_escaped = gen_utf16_escaped()
-- Load matching data for utf16_escaped
local utf8_loaded
utf8_loaded, data.utf8_raw = pcall(util.file_load, "utf8.dat")
if not utf8_loaded then
data.utf8_raw = "Failed to load utf8.dat - please run genutf8.pl"
end
data.table_cycle = {}
data.table_cycle[1] = data.table_cycle
local big = {}
for i = 1, 1100 do
big = { { 10, false, true, json.null }, "string", a = big }
end
data.deeply_nested_data = big
return data
end
function test_decode_cycle(filename)
local obj1 = json.decode(util.file_load(filename))
local obj2 = json.decode(json.encode(obj1))
return util.compare_values(obj1, obj2)
end
-- Set up data used in tests
local Inf = math.huge;
local NaN = math.huge * 0;
local testdata = load_testdata()
local cjson_tests = {
-- Test API variables
{ "Check module name, version",
function () return json._NAME, json._VERSION end, { },
true, { "cjson", "2.1.0.11" } },
-- Test decoding simple types
{ "Decode string",
json.decode, { '"test string"' }, true, { "test string" } },
{ "Decode numbers",
json.decode, { '[ 0.0, -5e3, -1, 0.3e-3, 1023.2, 0e10 ]' },
true, { { 0.0, -5000, -1, 0.0003, 1023.2, 0 } } },
{ "Decode null",
json.decode, { 'null' }, true, { json.null } },
{ "Decode true",
json.decode, { 'true' }, true, { true } },
{ "Decode false",
json.decode, { 'false' }, true, { false } },
{ "Decode object with numeric keys",
json.decode, { '{ "1": "one", "3": "three" }' },
true, { { ["1"] = "one", ["3"] = "three" } } },
{ "Decode object with string keys",
json.decode, { '{ "a": "a", "b": "b" }' },
true, { { a = "a", b = "b" } } },
{ "Decode array",
json.decode, { '[ "one", null, "three" ]' },
true, { { "one", json.null, "three" } } },
-- Test decoding errors
{ "Decode UTF-16BE [throw error]",
json.decode, { '\0"\0"' },
false, { "JSON parser does not support UTF-16 or UTF-32" } },
{ "Decode UTF-16LE [throw error]",
json.decode, { '"\0"\0' },
false, { "JSON parser does not support UTF-16 or UTF-32" } },
{ "Decode UTF-32BE [throw error]",
json.decode, { '\0\0\0"' },
false, { "JSON parser does not support UTF-16 or UTF-32" } },
{ "Decode UTF-32LE [throw error]",
json.decode, { '"\0\0\0' },
false, { "JSON parser does not support UTF-16 or UTF-32" } },
{ "Decode partial JSON [throw error]",
json.decode, { '{ "unexpected eof": ' },
false, { "Expected value but found T_END at character 21" } },
{ "Decode with extra comma [throw error]",
json.decode, { '{ "extra data": true }, false' },
false, { "Expected the end but found T_COMMA at character 23" } },
{ "Decode invalid escape code [throw error]",
json.decode, { [[ { "bad escape \q code" } ]] },
false, { "Expected object key string but found invalid escape code at character 16" } },
{ "Decode invalid unicode escape [throw error]",
json.decode, { [[ { "bad unicode \u0f6 escape" } ]] },
false, { "Expected object key string but found invalid unicode escape code at character 17" } },
{ "Decode invalid keyword [throw error]",
json.decode, { ' [ "bad barewood", test ] ' },
false, { "Expected value but found invalid token at character 20" } },
{ "Decode invalid number #1 [throw error]",
json.decode, { '[ -+12 ]' },
false, { "Expected value but found invalid number at character 3" } },
{ "Decode invalid number #2 [throw error]",
json.decode, { '-v' },
false, { "Expected value but found invalid number at character 1" } },
{ "Decode invalid number exponent [throw error]",
json.decode, { '[ 0.4eg10 ]' },
false, { "Expected comma or array end but found invalid token at character 6" } },
-- Test decoding nested arrays / objects
{ "Set decode_max_depth(5)",
json.decode_max_depth, { 5 }, true, { 5 } },
{ "Decode array at nested limit",
json.decode, { '[[[[[ "nested" ]]]]]' },
true, { {{{{{ "nested" }}}}} } },
{ "Decode array over nested limit [throw error]",
json.decode, { '[[[[[[ "nested" ]]]]]]' },
false, { "Found too many nested data structures (6) at character 6" } },
{ "Decode object at nested limit",
json.decode, { '{"a":{"b":{"c":{"d":{"e":"nested"}}}}}' },
true, { {a={b={c={d={e="nested"}}}}} } },
{ "Decode object over nested limit [throw error]",
json.decode, { '{"a":{"b":{"c":{"d":{"e":{"f":"nested"}}}}}}' },
false, { "Found too many nested data structures (6) at character 26" } },
{ "Set decode_max_depth(1000)",
json.decode_max_depth, { 1000 }, true, { 1000 } },
{ "Decode deeply nested array [throw error]",
json.decode, { string.rep("[", 1100) .. '1100' .. string.rep("]", 1100)},
false, { "Found too many nested data structures (1001) at character 1001" } },
-- Test encoding nested tables
{ "Set encode_max_depth(5)",
json.encode_max_depth, { 5 }, true, { 5 } },
{ "Encode nested table as array at nested limit",
json.encode, { {{{{{"nested"}}}}} }, true, { '[[[[["nested"]]]]]' } },
{ "Encode nested table as array after nested limit [throw error]",
json.encode, { { {{{{{"nested"}}}}} } },
false, { "Cannot serialise, excessive nesting (6)" } },
{ "Encode nested table as object at nested limit",
json.encode, { {a={b={c={d={e="nested"}}}}} },
true, { '{"a":{"b":{"c":{"d":{"e":"nested"}}}}}' } },
{ "Encode nested table as object over nested limit [throw error]",
json.encode, { {a={b={c={d={e={f="nested"}}}}}} },
false, { "Cannot serialise, excessive nesting (6)" } },
{ "Encode table with cycle [throw error]",
json.encode, { testdata.table_cycle },
false, { "Cannot serialise, excessive nesting (6)" } },
{ "Set encode_max_depth(1000)",
json.encode_max_depth, { 1000 }, true, { 1000 } },
{ "Encode deeply nested data [throw error]",
json.encode, { testdata.deeply_nested_data },
false, { "Cannot serialise, excessive nesting (1001)" } },
-- Test encoding simple types
{ "Encode null",
json.encode, { json.null }, true, { 'null' } },
{ "Encode true",
json.encode, { true }, true, { 'true' } },
{ "Encode false",
json.encode, { false }, true, { 'false' } },
{ "Encode empty object",
json.encode, { { } }, true, { '{}' } },
{ "Encode integer",
json.encode, { 10 }, true, { '10' } },
{ "Encode string",
json.encode, { "hello" }, true, { '"hello"' } },
{ "Encode Lua function [throw error]",
json.encode, { function () end },
false, { "Cannot serialise function: type not supported" } },
-- Test decoding invalid numbers
{ "Set decode_invalid_numbers(true)",
json.decode_invalid_numbers, { true }, true, { true } },
{ "Decode hexadecimal",
json.decode, { '0x6.ffp1' }, true, { 13.9921875 } },
{ "Decode numbers with leading zero",
json.decode, { '[ 0123, 00.33 ]' }, true, { { 123, 0.33 } } },
{ "Decode +-Inf",
json.decode, { '[ +Inf, Inf, -Inf ]' }, true, { { Inf, Inf, -Inf } } },
{ "Decode +-Infinity",
json.decode, { '[ +Infinity, Infinity, -Infinity ]' },
true, { { Inf, Inf, -Inf } } },
{ "Decode +-NaN",
json.decode, { '[ +NaN, NaN, -NaN ]' }, true, { { NaN, NaN, NaN } } },
{ "Decode Infrared (not infinity) [throw error]",
json.decode, { 'Infrared' },
false, { "Expected the end but found invalid token at character 4" } },
{ "Decode Noodle (not NaN) [throw error]",
json.decode, { 'Noodle' },
false, { "Expected value but found invalid token at character 1" } },
{ "Set decode_invalid_numbers(false)",
json.decode_invalid_numbers, { false }, true, { false } },
{ "Decode hexadecimal [throw error]",
json.decode, { '0x6' },
false, { "Expected value but found invalid number at character 1" } },
{ "Decode numbers with leading zero [throw error]",
json.decode, { '[ 0123, 00.33 ]' },
false, { "Expected value but found invalid number at character 3" } },
{ "Decode +-Inf [throw error]",
json.decode, { '[ +Inf, Inf, -Inf ]' },
false, { "Expected value but found invalid token at character 3" } },
{ "Decode +-Infinity [throw error]",
json.decode, { '[ +Infinity, Infinity, -Infinity ]' },
false, { "Expected value but found invalid token at character 3" } },
{ "Decode +-NaN [throw error]",
json.decode, { '[ +NaN, NaN, -NaN ]' },
false, { "Expected value but found invalid token at character 3" } },
{ 'Set decode_invalid_numbers("on")',
json.decode_invalid_numbers, { "on" }, true, { true } },
-- Test encoding invalid numbers
{ "Set encode_invalid_numbers(false)",
json.encode_invalid_numbers, { false }, true, { false } },
{ "Encode NaN [throw error]",
json.encode, { NaN },
false, { "Cannot serialise number: must not be NaN or Infinity" } },
{ "Encode Infinity [throw error]",
json.encode, { Inf },
false, { "Cannot serialise number: must not be NaN or Infinity" } },
{ "Set encode_invalid_numbers(\"null\")",
json.encode_invalid_numbers, { "null" }, true, { "null" } },
{ "Encode NaN as null",
json.encode, { NaN }, true, { "null" } },
{ "Encode Infinity as null",
json.encode, { Inf }, true, { "null" } },
{ "Set encode_invalid_numbers(true)",
json.encode_invalid_numbers, { true }, true, { true } },
{ "Encode NaN",
json.encode, { NaN }, true, { "NaN" } },
{ "Encode +Infinity",
json.encode, { Inf }, true, { "Infinity" } },
{ "Encode -Infinity",
json.encode, { -Inf }, true, { "-Infinity" } },
{ 'Set encode_invalid_numbers("off")',
json.encode_invalid_numbers, { "off" }, true, { false } },
-- Test encoding tables
{ "Set encode_sparse_array(true, 2, 3)",
json.encode_sparse_array, { true, 2, 3 }, true, { true, 2, 3 } },
{ "Encode sparse table as array #1",
json.encode, { { [3] = "sparse test" } },
true, { '[null,null,"sparse test"]' } },
{ "Encode sparse table as array #2",
json.encode, { { [1] = "one", [4] = "sparse test" } },
true, { '["one",null,null,"sparse test"]' } },
{ "Encode sparse array as object",
json.encode, { { [1] = "one", [5] = "sparse test" } },
true, {
util.one_of {
'{"5":"sparse test","1":"one"}',
'{"1":"one","5":"sparse test"}'
}
} },
{ "Encode table with numeric string key as object",
json.encode, { { ["2"] = "numeric string key test" } },
true, { '{"2":"numeric string key test"}' } },
{ "Set encode_sparse_array(false)",
json.encode_sparse_array, { false }, true, { false, 2, 3 } },
{ "Encode table with incompatible key [throw error]",
json.encode, { { [false] = "wrong" } },
false, { "Cannot serialise boolean: table key must be a number or string" } },
-- Test escaping
{ "Encode all octets (8-bit clean)",
json.encode, { testdata.octets_raw }, true, { testdata.octets_escaped } },
{ "Decode all escaped octets",
json.decode, { testdata.octets_escaped }, true, { testdata.octets_raw } },
{ "Decode single UTF-16 escape",
json.decode, { [["\uF800"]] }, true, { "\239\160\128" } },
{ "Decode all UTF-16 escapes (including surrogate combinations)",
json.decode, { testdata.utf16_escaped }, true, { testdata.utf8_raw } },
{ "Decode swapped surrogate pair [throw error]",
json.decode, { [["\uDC00\uD800"]] },
false, { "Expected value but found invalid unicode escape code at character 2" } },
{ "Decode duplicate high surrogate [throw error]",
json.decode, { [["\uDB00\uDB00"]] },
false, { "Expected value but found invalid unicode escape code at character 2" } },
{ "Decode duplicate low surrogate [throw error]",
json.decode, { [["\uDB00\uDB00"]] },
false, { "Expected value but found invalid unicode escape code at character 2" } },
{ "Decode missing low surrogate [throw error]",
json.decode, { [["\uDB00"]] },
false, { "Expected value but found invalid unicode escape code at character 2" } },
{ "Decode invalid low surrogate [throw error]",
json.decode, { [["\uDB00\uD"]] },
false, { "Expected value but found invalid unicode escape code at character 2" } },
-- Test locale support
--
-- The standard Lua interpreter is ANSI C online doesn't support locales
-- by default. Force a known problematic locale to test strtod()/sprintf().
{ "Set locale to cs_CZ (comma separator)", function ()
os.setlocale("cs_CZ")
json.new()
end },
{ "Encode number under comma locale",
json.encode, { 1.5 }, true, { '1.5' } },
{ "Decode number in array under comma locale",
json.decode, { '[ 10, "test" ]' }, true, { { 10, "test" } } },
{ "Revert locale to POSIX", function ()
os.setlocale("C")
json.new()
end },
-- Test encode_keep_buffer() and enable_number_precision()
{ "Set encode_keep_buffer(false)",
json.encode_keep_buffer, { false }, true, { false } },
{ "Set encode_number_precision(3)",
json.encode_number_precision, { 3 }, true, { 3 } },
{ "Encode number with precision 3",
json.encode, { 1/3 }, true, { "0.333" } },
{ "Set encode_number_precision(14)",
json.encode_number_precision, { 14 }, true, { 14 } },
{ "Set encode_keep_buffer(true)",
json.encode_keep_buffer, { true }, true, { true } },
-- Test config API errors
-- Function is listed as '?' due to pcall for older versions of Lua
{ "Set encode_number_precision(0) [throw error]",
json.encode_number_precision, { 0 },
false, {
util.one_of {
"bad argument #1 to '?' (expected integer between 1 and 16)",
"bad argument #1 to 'cjson.encode_number_precision' (expected integer between 1 and 16)"
}
} },
{ "Set encode_number_precision(\"five\") [throw error]",
json.encode_number_precision, { "five" },
false, {
util.one_of {
"bad argument #1 to '?' (number expected, got string)",
"bad argument #1 to 'cjson.encode_number_precision' (number expected, got string)"
}
} },
{ "Set encode_keep_buffer(nil, true) [throw error]",
json.encode_keep_buffer, { nil, true },
false, {
util.one_of {
"bad argument #2 to '?' (found too many arguments)",
"bad argument #2 to 'cjson.encode_keep_buffer' (found too many arguments)"
}
} },
{ "Set encode_max_depth(\"wrong\") [throw error]",
json.encode_max_depth, { "wrong" },
false, {
util.one_of {
"bad argument #1 to '?' (number expected, got string)",
"bad argument #1 to 'cjson.encode_max_depth' (number expected, got string)"
}
} },
{ "Set decode_max_depth(0) [throw error]",
json.decode_max_depth, { "0" },
false, {
util.one_of {
"bad argument #1 to '?' (expected integer between 1 and 2147483647)",
"bad argument #1 to 'cjson.decode_max_depth' (expected integer between 1 and 2147483647)"
}
} },
{ "Set encode_invalid_numbers(-2) [throw error]",
json.encode_invalid_numbers, { -2 },
false, {
util.one_of {
"bad argument #1 to '?' (invalid option '-2')",
"bad argument #1 to 'cjson.encode_invalid_numbers' (invalid option '-2')"
}
} },
{ "Set decode_invalid_numbers(true, false) [throw error]",
json.decode_invalid_numbers, { true, false },
false, {
util.one_of {
"bad argument #2 to '?' (found too many arguments)",
"bad argument #2 to 'cjson.decode_invalid_numbers' (found too many arguments)"
}
} },
{ "Set encode_sparse_array(\"not quite on\") [throw error]",
json.encode_sparse_array, { "not quite on" },
false, {
util.one_of {
"bad argument #1 to '?' (invalid option 'not quite on')",
"bad argument #1 to 'cjson.encode_sparse_array' (invalid option 'not quite on')"
}
} },
{ "Reset Lua CJSON configuration", function () json = json.new() end },
-- Wrap in a function to ensure the table returned by json.new() is used
{ "Check encode_sparse_array()",
function (...) return json.encode_sparse_array(...) end, { },
true, { false, 2, 10 } },
{ "Encode (safe) simple value",
json_safe.encode, { true },
true, { "true" } },
{ "Encode (safe) argument validation [throw error]",
json_safe.encode, { "arg1", "arg2" },
false, {
util.one_of {
"bad argument #1 to '?' (expected 1 argument)",
"bad argument #1 to 'cjson.safe.encode' (expected 1 argument)"
}
} },
{ "Decode (safe) error generation",
json_safe.decode, { "Oops" },
true, { nil, "Expected value but found invalid token at character 1" } },
{ "Decode (safe) error generation after new()",
function(...) return json_safe.new().decode(...) end, { "Oops" },
true, { nil, "Expected value but found invalid token at character 1" } },
}
print(("==> Testing Lua CJSON version %s\n"):format(json._VERSION))
util.run_test_group(cjson_tests)
for _, filename in ipairs(arg) do
util.run_test("Decode cycle " .. filename, test_decode_cycle, { filename },
true, { true })
end
local pass, total = util.run_test_summary()
if pass == total then
print("==> Summary: all tests succeeded")
else
print(("==> Summary: %d/%d tests failed"):format(total - pass, total))
os.exit(1)
end
-- vi:ai et sw=4 ts=4:

View File

@ -0,0 +1 @@
{ "array": [ 10, true, null ] }