Add support for streaming JSON encoder/decoder (#1755)
Replaces the problematic cjson module.
This commit is contained in:
parent
b6ef1ffee7
commit
b09cac058a
|
@ -38,7 +38,6 @@ SUBDIRS= \
|
|||
smart \
|
||||
modules \
|
||||
spiffs \
|
||||
cjson \
|
||||
crypto \
|
||||
dhtlib \
|
||||
tsl2561 \
|
||||
|
@ -87,7 +86,6 @@ COMPONENTS_eagle.app.v6 = \
|
|||
smart/smart.a \
|
||||
spiffs/spiffs.a \
|
||||
fatfs/libfatfs.a \
|
||||
cjson/libcjson.a \
|
||||
crypto/libcrypto.a \
|
||||
dhtlib/libdhtlib.a \
|
||||
tsl2561/tsl2561lib.a \
|
||||
|
|
|
@ -1,76 +0,0 @@
|
|||
# If Lua is installed in a non-standard location, please set the LUA_DIR
|
||||
# environment variable to point to prefix for the install. Eg:
|
||||
# Unix: export LUA_DIR=/home/user/pkg
|
||||
# Windows: set LUA_DIR=c:\lua51
|
||||
|
||||
project(lua-cjson C)
|
||||
cmake_minimum_required(VERSION 2.6)
|
||||
|
||||
option(USE_INTERNAL_FPCONV "Use internal strtod() / g_fmt() code for performance")
|
||||
option(MULTIPLE_THREADS "Support multi-threaded apps with internal fpconv - recommended" ON)
|
||||
|
||||
if(NOT CMAKE_BUILD_TYPE)
|
||||
set(CMAKE_BUILD_TYPE Release CACHE STRING
|
||||
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel."
|
||||
FORCE)
|
||||
endif()
|
||||
|
||||
find_package(Lua51 REQUIRED)
|
||||
include_directories(${LUA_INCLUDE_DIR})
|
||||
|
||||
if(NOT USE_INTERNAL_FPCONV)
|
||||
# Use libc number conversion routines (strtod(), sprintf())
|
||||
set(FPCONV_SOURCES fpconv.c)
|
||||
else()
|
||||
# Use internal number conversion routines
|
||||
add_definitions(-DUSE_INTERNAL_FPCONV)
|
||||
set(FPCONV_SOURCES g_fmt.c dtoa.c)
|
||||
|
||||
include(TestBigEndian)
|
||||
TEST_BIG_ENDIAN(IEEE_BIG_ENDIAN)
|
||||
if(IEEE_BIG_ENDIAN)
|
||||
add_definitions(-DIEEE_BIG_ENDIAN)
|
||||
endif()
|
||||
|
||||
if(MULTIPLE_THREADS)
|
||||
set(CMAKE_THREAD_PREFER_PTHREAD TRUE)
|
||||
find_package(Threads REQUIRED)
|
||||
if(NOT CMAKE_USE_PTHREADS_INIT)
|
||||
message(FATAL_ERROR
|
||||
"Pthreads not found - required by MULTIPLE_THREADS option")
|
||||
endif()
|
||||
add_definitions(-DMULTIPLE_THREADS)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Handle platforms missing isinf() macro (Eg, some Solaris systems).
|
||||
include(CheckSymbolExists)
|
||||
CHECK_SYMBOL_EXISTS(isinf math.h HAVE_ISINF)
|
||||
if(NOT HAVE_ISINF)
|
||||
add_definitions(-DUSE_INTERNAL_ISINF)
|
||||
endif()
|
||||
|
||||
set(_MODULE_LINK "${CMAKE_THREAD_LIBS_INIT}")
|
||||
get_filename_component(_lua_lib_dir ${LUA_LIBRARY} PATH)
|
||||
|
||||
if(APPLE)
|
||||
set(CMAKE_SHARED_MODULE_CREATE_C_FLAGS
|
||||
"${CMAKE_SHARED_MODULE_CREATE_C_FLAGS} -undefined dynamic_lookup")
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
# Win32 modules need to be linked to the Lua library.
|
||||
set(_MODULE_LINK ${LUA_LIBRARY} ${_MODULE_LINK})
|
||||
set(_lua_module_dir "${_lua_lib_dir}")
|
||||
# Windows sprintf()/strtod() handle NaN/inf differently. Not supported.
|
||||
add_definitions(-DDISABLE_INVALID_NUMBERS)
|
||||
else()
|
||||
set(_lua_module_dir "${_lua_lib_dir}/lua/5.1")
|
||||
endif()
|
||||
|
||||
add_library(cjson MODULE lua_cjson.c strbuf.c ${FPCONV_SOURCES})
|
||||
set_target_properties(cjson PROPERTIES PREFIX "")
|
||||
target_link_libraries(cjson ${_MODULE_LINK})
|
||||
install(TARGETS cjson DESTINATION "${_lua_module_dir}")
|
||||
|
||||
# vi:ai et sw=4 ts=4:
|
|
@ -1,21 +0,0 @@
|
|||
Copyright (c) 2010-2012 Mark Pulford <mark@kyne.com.au>
|
||||
2015 Zeroday Hong <zeroday@nodemcu.com> nodemcu.com
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -1,47 +0,0 @@
|
|||
|
||||
#############################################################
|
||||
# Required variables for each makefile
|
||||
# Discard this section from all parent makefiles
|
||||
# Expected variables (with automatic defaults):
|
||||
# CSRCS (all "C" files in the dir)
|
||||
# SUBDIRS (all subdirs with a Makefile)
|
||||
# GEN_LIBS - list of libs to be generated ()
|
||||
# GEN_IMAGES - list of images to be generated ()
|
||||
# COMPONENTS_xxx - a list of libs/objs in the form
|
||||
# subdir/lib to be extracted and rolled up into
|
||||
# a generated lib/image xxx.a ()
|
||||
#
|
||||
ifndef PDIR
|
||||
|
||||
GEN_LIBS = libcjson.a
|
||||
|
||||
endif
|
||||
|
||||
|
||||
#############################################################
|
||||
# Configuration i.e. compile options etc.
|
||||
# Target specific stuff (defines etc.) goes in here!
|
||||
# Generally values applying to a tree are captured in the
|
||||
# makefile at its root level - these are then overridden
|
||||
# for a subtree within the makefile rooted therein
|
||||
#
|
||||
#DEFINES +=
|
||||
|
||||
#############################################################
|
||||
# Recursion Magic - Don't touch this!!
|
||||
#
|
||||
# Each subtree potentially has an include directory
|
||||
# corresponding to the common APIs applicable to modules
|
||||
# rooted at that subtree. Accordingly, the INCLUDE PATH
|
||||
# of a module can only contain the include directories up
|
||||
# its parent path, and not its siblings
|
||||
#
|
||||
# Required for each makefile to inherit from the parent
|
||||
#
|
||||
|
||||
INCLUDES := $(INCLUDES) -I $(PDIR)include
|
||||
INCLUDES += -I ./
|
||||
INCLUDES += -I ../libc
|
||||
PDIR := ../$(PDIR)
|
||||
sinclude $(PDIR)Makefile
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
The following people have helped with bug reports, testing and/or
|
||||
suggestions:
|
||||
|
||||
- Louis-Philippe Perron (@loopole)
|
||||
- Ondřej Jirman
|
||||
- Steve Donovan <steve.j.donovan@gmail.com>
|
||||
- Zhang "agentzh" Yichun <agentzh@gmail.com>
|
||||
|
||||
Thanks!
|
|
@ -1,24 +0,0 @@
|
|||
#include "cjson_mem.h"
|
||||
#include "../lua/lauxlib.h"
|
||||
#include <c_stdlib.h>
|
||||
|
||||
static const char errfmt[] = "cjson %salloc: out of mem (%d bytes)";
|
||||
|
||||
void *cjson_mem_malloc (uint32_t sz)
|
||||
{
|
||||
void *p = (void*)c_malloc (sz);
|
||||
lua_State *L = lua_getstate();
|
||||
if (!p)
|
||||
luaL_error (L, errfmt, "m", sz);
|
||||
return p;
|
||||
}
|
||||
|
||||
|
||||
void *cjson_mem_realloc (void *o, uint32_t sz)
|
||||
{
|
||||
void *p = (void*)c_realloc (o, sz);
|
||||
lua_State *L = lua_getstate();
|
||||
if (!p)
|
||||
luaL_error (L, errfmt, "re", sz);
|
||||
return p;
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
#ifndef _CJSON_MEM_H_
|
||||
#define _CJSON_MEM_H_
|
||||
|
||||
#include "../lua/lua.h"
|
||||
|
||||
void *cjson_mem_malloc (uint32_t sz);
|
||||
void *cjson_mem_realloc (void *p, uint32_t sz);
|
||||
|
||||
#endif
|
|
@ -1,50 +0,0 @@
|
|||
parser:
|
||||
- call parse_value
|
||||
- next_token
|
||||
? <EOF> nop.
|
||||
|
||||
parse_value:
|
||||
- next_token
|
||||
? <OBJ_BEGIN> call parse_object.
|
||||
? <ARR_BEGIN> call parse_array.
|
||||
? <STRING> push. return.
|
||||
? <BOOLEAN> push. return.
|
||||
? <NULL> push. return.
|
||||
? <NUMBER> push. return.
|
||||
|
||||
parse_object:
|
||||
- push table
|
||||
- next_token
|
||||
? <STRING> push.
|
||||
- next_token
|
||||
? <COLON> nop.
|
||||
- call parse_value
|
||||
- set table
|
||||
- next_token
|
||||
? <OBJ_END> return.
|
||||
? <COMMA> loop parse_object.
|
||||
|
||||
parse_array:
|
||||
- push table
|
||||
- call parse_value
|
||||
- table append
|
||||
- next_token
|
||||
? <COMMA> loop parse_array.
|
||||
? ] return.
|
||||
|
||||
next_token:
|
||||
- check next character
|
||||
? { return <OBJ_BEGIN>
|
||||
? } return <OBJ_END>
|
||||
? [ return <ARR_BEGIN>
|
||||
? ] return <ARR_END>
|
||||
? , return <COMMA>
|
||||
? : return <COLON>
|
||||
? [-0-9] gobble number. return <NUMBER>
|
||||
? " gobble string. return <STRING>
|
||||
? [ \t\n] eat whitespace.
|
||||
? n Check "null". return <NULL> or <UNKNOWN>
|
||||
? t Check "true". return <BOOLEAN> or <UNKNOWN>
|
||||
? f Check "false". return <BOOLEAN> or <UNKNOWN>
|
||||
? . return <UNKNOWN>
|
||||
? \0 return <END>
|
4359
app/cjson/dtoa.c
4359
app/cjson/dtoa.c
File diff suppressed because it is too large
Load Diff
|
@ -1,74 +0,0 @@
|
|||
#ifndef _DTOA_CONFIG_H
|
||||
#define _DTOA_CONFIG_H
|
||||
#if 0
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
|
||||
/* Ensure dtoa.c does not USE_LOCALE. Lua CJSON must not use locale
|
||||
* aware conversion routines. */
|
||||
#undef USE_LOCALE
|
||||
|
||||
/* dtoa.c should not touch errno, Lua CJSON does not use it, and it
|
||||
* may not be threadsafe */
|
||||
#define NO_ERRNO
|
||||
|
||||
#define Long int32_t
|
||||
#define ULong uint32_t
|
||||
#define Llong int64_t
|
||||
#define ULLong uint64_t
|
||||
|
||||
#ifdef IEEE_BIG_ENDIAN
|
||||
#define IEEE_MC68k
|
||||
#else
|
||||
#define IEEE_8087
|
||||
#endif
|
||||
|
||||
#define MALLOC(n) xmalloc(n)
|
||||
|
||||
static void *xmalloc(size_t size)
|
||||
{
|
||||
void *p;
|
||||
|
||||
p = malloc(size);
|
||||
if (!p) {
|
||||
fprintf(stderr, "Out of memory");
|
||||
abort();
|
||||
}
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
#ifdef MULTIPLE_THREADS
|
||||
|
||||
/* Enable locking to support multi-threaded applications */
|
||||
|
||||
#include <pthread.h>
|
||||
|
||||
static pthread_mutex_t private_dtoa_lock[2] = {
|
||||
PTHREAD_MUTEX_INITIALIZER,
|
||||
PTHREAD_MUTEX_INITIALIZER
|
||||
};
|
||||
|
||||
#define ACQUIRE_DTOA_LOCK(n) do { \
|
||||
int r = pthread_mutex_lock(&private_dtoa_lock[n]); \
|
||||
if (r) { \
|
||||
fprintf(stderr, "pthread_mutex_lock failed with %d\n", r); \
|
||||
abort(); \
|
||||
} \
|
||||
} while (0)
|
||||
|
||||
#define FREE_DTOA_LOCK(n) do { \
|
||||
int r = pthread_mutex_unlock(&private_dtoa_lock[n]); \
|
||||
if (r) { \
|
||||
fprintf(stderr, "pthread_mutex_unlock failed with %d\n", r);\
|
||||
abort(); \
|
||||
} \
|
||||
} while (0)
|
||||
|
||||
#endif /* MULTIPLE_THREADS */
|
||||
#endif
|
||||
#endif /* _DTOA_CONFIG_H */
|
||||
|
||||
/* vi:ai et sw=4 ts=4:
|
||||
*/
|
|
@ -1,209 +0,0 @@
|
|||
/* fpconv - Floating point conversion routines
|
||||
*
|
||||
* Copyright (c) 2011-2012 Mark Pulford <mark@kyne.com.au>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/* JSON uses a '.' decimal separator. strtod() / sprintf() under C libraries
|
||||
* with locale support will break when the decimal separator is a comma.
|
||||
*
|
||||
* fpconv_* will around these issues with a translation buffer if required.
|
||||
*/
|
||||
|
||||
#include "c_stdio.h"
|
||||
#include "c_stdlib.h"
|
||||
// #include <assert.h>
|
||||
#include "c_string.h"
|
||||
|
||||
#include "fpconv.h"
|
||||
|
||||
#if 0
|
||||
/* Lua CJSON assumes the locale is the same for all threads within a
|
||||
* process and doesn't change after initialisation.
|
||||
*
|
||||
* This avoids the need for per thread storage or expensive checks
|
||||
* for call. */
|
||||
static char locale_decimal_point = '.';
|
||||
|
||||
/* In theory multibyte decimal_points are possible, but
|
||||
* Lua CJSON only supports UTF-8 and known locales only have
|
||||
* single byte decimal points ([.,]).
|
||||
*
|
||||
* localconv() may not be thread safe (=>crash), and nl_langinfo() is
|
||||
* not supported on some platforms. Use sprintf() instead - if the
|
||||
* locale does change, at least Lua CJSON won't crash. */
|
||||
static void fpconv_update_locale()
|
||||
{
|
||||
char buf[8];
|
||||
|
||||
c_sprintf(buf, "%g", 0.5);
|
||||
|
||||
/* Failing this test might imply the platform has a buggy dtoa
|
||||
* implementation or wide characters */
|
||||
if (buf[0] != '0' || buf[2] != '5' || buf[3] != 0) {
|
||||
NODE_ERR("Error: wide characters found or printf() bug.");
|
||||
return;
|
||||
}
|
||||
|
||||
locale_decimal_point = buf[1];
|
||||
}
|
||||
|
||||
/* Check for a valid number character: [-+0-9a-yA-Y.]
|
||||
* Eg: -0.6e+5, infinity, 0xF0.F0pF0
|
||||
*
|
||||
* Used to find the probable end of a number. It doesn't matter if
|
||||
* invalid characters are counted - strtod() will find the valid
|
||||
* number if it exists. The risk is that slightly more memory might
|
||||
* be allocated before a parse error occurs. */
|
||||
static inline int valid_number_character(char ch)
|
||||
{
|
||||
char lower_ch;
|
||||
|
||||
if ('0' <= ch && ch <= '9')
|
||||
return 1;
|
||||
if (ch == '-' || ch == '+' || ch == '.')
|
||||
return 1;
|
||||
|
||||
/* Hex digits, exponent (e), base (p), "infinity",.. */
|
||||
lower_ch = ch | 0x20;
|
||||
if ('a' <= lower_ch && lower_ch <= 'y')
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Calculate the size of the buffer required for a strtod locale
|
||||
* conversion. */
|
||||
static int strtod_buffer_size(const char *s)
|
||||
{
|
||||
const char *p = s;
|
||||
|
||||
while (valid_number_character(*p))
|
||||
p++;
|
||||
|
||||
return p - s;
|
||||
}
|
||||
|
||||
/* Similar to strtod(), but must be passed the current locale's decimal point
|
||||
* character. Guaranteed to be called at the start of any valid number in a string */
|
||||
double fpconv_strtod(const char *nptr, char **endptr)
|
||||
{
|
||||
char localbuf[FPCONV_G_FMT_BUFSIZE];
|
||||
char *buf, *endbuf, *dp;
|
||||
int buflen;
|
||||
double value;
|
||||
|
||||
/* System strtod() is fine when decimal point is '.' */
|
||||
if (locale_decimal_point == '.')
|
||||
return c_strtod(nptr, endptr);
|
||||
|
||||
buflen = strtod_buffer_size(nptr);
|
||||
if (!buflen) {
|
||||
/* No valid characters found, standard strtod() return */
|
||||
*endptr = (char *)nptr;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Duplicate number into buffer */
|
||||
if (buflen >= FPCONV_G_FMT_BUFSIZE) {
|
||||
/* Handle unusually large numbers */
|
||||
buf = c_malloc(buflen + 1);
|
||||
if (!buf) {
|
||||
NODE_ERR("not enough memory\n");
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
/* This is the common case.. */
|
||||
buf = localbuf;
|
||||
}
|
||||
c_memcpy(buf, nptr, buflen);
|
||||
buf[buflen] = 0;
|
||||
|
||||
/* Update decimal point character if found */
|
||||
dp = c_strchr(buf, '.');
|
||||
if (dp)
|
||||
*dp = locale_decimal_point;
|
||||
|
||||
value = c_strtod(buf, &endbuf);
|
||||
*endptr = (char *)&nptr[endbuf - buf];
|
||||
if (buflen >= FPCONV_G_FMT_BUFSIZE)
|
||||
c_free(buf);
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/* "fmt" must point to a buffer of at least 6 characters */
|
||||
static void set_number_format(char *fmt, int precision)
|
||||
{
|
||||
int d1, d2, i;
|
||||
|
||||
if(!(1 <= precision && precision <= 14)) return;
|
||||
|
||||
/* Create printf format (%.14g) from precision */
|
||||
d1 = precision / 10;
|
||||
d2 = precision % 10;
|
||||
fmt[0] = '%';
|
||||
fmt[1] = '.';
|
||||
i = 2;
|
||||
if (d1) {
|
||||
fmt[i++] = '0' + d1;
|
||||
}
|
||||
fmt[i++] = '0' + d2;
|
||||
fmt[i++] = 'g';
|
||||
fmt[i] = 0;
|
||||
}
|
||||
|
||||
/* Assumes there is always at least 32 characters available in the target buffer */
|
||||
int fpconv_g_fmt(char *str, double num, int precision)
|
||||
{
|
||||
char buf[FPCONV_G_FMT_BUFSIZE];
|
||||
char fmt[6];
|
||||
int len;
|
||||
char *b;
|
||||
|
||||
set_number_format(fmt, precision);
|
||||
|
||||
/* Pass through when decimal point character is dot. */
|
||||
if (locale_decimal_point == '.'){
|
||||
c_sprintf(str, fmt, num);
|
||||
return c_strlen(str);
|
||||
}
|
||||
|
||||
/* snprintf() to a buffer then translate for other decimal point characters */
|
||||
c_sprintf(buf, fmt, num);
|
||||
len = c_strlen(buf);
|
||||
|
||||
/* Copy into target location. Translate decimal point if required */
|
||||
b = buf;
|
||||
do {
|
||||
*str++ = (*b == locale_decimal_point ? '.' : *b);
|
||||
} while(*b++);
|
||||
|
||||
return len;
|
||||
}
|
||||
|
||||
void fpconv_init()
|
||||
{
|
||||
fpconv_update_locale();
|
||||
}
|
||||
#endif
|
||||
/* vi:ai et sw=4 ts=4:
|
||||
*/
|
|
@ -1,22 +0,0 @@
|
|||
/* Lua CJSON floating point conversion routines */
|
||||
|
||||
/* Buffer required to store the largest string representation of a double.
|
||||
*
|
||||
* Longest double printed with %.14g is 21 characters long:
|
||||
* -1.7976931348623e+308 */
|
||||
# define FPCONV_G_FMT_BUFSIZE 32
|
||||
|
||||
#ifdef USE_INTERNAL_FPCONV
|
||||
static inline void fpconv_init()
|
||||
{
|
||||
/* Do nothing - not required */
|
||||
}
|
||||
#else
|
||||
extern inline void fpconv_init();
|
||||
#endif
|
||||
|
||||
extern int fpconv_g_fmt(char*, double, int);
|
||||
extern double fpconv_strtod(const char*, char**);
|
||||
|
||||
/* vi:ai et sw=4 ts=4:
|
||||
*/
|
|
@ -1,112 +0,0 @@
|
|||
/****************************************************************
|
||||
*
|
||||
* The author of this software is David M. Gay.
|
||||
*
|
||||
* Copyright (c) 1991, 1996 by Lucent Technologies.
|
||||
*
|
||||
* Permission to use, copy, modify, and distribute this software for any
|
||||
* purpose without fee is hereby granted, provided that this entire notice
|
||||
* is included in all copies of any software which is or includes a copy
|
||||
* or modification of this software and in all copies of the supporting
|
||||
* documentation for such software.
|
||||
*
|
||||
* THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
|
||||
* WARRANTY. IN PARTICULAR, NEITHER THE AUTHOR NOR LUCENT MAKES ANY
|
||||
* REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
|
||||
* OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.
|
||||
*
|
||||
***************************************************************/
|
||||
|
||||
/* g_fmt(buf,x) stores the closest decimal approximation to x in buf;
|
||||
* it suffices to declare buf
|
||||
* char buf[32];
|
||||
*/
|
||||
#if 0
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
extern char *dtoa(double, int, int, int *, int *, char **);
|
||||
extern int g_fmt(char *, double, int);
|
||||
extern void freedtoa(char*);
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
int
|
||||
fpconv_g_fmt(char *b, double x, int precision)
|
||||
{
|
||||
register int i, k;
|
||||
register char *s;
|
||||
int decpt, j, sign;
|
||||
char *b0, *s0, *se;
|
||||
|
||||
b0 = b;
|
||||
#ifdef IGNORE_ZERO_SIGN
|
||||
if (!x) {
|
||||
*b++ = '0';
|
||||
*b = 0;
|
||||
goto done;
|
||||
}
|
||||
#endif
|
||||
s = s0 = dtoa(x, 2, precision, &decpt, &sign, &se);
|
||||
if (sign)
|
||||
*b++ = '-';
|
||||
if (decpt == 9999) /* Infinity or Nan */ {
|
||||
while((*b++ = *s++));
|
||||
/* "b" is used to calculate the return length. Decrement to exclude the
|
||||
* Null terminator from the length */
|
||||
b--;
|
||||
goto done0;
|
||||
}
|
||||
if (decpt <= -4 || decpt > precision) {
|
||||
*b++ = *s++;
|
||||
if (*s) {
|
||||
*b++ = '.';
|
||||
while((*b = *s++))
|
||||
b++;
|
||||
}
|
||||
*b++ = 'e';
|
||||
/* sprintf(b, "%+.2d", decpt - 1); */
|
||||
if (--decpt < 0) {
|
||||
*b++ = '-';
|
||||
decpt = -decpt;
|
||||
}
|
||||
else
|
||||
*b++ = '+';
|
||||
for(j = 2, k = 10; 10*k <= decpt; j++, k *= 10);
|
||||
for(;;) {
|
||||
i = decpt / k;
|
||||
*b++ = i + '0';
|
||||
if (--j <= 0)
|
||||
break;
|
||||
decpt -= i*k;
|
||||
decpt *= 10;
|
||||
}
|
||||
*b = 0;
|
||||
}
|
||||
else if (decpt <= 0) {
|
||||
*b++ = '0';
|
||||
*b++ = '.';
|
||||
for(; decpt < 0; decpt++)
|
||||
*b++ = '0';
|
||||
while((*b++ = *s++));
|
||||
b--;
|
||||
}
|
||||
else {
|
||||
while((*b = *s++)) {
|
||||
b++;
|
||||
if (--decpt == 0 && *s)
|
||||
*b++ = '.';
|
||||
}
|
||||
for(; decpt > 0; decpt--)
|
||||
*b++ = '0';
|
||||
*b = 0;
|
||||
}
|
||||
done0:
|
||||
freedtoa(s0);
|
||||
#ifdef IGNORE_ZERO_SIGN
|
||||
done:
|
||||
#endif
|
||||
return b - b0;
|
||||
}
|
||||
#endif
|
|
@ -1,271 +0,0 @@
|
|||
local json = require "cjson"
|
||||
|
||||
-- Various common routines used by the Lua CJSON package
|
||||
--
|
||||
-- Mark Pulford <mark@kyne.com.au>
|
||||
|
||||
-- Determine with a Lua table can be treated as an array.
|
||||
-- Explicitly returns "not an array" for very sparse arrays.
|
||||
-- Returns:
|
||||
-- -1 Not an array
|
||||
-- 0 Empty table
|
||||
-- >0 Highest index in the array
|
||||
local function is_array(table)
|
||||
local max = 0
|
||||
local count = 0
|
||||
for k, v in pairs(table) do
|
||||
if type(k) == "number" then
|
||||
if k > max then max = k end
|
||||
count = count + 1
|
||||
else
|
||||
return -1
|
||||
end
|
||||
end
|
||||
if max > count * 2 then
|
||||
return -1
|
||||
end
|
||||
|
||||
return max
|
||||
end
|
||||
|
||||
local serialise_value
|
||||
|
||||
local function serialise_table(value, indent, depth)
|
||||
local spacing, spacing2, indent2
|
||||
if indent then
|
||||
spacing = "\n" .. indent
|
||||
spacing2 = spacing .. " "
|
||||
indent2 = indent .. " "
|
||||
else
|
||||
spacing, spacing2, indent2 = " ", " ", false
|
||||
end
|
||||
depth = depth + 1
|
||||
if depth > 50 then
|
||||
return "Cannot serialise any further: too many nested tables"
|
||||
end
|
||||
|
||||
local max = is_array(value)
|
||||
|
||||
local comma = false
|
||||
local fragment = { "{" .. spacing2 }
|
||||
if max > 0 then
|
||||
-- Serialise array
|
||||
for i = 1, max do
|
||||
if comma then
|
||||
table.insert(fragment, "," .. spacing2)
|
||||
end
|
||||
table.insert(fragment, serialise_value(value[i], indent2, depth))
|
||||
comma = true
|
||||
end
|
||||
elseif max < 0 then
|
||||
-- Serialise table
|
||||
for k, v in pairs(value) do
|
||||
if comma then
|
||||
table.insert(fragment, "," .. spacing2)
|
||||
end
|
||||
table.insert(fragment,
|
||||
("[%s] = %s"):format(serialise_value(k, indent2, depth),
|
||||
serialise_value(v, indent2, depth)))
|
||||
comma = true
|
||||
end
|
||||
end
|
||||
table.insert(fragment, spacing .. "}")
|
||||
|
||||
return table.concat(fragment)
|
||||
end
|
||||
|
||||
function serialise_value(value, indent, depth)
|
||||
if indent == nil then indent = "" end
|
||||
if depth == nil then depth = 0 end
|
||||
|
||||
if value == json.null then
|
||||
return "json.null"
|
||||
elseif type(value) == "string" then
|
||||
return ("%q"):format(value)
|
||||
elseif type(value) == "nil" or type(value) == "number" or
|
||||
type(value) == "boolean" then
|
||||
return tostring(value)
|
||||
elseif type(value) == "table" then
|
||||
return serialise_table(value, indent, depth)
|
||||
else
|
||||
return "\"<" .. type(value) .. ">\""
|
||||
end
|
||||
end
|
||||
|
||||
local function file_load(filename)
|
||||
local file
|
||||
if filename == nil then
|
||||
file = io.stdin
|
||||
else
|
||||
local err
|
||||
file, err = io.open(filename, "rb")
|
||||
if file == nil then
|
||||
error(("Unable to read '%s': %s"):format(filename, err))
|
||||
end
|
||||
end
|
||||
local data = file:read("*a")
|
||||
|
||||
if filename ~= nil then
|
||||
file:close()
|
||||
end
|
||||
|
||||
if data == nil then
|
||||
error("Failed to read " .. filename)
|
||||
end
|
||||
|
||||
return data
|
||||
end
|
||||
|
||||
local function file_save(filename, data)
|
||||
local file
|
||||
if filename == nil then
|
||||
file = io.stdout
|
||||
else
|
||||
local err
|
||||
file, err = io.open(filename, "wb")
|
||||
if file == nil then
|
||||
error(("Unable to write '%s': %s"):format(filename, err))
|
||||
end
|
||||
end
|
||||
file:write(data)
|
||||
if filename ~= nil then
|
||||
file:close()
|
||||
end
|
||||
end
|
||||
|
||||
local function compare_values(val1, val2)
|
||||
local type1 = type(val1)
|
||||
local type2 = type(val2)
|
||||
if type1 ~= type2 then
|
||||
return false
|
||||
end
|
||||
|
||||
-- Check for NaN
|
||||
if type1 == "number" and val1 ~= val1 and val2 ~= val2 then
|
||||
return true
|
||||
end
|
||||
|
||||
if type1 ~= "table" then
|
||||
return val1 == val2
|
||||
end
|
||||
|
||||
-- check_keys stores all the keys that must be checked in val2
|
||||
local check_keys = {}
|
||||
for k, _ in pairs(val1) do
|
||||
check_keys[k] = true
|
||||
end
|
||||
|
||||
for k, v in pairs(val2) do
|
||||
if not check_keys[k] then
|
||||
return false
|
||||
end
|
||||
|
||||
if not compare_values(val1[k], val2[k]) then
|
||||
return false
|
||||
end
|
||||
|
||||
check_keys[k] = nil
|
||||
end
|
||||
for k, _ in pairs(check_keys) do
|
||||
-- Not the same if any keys from val1 were not found in val2
|
||||
return false
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
local test_count_pass = 0
|
||||
local test_count_total = 0
|
||||
|
||||
local function run_test_summary()
|
||||
return test_count_pass, test_count_total
|
||||
end
|
||||
|
||||
local function run_test(testname, func, input, should_work, output)
|
||||
local function status_line(name, status, value)
|
||||
local statusmap = { [true] = ":success", [false] = ":error" }
|
||||
if status ~= nil then
|
||||
name = name .. statusmap[status]
|
||||
end
|
||||
print(("[%s] %s"):format(name, serialise_value(value, false)))
|
||||
end
|
||||
|
||||
local result = { pcall(func, unpack(input)) }
|
||||
local success = table.remove(result, 1)
|
||||
|
||||
local correct = false
|
||||
if success == should_work and compare_values(result, output) then
|
||||
correct = true
|
||||
test_count_pass = test_count_pass + 1
|
||||
end
|
||||
test_count_total = test_count_total + 1
|
||||
|
||||
local teststatus = { [true] = "PASS", [false] = "FAIL" }
|
||||
print(("==> Test [%d] %s: %s"):format(test_count_total, testname,
|
||||
teststatus[correct]))
|
||||
|
||||
status_line("Input", nil, input)
|
||||
if not correct then
|
||||
status_line("Expected", should_work, output)
|
||||
end
|
||||
status_line("Received", success, result)
|
||||
print()
|
||||
|
||||
return correct, result
|
||||
end
|
||||
|
||||
local function run_test_group(tests)
|
||||
local function run_helper(name, func, input)
|
||||
if type(name) == "string" and #name > 0 then
|
||||
print("==> " .. name)
|
||||
end
|
||||
-- Not a protected call, these functions should never generate errors.
|
||||
func(unpack(input or {}))
|
||||
print()
|
||||
end
|
||||
|
||||
for _, v in ipairs(tests) do
|
||||
-- Run the helper if "should_work" is missing
|
||||
if v[4] == nil then
|
||||
run_helper(unpack(v))
|
||||
else
|
||||
run_test(unpack(v))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Run a Lua script in a separate environment
|
||||
local function run_script(script, env)
|
||||
local env = env or {}
|
||||
local func
|
||||
|
||||
-- Use setfenv() if it exists, otherwise assume Lua 5.2 load() exists
|
||||
if _G.setfenv then
|
||||
func = loadstring(script)
|
||||
if func then
|
||||
setfenv(func, env)
|
||||
end
|
||||
else
|
||||
func = load(script, nil, nil, env)
|
||||
end
|
||||
|
||||
if func == nil then
|
||||
error("Invalid syntax.")
|
||||
end
|
||||
func()
|
||||
|
||||
return env
|
||||
end
|
||||
|
||||
-- Export functions
|
||||
return {
|
||||
serialise_value = serialise_value,
|
||||
file_load = file_load,
|
||||
file_save = file_save,
|
||||
compare_values = compare_values,
|
||||
run_test_summary = run_test_summary,
|
||||
run_test = run_test,
|
||||
run_test_group = run_test_group,
|
||||
run_script = run_script
|
||||
}
|
||||
|
||||
-- vi:ai et sw=4 ts=4:
|
|
@ -1,14 +0,0 @@
|
|||
#!/usr/bin/env lua
|
||||
|
||||
-- usage: json2lua.lua [json_file]
|
||||
--
|
||||
-- Eg:
|
||||
-- echo '[ "testing" ]' | ./json2lua.lua
|
||||
-- ./json2lua.lua test.json
|
||||
|
||||
local json = require "cjson"
|
||||
local util = require "cjson.util"
|
||||
|
||||
local json_text = util.file_load(arg[1])
|
||||
local t = json.decode(json_text)
|
||||
print(util.serialise_value(t))
|
|
@ -1,20 +0,0 @@
|
|||
#!/usr/bin/env lua
|
||||
|
||||
-- usage: lua2json.lua [lua_file]
|
||||
--
|
||||
-- Eg:
|
||||
-- echo '{ "testing" }' | ./lua2json.lua
|
||||
-- ./lua2json.lua test.lua
|
||||
|
||||
local json = require "cjson"
|
||||
local util = require "cjson.util"
|
||||
|
||||
local env = {
|
||||
json = { null = json.null },
|
||||
null = json.null
|
||||
}
|
||||
|
||||
local t = util.run_script("data = " .. util.file_load(arg[1]), env)
|
||||
print(json.encode(t.data))
|
||||
|
||||
-- vi:ai et sw=4 ts=4:
|
|
@ -1,168 +0,0 @@
|
|||
= Lua CJSON 2.1devel Manual =
|
||||
Mark Pulford <mark@kyne.com.au>
|
||||
:revdate: 1st March 2012
|
||||
|
||||
Overview
|
||||
--------
|
||||
|
||||
The Lua CJSON module provides JSON support for Lua.
|
||||
|
||||
*Features*::
|
||||
- Fast, standards compliant encoding/parsing routines
|
||||
- Full support for JSON with UTF-8, including decoding surrogate pairs
|
||||
- Optional run-time support for common exceptions to the JSON
|
||||
specification (infinity, NaN,..)
|
||||
- No dependencies on other libraries
|
||||
|
||||
*Caveats*::
|
||||
- UTF-16 and UTF-32 are not supported
|
||||
|
||||
Lua CJSON is covered by the MIT license. Review the file +LICENSE+ for
|
||||
details.
|
||||
|
||||
API (Functions)
|
||||
---------------
|
||||
|
||||
Synopsis
|
||||
~~~~~~~~
|
||||
|
||||
[source,lua]
|
||||
------------
|
||||
|
||||
-- Translate Lua value to/from JSON
|
||||
text = cjson.encode(value)
|
||||
value = cjson.decode(text)
|
||||
|
||||
|
||||
Module Instantiation
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
decode
|
||||
~~~~~~
|
||||
|
||||
[source,lua]
|
||||
------------
|
||||
value = cjson.decode(json_text)
|
||||
------------
|
||||
|
||||
+cjson.decode+ will deserialise any UTF-8 JSON string into a Lua value
|
||||
or table.
|
||||
|
||||
UTF-16 and UTF-32 JSON strings are not supported.
|
||||
|
||||
+cjson.decode+ requires that any NULL (ASCII 0) and double quote (ASCII
|
||||
34) characters are escaped within strings. All escape codes will be
|
||||
decoded and other bytes will be passed transparently. UTF-8 characters
|
||||
are not validated during decoding and should be checked elsewhere if
|
||||
required.
|
||||
|
||||
JSON +null+ will be converted to a NULL +lightuserdata+ value. This can
|
||||
be compared with +cjson.null+ for convenience.
|
||||
|
||||
By default, numbers incompatible with the JSON specification (infinity,
|
||||
NaN, hexadecimal) can be decoded. This default can be changed with
|
||||
<<decode_invalid_numbers,+cjson.decode_invalid_numbers+>>.
|
||||
|
||||
.Example: Decoding
|
||||
[source,lua]
|
||||
json_text = '[ true, { "foo": "bar" } ]'
|
||||
value = cjson.decode(json_text)
|
||||
-- Returns: { true, { foo = "bar" } }
|
||||
|
||||
[CAUTION]
|
||||
Care must be taken after decoding JSON objects with numeric keys. Each
|
||||
numeric key will be stored as a Lua +string+. Any subsequent code
|
||||
assuming type +number+ may break.
|
||||
|
||||
|
||||
[[encode]]
|
||||
encode
|
||||
~~~~~~
|
||||
|
||||
[source,lua]
|
||||
------------
|
||||
json_text = cjson.encode(value)
|
||||
------------
|
||||
|
||||
+cjson.encode+ will serialise a Lua value into a string containing the
|
||||
JSON representation.
|
||||
|
||||
+cjson.encode+ supports the following types:
|
||||
|
||||
- +boolean+
|
||||
- +lightuserdata+ (NULL value only)
|
||||
- +nil+
|
||||
- +number+
|
||||
- +string+
|
||||
- +table+
|
||||
|
||||
The remaining Lua types will generate an error:
|
||||
|
||||
- +function+
|
||||
- +lightuserdata+ (non-NULL values)
|
||||
- +thread+
|
||||
- +userdata+
|
||||
|
||||
By default, numbers are encoded with 14 significant digits. Refer to
|
||||
<<encode_number_precision,+cjson.encode_number_precision+>> for details.
|
||||
|
||||
Lua CJSON will escape the following characters within each UTF-8 string:
|
||||
|
||||
- Control characters (ASCII 0 - 31)
|
||||
- Double quote (ASCII 34)
|
||||
- Forward slash (ASCII 47)
|
||||
- Blackslash (ASCII 92)
|
||||
- Delete (ASCII 127)
|
||||
|
||||
All other bytes are passed transparently.
|
||||
|
||||
[CAUTION]
|
||||
=========
|
||||
Lua CJSON will successfully encode/decode binary strings, but this is
|
||||
technically not supported by JSON and may not be compatible with other
|
||||
JSON libraries. To ensure the output is valid JSON, applications should
|
||||
ensure all Lua strings passed to +cjson.encode+ are UTF-8.
|
||||
|
||||
Base64 is commonly used to encode binary data as the most efficient
|
||||
encoding under UTF-8 can only reduce the encoded size by a further
|
||||
~8%. Lua Base64 routines can be found in the
|
||||
http://w3.impa.br/%7Ediego/software/luasocket/[LuaSocket] and
|
||||
http://www.tecgraf.puc-rio.br/%7Elhf/ftp/lua/#lbase64[lbase64] packages.
|
||||
=========
|
||||
|
||||
Lua CJSON uses a heuristic to determine whether to encode a Lua table as
|
||||
a JSON array or an object. A Lua table with only positive integer keys
|
||||
of type +number+ will be encoded as a JSON array. All other tables will
|
||||
be encoded as a JSON object.
|
||||
|
||||
Lua CJSON does not use metamethods when serialising tables.
|
||||
|
||||
- +rawget+ is used to iterate over Lua arrays
|
||||
- +next+ is used to iterate over Lua objects
|
||||
|
||||
Lua arrays with missing entries (_sparse arrays_) may optionally be
|
||||
encoded in several different ways. Refer to
|
||||
<<encode_sparse_array,+cjson.encode_sparse_array+>> for details.
|
||||
|
||||
JSON object keys are always strings. Hence +cjson.encode+ only supports
|
||||
table keys which are type +number+ or +string+. All other types will
|
||||
generate an error.
|
||||
|
||||
[NOTE]
|
||||
Standards compliant JSON must be encapsulated in either an object (+{}+)
|
||||
or an array (+[]+). If strictly standards compliant JSON is desired, a
|
||||
table must be passed to +cjson.encode+.
|
||||
|
||||
By default, encoding the following Lua values will generate errors:
|
||||
|
||||
- Numbers incompatible with the JSON specification (infinity, NaN)
|
||||
- Tables nested more than 1000 levels deep
|
||||
- Excessively sparse Lua arrays
|
||||
|
||||
.Example: Encoding
|
||||
[source,lua]
|
||||
value = { true, { foo = "bar" } }
|
||||
json_text = cjson.encode(value)
|
||||
-- Returns: '[true,{"foo":"bar"}]'
|
||||
|
||||
// vi:ft=asciidoc tw=72:
|
|
@ -1,563 +0,0 @@
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Network Working Group D. Crockford
|
||||
Request for Comments: 4627 JSON.org
|
||||
Category: Informational July 2006
|
||||
|
||||
|
||||
The application/json Media Type for JavaScript Object Notation (JSON)
|
||||
|
||||
Status of This Memo
|
||||
|
||||
This memo provides information for the Internet community. It does
|
||||
not specify an Internet standard of any kind. Distribution of this
|
||||
memo is unlimited.
|
||||
|
||||
Copyright Notice
|
||||
|
||||
Copyright (C) The Internet Society (2006).
|
||||
|
||||
Abstract
|
||||
|
||||
JavaScript Object Notation (JSON) is a lightweight, text-based,
|
||||
language-independent data interchange format. It was derived from
|
||||
the ECMAScript Programming Language Standard. JSON defines a small
|
||||
set of formatting rules for the portable representation of structured
|
||||
data.
|
||||
|
||||
1. Introduction
|
||||
|
||||
JavaScript Object Notation (JSON) is a text format for the
|
||||
serialization of structured data. It is derived from the object
|
||||
literals of JavaScript, as defined in the ECMAScript Programming
|
||||
Language Standard, Third Edition [ECMA].
|
||||
|
||||
JSON can represent four primitive types (strings, numbers, booleans,
|
||||
and null) and two structured types (objects and arrays).
|
||||
|
||||
A string is a sequence of zero or more Unicode characters [UNICODE].
|
||||
|
||||
An object is an unordered collection of zero or more name/value
|
||||
pairs, where a name is a string and a value is a string, number,
|
||||
boolean, null, object, or array.
|
||||
|
||||
An array is an ordered sequence of zero or more values.
|
||||
|
||||
The terms "object" and "array" come from the conventions of
|
||||
JavaScript.
|
||||
|
||||
JSON's design goals were for it to be minimal, portable, textual, and
|
||||
a subset of JavaScript.
|
||||
|
||||
|
||||
|
||||
Crockford Informational [Page 1]
|
||||
|
||||
RFC 4627 JSON July 2006
|
||||
|
||||
|
||||
1.1. Conventions Used in This Document
|
||||
|
||||
The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT",
|
||||
"SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this
|
||||
document are to be interpreted as described in [RFC2119].
|
||||
|
||||
The grammatical rules in this document are to be interpreted as
|
||||
described in [RFC4234].
|
||||
|
||||
2. JSON Grammar
|
||||
|
||||
A JSON text is a sequence of tokens. The set of tokens includes six
|
||||
structural characters, strings, numbers, and three literal names.
|
||||
|
||||
A JSON text is a serialized object or array.
|
||||
|
||||
JSON-text = object / array
|
||||
|
||||
These are the six structural characters:
|
||||
|
||||
begin-array = ws %x5B ws ; [ left square bracket
|
||||
|
||||
begin-object = ws %x7B ws ; { left curly bracket
|
||||
|
||||
end-array = ws %x5D ws ; ] right square bracket
|
||||
|
||||
end-object = ws %x7D ws ; } right curly bracket
|
||||
|
||||
name-separator = ws %x3A ws ; : colon
|
||||
|
||||
value-separator = ws %x2C ws ; , comma
|
||||
|
||||
Insignificant whitespace is allowed before or after any of the six
|
||||
structural characters.
|
||||
|
||||
ws = *(
|
||||
%x20 / ; Space
|
||||
%x09 / ; Horizontal tab
|
||||
%x0A / ; Line feed or New line
|
||||
%x0D ; Carriage return
|
||||
)
|
||||
|
||||
2.1. Values
|
||||
|
||||
A JSON value MUST be an object, array, number, or string, or one of
|
||||
the following three literal names:
|
||||
|
||||
false null true
|
||||
|
||||
|
||||
|
||||
Crockford Informational [Page 2]
|
||||
|
||||
RFC 4627 JSON July 2006
|
||||
|
||||
|
||||
The literal names MUST be lowercase. No other literal names are
|
||||
allowed.
|
||||
|
||||
value = false / null / true / object / array / number / string
|
||||
|
||||
false = %x66.61.6c.73.65 ; false
|
||||
|
||||
null = %x6e.75.6c.6c ; null
|
||||
|
||||
true = %x74.72.75.65 ; true
|
||||
|
||||
2.2. Objects
|
||||
|
||||
An object structure is represented as a pair of curly brackets
|
||||
surrounding zero or more name/value pairs (or members). A name is a
|
||||
string. A single colon comes after each name, separating the name
|
||||
from the value. A single comma separates a value from a following
|
||||
name. The names within an object SHOULD be unique.
|
||||
|
||||
object = begin-object [ member *( value-separator member ) ]
|
||||
end-object
|
||||
|
||||
member = string name-separator value
|
||||
|
||||
2.3. Arrays
|
||||
|
||||
An array structure is represented as square brackets surrounding zero
|
||||
or more values (or elements). Elements are separated by commas.
|
||||
|
||||
array = begin-array [ value *( value-separator value ) ] end-array
|
||||
|
||||
2.4. Numbers
|
||||
|
||||
The representation of numbers is similar to that used in most
|
||||
programming languages. A number contains an integer component that
|
||||
may be prefixed with an optional minus sign, which may be followed by
|
||||
a fraction part and/or an exponent part.
|
||||
|
||||
Octal and hex forms are not allowed. Leading zeros are not allowed.
|
||||
|
||||
A fraction part is a decimal point followed by one or more digits.
|
||||
|
||||
An exponent part begins with the letter E in upper or lowercase,
|
||||
which may be followed by a plus or minus sign. The E and optional
|
||||
sign are followed by one or more digits.
|
||||
|
||||
Numeric values that cannot be represented as sequences of digits
|
||||
(such as Infinity and NaN) are not permitted.
|
||||
|
||||
|
||||
|
||||
Crockford Informational [Page 3]
|
||||
|
||||
RFC 4627 JSON July 2006
|
||||
|
||||
|
||||
number = [ minus ] int [ frac ] [ exp ]
|
||||
|
||||
decimal-point = %x2E ; .
|
||||
|
||||
digit1-9 = %x31-39 ; 1-9
|
||||
|
||||
e = %x65 / %x45 ; e E
|
||||
|
||||
exp = e [ minus / plus ] 1*DIGIT
|
||||
|
||||
frac = decimal-point 1*DIGIT
|
||||
|
||||
int = zero / ( digit1-9 *DIGIT )
|
||||
|
||||
minus = %x2D ; -
|
||||
|
||||
plus = %x2B ; +
|
||||
|
||||
zero = %x30 ; 0
|
||||
|
||||
2.5. Strings
|
||||
|
||||
The representation of strings is similar to conventions used in the C
|
||||
family of programming languages. A string begins and ends with
|
||||
quotation marks. All Unicode characters may be placed within the
|
||||
quotation marks except for the characters that must be escaped:
|
||||
quotation mark, reverse solidus, and the control characters (U+0000
|
||||
through U+001F).
|
||||
|
||||
Any character may be escaped. If the character is in the Basic
|
||||
Multilingual Plane (U+0000 through U+FFFF), then it may be
|
||||
represented as a six-character sequence: a reverse solidus, followed
|
||||
by the lowercase letter u, followed by four hexadecimal digits that
|
||||
encode the character's code point. The hexadecimal letters A though
|
||||
F can be upper or lowercase. So, for example, a string containing
|
||||
only a single reverse solidus character may be represented as
|
||||
"\u005C".
|
||||
|
||||
Alternatively, there are two-character sequence escape
|
||||
representations of some popular characters. So, for example, a
|
||||
string containing only a single reverse solidus character may be
|
||||
represented more compactly as "\\".
|
||||
|
||||
To escape an extended character that is not in the Basic Multilingual
|
||||
Plane, the character is represented as a twelve-character sequence,
|
||||
encoding the UTF-16 surrogate pair. So, for example, a string
|
||||
containing only the G clef character (U+1D11E) may be represented as
|
||||
"\uD834\uDD1E".
|
||||
|
||||
|
||||
|
||||
Crockford Informational [Page 4]
|
||||
|
||||
RFC 4627 JSON July 2006
|
||||
|
||||
|
||||
string = quotation-mark *char quotation-mark
|
||||
|
||||
char = unescaped /
|
||||
escape (
|
||||
%x22 / ; " quotation mark U+0022
|
||||
%x5C / ; \ reverse solidus U+005C
|
||||
%x2F / ; / solidus U+002F
|
||||
%x62 / ; b backspace U+0008
|
||||
%x66 / ; f form feed U+000C
|
||||
%x6E / ; n line feed U+000A
|
||||
%x72 / ; r carriage return U+000D
|
||||
%x74 / ; t tab U+0009
|
||||
%x75 4HEXDIG ) ; uXXXX U+XXXX
|
||||
|
||||
escape = %x5C ; \
|
||||
|
||||
quotation-mark = %x22 ; "
|
||||
|
||||
unescaped = %x20-21 / %x23-5B / %x5D-10FFFF
|
||||
|
||||
3. Encoding
|
||||
|
||||
JSON text SHALL be encoded in Unicode. The default encoding is
|
||||
UTF-8.
|
||||
|
||||
Since the first two characters of a JSON text will always be ASCII
|
||||
characters [RFC0020], it is possible to determine whether an octet
|
||||
stream is UTF-8, UTF-16 (BE or LE), or UTF-32 (BE or LE) by looking
|
||||
at the pattern of nulls in the first four octets.
|
||||
|
||||
00 00 00 xx UTF-32BE
|
||||
00 xx 00 xx UTF-16BE
|
||||
xx 00 00 00 UTF-32LE
|
||||
xx 00 xx 00 UTF-16LE
|
||||
xx xx xx xx UTF-8
|
||||
|
||||
4. Parsers
|
||||
|
||||
A JSON parser transforms a JSON text into another representation. A
|
||||
JSON parser MUST accept all texts that conform to the JSON grammar.
|
||||
A JSON parser MAY accept non-JSON forms or extensions.
|
||||
|
||||
An implementation may set limits on the size of texts that it
|
||||
accepts. An implementation may set limits on the maximum depth of
|
||||
nesting. An implementation may set limits on the range of numbers.
|
||||
An implementation may set limits on the length and character contents
|
||||
of strings.
|
||||
|
||||
|
||||
|
||||
|
||||
Crockford Informational [Page 5]
|
||||
|
||||
RFC 4627 JSON July 2006
|
||||
|
||||
|
||||
5. Generators
|
||||
|
||||
A JSON generator produces JSON text. The resulting text MUST
|
||||
strictly conform to the JSON grammar.
|
||||
|
||||
6. IANA Considerations
|
||||
|
||||
The MIME media type for JSON text is application/json.
|
||||
|
||||
Type name: application
|
||||
|
||||
Subtype name: json
|
||||
|
||||
Required parameters: n/a
|
||||
|
||||
Optional parameters: n/a
|
||||
|
||||
Encoding considerations: 8bit if UTF-8; binary if UTF-16 or UTF-32
|
||||
|
||||
JSON may be represented using UTF-8, UTF-16, or UTF-32. When JSON
|
||||
is written in UTF-8, JSON is 8bit compatible. When JSON is
|
||||
written in UTF-16 or UTF-32, the binary content-transfer-encoding
|
||||
must be used.
|
||||
|
||||
Security considerations:
|
||||
|
||||
Generally there are security issues with scripting languages. JSON
|
||||
is a subset of JavaScript, but it is a safe subset that excludes
|
||||
assignment and invocation.
|
||||
|
||||
A JSON text can be safely passed into JavaScript's eval() function
|
||||
(which compiles and executes a string) if all the characters not
|
||||
enclosed in strings are in the set of characters that form JSON
|
||||
tokens. This can be quickly determined in JavaScript with two
|
||||
regular expressions and calls to the test and replace methods.
|
||||
|
||||
var my_JSON_object = !(/[^,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]/.test(
|
||||
text.replace(/"(\\.|[^"\\])*"/g, ''))) &&
|
||||
eval('(' + text + ')');
|
||||
|
||||
Interoperability considerations: n/a
|
||||
|
||||
Published specification: RFC 4627
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Crockford Informational [Page 6]
|
||||
|
||||
RFC 4627 JSON July 2006
|
||||
|
||||
|
||||
Applications that use this media type:
|
||||
|
||||
JSON has been used to exchange data between applications written
|
||||
in all of these programming languages: ActionScript, C, C#,
|
||||
ColdFusion, Common Lisp, E, Erlang, Java, JavaScript, Lua,
|
||||
Objective CAML, Perl, PHP, Python, Rebol, Ruby, and Scheme.
|
||||
|
||||
Additional information:
|
||||
|
||||
Magic number(s): n/a
|
||||
File extension(s): .json
|
||||
Macintosh file type code(s): TEXT
|
||||
|
||||
Person & email address to contact for further information:
|
||||
Douglas Crockford
|
||||
douglas@crockford.com
|
||||
|
||||
Intended usage: COMMON
|
||||
|
||||
Restrictions on usage: none
|
||||
|
||||
Author:
|
||||
Douglas Crockford
|
||||
douglas@crockford.com
|
||||
|
||||
Change controller:
|
||||
Douglas Crockford
|
||||
douglas@crockford.com
|
||||
|
||||
7. Security Considerations
|
||||
|
||||
See Security Considerations in Section 6.
|
||||
|
||||
8. Examples
|
||||
|
||||
This is a JSON object:
|
||||
|
||||
{
|
||||
"Image": {
|
||||
"Width": 800,
|
||||
"Height": 600,
|
||||
"Title": "View from 15th Floor",
|
||||
"Thumbnail": {
|
||||
"Url": "http://www.example.com/image/481989943",
|
||||
"Height": 125,
|
||||
"Width": "100"
|
||||
},
|
||||
"IDs": [116, 943, 234, 38793]
|
||||
|
||||
|
||||
|
||||
Crockford Informational [Page 7]
|
||||
|
||||
RFC 4627 JSON July 2006
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
Its Image member is an object whose Thumbnail member is an object
|
||||
and whose IDs member is an array of numbers.
|
||||
|
||||
This is a JSON array containing two objects:
|
||||
|
||||
[
|
||||
{
|
||||
"precision": "zip",
|
||||
"Latitude": 37.7668,
|
||||
"Longitude": -122.3959,
|
||||
"Address": "",
|
||||
"City": "SAN FRANCISCO",
|
||||
"State": "CA",
|
||||
"Zip": "94107",
|
||||
"Country": "US"
|
||||
},
|
||||
{
|
||||
"precision": "zip",
|
||||
"Latitude": 37.371991,
|
||||
"Longitude": -122.026020,
|
||||
"Address": "",
|
||||
"City": "SUNNYVALE",
|
||||
"State": "CA",
|
||||
"Zip": "94085",
|
||||
"Country": "US"
|
||||
}
|
||||
]
|
||||
|
||||
9. References
|
||||
|
||||
9.1. Normative References
|
||||
|
||||
[ECMA] European Computer Manufacturers Association, "ECMAScript
|
||||
Language Specification 3rd Edition", December 1999,
|
||||
<http://www.ecma-international.org/publications/files/
|
||||
ecma-st/ECMA-262.pdf>.
|
||||
|
||||
[RFC0020] Cerf, V., "ASCII format for network interchange", RFC 20,
|
||||
October 1969.
|
||||
|
||||
[RFC2119] Bradner, S., "Key words for use in RFCs to Indicate
|
||||
Requirement Levels", BCP 14, RFC 2119, March 1997.
|
||||
|
||||
[RFC4234] Crocker, D. and P. Overell, "Augmented BNF for Syntax
|
||||
Specifications: ABNF", RFC 4234, October 2005.
|
||||
|
||||
|
||||
|
||||
Crockford Informational [Page 8]
|
||||
|
||||
RFC 4627 JSON July 2006
|
||||
|
||||
|
||||
[UNICODE] The Unicode Consortium, "The Unicode Standard Version 4.0",
|
||||
2003, <http://www.unicode.org/versions/Unicode4.1.0/>.
|
||||
|
||||
Author's Address
|
||||
|
||||
Douglas Crockford
|
||||
JSON.org
|
||||
EMail: douglas@crockford.com
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Crockford Informational [Page 9]
|
||||
|
||||
RFC 4627 JSON July 2006
|
||||
|
||||
|
||||
Full Copyright Statement
|
||||
|
||||
Copyright (C) The Internet Society (2006).
|
||||
|
||||
This document is subject to the rights, licenses and restrictions
|
||||
contained in BCP 78, and except as set forth therein, the authors
|
||||
retain all their rights.
|
||||
|
||||
This document and the information contained herein are provided on an
|
||||
"AS IS" basis and THE CONTRIBUTOR, THE ORGANIZATION HE/SHE REPRESENTS
|
||||
OR IS SPONSORED BY (IF ANY), THE INTERNET SOCIETY AND THE INTERNET
|
||||
ENGINEERING TASK FORCE DISCLAIM ALL WARRANTIES, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO ANY WARRANTY THAT THE USE OF THE
|
||||
INFORMATION HEREIN WILL NOT INFRINGE ANY RIGHTS OR ANY IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
Intellectual Property
|
||||
|
||||
The IETF takes no position regarding the validity or scope of any
|
||||
Intellectual Property Rights or other rights that might be claimed to
|
||||
pertain to the implementation or use of the technology described in
|
||||
this document or the extent to which any license under such rights
|
||||
might or might not be available; nor does it represent that it has
|
||||
made any independent effort to identify any such rights. Information
|
||||
on the procedures with respect to rights in RFC documents can be
|
||||
found in BCP 78 and BCP 79.
|
||||
|
||||
Copies of IPR disclosures made to the IETF Secretariat and any
|
||||
assurances of licenses to be made available, or the result of an
|
||||
attempt made to obtain a general license or permission for the use of
|
||||
such proprietary rights by implementers or users of this
|
||||
specification can be obtained from the IETF on-line IPR repository at
|
||||
http://www.ietf.org/ipr.
|
||||
|
||||
The IETF invites any interested party to bring to its attention any
|
||||
copyrights, patents or patent applications, or other proprietary
|
||||
rights that may cover technology that may be required to implement
|
||||
this standard. Please address the information to the IETF at
|
||||
ietf-ipr@ietf.org.
|
||||
|
||||
Acknowledgement
|
||||
|
||||
Funding for the RFC Editor function is provided by the IETF
|
||||
Administrative Support Activity (IASA).
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Crockford Informational [Page 10]
|
||||
|
|
@ -1,253 +0,0 @@
|
|||
/* strbuf - String buffer routines
|
||||
*
|
||||
* Copyright (c) 2010-2012 Mark Pulford <mark@kyne.com.au>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "c_stdio.h"
|
||||
#include "c_stdlib.h"
|
||||
#include "c_stdarg.h"
|
||||
#include "c_string.h"
|
||||
|
||||
#include "strbuf.h"
|
||||
#include "cjson_mem.h"
|
||||
|
||||
int strbuf_init(strbuf_t *s, int len)
|
||||
{
|
||||
int size;
|
||||
|
||||
if (len <= 0)
|
||||
size = STRBUF_DEFAULT_SIZE;
|
||||
else
|
||||
size = len + 1; /* \0 terminator */
|
||||
|
||||
s->buf = NULL;
|
||||
s->size = size;
|
||||
s->length = 0;
|
||||
s->increment = STRBUF_DEFAULT_INCREMENT;
|
||||
s->dynamic = 0;
|
||||
s->reallocs = 0;
|
||||
s->debug = 0;
|
||||
|
||||
s->buf = (char *)cjson_mem_malloc(size);
|
||||
if (!s->buf){
|
||||
NODE_ERR("not enough memory\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
strbuf_ensure_null(s);
|
||||
return 0;
|
||||
}
|
||||
|
||||
strbuf_t *strbuf_new(int len)
|
||||
{
|
||||
strbuf_t *s;
|
||||
|
||||
s = (strbuf_t *)cjson_mem_malloc(sizeof(strbuf_t));
|
||||
if (!s){
|
||||
NODE_ERR("not enough memory\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
strbuf_init(s, len);
|
||||
|
||||
/* Dynamic strbuf allocation / deallocation */
|
||||
s->dynamic = 1;
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
int strbuf_set_increment(strbuf_t *s, int increment)
|
||||
{
|
||||
/* Increment > 0: Linear buffer growth rate
|
||||
* Increment < -1: Exponential buffer growth rate */
|
||||
if (increment == 0 || increment == -1){
|
||||
NODE_ERR("BUG: Invalid string increment");
|
||||
return -1;
|
||||
}
|
||||
|
||||
s->increment = increment;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline void debug_stats(strbuf_t *s)
|
||||
{
|
||||
if (s->debug) {
|
||||
NODE_ERR("strbuf(%lx) reallocs: %d, length: %d, size: %d\n",
|
||||
(long)s, s->reallocs, s->length, s->size);
|
||||
}
|
||||
}
|
||||
|
||||
/* If strbuf_t has not been dynamically allocated, strbuf_free() can
|
||||
* be called any number of times strbuf_init() */
|
||||
void strbuf_free(strbuf_t *s)
|
||||
{
|
||||
debug_stats(s);
|
||||
|
||||
if (s->buf) {
|
||||
c_free(s->buf);
|
||||
s->buf = NULL;
|
||||
}
|
||||
if (s->dynamic)
|
||||
c_free(s);
|
||||
}
|
||||
|
||||
char *strbuf_free_to_string(strbuf_t *s, int *len)
|
||||
{
|
||||
char *buf;
|
||||
|
||||
debug_stats(s);
|
||||
|
||||
strbuf_ensure_null(s);
|
||||
|
||||
buf = s->buf;
|
||||
if (len)
|
||||
*len = s->length;
|
||||
|
||||
if (s->dynamic)
|
||||
c_free(s);
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
static int calculate_new_size(strbuf_t *s, int len)
|
||||
{
|
||||
int reqsize, newsize;
|
||||
|
||||
if (len <= 0){
|
||||
NODE_ERR("BUG: Invalid strbuf length requested");
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Ensure there is room for optional NULL termination */
|
||||
reqsize = len + 1;
|
||||
|
||||
/* If the user has requested to shrink the buffer, do it exactly */
|
||||
if (s->size > reqsize)
|
||||
return reqsize;
|
||||
|
||||
newsize = s->size;
|
||||
if (s->increment < 0) {
|
||||
/* Exponential sizing */
|
||||
while (newsize < reqsize)
|
||||
newsize *= -s->increment;
|
||||
} else {
|
||||
/* Linear sizing */
|
||||
newsize = (((reqsize -1) / s->increment) + 1) * s->increment;
|
||||
}
|
||||
|
||||
return newsize;
|
||||
}
|
||||
|
||||
|
||||
/* Ensure strbuf can handle a string length bytes long (ignoring NULL
|
||||
* optional termination). */
|
||||
int strbuf_resize(strbuf_t *s, int len)
|
||||
{
|
||||
int newsize;
|
||||
|
||||
newsize = calculate_new_size(s, len);
|
||||
|
||||
if (s->debug > 1) {
|
||||
NODE_ERR("strbuf(%lx) resize: %d => %d\n",
|
||||
(long)s, s->size, newsize);
|
||||
}
|
||||
|
||||
s->buf = (char *)cjson_mem_realloc(s->buf, newsize);
|
||||
if (!s->buf){
|
||||
NODE_ERR("not enough memory");
|
||||
return -1;
|
||||
}
|
||||
s->size = newsize;
|
||||
s->reallocs++;
|
||||
return 0;
|
||||
}
|
||||
|
||||
void strbuf_append_string(strbuf_t *s, const char *str)
|
||||
{
|
||||
int space, i;
|
||||
|
||||
space = strbuf_empty_length(s);
|
||||
|
||||
for (i = 0; str[i]; i++) {
|
||||
if (space < 1) {
|
||||
strbuf_resize(s, s->length + 1);
|
||||
space = strbuf_empty_length(s);
|
||||
}
|
||||
|
||||
s->buf[s->length] = str[i];
|
||||
s->length++;
|
||||
space--;
|
||||
}
|
||||
}
|
||||
#if 0
|
||||
/* strbuf_append_fmt() should only be used when an upper bound
|
||||
* is known for the output string. */
|
||||
void strbuf_append_fmt(strbuf_t *s, int len, const char *fmt, ...)
|
||||
{
|
||||
va_list arg;
|
||||
int fmt_len;
|
||||
|
||||
strbuf_ensure_empty_length(s, len);
|
||||
|
||||
va_start(arg, fmt);
|
||||
fmt_len = vsnprintf(s->buf + s->length, len, fmt, arg);
|
||||
va_end(arg);
|
||||
|
||||
if (fmt_len < 0)
|
||||
die("BUG: Unable to convert number"); /* This should never happen.. */
|
||||
|
||||
s->length += fmt_len;
|
||||
}
|
||||
|
||||
/* strbuf_append_fmt_retry() can be used when the there is no known
|
||||
* upper bound for the output string. */
|
||||
void strbuf_append_fmt_retry(strbuf_t *s, const char *fmt, ...)
|
||||
{
|
||||
va_list arg;
|
||||
int fmt_len, try;
|
||||
int empty_len;
|
||||
|
||||
/* If the first attempt to append fails, resize the buffer appropriately
|
||||
* and try again */
|
||||
for (try = 0; ; try++) {
|
||||
va_start(arg, fmt);
|
||||
/* Append the new formatted string */
|
||||
/* fmt_len is the length of the string required, excluding the
|
||||
* trailing NULL */
|
||||
empty_len = strbuf_empty_length(s);
|
||||
/* Add 1 since there is also space to store the terminating NULL. */
|
||||
fmt_len = vsnprintf(s->buf + s->length, empty_len + 1, fmt, arg);
|
||||
va_end(arg);
|
||||
|
||||
if (fmt_len <= empty_len)
|
||||
break; /* SUCCESS */
|
||||
if (try > 0)
|
||||
die("BUG: length of formatted string changed");
|
||||
|
||||
strbuf_resize(s, s->length + fmt_len);
|
||||
}
|
||||
|
||||
s->length += fmt_len;
|
||||
}
|
||||
#endif
|
||||
/* vi:ai et sw=4 ts=4:
|
||||
*/
|
|
@ -1,155 +0,0 @@
|
|||
/* strbuf - String buffer routines
|
||||
*
|
||||
* Copyright (c) 2010-2012 Mark Pulford <mark@kyne.com.au>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "c_stdlib.h"
|
||||
#include "c_stdarg.h"
|
||||
#include "user_config.h"
|
||||
|
||||
/* Size: Total bytes allocated to *buf
|
||||
* Length: String length, excluding optional NULL terminator.
|
||||
* Increment: Allocation increments when resizing the string buffer.
|
||||
* Dynamic: True if created via strbuf_new()
|
||||
*/
|
||||
|
||||
typedef struct {
|
||||
char *buf;
|
||||
int size;
|
||||
int length;
|
||||
int increment;
|
||||
int dynamic;
|
||||
int reallocs;
|
||||
int debug;
|
||||
} strbuf_t;
|
||||
|
||||
#ifndef STRBUF_DEFAULT_SIZE
|
||||
#define STRBUF_DEFAULT_SIZE 1023
|
||||
#endif
|
||||
#ifndef STRBUF_DEFAULT_INCREMENT
|
||||
#define STRBUF_DEFAULT_INCREMENT -2
|
||||
#endif
|
||||
|
||||
/* Initialise */
|
||||
extern strbuf_t *strbuf_new(int len);
|
||||
extern int strbuf_init(strbuf_t *s, int len);
|
||||
extern int strbuf_set_increment(strbuf_t *s, int increment);
|
||||
|
||||
/* Release */
|
||||
extern void strbuf_free(strbuf_t *s);
|
||||
extern char *strbuf_free_to_string(strbuf_t *s, int *len);
|
||||
|
||||
/* Management */
|
||||
extern int strbuf_resize(strbuf_t *s, int len);
|
||||
static int strbuf_empty_length(strbuf_t *s);
|
||||
static int strbuf_length(strbuf_t *s);
|
||||
static char *strbuf_string(strbuf_t *s, int *len);
|
||||
static void strbuf_ensure_empty_length(strbuf_t *s, int len);
|
||||
static char *strbuf_empty_ptr(strbuf_t *s);
|
||||
static void strbuf_extend_length(strbuf_t *s, int len);
|
||||
|
||||
/* Update */
|
||||
extern void strbuf_append_fmt(strbuf_t *s, int len, const char *fmt, ...);
|
||||
extern void strbuf_append_fmt_retry(strbuf_t *s, const char *format, ...);
|
||||
static void strbuf_append_mem(strbuf_t *s, const char *c, int len);
|
||||
extern void strbuf_append_string(strbuf_t *s, const char *str);
|
||||
static void strbuf_append_char(strbuf_t *s, const char c);
|
||||
static void strbuf_ensure_null(strbuf_t *s);
|
||||
|
||||
/* Reset string for before use */
|
||||
static inline void strbuf_reset(strbuf_t *s)
|
||||
{
|
||||
s->length = 0;
|
||||
}
|
||||
|
||||
static inline int strbuf_allocated(strbuf_t *s)
|
||||
{
|
||||
return s->buf != NULL;
|
||||
}
|
||||
|
||||
/* Return bytes remaining in the string buffer
|
||||
* Ensure there is space for a NULL terminator. */
|
||||
static inline int strbuf_empty_length(strbuf_t *s)
|
||||
{
|
||||
return s->size - s->length - 1;
|
||||
}
|
||||
|
||||
static inline void strbuf_ensure_empty_length(strbuf_t *s, int len)
|
||||
{
|
||||
if (len > strbuf_empty_length(s))
|
||||
strbuf_resize(s, s->length + len);
|
||||
}
|
||||
|
||||
static inline char *strbuf_empty_ptr(strbuf_t *s)
|
||||
{
|
||||
return s->buf + s->length;
|
||||
}
|
||||
|
||||
static inline void strbuf_extend_length(strbuf_t *s, int len)
|
||||
{
|
||||
s->length += len;
|
||||
}
|
||||
|
||||
static inline int strbuf_length(strbuf_t *s)
|
||||
{
|
||||
return s->length;
|
||||
}
|
||||
|
||||
static inline void strbuf_append_char(strbuf_t *s, const char c)
|
||||
{
|
||||
strbuf_ensure_empty_length(s, 1);
|
||||
s->buf[s->length++] = c;
|
||||
}
|
||||
|
||||
static inline void strbuf_append_char_unsafe(strbuf_t *s, const char c)
|
||||
{
|
||||
s->buf[s->length++] = c;
|
||||
}
|
||||
|
||||
static inline void strbuf_append_mem(strbuf_t *s, const char *c, int len)
|
||||
{
|
||||
strbuf_ensure_empty_length(s, len);
|
||||
c_memcpy(s->buf + s->length, c, len);
|
||||
s->length += len;
|
||||
}
|
||||
|
||||
static inline void strbuf_append_mem_unsafe(strbuf_t *s, const char *c, int len)
|
||||
{
|
||||
c_memcpy(s->buf + s->length, c, len);
|
||||
s->length += len;
|
||||
}
|
||||
|
||||
static inline void strbuf_ensure_null(strbuf_t *s)
|
||||
{
|
||||
s->buf[s->length] = 0;
|
||||
}
|
||||
|
||||
static inline char *strbuf_string(strbuf_t *s, int *len)
|
||||
{
|
||||
if (len)
|
||||
*len = s->length;
|
||||
|
||||
return s->buf;
|
||||
}
|
||||
|
||||
/* vi:ai et sw=4 ts=4:
|
||||
*/
|
|
@ -1,4 +0,0 @@
|
|||
These JSON examples were taken from the JSON website
|
||||
(http://json.org/example.html) and RFC 4627.
|
||||
|
||||
Used with permission.
|
|
@ -1,131 +0,0 @@
|
|||
#!/usr/bin/env lua
|
||||
|
||||
-- This benchmark script measures wall clock time and should be
|
||||
-- run on an unloaded system.
|
||||
--
|
||||
-- Your Mileage May Vary.
|
||||
--
|
||||
-- Mark Pulford <mark@kyne.com.au>
|
||||
|
||||
local json_module = os.getenv("JSON_MODULE") or "cjson"
|
||||
|
||||
require "socket"
|
||||
local json = require(json_module)
|
||||
local util = require "cjson.util"
|
||||
|
||||
local function find_func(mod, funcnames)
|
||||
for _, v in ipairs(funcnames) do
|
||||
if mod[v] then
|
||||
return mod[v]
|
||||
end
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
local json_encode = find_func(json, { "encode", "Encode", "to_string", "stringify", "json" })
|
||||
local json_decode = find_func(json, { "decode", "Decode", "to_value", "parse" })
|
||||
|
||||
local function average(t)
|
||||
local total = 0
|
||||
for _, v in ipairs(t) do
|
||||
total = total + v
|
||||
end
|
||||
return total / #t
|
||||
end
|
||||
|
||||
function benchmark(tests, seconds, rep)
|
||||
local function bench(func, iter)
|
||||
-- Use socket.gettime() to measure microsecond resolution
|
||||
-- wall clock time.
|
||||
local t = socket.gettime()
|
||||
for i = 1, iter do
|
||||
func(i)
|
||||
end
|
||||
t = socket.gettime() - t
|
||||
|
||||
-- Don't trust any results when the run lasted for less than a
|
||||
-- millisecond - return nil.
|
||||
if t < 0.001 then
|
||||
return nil
|
||||
end
|
||||
|
||||
return (iter / t)
|
||||
end
|
||||
|
||||
-- Roughly calculate the number of interations required
|
||||
-- to obtain a particular time period.
|
||||
local function calc_iter(func, seconds)
|
||||
local iter = 1
|
||||
local rate
|
||||
-- Warm up the bench function first.
|
||||
func()
|
||||
while not rate do
|
||||
rate = bench(func, iter)
|
||||
iter = iter * 10
|
||||
end
|
||||
return math.ceil(seconds * rate)
|
||||
end
|
||||
|
||||
local test_results = {}
|
||||
for name, func in pairs(tests) do
|
||||
-- k(number), v(string)
|
||||
-- k(string), v(function)
|
||||
-- k(number), v(function)
|
||||
if type(func) == "string" then
|
||||
name = func
|
||||
func = _G[name]
|
||||
end
|
||||
|
||||
local iter = calc_iter(func, seconds)
|
||||
|
||||
local result = {}
|
||||
for i = 1, rep do
|
||||
result[i] = bench(func, iter)
|
||||
end
|
||||
|
||||
-- Remove the slowest half (round down) of the result set
|
||||
table.sort(result)
|
||||
for i = 1, math.floor(#result / 2) do
|
||||
table.remove(result, 1)
|
||||
end
|
||||
|
||||
test_results[name] = average(result)
|
||||
end
|
||||
|
||||
return test_results
|
||||
end
|
||||
|
||||
function bench_file(filename)
|
||||
local data_json = util.file_load(filename)
|
||||
local data_obj = json_decode(data_json)
|
||||
|
||||
local function test_encode()
|
||||
json_encode(data_obj)
|
||||
end
|
||||
local function test_decode()
|
||||
json_decode(data_json)
|
||||
end
|
||||
|
||||
local tests = {}
|
||||
if json_encode then tests.encode = test_encode end
|
||||
if json_decode then tests.decode = test_decode end
|
||||
|
||||
return benchmark(tests, 0.1, 5)
|
||||
end
|
||||
|
||||
-- Optionally load any custom configuration required for this module
|
||||
local success, data = pcall(util.file_load, ("bench-%s.lua"):format(json_module))
|
||||
if success then
|
||||
util.run_script(data, _G)
|
||||
configure(json)
|
||||
end
|
||||
|
||||
for i = 1, #arg do
|
||||
local results = bench_file(arg[i])
|
||||
for k, v in pairs(results) do
|
||||
print(("%s\t%s\t%d"):format(arg[i], k, v))
|
||||
end
|
||||
end
|
||||
|
||||
-- vi:ai et sw=4 ts=4:
|
|
@ -1,22 +0,0 @@
|
|||
{
|
||||
"glossary": {
|
||||
"title": "example glossary",
|
||||
"GlossDiv": {
|
||||
"title": "S",
|
||||
"GlossList": {
|
||||
"GlossEntry": {
|
||||
"ID": "SGML",
|
||||
"SortAs": "SGML",
|
||||
"GlossTerm": "Standard Generalized Mark up Language",
|
||||
"Acronym": "SGML",
|
||||
"Abbrev": "ISO 8879:1986",
|
||||
"GlossDef": {
|
||||
"para": "A meta-markup language, used to create markup languages such as DocBook.",
|
||||
"GlossSeeAlso": ["GML", "XML"]
|
||||
},
|
||||
"GlossSee": "markup"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
{"menu": {
|
||||
"id": "file",
|
||||
"value": "File",
|
||||
"popup": {
|
||||
"menuitem": [
|
||||
{"value": "New", "onclick": "CreateNewDoc()"},
|
||||
{"value": "Open", "onclick": "OpenDoc()"},
|
||||
{"value": "Close", "onclick": "CloseDoc()"}
|
||||
]
|
||||
}
|
||||
}}
|
|
@ -1,26 +0,0 @@
|
|||
{"widget": {
|
||||
"debug": "on",
|
||||
"window": {
|
||||
"title": "Sample Konfabulator Widget",
|
||||
"name": "main_window",
|
||||
"width": 500,
|
||||
"height": 500
|
||||
},
|
||||
"image": {
|
||||
"src": "Images/Sun.png",
|
||||
"name": "sun1",
|
||||
"hOffset": 250,
|
||||
"vOffset": 250,
|
||||
"alignment": "center"
|
||||
},
|
||||
"text": {
|
||||
"data": "Click Here",
|
||||
"size": 36,
|
||||
"style": "bold",
|
||||
"name": "text1",
|
||||
"hOffset": 250,
|
||||
"vOffset": 100,
|
||||
"alignment": "center",
|
||||
"onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;"
|
||||
}
|
||||
}}
|
|
@ -1,88 +0,0 @@
|
|||
{"web-app": {
|
||||
"servlet": [
|
||||
{
|
||||
"servlet-name": "cofaxCDS",
|
||||
"servlet-class": "org.cofax.cds.CDSServlet",
|
||||
"init-param": {
|
||||
"configGlossary:installationAt": "Philadelphia, PA",
|
||||
"configGlossary:adminEmail": "ksm@pobox.com",
|
||||
"configGlossary:poweredBy": "Cofax",
|
||||
"configGlossary:poweredByIcon": "/images/cofax.gif",
|
||||
"configGlossary:staticPath": "/content/static",
|
||||
"templateProcessorClass": "org.cofax.WysiwygTemplate",
|
||||
"templateLoaderClass": "org.cofax.FilesTemplateLoader",
|
||||
"templatePath": "templates",
|
||||
"templateOverridePath": "",
|
||||
"defaultListTemplate": "listTemplate.htm",
|
||||
"defaultFileTemplate": "articleTemplate.htm",
|
||||
"useJSP": false,
|
||||
"jspListTemplate": "listTemplate.jsp",
|
||||
"jspFileTemplate": "articleTemplate.jsp",
|
||||
"cachePackageTagsTrack": 200,
|
||||
"cachePackageTagsStore": 200,
|
||||
"cachePackageTagsRefresh": 60,
|
||||
"cacheTemplatesTrack": 100,
|
||||
"cacheTemplatesStore": 50,
|
||||
"cacheTemplatesRefresh": 15,
|
||||
"cachePagesTrack": 200,
|
||||
"cachePagesStore": 100,
|
||||
"cachePagesRefresh": 10,
|
||||
"cachePagesDirtyRead": 10,
|
||||
"searchEngineListTemplate": "forSearchEnginesList.htm",
|
||||
"searchEngineFileTemplate": "forSearchEngines.htm",
|
||||
"searchEngineRobotsDb": "WEB-INF/robots.db",
|
||||
"useDataStore": true,
|
||||
"dataStoreClass": "org.cofax.SqlDataStore",
|
||||
"redirectionClass": "org.cofax.SqlRedirection",
|
||||
"dataStoreName": "cofax",
|
||||
"dataStoreDriver": "com.microsoft.jdbc.sqlserver.SQLServerDriver",
|
||||
"dataStoreUrl": "jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon",
|
||||
"dataStoreUser": "sa",
|
||||
"dataStorePassword": "dataStoreTestQuery",
|
||||
"dataStoreTestQuery": "SET NOCOUNT ON;select test='test';",
|
||||
"dataStoreLogFile": "/usr/local/tomcat/logs/datastore.log",
|
||||
"dataStoreInitConns": 10,
|
||||
"dataStoreMaxConns": 100,
|
||||
"dataStoreConnUsageLimit": 100,
|
||||
"dataStoreLogLevel": "debug",
|
||||
"maxUrlLength": 500}},
|
||||
{
|
||||
"servlet-name": "cofaxEmail",
|
||||
"servlet-class": "org.cofax.cds.EmailServlet",
|
||||
"init-param": {
|
||||
"mailHost": "mail1",
|
||||
"mailHostOverride": "mail2"}},
|
||||
{
|
||||
"servlet-name": "cofaxAdmin",
|
||||
"servlet-class": "org.cofax.cds.AdminServlet"},
|
||||
|
||||
{
|
||||
"servlet-name": "fileServlet",
|
||||
"servlet-class": "org.cofax.cds.FileServlet"},
|
||||
{
|
||||
"servlet-name": "cofaxTools",
|
||||
"servlet-class": "org.cofax.cms.CofaxToolsServlet",
|
||||
"init-param": {
|
||||
"templatePath": "toolstemplates/",
|
||||
"log": 1,
|
||||
"logLocation": "/usr/local/tomcat/logs/CofaxTools.log",
|
||||
"logMaxSize": "",
|
||||
"dataLog": 1,
|
||||
"dataLogLocation": "/usr/local/tomcat/logs/dataLog.log",
|
||||
"dataLogMaxSize": "",
|
||||
"removePageCache": "/content/admin/remove?cache=pages&id=",
|
||||
"removeTemplateCache": "/content/admin/remove?cache=templates&id=",
|
||||
"fileTransferFolder": "/usr/local/tomcat/webapps/content/fileTransferFolder",
|
||||
"lookInContext": 1,
|
||||
"adminGroupID": 4,
|
||||
"betaServer": true}}],
|
||||
"servlet-mapping": {
|
||||
"cofaxCDS": "/",
|
||||
"cofaxEmail": "/cofaxutil/aemail/*",
|
||||
"cofaxAdmin": "/admin/*",
|
||||
"fileServlet": "/static/*",
|
||||
"cofaxTools": "/tools/*"},
|
||||
|
||||
"taglib": {
|
||||
"taglib-uri": "cofax.tld",
|
||||
"taglib-location": "/WEB-INF/tlds/cofax.tld"}}}
|
|
@ -1,27 +0,0 @@
|
|||
{"menu": {
|
||||
"header": "SVG Viewer",
|
||||
"items": [
|
||||
{"id": "Open"},
|
||||
{"id": "OpenNew", "label": "Open New"},
|
||||
null,
|
||||
{"id": "ZoomIn", "label": "Zoom In"},
|
||||
{"id": "ZoomOut", "label": "Zoom Out"},
|
||||
{"id": "OriginalView", "label": "Original View"},
|
||||
null,
|
||||
{"id": "Quality"},
|
||||
{"id": "Pause"},
|
||||
{"id": "Mute"},
|
||||
null,
|
||||
{"id": "Find", "label": "Find..."},
|
||||
{"id": "FindAgain", "label": "Find Again"},
|
||||
{"id": "Copy"},
|
||||
{"id": "CopyAgain", "label": "Copy Again"},
|
||||
{"id": "CopySVG", "label": "Copy SVG"},
|
||||
{"id": "ViewSVG", "label": "View SVG"},
|
||||
{"id": "ViewSource", "label": "View Source"},
|
||||
{"id": "SaveAs", "label": "Save As"},
|
||||
null,
|
||||
{"id": "Help"},
|
||||
{"id": "About", "label": "About Adobe CVG Viewer..."}
|
||||
]
|
||||
}}
|
|
@ -1,23 +0,0 @@
|
|||
#!/usr/bin/env perl
|
||||
|
||||
# Create test comparison data using a different UTF-8 implementation.
|
||||
|
||||
# The generated utf8.dat file must have the following MD5 sum:
|
||||
# cff03b039d850f370a7362f3313e5268
|
||||
|
||||
use strict;
|
||||
|
||||
# 0xD800 - 0xDFFF are used to encode supplementary codepoints
|
||||
# 0x10000 - 0x10FFFF are supplementary codepoints
|
||||
my (@codepoints) = (0 .. 0xD7FF, 0xE000 .. 0x10FFFF);
|
||||
|
||||
my $utf8 = pack("U*", @codepoints);
|
||||
defined($utf8) or die "Unable create UTF-8 string\n";
|
||||
|
||||
open(FH, ">:utf8", "utf8.dat")
|
||||
or die "Unable to open utf8.dat: $!\n";
|
||||
print FH $utf8
|
||||
or die "Unable to write utf8.dat\n";
|
||||
close(FH);
|
||||
|
||||
# vi:ai et sw=4 ts=4:
|
|
@ -1,7 +0,0 @@
|
|||
[ 0.110001,
|
||||
0.12345678910111,
|
||||
0.412454033640,
|
||||
2.6651441426902,
|
||||
2.718281828459,
|
||||
3.1415926535898,
|
||||
2.1406926327793 ]
|
|
@ -1 +0,0 @@
|
|||
"\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-.\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f€亗儎厗噲墛媽崕彁憭摂晼棙櫄洔潪煚、¥ウЖ┆<D096><E29486><EFBFBD>辈炒刀犯购患骄坷谅媚牌侨墒颂臀闲岩釉罩棕仝圮蒉哙徕沅彐玷殛腱眍镳耱篝貊鼬<E8B28A><E9BCAC><EFBFBD><EFBFBD>"
|
|
@ -1,13 +0,0 @@
|
|||
{
|
||||
"Image": {
|
||||
"Width": 800,
|
||||
"Height": 600,
|
||||
"Title": "View from 15th Floor",
|
||||
"Thumbnail": {
|
||||
"Url": "http://www.example.com/image/481989943",
|
||||
"Height": 125,
|
||||
"Width": "100"
|
||||
},
|
||||
"IDs": [116, 943, 234, 38793]
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
[
|
||||
{
|
||||
"precision": "zip",
|
||||
"Latitude": 37.7668,
|
||||
"Longitude": -122.3959,
|
||||
"Address": "",
|
||||
"City": "SAN FRANCISCO",
|
||||
"State": "CA",
|
||||
"Zip": "94107",
|
||||
"Country": "US"
|
||||
},
|
||||
{
|
||||
"precision": "zip",
|
||||
"Latitude": 37.371991,
|
||||
"Longitude": -122.026020,
|
||||
"Address": "",
|
||||
"City": "SUNNYVALE",
|
||||
"State": "CA",
|
||||
"Zip": "94085",
|
||||
"Country": "US"
|
||||
}
|
||||
]
|
|
@ -1,425 +0,0 @@
|
|||
#!/usr/bin/env lua
|
||||
|
||||
-- Lua CJSON tests
|
||||
--
|
||||
-- Mark Pulford <mark@kyne.com.au>
|
||||
--
|
||||
-- Note: The output of this script is easier to read with "less -S"
|
||||
|
||||
local json = require "cjson"
|
||||
local json_safe = require "cjson.safe"
|
||||
local util = require "cjson.util"
|
||||
|
||||
local function gen_raw_octets()
|
||||
local chars = {}
|
||||
for i = 0, 255 do chars[i + 1] = string.char(i) end
|
||||
return table.concat(chars)
|
||||
end
|
||||
|
||||
-- Generate every UTF-16 codepoint, including supplementary codes
|
||||
local function gen_utf16_escaped()
|
||||
-- Create raw table escapes
|
||||
local utf16_escaped = {}
|
||||
local count = 0
|
||||
|
||||
local function append_escape(code)
|
||||
local esc = ('\\u%04X'):format(code)
|
||||
table.insert(utf16_escaped, esc)
|
||||
end
|
||||
|
||||
table.insert(utf16_escaped, '"')
|
||||
for i = 0, 0xD7FF do
|
||||
append_escape(i)
|
||||
end
|
||||
-- Skip 0xD800 - 0xDFFF since they are used to encode supplementary
|
||||
-- codepoints
|
||||
for i = 0xE000, 0xFFFF do
|
||||
append_escape(i)
|
||||
end
|
||||
-- Append surrogate pair for each supplementary codepoint
|
||||
for high = 0xD800, 0xDBFF do
|
||||
for low = 0xDC00, 0xDFFF do
|
||||
append_escape(high)
|
||||
append_escape(low)
|
||||
end
|
||||
end
|
||||
table.insert(utf16_escaped, '"')
|
||||
|
||||
return table.concat(utf16_escaped)
|
||||
end
|
||||
|
||||
function load_testdata()
|
||||
local data = {}
|
||||
|
||||
-- Data for 8bit raw <-> escaped octets tests
|
||||
data.octets_raw = gen_raw_octets()
|
||||
data.octets_escaped = util.file_load("octets-escaped.dat")
|
||||
|
||||
-- Data for \uXXXX -> UTF-8 test
|
||||
data.utf16_escaped = gen_utf16_escaped()
|
||||
|
||||
-- Load matching data for utf16_escaped
|
||||
local utf8_loaded
|
||||
utf8_loaded, data.utf8_raw = pcall(util.file_load, "utf8.dat")
|
||||
if not utf8_loaded then
|
||||
data.utf8_raw = "Failed to load utf8.dat - please run genutf8.pl"
|
||||
end
|
||||
|
||||
data.table_cycle = {}
|
||||
data.table_cycle[1] = data.table_cycle
|
||||
|
||||
local big = {}
|
||||
for i = 1, 1100 do
|
||||
big = { { 10, false, true, json.null }, "string", a = big }
|
||||
end
|
||||
data.deeply_nested_data = big
|
||||
|
||||
return data
|
||||
end
|
||||
|
||||
function test_decode_cycle(filename)
|
||||
local obj1 = json.decode(util.file_load(filename))
|
||||
local obj2 = json.decode(json.encode(obj1))
|
||||
return util.compare_values(obj1, obj2)
|
||||
end
|
||||
|
||||
-- Set up data used in tests
|
||||
local Inf = math.huge;
|
||||
local NaN = math.huge * 0;
|
||||
|
||||
local testdata = load_testdata()
|
||||
|
||||
local cjson_tests = {
|
||||
-- Test API variables
|
||||
{ "Check module name, version",
|
||||
function () return json._NAME, json._VERSION end, { },
|
||||
true, { "cjson", "2.1devel" } },
|
||||
|
||||
-- Test decoding simple types
|
||||
{ "Decode string",
|
||||
json.decode, { '"test string"' }, true, { "test string" } },
|
||||
{ "Decode numbers",
|
||||
json.decode, { '[ 0.0, -5e3, -1, 0.3e-3, 1023.2, 0e10 ]' },
|
||||
true, { { 0.0, -5000, -1, 0.0003, 1023.2, 0 } } },
|
||||
{ "Decode null",
|
||||
json.decode, { 'null' }, true, { json.null } },
|
||||
{ "Decode true",
|
||||
json.decode, { 'true' }, true, { true } },
|
||||
{ "Decode false",
|
||||
json.decode, { 'false' }, true, { false } },
|
||||
{ "Decode object with numeric keys",
|
||||
json.decode, { '{ "1": "one", "3": "three" }' },
|
||||
true, { { ["1"] = "one", ["3"] = "three" } } },
|
||||
{ "Decode object with string keys",
|
||||
json.decode, { '{ "a": "a", "b": "b" }' },
|
||||
true, { { a = "a", b = "b" } } },
|
||||
{ "Decode array",
|
||||
json.decode, { '[ "one", null, "three" ]' },
|
||||
true, { { "one", json.null, "three" } } },
|
||||
|
||||
-- Test decoding errors
|
||||
{ "Decode UTF-16BE [throw error]",
|
||||
json.decode, { '\0"\0"' },
|
||||
false, { "JSON parser does not support UTF-16 or UTF-32" } },
|
||||
{ "Decode UTF-16LE [throw error]",
|
||||
json.decode, { '"\0"\0' },
|
||||
false, { "JSON parser does not support UTF-16 or UTF-32" } },
|
||||
{ "Decode UTF-32BE [throw error]",
|
||||
json.decode, { '\0\0\0"' },
|
||||
false, { "JSON parser does not support UTF-16 or UTF-32" } },
|
||||
{ "Decode UTF-32LE [throw error]",
|
||||
json.decode, { '"\0\0\0' },
|
||||
false, { "JSON parser does not support UTF-16 or UTF-32" } },
|
||||
{ "Decode partial JSON [throw error]",
|
||||
json.decode, { '{ "unexpected eof": ' },
|
||||
false, { "Expected value but found T_END at character 21" } },
|
||||
{ "Decode with extra comma [throw error]",
|
||||
json.decode, { '{ "extra data": true }, false' },
|
||||
false, { "Expected the end but found T_COMMA at character 23" } },
|
||||
{ "Decode invalid escape code [throw error]",
|
||||
json.decode, { [[ { "bad escape \q code" } ]] },
|
||||
false, { "Expected object key string but found invalid escape code at character 16" } },
|
||||
{ "Decode invalid unicode escape [throw error]",
|
||||
json.decode, { [[ { "bad unicode \u0f6 escape" } ]] },
|
||||
false, { "Expected object key string but found invalid unicode escape code at character 17" } },
|
||||
{ "Decode invalid keyword [throw error]",
|
||||
json.decode, { ' [ "bad barewood", test ] ' },
|
||||
false, { "Expected value but found invalid token at character 20" } },
|
||||
{ "Decode invalid number #1 [throw error]",
|
||||
json.decode, { '[ -+12 ]' },
|
||||
false, { "Expected value but found invalid number at character 3" } },
|
||||
{ "Decode invalid number #2 [throw error]",
|
||||
json.decode, { '-v' },
|
||||
false, { "Expected value but found invalid number at character 1" } },
|
||||
{ "Decode invalid number exponent [throw error]",
|
||||
json.decode, { '[ 0.4eg10 ]' },
|
||||
false, { "Expected comma or array end but found invalid token at character 6" } },
|
||||
|
||||
-- Test decoding nested arrays / objects
|
||||
{ "Set decode_max_depth(5)",
|
||||
json.decode_max_depth, { 5 }, true, { 5 } },
|
||||
{ "Decode array at nested limit",
|
||||
json.decode, { '[[[[[ "nested" ]]]]]' },
|
||||
true, { {{{{{ "nested" }}}}} } },
|
||||
{ "Decode array over nested limit [throw error]",
|
||||
json.decode, { '[[[[[[ "nested" ]]]]]]' },
|
||||
false, { "Found too many nested data structures (6) at character 6" } },
|
||||
{ "Decode object at nested limit",
|
||||
json.decode, { '{"a":{"b":{"c":{"d":{"e":"nested"}}}}}' },
|
||||
true, { {a={b={c={d={e="nested"}}}}} } },
|
||||
{ "Decode object over nested limit [throw error]",
|
||||
json.decode, { '{"a":{"b":{"c":{"d":{"e":{"f":"nested"}}}}}}' },
|
||||
false, { "Found too many nested data structures (6) at character 26" } },
|
||||
{ "Set decode_max_depth(1000)",
|
||||
json.decode_max_depth, { 1000 }, true, { 1000 } },
|
||||
{ "Decode deeply nested array [throw error]",
|
||||
json.decode, { string.rep("[", 1100) .. '1100' .. string.rep("]", 1100)},
|
||||
false, { "Found too many nested data structures (1001) at character 1001" } },
|
||||
|
||||
-- Test encoding nested tables
|
||||
{ "Set encode_max_depth(5)",
|
||||
json.encode_max_depth, { 5 }, true, { 5 } },
|
||||
{ "Encode nested table as array at nested limit",
|
||||
json.encode, { {{{{{"nested"}}}}} }, true, { '[[[[["nested"]]]]]' } },
|
||||
{ "Encode nested table as array after nested limit [throw error]",
|
||||
json.encode, { { {{{{{"nested"}}}}} } },
|
||||
false, { "Cannot serialise, excessive nesting (6)" } },
|
||||
{ "Encode nested table as object at nested limit",
|
||||
json.encode, { {a={b={c={d={e="nested"}}}}} },
|
||||
true, { '{"a":{"b":{"c":{"d":{"e":"nested"}}}}}' } },
|
||||
{ "Encode nested table as object over nested limit [throw error]",
|
||||
json.encode, { {a={b={c={d={e={f="nested"}}}}}} },
|
||||
false, { "Cannot serialise, excessive nesting (6)" } },
|
||||
{ "Encode table with cycle [throw error]",
|
||||
json.encode, { testdata.table_cycle },
|
||||
false, { "Cannot serialise, excessive nesting (6)" } },
|
||||
{ "Set encode_max_depth(1000)",
|
||||
json.encode_max_depth, { 1000 }, true, { 1000 } },
|
||||
{ "Encode deeply nested data [throw error]",
|
||||
json.encode, { testdata.deeply_nested_data },
|
||||
false, { "Cannot serialise, excessive nesting (1001)" } },
|
||||
|
||||
-- Test encoding simple types
|
||||
{ "Encode null",
|
||||
json.encode, { json.null }, true, { 'null' } },
|
||||
{ "Encode true",
|
||||
json.encode, { true }, true, { 'true' } },
|
||||
{ "Encode false",
|
||||
json.encode, { false }, true, { 'false' } },
|
||||
{ "Encode empty object",
|
||||
json.encode, { { } }, true, { '{}' } },
|
||||
{ "Encode integer",
|
||||
json.encode, { 10 }, true, { '10' } },
|
||||
{ "Encode string",
|
||||
json.encode, { "hello" }, true, { '"hello"' } },
|
||||
{ "Encode Lua function [throw error]",
|
||||
json.encode, { function () end },
|
||||
false, { "Cannot serialise function: type not supported" } },
|
||||
|
||||
-- Test decoding invalid numbers
|
||||
{ "Set decode_invalid_numbers(true)",
|
||||
json.decode_invalid_numbers, { true }, true, { true } },
|
||||
{ "Decode hexadecimal",
|
||||
json.decode, { '0x6.ffp1' }, true, { 13.9921875 } },
|
||||
{ "Decode numbers with leading zero",
|
||||
json.decode, { '[ 0123, 00.33 ]' }, true, { { 123, 0.33 } } },
|
||||
{ "Decode +-Inf",
|
||||
json.decode, { '[ +Inf, Inf, -Inf ]' }, true, { { Inf, Inf, -Inf } } },
|
||||
{ "Decode +-Infinity",
|
||||
json.decode, { '[ +Infinity, Infinity, -Infinity ]' },
|
||||
true, { { Inf, Inf, -Inf } } },
|
||||
{ "Decode +-NaN",
|
||||
json.decode, { '[ +NaN, NaN, -NaN ]' }, true, { { NaN, NaN, NaN } } },
|
||||
{ "Decode Infrared (not infinity) [throw error]",
|
||||
json.decode, { 'Infrared' },
|
||||
false, { "Expected the end but found invalid token at character 4" } },
|
||||
{ "Decode Noodle (not NaN) [throw error]",
|
||||
json.decode, { 'Noodle' },
|
||||
false, { "Expected value but found invalid token at character 1" } },
|
||||
{ "Set decode_invalid_numbers(false)",
|
||||
json.decode_invalid_numbers, { false }, true, { false } },
|
||||
{ "Decode hexadecimal [throw error]",
|
||||
json.decode, { '0x6' },
|
||||
false, { "Expected value but found invalid number at character 1" } },
|
||||
{ "Decode numbers with leading zero [throw error]",
|
||||
json.decode, { '[ 0123, 00.33 ]' },
|
||||
false, { "Expected value but found invalid number at character 3" } },
|
||||
{ "Decode +-Inf [throw error]",
|
||||
json.decode, { '[ +Inf, Inf, -Inf ]' },
|
||||
false, { "Expected value but found invalid token at character 3" } },
|
||||
{ "Decode +-Infinity [throw error]",
|
||||
json.decode, { '[ +Infinity, Infinity, -Infinity ]' },
|
||||
false, { "Expected value but found invalid token at character 3" } },
|
||||
{ "Decode +-NaN [throw error]",
|
||||
json.decode, { '[ +NaN, NaN, -NaN ]' },
|
||||
false, { "Expected value but found invalid token at character 3" } },
|
||||
{ 'Set decode_invalid_numbers("on")',
|
||||
json.decode_invalid_numbers, { "on" }, true, { true } },
|
||||
|
||||
-- Test encoding invalid numbers
|
||||
{ "Set encode_invalid_numbers(false)",
|
||||
json.encode_invalid_numbers, { false }, true, { false } },
|
||||
{ "Encode NaN [throw error]",
|
||||
json.encode, { NaN },
|
||||
false, { "Cannot serialise number: must not be NaN or Infinity" } },
|
||||
{ "Encode Infinity [throw error]",
|
||||
json.encode, { Inf },
|
||||
false, { "Cannot serialise number: must not be NaN or Infinity" } },
|
||||
{ "Set encode_invalid_numbers(\"null\")",
|
||||
json.encode_invalid_numbers, { "null" }, true, { "null" } },
|
||||
{ "Encode NaN as null",
|
||||
json.encode, { NaN }, true, { "null" } },
|
||||
{ "Encode Infinity as null",
|
||||
json.encode, { Inf }, true, { "null" } },
|
||||
{ "Set encode_invalid_numbers(true)",
|
||||
json.encode_invalid_numbers, { true }, true, { true } },
|
||||
{ "Encode NaN",
|
||||
json.encode, { NaN }, true, { "NaN" } },
|
||||
{ "Encode +Infinity",
|
||||
json.encode, { Inf }, true, { "Infinity" } },
|
||||
{ "Encode -Infinity",
|
||||
json.encode, { -Inf }, true, { "-Infinity" } },
|
||||
{ 'Set encode_invalid_numbers("off")',
|
||||
json.encode_invalid_numbers, { "off" }, true, { false } },
|
||||
|
||||
-- Test encoding tables
|
||||
{ "Set encode_sparse_array(true, 2, 3)",
|
||||
json.encode_sparse_array, { true, 2, 3 }, true, { true, 2, 3 } },
|
||||
{ "Encode sparse table as array #1",
|
||||
json.encode, { { [3] = "sparse test" } },
|
||||
true, { '[null,null,"sparse test"]' } },
|
||||
{ "Encode sparse table as array #2",
|
||||
json.encode, { { [1] = "one", [4] = "sparse test" } },
|
||||
true, { '["one",null,null,"sparse test"]' } },
|
||||
{ "Encode sparse array as object",
|
||||
json.encode, { { [1] = "one", [5] = "sparse test" } },
|
||||
true, { '{"1":"one","5":"sparse test"}' } },
|
||||
{ "Encode table with numeric string key as object",
|
||||
json.encode, { { ["2"] = "numeric string key test" } },
|
||||
true, { '{"2":"numeric string key test"}' } },
|
||||
{ "Set encode_sparse_array(false)",
|
||||
json.encode_sparse_array, { false }, true, { false, 2, 3 } },
|
||||
{ "Encode table with incompatible key [throw error]",
|
||||
json.encode, { { [false] = "wrong" } },
|
||||
false, { "Cannot serialise boolean: table key must be a number or string" } },
|
||||
|
||||
-- Test escaping
|
||||
{ "Encode all octets (8-bit clean)",
|
||||
json.encode, { testdata.octets_raw }, true, { testdata.octets_escaped } },
|
||||
{ "Decode all escaped octets",
|
||||
json.decode, { testdata.octets_escaped }, true, { testdata.octets_raw } },
|
||||
{ "Decode single UTF-16 escape",
|
||||
json.decode, { [["\uF800"]] }, true, { "\239\160\128" } },
|
||||
{ "Decode all UTF-16 escapes (including surrogate combinations)",
|
||||
json.decode, { testdata.utf16_escaped }, true, { testdata.utf8_raw } },
|
||||
{ "Decode swapped surrogate pair [throw error]",
|
||||
json.decode, { [["\uDC00\uD800"]] },
|
||||
false, { "Expected value but found invalid unicode escape code at character 2" } },
|
||||
{ "Decode duplicate high surrogate [throw error]",
|
||||
json.decode, { [["\uDB00\uDB00"]] },
|
||||
false, { "Expected value but found invalid unicode escape code at character 2" } },
|
||||
{ "Decode duplicate low surrogate [throw error]",
|
||||
json.decode, { [["\uDB00\uDB00"]] },
|
||||
false, { "Expected value but found invalid unicode escape code at character 2" } },
|
||||
{ "Decode missing low surrogate [throw error]",
|
||||
json.decode, { [["\uDB00"]] },
|
||||
false, { "Expected value but found invalid unicode escape code at character 2" } },
|
||||
{ "Decode invalid low surrogate [throw error]",
|
||||
json.decode, { [["\uDB00\uD"]] },
|
||||
false, { "Expected value but found invalid unicode escape code at character 2" } },
|
||||
|
||||
-- Test locale support
|
||||
--
|
||||
-- The standard Lua interpreter is ANSI C online doesn't support locales
|
||||
-- by default. Force a known problematic locale to test strtod()/sprintf().
|
||||
{ "Set locale to cs_CZ (comma separator)", function ()
|
||||
os.setlocale("cs_CZ")
|
||||
json.new()
|
||||
end },
|
||||
{ "Encode number under comma locale",
|
||||
json.encode, { 1.5 }, true, { '1.5' } },
|
||||
{ "Decode number in array under comma locale",
|
||||
json.decode, { '[ 10, "test" ]' }, true, { { 10, "test" } } },
|
||||
{ "Revert locale to POSIX", function ()
|
||||
os.setlocale("C")
|
||||
json.new()
|
||||
end },
|
||||
|
||||
-- Test encode_keep_buffer() and enable_number_precision()
|
||||
{ "Set encode_keep_buffer(false)",
|
||||
json.encode_keep_buffer, { false }, true, { false } },
|
||||
{ "Set encode_number_precision(3)",
|
||||
json.encode_number_precision, { 3 }, true, { 3 } },
|
||||
{ "Encode number with precision 3",
|
||||
json.encode, { 1/3 }, true, { "0.333" } },
|
||||
{ "Set encode_number_precision(14)",
|
||||
json.encode_number_precision, { 14 }, true, { 14 } },
|
||||
{ "Set encode_keep_buffer(true)",
|
||||
json.encode_keep_buffer, { true }, true, { true } },
|
||||
|
||||
-- Test config API errors
|
||||
-- Function is listed as '?' due to pcall
|
||||
{ "Set encode_number_precision(0) [throw error]",
|
||||
json.encode_number_precision, { 0 },
|
||||
false, { "bad argument #1 to '?' (expected integer between 1 and 14)" } },
|
||||
{ "Set encode_number_precision(\"five\") [throw error]",
|
||||
json.encode_number_precision, { "five" },
|
||||
false, { "bad argument #1 to '?' (number expected, got string)" } },
|
||||
{ "Set encode_keep_buffer(nil, true) [throw error]",
|
||||
json.encode_keep_buffer, { nil, true },
|
||||
false, { "bad argument #2 to '?' (found too many arguments)" } },
|
||||
{ "Set encode_max_depth(\"wrong\") [throw error]",
|
||||
json.encode_max_depth, { "wrong" },
|
||||
false, { "bad argument #1 to '?' (number expected, got string)" } },
|
||||
{ "Set decode_max_depth(0) [throw error]",
|
||||
json.decode_max_depth, { "0" },
|
||||
false, { "bad argument #1 to '?' (expected integer between 1 and 2147483647)" } },
|
||||
{ "Set encode_invalid_numbers(-2) [throw error]",
|
||||
json.encode_invalid_numbers, { -2 },
|
||||
false, { "bad argument #1 to '?' (invalid option '-2')" } },
|
||||
{ "Set decode_invalid_numbers(true, false) [throw error]",
|
||||
json.decode_invalid_numbers, { true, false },
|
||||
false, { "bad argument #2 to '?' (found too many arguments)" } },
|
||||
{ "Set encode_sparse_array(\"not quite on\") [throw error]",
|
||||
json.encode_sparse_array, { "not quite on" },
|
||||
false, { "bad argument #1 to '?' (invalid option 'not quite on')" } },
|
||||
|
||||
{ "Reset Lua CJSON configuration", function () json = json.new() end },
|
||||
-- Wrap in a function to ensure the table returned by json.new() is used
|
||||
{ "Check encode_sparse_array()",
|
||||
function (...) return json.encode_sparse_array(...) end, { },
|
||||
true, { false, 2, 10 } },
|
||||
|
||||
{ "Encode (safe) simple value",
|
||||
json_safe.encode, { true },
|
||||
true, { "true" } },
|
||||
{ "Encode (safe) argument validation [throw error]",
|
||||
json_safe.encode, { "arg1", "arg2" },
|
||||
false, { "bad argument #1 to '?' (expected 1 argument)" } },
|
||||
{ "Decode (safe) error generation",
|
||||
json_safe.decode, { "Oops" },
|
||||
true, { nil, "Expected value but found invalid token at character 1" } },
|
||||
{ "Decode (safe) error generation after new()",
|
||||
function(...) return json_safe.new().decode(...) end, { "Oops" },
|
||||
true, { nil, "Expected value but found invalid token at character 1" } },
|
||||
}
|
||||
|
||||
print(("==> Testing Lua CJSON version %s\n"):format(json._VERSION))
|
||||
|
||||
util.run_test_group(cjson_tests)
|
||||
|
||||
for _, filename in ipairs(arg) do
|
||||
util.run_test("Decode cycle " .. filename, test_decode_cycle, { filename },
|
||||
true, { true })
|
||||
end
|
||||
|
||||
local pass, total = util.run_test_summary()
|
||||
|
||||
if pass == total then
|
||||
print("==> Summary: all tests succeeded")
|
||||
else
|
||||
print(("==> Summary: %d/%d tests failed"):format(total - pass, total))
|
||||
os.exit(1)
|
||||
end
|
||||
|
||||
-- vi:ai et sw=4 ts=4:
|
|
@ -1 +0,0 @@
|
|||
{ "array": [ 10, true, null ] }
|
|
@ -24,7 +24,6 @@
|
|||
#define LUA_USE_MODULES_BIT
|
||||
//#define LUA_USE_MODULES_BMP085
|
||||
//#define LUA_USE_MODULES_BME280
|
||||
//#define LUA_USE_MODULES_CJSON
|
||||
//#define LUA_USE_MODULES_COAP
|
||||
//#define LUA_USE_MODULES_CRON
|
||||
//#define LUA_USE_MODULES_CRYPTO
|
||||
|
@ -54,6 +53,7 @@
|
|||
//#define LUA_USE_MODULES_RTCMEM
|
||||
//#define LUA_USE_MODULES_RTCTIME
|
||||
//#define LUA_USE_MODULES_SIGMA_DELTA
|
||||
//#define LUA_USE_MODULES_SJSON
|
||||
//#define LUA_USE_MODULES_SNTP
|
||||
//#define LUA_USE_MODULES_SOMFY
|
||||
#define LUA_USE_MODULES_SPI
|
||||
|
|
|
@ -50,10 +50,10 @@ INCLUDES += -I ../pcm
|
|||
INCLUDES += -I ../platform
|
||||
INCLUDES += -I ../spiffs
|
||||
INCLUDES += -I ../smart
|
||||
INCLUDES += -I ../cjson
|
||||
INCLUDES += -I ../dhtlib
|
||||
INCLUDES += -I ../fatfs
|
||||
INCLUDES += -I ../http
|
||||
INCLUDES += -I ../sjson
|
||||
INCLUDES += -I ../websocket
|
||||
PDIR := ../$(PDIR)
|
||||
sinclude $(PDIR)Makefile
|
||||
|
|
1630
app/modules/cjson.c
1630
app/modules/cjson.c
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,995 @@
|
|||
/**
|
||||
* JSON Simple/Stacked/Stateful Lexer.
|
||||
* - Does not buffer data
|
||||
* - Maintains state
|
||||
* - Callback oriented
|
||||
* - Lightweight and fast. One source file and one header file
|
||||
*
|
||||
* Copyright (C) 2012-2015 Mark Nunberg
|
||||
* See included LICENSE file for license details.
|
||||
*/
|
||||
|
||||
#ifndef JSONSL_H_
|
||||
#define JSONSL_H_
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stddef.h>
|
||||
#include <string.h>
|
||||
#include <sys/types.h>
|
||||
#include <wchar.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif /* __cplusplus */
|
||||
|
||||
#ifdef JSONSL_USE_WCHAR
|
||||
typedef jsonsl_char_t wchar_t;
|
||||
typedef jsonsl_uchar_t unsigned wchar_t;
|
||||
#else
|
||||
typedef char jsonsl_char_t;
|
||||
typedef unsigned char jsonsl_uchar_t;
|
||||
#endif /* JSONSL_USE_WCHAR */
|
||||
|
||||
/* Stolen from http-parser.h, and possibly others */
|
||||
#if defined(_WIN32) && !defined(__MINGW32__) && (!defined(_MSC_VER) || _MSC_VER<1600)
|
||||
typedef __int8 int8_t;
|
||||
typedef unsigned __int8 uint8_t;
|
||||
typedef __int16 int16_t;
|
||||
typedef unsigned __int16 uint16_t;
|
||||
typedef __int32 int32_t;
|
||||
typedef unsigned __int32 uint32_t;
|
||||
typedef __int64 int64_t;
|
||||
typedef unsigned __int64 uint64_t;
|
||||
#if !defined(_MSC_VER) || _MSC_VER<1400
|
||||
typedef unsigned int size_t;
|
||||
typedef int ssize_t;
|
||||
#endif
|
||||
#else
|
||||
#include <stdint.h>
|
||||
#endif
|
||||
|
||||
|
||||
#if (!defined(JSONSL_STATE_GENERIC)) && (!defined(JSONSL_STATE_USER_FIELDS))
|
||||
#define JSONSL_STATE_GENERIC
|
||||
#endif /* !defined JSONSL_STATE_GENERIC */
|
||||
|
||||
#ifdef JSONSL_STATE_GENERIC
|
||||
#define JSONSL_STATE_USER_FIELDS
|
||||
#endif /* JSONSL_STATE_GENERIC */
|
||||
|
||||
/* Additional fields for component object */
|
||||
#ifndef JSONSL_JPR_COMPONENT_USER_FIELDS
|
||||
#define JSONSL_JPR_COMPONENT_USER_FIELDS
|
||||
#endif
|
||||
|
||||
#ifndef JSONSL_API
|
||||
/**
|
||||
* We require a /DJSONSL_DLL so that users already using this as a static
|
||||
* or embedded library don't get confused
|
||||
*/
|
||||
#if defined(_WIN32) && defined(JSONSL_DLL)
|
||||
#define JSONSL_API __declspec(dllexport)
|
||||
#else
|
||||
#define JSONSL_API
|
||||
#endif /* _WIN32 */
|
||||
|
||||
#endif /* !JSONSL_API */
|
||||
|
||||
#ifndef JSONSL_INLINE
|
||||
#if defined(_MSC_VER)
|
||||
#define JSONSL_INLINE __inline
|
||||
#elif defined(__GNUC__)
|
||||
#define JSONSL_INLINE __inline__
|
||||
#else
|
||||
#define JSONSL_INLINE inline
|
||||
#endif /* _MSC_VER or __GNUC__ */
|
||||
#endif /* JSONSL_INLINE */
|
||||
|
||||
#define JSONSL_MAX_LEVELS 512
|
||||
|
||||
struct jsonsl_st;
|
||||
typedef struct jsonsl_st *jsonsl_t;
|
||||
|
||||
typedef struct jsonsl_jpr_st* jsonsl_jpr_t;
|
||||
|
||||
/**
|
||||
* This flag is true when AND'd against a type whose value
|
||||
* must be in "quoutes" i.e. T_HKEY and T_STRING
|
||||
*/
|
||||
#define JSONSL_Tf_STRINGY 0xffff00
|
||||
|
||||
/**
|
||||
* Constant representing the special JSON types.
|
||||
* The values are special and aid in speed (the OBJECT and LIST
|
||||
* values are the char literals of their openings).
|
||||
*
|
||||
* Their actual value is a character which attempts to resemble
|
||||
* some mnemonic reference to the actual type.
|
||||
*
|
||||
* If new types are added, they must fit into the ASCII printable
|
||||
* range (so they should be AND'd with 0x7f and yield something
|
||||
* meaningful)
|
||||
*/
|
||||
#define JSONSL_XTYPE \
|
||||
X(STRING, '"'|JSONSL_Tf_STRINGY) \
|
||||
X(HKEY, '#'|JSONSL_Tf_STRINGY) \
|
||||
X(OBJECT, '{') \
|
||||
X(LIST, '[') \
|
||||
X(SPECIAL, '^') \
|
||||
X(UESCAPE, 'u')
|
||||
typedef enum {
|
||||
#define X(o, c) \
|
||||
JSONSL_T_##o = c,
|
||||
JSONSL_XTYPE
|
||||
JSONSL_T_UNKNOWN = '?',
|
||||
/* Abstract 'root' object */
|
||||
JSONSL_T_ROOT = 0
|
||||
#undef X
|
||||
} jsonsl_type_t;
|
||||
|
||||
/**
|
||||
* Subtypes for T_SPECIAL. We define them as flags
|
||||
* because more than one type can be applied to a
|
||||
* given object.
|
||||
*/
|
||||
|
||||
#define JSONSL_XSPECIAL \
|
||||
X(NONE, 0) \
|
||||
X(SIGNED, 1<<0) \
|
||||
X(UNSIGNED, 1<<1) \
|
||||
X(TRUE, 1<<2) \
|
||||
X(FALSE, 1<<3) \
|
||||
X(NULL, 1<<4) \
|
||||
X(FLOAT, 1<<5) \
|
||||
X(EXPONENT, 1<<6) \
|
||||
X(NONASCII, 1<<7)
|
||||
typedef enum {
|
||||
#define X(o,b) \
|
||||
JSONSL_SPECIALf_##o = b,
|
||||
JSONSL_XSPECIAL
|
||||
#undef X
|
||||
/* Handy flags for checking */
|
||||
|
||||
JSONSL_SPECIALf_UNKNOWN = 1 << 8,
|
||||
|
||||
/** @private Private */
|
||||
JSONSL_SPECIALf_ZERO = 1 << 9 | JSONSL_SPECIALf_UNSIGNED,
|
||||
/** @private */
|
||||
JSONSL_SPECIALf_DASH = 1 << 10,
|
||||
|
||||
/** Type is numeric */
|
||||
JSONSL_SPECIALf_NUMERIC = (JSONSL_SPECIALf_SIGNED| JSONSL_SPECIALf_UNSIGNED),
|
||||
|
||||
/** Type is a boolean */
|
||||
JSONSL_SPECIALf_BOOLEAN = (JSONSL_SPECIALf_TRUE|JSONSL_SPECIALf_FALSE),
|
||||
|
||||
/** Type is an "extended", not integral type (but numeric) */
|
||||
JSONSL_SPECIALf_NUMNOINT = (JSONSL_SPECIALf_FLOAT|JSONSL_SPECIALf_EXPONENT)
|
||||
} jsonsl_special_t;
|
||||
|
||||
|
||||
/**
|
||||
* These are the various types of stack (or other) events
|
||||
* which will trigger a callback.
|
||||
* Like the type constants, this are also mnemonic
|
||||
*/
|
||||
#define JSONSL_XACTION \
|
||||
X(PUSH, '+') \
|
||||
X(POP, '-') \
|
||||
X(UESCAPE, 'U') \
|
||||
X(ERROR, '!')
|
||||
typedef enum {
|
||||
#define X(a,c) \
|
||||
JSONSL_ACTION_##a = c,
|
||||
JSONSL_XACTION
|
||||
JSONSL_ACTION_UNKNOWN = '?'
|
||||
#undef X
|
||||
} jsonsl_action_t;
|
||||
|
||||
|
||||
/**
|
||||
* Various errors which may be thrown while parsing JSON
|
||||
*/
|
||||
#define JSONSL_XERR \
|
||||
/* Trailing garbage characters */ \
|
||||
X(GARBAGE_TRAILING) \
|
||||
/* We were expecting a 'special' (numeric, true, false, null) */ \
|
||||
X(SPECIAL_EXPECTED) \
|
||||
/* The 'special' value was incomplete */ \
|
||||
X(SPECIAL_INCOMPLETE) \
|
||||
/* Found a stray token */ \
|
||||
X(STRAY_TOKEN) \
|
||||
/* We were expecting a token before this one */ \
|
||||
X(MISSING_TOKEN) \
|
||||
/* Cannot insert because the container is not ready */ \
|
||||
X(CANT_INSERT) \
|
||||
/* Found a '\' outside a string */ \
|
||||
X(ESCAPE_OUTSIDE_STRING) \
|
||||
/* Found a ':' outside of a hash */ \
|
||||
X(KEY_OUTSIDE_OBJECT) \
|
||||
/* found a string outside of a container */ \
|
||||
X(STRING_OUTSIDE_CONTAINER) \
|
||||
/* Found a null byte in middle of string */ \
|
||||
X(FOUND_NULL_BYTE) \
|
||||
/* Current level exceeds limit specified in constructor */ \
|
||||
X(LEVELS_EXCEEDED) \
|
||||
/* Got a } as a result of an opening [ or vice versa */ \
|
||||
X(BRACKET_MISMATCH) \
|
||||
/* We expected a key, but got something else instead */ \
|
||||
X(HKEY_EXPECTED) \
|
||||
/* We got an illegal control character (bad whitespace or something) */ \
|
||||
X(WEIRD_WHITESPACE) \
|
||||
/* Found a \u-escape, but there were less than 4 following hex digits */ \
|
||||
X(UESCAPE_TOOSHORT) \
|
||||
/* Invalid two-character escape */ \
|
||||
X(ESCAPE_INVALID) \
|
||||
/* Trailing comma */ \
|
||||
X(TRAILING_COMMA) \
|
||||
/* An invalid number was passed in a numeric field */ \
|
||||
X(INVALID_NUMBER) \
|
||||
/* Value is missing for object */ \
|
||||
X(VALUE_EXPECTED) \
|
||||
/* The following are for JPR Stuff */ \
|
||||
\
|
||||
/* Found a literal '%' but it was only followed by a single valid hex digit */ \
|
||||
X(PERCENT_BADHEX) \
|
||||
/* jsonpointer URI is malformed '/' */ \
|
||||
X(JPR_BADPATH) \
|
||||
/* Duplicate slash */ \
|
||||
X(JPR_DUPSLASH) \
|
||||
/* No leading root */ \
|
||||
X(JPR_NOROOT) \
|
||||
/* Allocation failure */ \
|
||||
X(ENOMEM) \
|
||||
/* Invalid unicode codepoint detected (in case of escapes) */ \
|
||||
X(INVALID_CODEPOINT)
|
||||
|
||||
typedef enum {
|
||||
JSONSL_ERROR_SUCCESS = 0,
|
||||
#define X(e) \
|
||||
JSONSL_ERROR_##e,
|
||||
JSONSL_XERR
|
||||
#undef X
|
||||
JSONSL_ERROR_GENERIC
|
||||
} jsonsl_error_t;
|
||||
|
||||
|
||||
/**
|
||||
* A state is a single level of the stack.
|
||||
* Non-private data (i.e. the 'data' field, see the STATE_GENERIC section)
|
||||
* will remain in tact until the item is popped.
|
||||
*
|
||||
* As a result, it means a parent state object may be accessed from a child
|
||||
* object, (the parents fields will all be valid). This allows a user to create
|
||||
* an ad-hoc hierarchy on top of the JSON one.
|
||||
*
|
||||
*/
|
||||
struct jsonsl_state_st {
|
||||
/**
|
||||
* The JSON object type
|
||||
*/
|
||||
unsigned int type;
|
||||
|
||||
/**
|
||||
* The position (in terms of number of bytes since the first call to
|
||||
* jsonsl_feed()) at which the state was first pushed. This includes
|
||||
* opening tokens, if applicable.
|
||||
*
|
||||
* @note For strings (i.e. type & JSONSL_Tf_STRINGY is nonzero) this will
|
||||
* be the position of the first quote.
|
||||
*
|
||||
* @see jsonsl_st::pos which contains the _current_ position and can be
|
||||
* used during a POP callback to get the length of the element.
|
||||
*/
|
||||
size_t pos_begin;
|
||||
|
||||
/**FIXME: This is redundant as the same information can be derived from
|
||||
* jsonsl_st::pos at pop-time */
|
||||
size_t pos_cur;
|
||||
|
||||
/** If this element is special, then its extended type is here */
|
||||
unsigned short special_flags;
|
||||
|
||||
/**
|
||||
* Level of recursion into nesting. This is mainly a convenience
|
||||
* variable, as this can technically be deduced from the lexer's
|
||||
* level parameter (though the logic is not that simple)
|
||||
*/
|
||||
unsigned short level;
|
||||
|
||||
|
||||
/**
|
||||
* how many elements in the object/list.
|
||||
* For objects (hashes), an element is either
|
||||
* a key or a value. Thus for one complete pair,
|
||||
* nelem will be 2.
|
||||
*
|
||||
* For special types, this will hold the sum of the digits.
|
||||
* This only holds true for values which are simple signed/unsigned
|
||||
* numbers. Otherwise a special flag is set, and extra handling is not
|
||||
* performed.
|
||||
*/
|
||||
uint32_t nelem;
|
||||
|
||||
|
||||
|
||||
/*TODO: merge this and special_flags into a union */
|
||||
|
||||
|
||||
/**
|
||||
* Useful for an opening nest, this will prevent a callback from being
|
||||
* invoked on this item or any of its children
|
||||
*/
|
||||
int ignore_callback : 1;
|
||||
|
||||
/**
|
||||
* Counter which is incremented each time an escape ('\') is encountered.
|
||||
* This is used internally for non-string types and should only be
|
||||
* inspected by the user if the state actually represents a string
|
||||
* type.
|
||||
*/
|
||||
unsigned int nescapes : 31;
|
||||
|
||||
/**
|
||||
* Put anything you want here. if JSONSL_STATE_USER_FIELDS is here, then
|
||||
* the macro expansion happens here.
|
||||
*
|
||||
* You can use these fields to store hierarchical or 'tagging' information
|
||||
* for specific objects.
|
||||
*
|
||||
* See the documentation above for the lifetime of the state object (i.e.
|
||||
* if the private data points to allocated memory, it should be freed
|
||||
* when the object is popped, as the state object will be re-used)
|
||||
*/
|
||||
#ifndef JSONSL_STATE_GENERIC
|
||||
JSONSL_STATE_USER_FIELDS
|
||||
#else
|
||||
|
||||
/**
|
||||
* Otherwise, this is a simple void * pointer for anything you want
|
||||
*/
|
||||
void *data;
|
||||
#endif /* JSONSL_STATE_USER_FIELDS */
|
||||
};
|
||||
|
||||
/**Gets the number of elements in the list.
|
||||
* @param st The state. Must be of type JSONSL_T_LIST
|
||||
* @return number of elements in the list
|
||||
*/
|
||||
#define JSONSL_LIST_SIZE(st) ((st)->nelem)
|
||||
|
||||
/**Gets the number of key-value pairs in an object
|
||||
* @param st The state. Must be of type JSONSL_T_OBJECT
|
||||
* @return the number of key-value pairs in the object
|
||||
*/
|
||||
#define JSONSL_OBJECT_SIZE(st) ((st)->nelem / 2)
|
||||
|
||||
/**Gets the numeric value.
|
||||
* @param st The state. Must be of type JSONSL_T_SPECIAL and
|
||||
* special_flags must have the JSONSL_SPECIALf_NUMERIC flag
|
||||
* set.
|
||||
* @return the numeric value of the state.
|
||||
*/
|
||||
#define JSONSL_NUMERIC_VALUE(st) ((st)->nelem)
|
||||
|
||||
/*
|
||||
* So now we need some special structure for keeping the
|
||||
* JPR info in sync. Preferrably all in a single block
|
||||
* of memory (there's no need for separate allocations.
|
||||
* So we will define a 'table' with the following layout
|
||||
*
|
||||
* Level nPosbl JPR1_last JPR2_last JPR3_last
|
||||
*
|
||||
* 0 1 NOMATCH POSSIBLE POSSIBLE
|
||||
* 1 0 NOMATCH NOMATCH COMPLETE
|
||||
* [ table ends here because no further path is possible]
|
||||
*
|
||||
* Where the JPR..n corresponds to the number of JPRs
|
||||
* requested, and nPosble is a quick flag to determine
|
||||
*
|
||||
* the number of possibilities. In the future this might
|
||||
* be made into a proper 'jump' table,
|
||||
*
|
||||
* Since we always mark JPRs from the higher levels descending
|
||||
* into the lower ones, a prospective child match would first
|
||||
* look at the parent table to check the possibilities, and then
|
||||
* see which ones were possible..
|
||||
*
|
||||
* Thus, the size of this blob would be (and these are all ints here)
|
||||
* nLevels * nJPR * 2.
|
||||
*
|
||||
* the 'Width' of the table would be nJPR*2, and the 'height' would be
|
||||
* nlevels
|
||||
*/
|
||||
|
||||
/**
|
||||
* This is called when a stack change ocurs.
|
||||
*
|
||||
* @param jsn The lexer
|
||||
* @param action The type of action, this can be PUSH or POP
|
||||
* @param state A pointer to the stack currently affected by the action
|
||||
* @param at A pointer to the position of the input buffer which triggered
|
||||
* this action.
|
||||
*/
|
||||
typedef void (*jsonsl_stack_callback)(
|
||||
jsonsl_t jsn,
|
||||
jsonsl_action_t action,
|
||||
struct jsonsl_state_st* state,
|
||||
const jsonsl_char_t *at);
|
||||
|
||||
|
||||
/**
|
||||
* This is called when an error is encountered.
|
||||
* Sometimes it's possible to 'erase' characters (by replacing them
|
||||
* with whitespace). If you think you have corrected the error, you
|
||||
* can return a true value, in which case the parser will backtrack
|
||||
* and try again.
|
||||
*
|
||||
* @param jsn The lexer
|
||||
* @param error The error which was thrown
|
||||
* @param state the current state
|
||||
* @param a pointer to the position of the input buffer which triggered
|
||||
* the error. Note that this is not const, this is because you have the
|
||||
* possibility of modifying the character in an attempt to correct the
|
||||
* error
|
||||
*
|
||||
* @return zero to bail, nonzero to try again (this only makes sense if
|
||||
* the input buffer has been modified by this callback)
|
||||
*/
|
||||
typedef int (*jsonsl_error_callback)(
|
||||
jsonsl_t jsn,
|
||||
jsonsl_error_t error,
|
||||
struct jsonsl_state_st* state,
|
||||
jsonsl_char_t *at);
|
||||
|
||||
struct jsonsl_st {
|
||||
/** Public, read-only */
|
||||
|
||||
/** This is the current level of the stack */
|
||||
unsigned int level;
|
||||
|
||||
/** Flag set to indicate we should stop processing */
|
||||
unsigned int stopfl;
|
||||
|
||||
/**
|
||||
* This is the current position, relative to the beginning
|
||||
* of the stream.
|
||||
*/
|
||||
size_t pos;
|
||||
|
||||
/** This is the 'bytes' variable passed to feed() */
|
||||
const jsonsl_char_t *base;
|
||||
|
||||
/** Callback invoked for PUSH actions */
|
||||
jsonsl_stack_callback action_callback_PUSH;
|
||||
|
||||
/** Callback invoked for POP actions */
|
||||
jsonsl_stack_callback action_callback_POP;
|
||||
|
||||
/** Default callback for any action, if neither PUSH or POP callbacks are defined */
|
||||
jsonsl_stack_callback action_callback;
|
||||
|
||||
/**
|
||||
* Do not invoke callbacks for objects deeper than this level.
|
||||
* NOTE: This field establishes the lower bound for ignored callbacks,
|
||||
* and is thus misnamed. `min_ignore_level` would actually make more
|
||||
* sense, but we don't want to break API.
|
||||
*/
|
||||
unsigned int max_callback_level;
|
||||
|
||||
/** The error callback. Invoked when an error happens. Should not be NULL */
|
||||
jsonsl_error_callback error_callback;
|
||||
|
||||
/* these are boolean flags you can modify. You will be called
|
||||
* about notification for each of these types if the corresponding
|
||||
* variable is true.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @name Callback Booleans.
|
||||
* These determine whether a callback is to be invoked for certain types of objects
|
||||
* @{*/
|
||||
|
||||
/** Boolean flag to enable or disable the invokcation for events on this type*/
|
||||
int call_SPECIAL;
|
||||
int call_OBJECT;
|
||||
int call_LIST;
|
||||
int call_STRING;
|
||||
int call_HKEY;
|
||||
/*@}*/
|
||||
|
||||
/**
|
||||
* @name u-Escape handling
|
||||
* Special handling for the \\u-f00d type sequences. These are meant
|
||||
* to be translated back into the corresponding octet(s).
|
||||
* A special callback (if set) is invoked with *at=='u'. An application
|
||||
* may wish to temporarily suspend parsing and handle the 'u-' sequence
|
||||
* internally (or not).
|
||||
*/
|
||||
|
||||
/*@{*/
|
||||
|
||||
/** Callback to be invoked for a u-escape */
|
||||
jsonsl_stack_callback action_callback_UESCAPE;
|
||||
|
||||
/** Boolean flag, whether to invoke the callback */
|
||||
int call_UESCAPE;
|
||||
|
||||
/** Boolean flag, whether we should return after encountering a u-escape:
|
||||
* the callback is invoked and then we return if this is true
|
||||
*/
|
||||
int return_UESCAPE;
|
||||
/*@}*/
|
||||
|
||||
struct {
|
||||
int allow_trailing_comma;
|
||||
} options;
|
||||
|
||||
/** Put anything here */
|
||||
void *data;
|
||||
|
||||
/*@{*/
|
||||
/** Private */
|
||||
int in_escape;
|
||||
char expecting;
|
||||
char tok_last;
|
||||
int can_insert;
|
||||
unsigned int levels_max;
|
||||
|
||||
#ifndef JSONSL_NO_JPR
|
||||
size_t jpr_count;
|
||||
jsonsl_jpr_t *jprs;
|
||||
|
||||
/* Root pointer for JPR matching information */
|
||||
size_t *jpr_root;
|
||||
#endif /* JSONSL_NO_JPR */
|
||||
/*@}*/
|
||||
|
||||
/**
|
||||
* This is the stack. Its upper bound is levels_max, or the
|
||||
* nlevels argument passed to jsonsl_new. If you modify this structure,
|
||||
* make sure that this member is last.
|
||||
*/
|
||||
struct jsonsl_state_st stack[1];
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new lexer object, with capacity for recursion up to nlevels
|
||||
*
|
||||
* @param nlevels maximum recursion depth
|
||||
*/
|
||||
JSONSL_API
|
||||
jsonsl_t jsonsl_new(int nlevels);
|
||||
|
||||
JSONSL_API
|
||||
jsonsl_t jsonsl_init(jsonsl_t jsn, int nlevels);
|
||||
|
||||
JSONSL_API
|
||||
size_t jsonsl_get_size(int nlevels);
|
||||
|
||||
/**
|
||||
* Feeds data into the lexer.
|
||||
*
|
||||
* @param jsn the lexer object
|
||||
* @param bytes new data to be fed
|
||||
* @param nbytes size of new data
|
||||
*/
|
||||
JSONSL_API
|
||||
void jsonsl_feed(jsonsl_t jsn, const jsonsl_char_t *bytes, size_t nbytes);
|
||||
|
||||
/**
|
||||
* Resets the internal parser state. This does not free the parser
|
||||
* but does clean it internally, so that the next time feed() is called,
|
||||
* it will be treated as a new stream
|
||||
*
|
||||
* @param jsn the lexer
|
||||
*/
|
||||
JSONSL_API
|
||||
void jsonsl_reset(jsonsl_t jsn);
|
||||
|
||||
/**
|
||||
* Frees the lexer, cleaning any allocated memory taken
|
||||
*
|
||||
* @param jsn the lexer
|
||||
*/
|
||||
JSONSL_API
|
||||
void jsonsl_destroy(jsonsl_t jsn);
|
||||
|
||||
/**
|
||||
* Gets the 'parent' element, given the current one
|
||||
*
|
||||
* @param jsn the lexer
|
||||
* @param cur the current nest, which should be a struct jsonsl_nest_st
|
||||
*/
|
||||
static JSONSL_INLINE
|
||||
struct jsonsl_state_st *jsonsl_last_state(const jsonsl_t jsn,
|
||||
const struct jsonsl_state_st *state)
|
||||
{
|
||||
/* Don't complain about overriding array bounds */
|
||||
if (state->level > 1) {
|
||||
return jsn->stack + state->level - 1;
|
||||
} else {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the state of the last fully consumed child of this parent. This is
|
||||
* only valid in the parent's POP callback.
|
||||
*
|
||||
* @param the lexer
|
||||
* @return A pointer to the child.
|
||||
*/
|
||||
static JSONSL_INLINE
|
||||
struct jsonsl_state_st *jsonsl_last_child(const jsonsl_t jsn,
|
||||
const struct jsonsl_state_st *parent)
|
||||
{
|
||||
return jsn->stack + (parent->level + 1);
|
||||
}
|
||||
|
||||
/**Call to instruct the parser to stop parsing and return. This is valid
|
||||
* only from within a callback */
|
||||
static JSONSL_INLINE
|
||||
void jsonsl_stop(jsonsl_t jsn)
|
||||
{
|
||||
jsn->stopfl = 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* This enables receiving callbacks on all events. Doesn't do
|
||||
* anything special but helps avoid some boilerplate.
|
||||
* This does not touch the UESCAPE callbacks or flags.
|
||||
*/
|
||||
static JSONSL_INLINE
|
||||
void jsonsl_enable_all_callbacks(jsonsl_t jsn)
|
||||
{
|
||||
jsn->call_HKEY = 1;
|
||||
jsn->call_STRING = 1;
|
||||
jsn->call_OBJECT = 1;
|
||||
jsn->call_SPECIAL = 1;
|
||||
jsn->call_LIST = 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* A macro which returns true if the current state object can
|
||||
* have children. This means a list type or an object type.
|
||||
*/
|
||||
#define JSONSL_STATE_IS_CONTAINER(state) \
|
||||
(state->type == JSONSL_T_OBJECT || state->type == JSONSL_T_LIST)
|
||||
|
||||
/**
|
||||
* These two functions, dump a string representation
|
||||
* of the error or type, respectively. They will never
|
||||
* return NULL
|
||||
*/
|
||||
JSONSL_API
|
||||
const char* jsonsl_strerror(jsonsl_error_t err);
|
||||
JSONSL_API
|
||||
const char* jsonsl_strtype(jsonsl_type_t jt);
|
||||
|
||||
/**
|
||||
* Dumps global metrics to the screen. This is a noop unless
|
||||
* jsonsl was compiled with JSONSL_USE_METRICS
|
||||
*/
|
||||
JSONSL_API
|
||||
void jsonsl_dump_global_metrics(void);
|
||||
|
||||
/* This macro just here for editors to do code folding */
|
||||
#ifndef JSONSL_NO_JPR
|
||||
|
||||
/**
|
||||
* @name JSON Pointer API
|
||||
*
|
||||
* JSONPointer API. This isn't really related to the lexer (at least not yet)
|
||||
* JSONPointer provides an extremely simple specification for providing
|
||||
* locations within JSON objects. We will extend it a bit and allow for
|
||||
* providing 'wildcard' characters by which to be able to 'query' the stream.
|
||||
*
|
||||
* See http://tools.ietf.org/html/draft-pbryan-zyp-json-pointer-00
|
||||
*
|
||||
* Currently I'm implementing the 'single query' API which can only use a single
|
||||
* query component. In the future I will integrate my yet-to-be-published
|
||||
* Boyer-Moore-esque prefix searching implementation, in order to allow
|
||||
* multiple paths to be merged into one for quick and efficient searching.
|
||||
*
|
||||
*
|
||||
* JPR (as we'll refer to it within the source) can be used by splitting
|
||||
* the components into mutliple sections, and incrementally 'track' each
|
||||
* component. When JSONSL delivers a 'pop' callback for a string, or a 'push'
|
||||
* callback for an object, we will check to see whether the index matching
|
||||
* the component corresponding to the current level contains a match
|
||||
* for our path.
|
||||
*
|
||||
* In order to do this properly, a structure must be maintained within the
|
||||
* parent indicating whether its children are possible matches. This flag
|
||||
* will be 'inherited' by call children which may conform to the match
|
||||
* specification, and discarded by all which do not (thereby eliminating
|
||||
* their children from inheriting it).
|
||||
*
|
||||
* A successful match is a complete one. One can provide multiple paths with
|
||||
* multiple levels of matches e.g.
|
||||
* /foo/bar/baz/^/blah
|
||||
*
|
||||
* @{
|
||||
*/
|
||||
|
||||
/** The wildcard character */
|
||||
#ifndef JSONSL_PATH_WILDCARD_CHAR
|
||||
#define JSONSL_PATH_WILDCARD_CHAR '^'
|
||||
#endif /* WILDCARD_CHAR */
|
||||
|
||||
#define JSONSL_XMATCH \
|
||||
X(COMPLETE,1) \
|
||||
X(POSSIBLE,0) \
|
||||
X(NOMATCH,-1) \
|
||||
X(TYPE_MISMATCH, -2)
|
||||
|
||||
typedef enum {
|
||||
|
||||
#define X(T,v) \
|
||||
JSONSL_MATCH_##T = v,
|
||||
JSONSL_XMATCH
|
||||
|
||||
#undef X
|
||||
JSONSL_MATCH_UNKNOWN
|
||||
} jsonsl_jpr_match_t;
|
||||
|
||||
typedef enum {
|
||||
JSONSL_PATH_STRING = 1,
|
||||
JSONSL_PATH_WILDCARD,
|
||||
JSONSL_PATH_NUMERIC,
|
||||
JSONSL_PATH_ROOT,
|
||||
|
||||
/* Special */
|
||||
JSONSL_PATH_INVALID = -1,
|
||||
JSONSL_PATH_NONE = 0
|
||||
} jsonsl_jpr_type_t;
|
||||
|
||||
struct jsonsl_jpr_component_st {
|
||||
/** The string the component points to */
|
||||
char *pstr;
|
||||
/** if this is a numeric type, the number is 'cached' here */
|
||||
unsigned long idx;
|
||||
/** The length of the string */
|
||||
size_t len;
|
||||
/** The type of component (NUMERIC or STRING) */
|
||||
jsonsl_jpr_type_t ptype;
|
||||
|
||||
/** Set this to true to enforce type checking between dict keys and array
|
||||
* indices. jsonsl_jpr_match() will return TYPE_MISMATCH if it detects
|
||||
* that an array index is actually a child of a dictionary. */
|
||||
short is_arridx;
|
||||
|
||||
/* Extra fields (for more advanced searches. Default is empty) */
|
||||
JSONSL_JPR_COMPONENT_USER_FIELDS
|
||||
};
|
||||
|
||||
struct jsonsl_jpr_st {
|
||||
/** Path components */
|
||||
struct jsonsl_jpr_component_st *components;
|
||||
size_t ncomponents;
|
||||
|
||||
/**Type of the match to be expected. If nonzero, will be compared against
|
||||
* the actual type */
|
||||
unsigned match_type;
|
||||
|
||||
/** Base of allocated string for components */
|
||||
char *basestr;
|
||||
|
||||
/** The original match string. Useful for returning to the user */
|
||||
char *orig;
|
||||
size_t norig;
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new JPR object.
|
||||
*
|
||||
* @param path the JSONPointer path specification.
|
||||
* @param errp a pointer to a jsonsl_error_t. If this function returns NULL,
|
||||
* then more details will be in this variable.
|
||||
*
|
||||
* @return a new jsonsl_jpr_t object, or NULL on error.
|
||||
*/
|
||||
JSONSL_API
|
||||
jsonsl_jpr_t jsonsl_jpr_new(const char *path, jsonsl_error_t *errp);
|
||||
|
||||
/**
|
||||
* Destroy a JPR object
|
||||
*/
|
||||
JSONSL_API
|
||||
void jsonsl_jpr_destroy(jsonsl_jpr_t jpr);
|
||||
|
||||
/**
|
||||
* Match a JSON object against a type and specific level
|
||||
*
|
||||
* @param jpr the JPR object
|
||||
* @param parent_type the type of the parent (should be T_LIST or T_OBJECT)
|
||||
* @param parent_level the level of the parent
|
||||
* @param key the 'key' of the child. If the parent is an array, this should be
|
||||
* empty.
|
||||
* @param nkey - the length of the key. If the parent is an array (T_LIST), then
|
||||
* this should be the current index.
|
||||
*
|
||||
* NOTE: The key of the child means any kind of associative data related to the
|
||||
* element. Thus: <<< { "foo" : [ >>,
|
||||
* the opening array's key is "foo".
|
||||
*
|
||||
* @return a status constant. This indicates whether a match was excluded, possible,
|
||||
* or successful.
|
||||
*/
|
||||
JSONSL_API
|
||||
jsonsl_jpr_match_t jsonsl_jpr_match(jsonsl_jpr_t jpr,
|
||||
unsigned int parent_type,
|
||||
unsigned int parent_level,
|
||||
const char *key, size_t nkey);
|
||||
|
||||
/**
|
||||
* Alternate matching algorithm. This matching algorithm does not use
|
||||
* JSONPointer but relies on a more structured searching mechanism. It
|
||||
* assumes that there is a clear distinction between array indices and
|
||||
* object keys. In this case, the jsonsl_path_component_st::ptype should
|
||||
* be set to @ref JSONSL_PATH_NUMERIC for an array index (the
|
||||
* jsonsl_path_comonent_st::is_arridx field will be removed in a future
|
||||
* version).
|
||||
*
|
||||
* @param jpr The path
|
||||
* @param parent The parent structure. Can be NULL if this is the root object
|
||||
* @param child The child structure. Should not be NULL
|
||||
* @param key Object key, if an object
|
||||
* @param nkey Length of object key
|
||||
* @return Status constant if successful
|
||||
*
|
||||
* @note
|
||||
* For successful matching, both the key and the path itself should be normalized
|
||||
* to contain 'proper' utf8 sequences rather than utf16 '\uXXXX' escapes. This
|
||||
* should currently be done in the application. Another version of this function
|
||||
* may use a temporary buffer in such circumstances (allocated by the application).
|
||||
*
|
||||
* Since this function also checks the state of the child, it should only
|
||||
* be called on PUSH callbacks, and not POP callbacks
|
||||
*/
|
||||
JSONSL_API
|
||||
jsonsl_jpr_match_t
|
||||
jsonsl_path_match(jsonsl_jpr_t jpr,
|
||||
const struct jsonsl_state_st *parent,
|
||||
const struct jsonsl_state_st *child,
|
||||
const char *key, size_t nkey);
|
||||
|
||||
|
||||
/**
|
||||
* Associate a set of JPR objects with a lexer instance.
|
||||
* This should be called before the lexer has been fed any data (and
|
||||
* behavior is undefined if you don't adhere to this).
|
||||
*
|
||||
* After using this function, you may subsequently call match_state() on
|
||||
* given states (presumably from within the callbacks).
|
||||
*
|
||||
* Note that currently the first JPR is the quickest and comes
|
||||
* pre-allocated with the state structure. Further JPR objects
|
||||
* are chained.
|
||||
*
|
||||
* @param jsn The lexer
|
||||
* @param jprs An array of jsonsl_jpr_t objects
|
||||
* @param njprs How many elements in the jprs array.
|
||||
*/
|
||||
JSONSL_API
|
||||
void jsonsl_jpr_match_state_init(jsonsl_t jsn,
|
||||
jsonsl_jpr_t *jprs,
|
||||
size_t njprs);
|
||||
|
||||
/**
|
||||
* This follows the same semantics as the normal match,
|
||||
* except we infer parent and type information from the relevant state objects.
|
||||
* The match status (for all possible JPR objects) is set in the *out parameter.
|
||||
*
|
||||
* If a match has succeeded, then its JPR object will be returned. In all other
|
||||
* instances, NULL is returned;
|
||||
*
|
||||
* @param jpr The jsonsl_jpr_t handle
|
||||
* @param state The jsonsl_state_st which is a candidate
|
||||
* @param key The hash key (if applicable, can be NULL if parent is list)
|
||||
* @param nkey Length of hash key (if applicable, can be zero if parent is list)
|
||||
* @param out A pointer to a jsonsl_jpr_match_t. This will be populated with
|
||||
* the match result
|
||||
*
|
||||
* @return If a match was completed in full, then the JPR object containing
|
||||
* the matching path will be returned. Otherwise, the return is NULL (note, this
|
||||
* does not mean matching has failed, it can still be part of the match: check
|
||||
* the out parameter).
|
||||
*/
|
||||
JSONSL_API
|
||||
jsonsl_jpr_t jsonsl_jpr_match_state(jsonsl_t jsn,
|
||||
struct jsonsl_state_st *state,
|
||||
const char *key,
|
||||
size_t nkey,
|
||||
jsonsl_jpr_match_t *out);
|
||||
|
||||
|
||||
/**
|
||||
* Cleanup any memory allocated and any states set by
|
||||
* match_state_init() and match_state()
|
||||
* @param jsn The lexer
|
||||
*/
|
||||
JSONSL_API
|
||||
void jsonsl_jpr_match_state_cleanup(jsonsl_t jsn);
|
||||
|
||||
/**
|
||||
* Return a string representation of the match result returned by match()
|
||||
*/
|
||||
JSONSL_API
|
||||
const char *jsonsl_strmatchtype(jsonsl_jpr_match_t match);
|
||||
|
||||
/* @}*/
|
||||
|
||||
/**
|
||||
* Utility function to convert escape sequences into their original form.
|
||||
*
|
||||
* The decoders I've sampled do not seem to specify a standard behavior of what
|
||||
* to escape/unescape.
|
||||
*
|
||||
* RFC 4627 Mandates only that the quoute, backslash, and ASCII control
|
||||
* characters (0x00-0x1f) be escaped. It is often common for applications
|
||||
* to escape a '/' - however this may also be desired behavior. the JSON
|
||||
* spec is not clear on this, and therefore jsonsl leaves it up to you.
|
||||
*
|
||||
* Additionally, sometimes you may wish to _normalize_ JSON. This is specifically
|
||||
* true when dealing with 'u-escapes' which can be expressed perfectly fine
|
||||
* as utf8. One use case for normalization is JPR string comparison, in which
|
||||
* case two effectively equivalent strings may not match because one is using
|
||||
* u-escapes and the other proper utf8. To normalize u-escapes only, pass in
|
||||
* an empty `toEscape` table, enabling only the `u` index.
|
||||
*
|
||||
* @param in The input string.
|
||||
* @param out An allocated output (should be the same size as in)
|
||||
* @param len the size of the buffer
|
||||
* @param toEscape - A sparse array of characters to unescape. Characters
|
||||
* which are not present in this array, e.g. toEscape['c'] == 0 will be
|
||||
* ignored and passed to the output in their original form.
|
||||
* @param oflags If not null, and a \uXXXX escape expands to a non-ascii byte,
|
||||
* then this variable will have the SPECIALf_NONASCII flag on.
|
||||
*
|
||||
* @param err A pointer to an error variable. If an error ocurrs, it will be
|
||||
* set in this variable
|
||||
* @param errat If not null and an error occurs, this will be set to point
|
||||
* to the position within the string at which the offending character was
|
||||
* encountered.
|
||||
*
|
||||
* @return The effective size of the output buffer.
|
||||
*
|
||||
* @note
|
||||
* This function now encodes the UTF8 equivalents of utf16 escapes (i.e.
|
||||
* 'u-escapes'). Previously this would encode the escapes as utf16 literals,
|
||||
* which while still correct in some sense was confusing for many (especially
|
||||
* considering that the inputs were variations of char).
|
||||
*
|
||||
* @note
|
||||
* The output buffer will never be larger than the input buffer, since
|
||||
* standard escape sequences (i.e. '\t') occupy two bytes in the source
|
||||
* but only one byte (when unescaped) in the output. Likewise u-escapes
|
||||
* (i.e. \uXXXX) will occupy six bytes in the source, but at the most
|
||||
* two bytes when escaped.
|
||||
*/
|
||||
JSONSL_API
|
||||
size_t jsonsl_util_unescape_ex(const char *in,
|
||||
char *out,
|
||||
size_t len,
|
||||
const int toEscape[128],
|
||||
unsigned *oflags,
|
||||
jsonsl_error_t *err,
|
||||
const char **errat);
|
||||
|
||||
/**
|
||||
* Convenience macro to avoid passing too many parameters
|
||||
*/
|
||||
#define jsonsl_util_unescape(in, out, len, toEscape, err) \
|
||||
jsonsl_util_unescape_ex(in, out, len, toEscape, NULL, err, NULL)
|
||||
|
||||
#endif /* JSONSL_NO_JPR */
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif /* __cplusplus */
|
||||
|
||||
#endif /* JSONSL_H_ */
|
|
@ -1,52 +1,5 @@
|
|||
# CJSON Module
|
||||
| Since | Origin / Contributor | Maintainer | Source |
|
||||
| :----- | :-------------------- | :---------- | :------ |
|
||||
| 2015-03-16 | [Mark Pulford](http://kyne.com.au/~mark/software/lua-cjson.php), [Zeroday](https://github.com/funshine) | [Zeroday](https://github.com/funshine) | [cjson](../../../app/modules/cjson.c) |
|
||||
|
||||
The JSON support module. Allows encoding and decoding to/from JSON.
|
||||
This module has been replaced by [sjson](sjson.md). It provides a superset of functionality. All references to `cjson` can be replaced by `sjson`.
|
||||
|
||||
Please note that nested tables can require a lot of memory to encode. To catch out-of-memory errors, use `pcall()`.
|
||||
|
||||
## cjson.encode()
|
||||
|
||||
Encode a Lua table to a JSON string. For details see the [documentation of the original Lua library](http://kyne.com.au/~mark/software/lua-cjson-manual.html#encode).
|
||||
|
||||
####Syntax
|
||||
`cjson.encode(table)`
|
||||
|
||||
####Parameters
|
||||
`table` data to encode
|
||||
|
||||
While it also is possible to encode plain strings and numbers rather than a table, it is not particularly useful to do so.
|
||||
|
||||
####Returns
|
||||
JSON string
|
||||
|
||||
####Example
|
||||
```lua
|
||||
ok, json = pcall(cjson.encode, {key="value"})
|
||||
if ok then
|
||||
print(json)
|
||||
else
|
||||
print("failed to encode!")
|
||||
end
|
||||
```
|
||||
|
||||
## cjson.decode()
|
||||
|
||||
Decode a JSON string to a Lua table. For details see the [documentation of the original Lua library](http://kyne.com.au/~mark/software/lua-cjson-manual.html#_decode).
|
||||
|
||||
####Syntax
|
||||
`cjson.decode(str)`
|
||||
|
||||
####Parameters
|
||||
`str` JSON string to decode
|
||||
|
||||
####Returns
|
||||
Lua table representation of the JSON data
|
||||
|
||||
####Example
|
||||
```lua
|
||||
t = cjson.decode('{"key":"value"}')
|
||||
for k,v in pairs(t) do print(k,v) end
|
||||
```
|
||||
|
|
|
@ -0,0 +1,234 @@
|
|||
# SJSON Module
|
||||
| Since | Origin / Contributor | Maintainer | Source |
|
||||
| :----- | :-------------------- | :---------- | :------ |
|
||||
| 2017-02-01 | [Philip Gladstone](https://github.com/pjsg) | [Philip Gladstone](https://github.com/pjsg) | [sjson](../../../app/modules/sjson.c) |
|
||||
|
||||
The JSON support module. Allows encoding and decoding to/from JSON.
|
||||
|
||||
Please note that nested tables can require a lot of memory to encode. To catch out-of-memory errors, use `pcall()`.
|
||||
|
||||
This code using the streaming json library [jsonsl](https://github.com/mnunberg/jsonsl) to do the parsing of the string.
|
||||
|
||||
This module can be used in two ways. The simpler way is to use it as a direct drop-in for cjson (you can just do `_G.cjson = sjson`).
|
||||
The more advanced approach is to use the streaming interface. This allows encoding and decoding of significantly larger objects.
|
||||
|
||||
The handling of json null is as follows:
|
||||
|
||||
- By default, the decoder represents null as sjson.NULL (which is a userdata object). This is the behavior of cjson.
|
||||
- The encoder always converts any userdata object into null.
|
||||
- Optionally, a single string can be specified in both the encoder and decoder. This string will be used in encoding/decoding to represent json null values. This string should not be used
|
||||
anywhere else in your data structures. A suitable value might be `"\0"`.
|
||||
|
||||
When encoding a lua object, if a function is found, then it is invoked (with no arguments) and the (single) returned value is encoded in the place of the function.
|
||||
|
||||
## sjson.encoder()
|
||||
|
||||
This creates an encoder object that can convert a LUA object into a JSON encoded string.
|
||||
|
||||
####Syntax
|
||||
`sjson.encoder(table [, opts])`
|
||||
|
||||
####Parameters
|
||||
- `table` data to encode
|
||||
- `opts` an optional table of options. The possible entries are:
|
||||
- `depth` the maximum encoding depth needed to encode the table. The default is 20 which should be enough for nearly all situations.
|
||||
- `null` the string value to treat as null.
|
||||
|
||||
####Returns
|
||||
A `sjson.encoder` object.
|
||||
|
||||
## sjson.encoder:read
|
||||
|
||||
This gets a chunk of JSON encoded data.
|
||||
|
||||
####Syntax
|
||||
`encoder:read([size])`
|
||||
|
||||
####Parameters
|
||||
- `size` an optional value for the number of bytes to return. The default is 1024.
|
||||
|
||||
####Returns
|
||||
A string of up to `size` bytes, or `nil` if the encoding is complete and all data has been returned.
|
||||
|
||||
#### Example
|
||||
The following example prints out (in 64 byte chunks) a JSON encoded string containing the first 4k of every file in the file system. The total string
|
||||
can be bigger than the total amount of memory on the NodeMCU.
|
||||
```
|
||||
function files()
|
||||
result = {}
|
||||
for k,v in pairs(file.list()) do
|
||||
result[k] = function() return file.open(k):read(4096) end
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
local encoder = sjson.encoder(files())
|
||||
|
||||
while true do
|
||||
data = encoder:read(64)
|
||||
if not data then
|
||||
break
|
||||
end
|
||||
print(data)
|
||||
end
|
||||
```
|
||||
|
||||
## sjson.encode()
|
||||
|
||||
Encode a Lua table to a JSON string. This is a convenience method provided for backwards compatibility with `cjson`.
|
||||
|
||||
####Syntax
|
||||
`sjson.encode(table [, opts])`
|
||||
|
||||
####Parameters
|
||||
- `table` data to encode
|
||||
- `opts` an optional table of options. The possible entries are:
|
||||
- `depth` the maximum encoding depth needed to encode the table. The default is 20 which should be enough for nearly all situations.
|
||||
- `null` the string value to treat as null.
|
||||
|
||||
####Returns
|
||||
JSON string
|
||||
|
||||
####Example
|
||||
```lua
|
||||
ok, json = pcall(sjson.encode, {key="value"})
|
||||
if ok then
|
||||
print(json)
|
||||
else
|
||||
print("failed to encode!")
|
||||
end
|
||||
```
|
||||
|
||||
## sjson.decoder()
|
||||
|
||||
This makes a decoder object that can parse a JSON encoded string into a lua object. A metatable can be specified for all the newly created lua tables. This allows
|
||||
you to handle each value as it is inserted into each table (by implementing the `__newindex` method).
|
||||
|
||||
####Syntax
|
||||
`sjson.decoder([opts])`
|
||||
|
||||
#### Parameters
|
||||
- `opts` an optional table of options. The possible entries are:
|
||||
- `depth` the maximum encoding depth needed to encode the table. The default is 20 which should be enough for nearly all situations.
|
||||
- `null` the string value to treat as null.
|
||||
- `metatable` a table to use as the metatable for all the new tables in the returned object.
|
||||
|
||||
#### Returns
|
||||
A `sjson.decoder` object
|
||||
|
||||
####Metatable
|
||||
|
||||
There are two principal methods that are invoked in the metatable (if it is present).
|
||||
|
||||
- `__newindex` this is the standard method invoked whenever a new table element is created.
|
||||
- `checkpath` this is invoked (if defined) whenever a new table is created. It is invoked with two arguments:
|
||||
- `table` this is the newly created table
|
||||
- `path` this is a list of the keys from the root.
|
||||
It must return `true` if this object is wanted in the result, or `false` otherwise.
|
||||
|
||||
For example, when decoding `{ "foo": [1, 2, []] }` the checkpath will be invoked as follows:
|
||||
|
||||
- `checkpath({}, {})` the `table` argument is the object that will correspond with the value of the JSON object.
|
||||
- `checkpath({}, {"foo"})` the `table` argument is the object that will correspond with the value of the outer JSON array.
|
||||
- `checkpath({}, {"foo", 3})` the `table` argument is the object that will correspond to the empty inner JSON array.
|
||||
|
||||
When the `checkpath` method is called, the metatable has already be associated with the new table. Thus the `checkpath` method can replace it
|
||||
if desired. For example, if you are decoding `{ "foo": { "bar": [1,2,3,4], "cat": [5] } }` and, for some reason, you did not want to capture the
|
||||
value of the `"bar"` key, then there are various ways to do this:
|
||||
|
||||
* In the `__newindex` metamethod, just check for the value of the key and skip the `rawset` if the key is `"bar"`. This only works if you want to skip all the
|
||||
`"bar"` keys.
|
||||
|
||||
* In the `checkpath` method, if the path is `["foo"]`, then return `false`.
|
||||
|
||||
* Use the following `checkpath`: `checkpath=function(tab, path) tab['__json_path'] = path return true end` This will save the path in each constructed object. Now the `__newindex` method can perform more sophisticated filtering.
|
||||
|
||||
The reason for being able to filter is that it enables processing of very large JSON responses on a memory constrained platform. Many APIs return lots of information
|
||||
which would exceed the memory budget of the platform. For example, `https://api.github.com/repos/nodemcu/nodemcu-firmware/contents` is over 13kB, and yet, if
|
||||
you only need the `download_url` keys, then the total size is around 600B. This can be handled with a simple `__newindex` method.
|
||||
|
||||
## sjson.decoder:write
|
||||
|
||||
This provides more data to be parsed into the lua object.
|
||||
|
||||
####Syntax
|
||||
`decoder:write(string)`
|
||||
|
||||
####Parameters
|
||||
|
||||
- `string` the next piece of JSON encoded data
|
||||
|
||||
####Returns
|
||||
The constructed lua object or `nil` if the decode is not yet complete.
|
||||
|
||||
####Errors
|
||||
If a parse error occurrs during this decode, then an error is thrown and the parse is aborted. The object cannot be used again.
|
||||
|
||||
|
||||
## sjson.decoder:result
|
||||
|
||||
This gets the decoded lua object, or raises an error if the decode is not yet complete. This can be called multiple times and will return the
|
||||
same object each time.
|
||||
|
||||
####Syntax
|
||||
`decoder:result()`
|
||||
|
||||
####Errors
|
||||
If the decode is not complete, then an error is thrown.
|
||||
|
||||
####Example
|
||||
```
|
||||
local decoder = sjson.decoder()
|
||||
|
||||
decoder:write("[10, 1")
|
||||
decoder:write("1")
|
||||
decoder:write(", \"foo\"]")
|
||||
|
||||
for k,v in pairs(decoder:result()) do
|
||||
print (k, v)
|
||||
end
|
||||
```
|
||||
|
||||
The next example demonstrates the use of the metatable argument. In this case it just prints out the operations, but it could suppress the assignment
|
||||
altogether if desired.
|
||||
|
||||
```
|
||||
local decoder = sjson.decoder({metatable=
|
||||
{__newindex=function(t,k,v) print("Setting '" .. k .. "' = '" .. tostring(v) .."'")
|
||||
rawset(t,k,v) end}})
|
||||
|
||||
decoder:write('[1, 2, {"foo":"bar"}]')
|
||||
|
||||
```
|
||||
|
||||
|
||||
## sjson.decode()
|
||||
|
||||
Decode a JSON string to a Lua table. This is a convenience method provided for backwards compatibility with `cjson`.
|
||||
|
||||
####Syntax
|
||||
`sjson.decode(str[, opts])`
|
||||
|
||||
####Parameters
|
||||
- `str` JSON string to decode
|
||||
- `opts` an optional table of options. The possible entries are:
|
||||
- `depth` the maximum encoding depth needed to encode the table. The default is 20 which should be enough for nearly all situations.
|
||||
- `null` the string value to treat as null.
|
||||
- `metatable` a table to use as the metatable for all the new tables in the returned object. See the metatable section in the description of `sjson.decoder()` above.
|
||||
|
||||
####Returns
|
||||
Lua table representation of the JSON data
|
||||
|
||||
####Errors
|
||||
If the string is not valid JSON, then an error is thrown.
|
||||
|
||||
####Example
|
||||
```lua
|
||||
t = sjson.decode('{"key":"value"}')
|
||||
for k,v in pairs(t) do print(k,v) end
|
||||
```
|
||||
|
||||
##Constants
|
||||
|
||||
There is one constant -- `sjson.NULL` -- which is used in lua structures to represent the presence of a JSON null.
|
||||
|
|
@ -5,7 +5,7 @@ MEMORY
|
|||
dport0_0_seg : org = 0x3FF00000, len = 0x10
|
||||
dram0_0_seg : org = 0x3FFE8000, len = 0x14000
|
||||
iram1_0_seg : org = 0x40100000, len = 0x8000
|
||||
irom0_0_seg : org = 0x40210000, len = 0xD0000
|
||||
irom0_0_seg : org = 0x40210000, len = 0xE0000
|
||||
}
|
||||
|
||||
PHDRS
|
||||
|
|
|
@ -18,7 +18,7 @@ end
|
|||
-- payload(json): {"cmd":xxx,"content":xxx}
|
||||
function topic1func(m,pl)
|
||||
print("get1: "..pl)
|
||||
local pack = cjson.decode(pl)
|
||||
local pack = sjson.decode(pl)
|
||||
if pack.content then
|
||||
if pack.cmd == "open" then file.open(pack.content,"w+")
|
||||
elseif pack.cmd == "write" then file.write(pack.content)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
-- Somfy module example (beside somfy module requires also CJSON module)
|
||||
-- Somfy module example (beside somfy module requires also SJSON module)
|
||||
-- The rolling code number is stored in the file somfy.cfg. A cached write of the somfy.cfg file is implemented in order to reduce the number of write to the EEPROM memory. Together with the logic of the file module it should allow long lasting operation.
|
||||
|
||||
config_file = "somfy."
|
||||
|
@ -43,7 +43,7 @@ function readconfig()
|
|||
end
|
||||
if not ln then ln = "{}" end
|
||||
print("Configuration: "..ln)
|
||||
config = cjson.decode(ln)
|
||||
config = sjson.decode(ln)
|
||||
config_saved = deepcopy(config)
|
||||
end
|
||||
|
||||
|
@ -52,7 +52,7 @@ function writeconfighard()
|
|||
file.remove(config_file.."bak")
|
||||
file.rename(config_file.."cfg", config_file.."bak")
|
||||
file.open(config_file.."cfg", "w+")
|
||||
local ok, cfg = pcall(cjson.encode, config)
|
||||
local ok, cfg = pcall(sjson.encode, config)
|
||||
if ok then
|
||||
file.writeline(cfg)
|
||||
else
|
||||
|
@ -68,8 +68,8 @@ function writeconfig()
|
|||
local savenow = false
|
||||
local savelater = false
|
||||
|
||||
--print("Config: "..cjson.encode(config))
|
||||
--print("Config saved: "..cjson.encode(config))
|
||||
--print("Config: "..sjson.encode(config))
|
||||
--print("Config saved: "..sjson.encode(config))
|
||||
|
||||
local count = 0
|
||||
for _ in pairs(config_saved) do count = count + 1 end
|
||||
|
@ -134,7 +134,7 @@ end
|
|||
--======================================================================================================--
|
||||
if not config then readconfig() end
|
||||
if #config == 0 then -- somfy.cfg does not exist
|
||||
config = cjson.decode([[{"window1":{"rc":1,"address":123},"window2":{"rc":1,"address":124}}]])
|
||||
config = sjson.decode([[{"window1":{"rc":1,"address":123},"window2":{"rc":1,"address":124}}]])
|
||||
config_saved = deepcopy(config)
|
||||
end
|
||||
down('window1',
|
||||
|
|
|
@ -72,6 +72,7 @@ pages:
|
|||
- 'rtcmem': 'en/modules/rtcmem.md'
|
||||
- 'rtctime': 'en/modules/rtctime.md'
|
||||
- 'sigma delta': 'en/modules/sigma-delta.md'
|
||||
- 'sjson': 'en/modules/sjson.md'
|
||||
- 'sntp': 'en/modules/sntp.md'
|
||||
- 'somfy': 'en/modules/somfy.md'
|
||||
- 'spi': 'en/modules/spi.md'
|
||||
|
|
Loading…
Reference in New Issue