Input buildinfo: https://buildinfos.debian.net/buildinfo-pool/l/lua-cjson/lua-cjson_2.1.0+dfsg-2.1_amd64.buildinfo Use metasnap for getting required timestamps New buildinfo file: /tmp/lua-cjson-2.1.0+dfsg-2.13kfm0w82/lua-cjson_2.1.0+dfsg-2.1_amd64.buildinfo Get source package info: lua-cjson=2.1.0+dfsg-2.1 Source URL: http://snapshot.notset.fr/mr/package/lua-cjson/2.1.0+dfsg-2.1/srcfiles?fileinfo=1 env -i PATH=/usr/sbin:/usr/bin:/sbin:/bin TMPDIR=/tmp mmdebstrap --arch=amd64 --include=autoconf=2.69-11 automake=1:1.15.1-3 autopoint=0.19.8.1-4 autotools-dev=20171216.1 base-files=10.1 base-passwd=3.5.44 bash=4.4-5 binutils=2.30-1 binutils-common=2.30-1 binutils-x86-64-linux-gnu=2.30-1 bsdmainutils=11.1.2 bsdutils=1:2.30.2-0.3 build-essential=12.4 bzip2=1.0.6-8.1 coreutils=8.28-1 cpp=4:7.2.0-1d1 cpp-7=7.3.0-1 dash=0.5.8-2.10 dctrl-tools=2.24-2+b1 debconf=1.5.65 debhelper=11.1.4 debianutils=4.8.4 dh-autoreconf=16 dh-lua=24 dh-strip-nondeterminism=0.040-1 diffutils=1:3.6-1 dpkg=1.19.0.5 dpkg-dev=1.19.0.5 e2fslibs=1.43.8-2 e2fsprogs=1.43.8-2 fdisk=2.30.2-0.3 file=1:5.32-1 findutils=4.6.0+git+20170828-2 g++=4:7.2.0-1d1 g++-7=7.3.0-1 gcc=4:7.2.0-1d1 gcc-7=7.3.0-1 gcc-7-base=7.3.0-1 gettext=0.19.8.1-4 gettext-base=0.19.8.1-4 grep=3.1-2 groff-base=1.22.3-9 gzip=1.6-5+b1 hostname=3.20 init-system-helpers=1.51 intltool-debian=0.35.0+20060710.4 libacl1=2.2.52-3+b1 libarchive-zip-perl=1.60-1 libasan4=7.3.0-1 libatomic1=7.3.0-1 libattr1=1:2.4.47-2+b2 libaudit-common=1:2.8.2-1 libaudit1=1:2.8.2-1 libbinutils=2.30-1 libblkid1=2.30.2-0.3 libbsd0=0.8.7-1 libbz2-1.0=1.0.6-8.1 libc-bin=2.26-6 libc-dev-bin=2.26-6 libc6=2.26-6 libc6-dev=2.26-6 libcap-ng0=0.7.7-3.1+b1 libcc1-0=7.3.0-1 libcilkrts5=7.3.0-1 libcomerr2=1.43.8-2 libcroco3=0.6.12-2 libdb5.3=5.3.28-13.1+b1 libdebconfclient0=0.239 libdpkg-perl=1.19.0.5 libfdisk1=2.30.2-0.3 libffi6=3.2.1-8 libfile-find-rule-perl=0.34-1 libfile-stripnondeterminism-perl=0.040-1 libgcc-7-dev=7.3.0-1 libgcc1=1:7.3.0-1 libgcrypt20=1.8.1-4 libgdbm5=1.14.1-2 libglib2.0-0=2.54.3-2 libgmp10=2:6.1.2+dfsg-2 libgomp1=7.3.0-1 libgpg-error0=1.27-5 libicu57=57.1-8 libisl15=0.18-1 libitm1=7.3.0-1 liblsan0=7.3.0-1 liblua5.1-0=5.1.5-8.1+b2 liblua5.1-0-dev=5.1.5-8.1+b2 liblua5.2-0=5.2.4-1.1+b2 liblua5.2-dev=5.2.4-1.1+b2 liblua5.3-0=5.3.3-1 liblua5.3-dev=5.3.3-1 libluasandbox-bin=1.2.1-4 libluasandbox-dev=1.2.1-4 libluasandbox0=1.2.1-4 liblz4-1=0.0~r131-2+b1 liblzma5=5.2.2-1.3 libmagic-mgc=1:5.32-1 libmagic1=1:5.32-1 libmount1=2.30.2-0.3 libmpc3=1.1.0-1 libmpfr6=4.0.0-7 libmpx2=7.3.0-1 libncurses5=6.0+20171125-1 libncursesw5=6.0+20171125-1 libnumber-compare-perl=0.03-1 libpam-modules=1.1.8-3.6 libpam-modules-bin=1.1.8-3.6 libpam-runtime=1.1.8-3.6 libpam0g=1.1.8-3.6 libpcre3=2:8.39-9 libperl5.26=5.26.1-4+b1 libpipeline1=1.5.0-1 libquadmath0=7.3.0-1 libreadline-dev=7.0-3 libreadline7=7.0-3 libseccomp2=2.3.1-2.1 libselinux1=2.7-2+b1 libsigsegv2=2.11-1 libsmartcols1=2.30.2-0.3 libss2=1.43.8-2 libstdc++-7-dev=7.3.0-1 libstdc++6=7.3.0-1 libsystemd0=237-1 libtext-glob-perl=0.10-1 libtimedate-perl=2.3000-2 libtinfo-dev=6.0+20171125-1 libtinfo5=6.0+20171125-1 libtool=2.4.6-2 libtool-bin=2.4.6-2 libtsan0=7.3.0-1 libubsan0=7.3.0-1 libudev1=237-1 libunistring2=0.9.8-1 libuuid1=2.30.2-0.3 libxml2=2.9.4+dfsg1-6.1 linux-libc-dev=4.14.13-1 login=1:4.5-1 lua5.1=5.1.5-8.1+b2 lua5.2=5.2.4-1.1+b2 lua5.3=5.3.3-1 m4=1.4.18-1 make=4.1-9.1 man-db=2.8.0-1 mawk=1.3.3-17+b3 ncurses-base=6.0+20171125-1 ncurses-bin=6.0+20171125-1 patch=2.7.5-1+b2 perl=5.26.1-4+b1 perl-base=5.26.1-4+b1 perl-modules-5.26=5.26.1-4 pkg-config=0.29-4+b1 po-debconf=1.0.20 readline-common=7.0-3 sed=4.4-2 sysvinit-utils=2.88dsf-59.10 tar=1.29b-2 util-linux=2.30.2-0.3 xz-utils=5.2.2-1.3 zlib1g=1:1.2.8.dfsg-5 --variant=apt --aptopt=Acquire::Check-Valid-Until "false" --aptopt=Acquire::http::Dl-Limit "1000"; --aptopt=Acquire::https::Dl-Limit "1000"; --aptopt=Acquire::Retries "5"; --aptopt=APT::Get::allow-downgrades "true"; --keyring=/usr/share/keyrings/ --essential-hook=chroot "$1" sh -c "apt-get --yes install fakeroot util-linux" --essential-hook=copy-in /usr/share/keyrings/debian-archive-bullseye-automatic.gpg /usr/share/keyrings/debian-archive-bullseye-security-automatic.gpg /usr/share/keyrings/debian-archive-bullseye-stable.gpg /usr/share/keyrings/debian-archive-buster-automatic.gpg /usr/share/keyrings/debian-archive-buster-security-automatic.gpg /usr/share/keyrings/debian-archive-buster-stable.gpg /usr/share/keyrings/debian-archive-keyring.gpg /usr/share/keyrings/debian-archive-removed-keys.gpg /usr/share/keyrings/debian-archive-stretch-automatic.gpg /usr/share/keyrings/debian-archive-stretch-security-automatic.gpg /usr/share/keyrings/debian-archive-stretch-stable.gpg /usr/share/keyrings/debian-ports-archive-keyring-removed.gpg /usr/share/keyrings/debian-ports-archive-keyring.gpg /usr/share/keyrings/debian-keyring.gpg /etc/apt/trusted.gpg.d/ --essential-hook=chroot "$1" sh -c "rm /etc/apt/sources.list && echo 'deb http://snapshot.notset.fr/archive/debian/20210814T212851Z/ bookworm main deb-src http://snapshot.notset.fr/archive/debian/20210814T212851Z/ bookworm main deb http://snapshot.notset.fr/archive/debian/20180205T095626Z/ unstable main' >> /etc/apt/sources.list && apt-get update" --customize-hook=chroot "$1" useradd --no-create-home -d /nonexistent -p "" builduser -s /bin/bash --customize-hook=chroot "$1" env sh -c "apt-get source --only-source -d lua-cjson=2.1.0+dfsg-2.1 && mkdir -p /build/lua-cjson-h7mDhj && dpkg-source --no-check -x /*.dsc /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg && chown -R builduser:builduser /build/lua-cjson-h7mDhj" --customize-hook=chroot "$1" env --unset=TMPDIR runuser builduser -c "cd /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg && env DEB_BUILD_OPTIONS="parallel=4" LC_ALL="POSIX" SOURCE_DATE_EPOCH="1516479336" dpkg-buildpackage -uc -a amd64 --build=any" --customize-hook=sync-out /build/lua-cjson-h7mDhj /tmp/lua-cjson-2.1.0+dfsg-2.13kfm0w82 buster /dev/null deb http://snapshot.notset.fr/archive/debian/20180205T095626Z unstable main I: automatically chosen mode: root I: chroot architecture amd64 is equal to the host's architecture I: automatically chosen format: tar I: using /tmp/mmdebstrap.k8neI7XJQK as tempdir I: running apt-get update... I: downloading packages with apt... I: extracting archives... I: installing essential packages... I: running --essential-hook in shell: sh -c 'chroot "$1" sh -c "apt-get --yes install fakeroot util-linux"' exec /tmp/mmdebstrap.k8neI7XJQK Reading package lists... Building dependency tree... util-linux is already the newest version (2.30.2-0.3). The following NEW packages will be installed: fakeroot libfakeroot 0 upgraded, 2 newly installed, 0 to remove and 0 not upgraded. Need to get 132 kB of archives. After this operation, 373 kB of additional disk space will be used. Get:1 http://snapshot.notset.fr/archive/debian/20180205T095626Z unstable/main amd64 libfakeroot amd64 1.22-2 [45.9 kB] Get:2 http://snapshot.notset.fr/archive/debian/20180205T095626Z unstable/main amd64 fakeroot amd64 1.22-2 [85.7 kB] debconf: delaying package configuration, since apt-utils is not installed Fetched 132 kB in 0s (909 kB/s) Selecting previously unselected package libfakeroot:amd64. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 4424 files and directories currently installed.) Preparing to unpack .../libfakeroot_1.22-2_amd64.deb ... Unpacking libfakeroot:amd64 (1.22-2) ... Selecting previously unselected package fakeroot. Preparing to unpack .../fakeroot_1.22-2_amd64.deb ... Unpacking fakeroot (1.22-2) ... Processing triggers for libc-bin (2.26-6) ... Setting up libfakeroot:amd64 (1.22-2) ... Setting up fakeroot (1.22-2) ... update-alternatives: using /usr/bin/fakeroot-sysv to provide /usr/bin/fakeroot (fakeroot) in auto mode Processing triggers for libc-bin (2.26-6) ... I: running special hook: copy-in /usr/share/keyrings/debian-archive-bullseye-automatic.gpg /usr/share/keyrings/debian-archive-bullseye-security-automatic.gpg /usr/share/keyrings/debian-archive-bullseye-stable.gpg /usr/share/keyrings/debian-archive-buster-automatic.gpg /usr/share/keyrings/debian-archive-buster-security-automatic.gpg /usr/share/keyrings/debian-archive-buster-stable.gpg /usr/share/keyrings/debian-archive-keyring.gpg /usr/share/keyrings/debian-archive-removed-keys.gpg /usr/share/keyrings/debian-archive-stretch-automatic.gpg /usr/share/keyrings/debian-archive-stretch-security-automatic.gpg /usr/share/keyrings/debian-archive-stretch-stable.gpg /usr/share/keyrings/debian-ports-archive-keyring-removed.gpg /usr/share/keyrings/debian-ports-archive-keyring.gpg /usr/share/keyrings/debian-keyring.gpg /etc/apt/trusted.gpg.d/ I: running --essential-hook in shell: sh -c 'chroot "$1" sh -c "rm /etc/apt/sources.list && echo 'deb http://snapshot.notset.fr/archive/debian/20210814T212851Z/ bookworm main deb-src http://snapshot.notset.fr/archive/debian/20210814T212851Z/ bookworm main deb http://snapshot.notset.fr/archive/debian/20180205T095626Z/ unstable main' >> /etc/apt/sources.list && apt-get update"' exec /tmp/mmdebstrap.k8neI7XJQK Get:1 http://snapshot.notset.fr/archive/debian/20210814T212851Z bookworm InRelease [81.6 kB] Hit:2 http://snapshot.notset.fr/archive/debian/20180205T095626Z unstable InRelease Ign:3 http://snapshot.notset.fr/archive/debian/20210814T212851Z bookworm/main Sources Ign:4 http://snapshot.notset.fr/archive/debian/20210814T212851Z bookworm/main amd64 Packages Ign:3 http://snapshot.notset.fr/archive/debian/20210814T212851Z bookworm/main Sources Ign:4 http://snapshot.notset.fr/archive/debian/20210814T212851Z bookworm/main amd64 Packages Get:3 http://snapshot.notset.fr/archive/debian/20210814T212851Z bookworm/main Sources [11.4 MB] Get:4 http://snapshot.notset.fr/archive/debian/20210814T212851Z bookworm/main amd64 Packages [11.1 MB] Fetched 22.6 MB in 20s (1149 kB/s) Reading package lists... I: installing remaining packages inside the chroot... I: running --customize-hook in shell: sh -c 'chroot "$1" useradd --no-create-home -d /nonexistent -p "" builduser -s /bin/bash' exec /tmp/mmdebstrap.k8neI7XJQK I: running --customize-hook in shell: sh -c 'chroot "$1" env sh -c "apt-get source --only-source -d lua-cjson=2.1.0+dfsg-2.1 && mkdir -p /build/lua-cjson-h7mDhj && dpkg-source --no-check -x /*.dsc /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg && chown -R builduser:builduser /build/lua-cjson-h7mDhj"' exec /tmp/mmdebstrap.k8neI7XJQK Reading package lists... NOTICE: 'lua-cjson' packaging is maintained in the 'Git' version control system at: git://anonscm.debian.org/collab-maint/liblua-cjson.git Please use: git clone git://anonscm.debian.org/collab-maint/liblua-cjson.git to retrieve the latest (possibly unreleased) updates to the package. Need to get 82.5 kB of source archives. Get:1 http://snapshot.notset.fr/archive/debian/20210814T212851Z bookworm/main lua-cjson 2.1.0+dfsg-2.1 (dsc) [2077 B] Get:2 http://snapshot.notset.fr/archive/debian/20210814T212851Z bookworm/main lua-cjson 2.1.0+dfsg-2.1 (tar) [76.8 kB] Get:3 http://snapshot.notset.fr/archive/debian/20210814T212851Z bookworm/main lua-cjson 2.1.0+dfsg-2.1 (diff) [3636 B] Fetched 82.5 kB in 0s (654 kB/s) Download complete and in download only mode W: Download is performed unsandboxed as root as file 'lua-cjson_2.1.0+dfsg-2.1.dsc' couldn't be accessed by user '_apt'. - pkgAcquire::Run (13: Permission denied) dpkg-source: info: extracting lua-cjson in /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg dpkg-source: info: unpacking lua-cjson_2.1.0+dfsg.orig.tar.gz dpkg-source: info: unpacking lua-cjson_2.1.0+dfsg-2.1.debian.tar.xz dpkg-source: info: applying disable-utf16-test dpkg-source: info: applying lua5.2-function-names I: running --customize-hook in shell: sh -c 'chroot "$1" env --unset=TMPDIR runuser builduser -c "cd /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg && env DEB_BUILD_OPTIONS="parallel=4" LC_ALL="POSIX" SOURCE_DATE_EPOCH="1516479336" dpkg-buildpackage -uc -a amd64 --build=any"' exec /tmp/mmdebstrap.k8neI7XJQK dpkg-buildpackage: info: source package lua-cjson dpkg-buildpackage: info: source version 2.1.0+dfsg-2.1 dpkg-buildpackage: info: source distribution unstable dpkg-buildpackage: info: source changed by Aurelien Jarno dpkg-source --before-build lua-cjson-2.1.0+dfsg dpkg-buildpackage: info: host architecture amd64 fakeroot debian/rules clean dh clean --buildsystem=lua --with lua dh: Compatibility levels before 9 are deprecated (level 7 in use) dh_auto_clean -O--buildsystem=lua dh_auto_clean: Compatibility levels before 9 are deprecated (level 7 in use) make --no-print-directory -f /usr/share/dh-lua/make/dh-lua.Makefile.multiple clean Making target clean for debian/lua5.1.dh-lua.conf # fix for leftovers of dh-lua < 14 Target clean made Making target clean for debian/lua5.2.dh-lua.conf # fix for leftovers of dh-lua < 14 Target clean made debian/rules override_dh_clean make[1]: Entering directory '/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg' dh_clean dh_clean: Compatibility levels before 9 are deprecated (level 7 in use) rm -f debian/trash make[1]: Leaving directory '/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg' debian/rules build-arch dh build-arch --buildsystem=lua --with lua dh: Compatibility levels before 9 are deprecated (level 7 in use) dh_update_autotools_config -a -O--buildsystem=lua dh_auto_configure -a -O--buildsystem=lua dh_auto_configure: Compatibility levels before 9 are deprecated (level 7 in use) make --no-print-directory -f /usr/share/dh-lua/make/dh-lua.Makefile.multiple configure Making target configure for debian/lua5.1.dh-lua.conf # .install Filling in debian/lua-cjson.install using /usr/share/dh-lua/template/lib.install.in Adding new line: usr/lib/x86_64-linux-gnu/lua/5.1/cjson.so Adding new line: usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.* Adding new line: usr/share/lua/5.1/cjson/util.lua Filling in debian/lua-cjson-dev.install using /usr/share/dh-lua/template/dev.install.in Adding new line: usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so Adding new line: usr/lib/x86_64-linux-gnu/liblua5.1-cjson.a Adding new line: usr/lib/x86_64-linux-gnu/pkgconfig/lua5.1-cjson.pc Adding new line: usr/include/lua5.1/lua-cjson.h # lua_versions Filling in debian/lua_versions Adding new line: 5.1 Target configure made Making target configure for debian/lua5.2.dh-lua.conf # .install Filling in debian/lua-cjson.install using /usr/share/dh-lua/template/lib.install.in Adding new line: usr/lib/x86_64-linux-gnu/lua/5.2/cjson.so Adding new line: usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.* Adding new line: usr/share/lua/5.2/cjson/util.lua Filling in debian/lua-cjson-dev.install using /usr/share/dh-lua/template/dev.install.in Adding new line: usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so Adding new line: usr/lib/x86_64-linux-gnu/liblua5.2-cjson.a Adding new line: usr/lib/x86_64-linux-gnu/pkgconfig/lua5.2-cjson.pc Adding new line: usr/include/lua5.2/lua-cjson.h # lua_versions Filling in debian/lua_versions Adding new line: 5.2 Target configure made dh_auto_build -a -O--buildsystem=lua dh_auto_build: Compatibility levels before 9 are deprecated (level 7 in use) make --no-print-directory -f /usr/share/dh-lua/make/dh-lua.Makefile.multiple build Making target build for debian/lua5.1.dh-lua.conf libtool --silent --tag=CC --mode=compile cc -c -g -O2 -fdebug-prefix-map=/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -I/usr//include/lua5.1 -Wall -Wextra -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/lua_cjson.lo lua_cjson.c lua_cjson.c: In function 'json_append_string': lua_cjson.c:477:19: warning: comparison between signed and unsigned integer expressions [-Wsign-compare] for (i = 0; i < len; i++) { ^ In file included from lua_cjson.c:47:0: lua_cjson.c: At top level: fpconv.h:15:20: warning: inline function 'fpconv_init' declared but never defined extern inline void fpconv_init(); ^~~~~~~~~~~ lua_cjson.c: In function 'json_append_data': lua_cjson.c:689:12: warning: this statement may fall through [-Wimplicit-fallthrough=] if (lua_touserdata(l, -1) == NULL) { ^ lua_cjson.c:693:5: note: here default: ^~~~~~~ libtool --silent --tag=CC --mode=compile cc -c -g -O2 -fdebug-prefix-map=/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -I/usr//include/lua5.1 -Wall -Wextra -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/fpconv.lo fpconv.c libtool --silent --tag=CC --mode=compile cc -c -g -O2 -fdebug-prefix-map=/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -I/usr//include/lua5.1 -Wall -Wextra -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/strbuf.lo strbuf.c libtool --silent --tag=CC --mode=link cc \ -rpath /usr//lib/x86_64-linux-gnu -version-info 0:0:0 -Wl,--no-add-needed \ -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/liblua5.1-cjson.la \ /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/lua_cjson.lo /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/fpconv.lo /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/strbuf.lo \ -Wl,-z,relro ar: `u' modifier ignored since `D' is the default (see `U') ldd /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/cjson.so linux-vdso.so.1 (0x00007fffb358a000) libc.so.6 => /lib/x86_64-linux-gnu/libc.so.6 (0x00007f187525e000) /lib64/ld-linux-x86-64.so.2 (0x00007f187581c000) Target build made Making target build for debian/lua5.2.dh-lua.conf libtool --silent --tag=CC --mode=compile cc -c -g -O2 -fdebug-prefix-map=/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -I/usr//include/lua5.2 -Wall -Wextra -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/lua_cjson.lo lua_cjson.c lua_cjson.c: In function 'json_append_string': lua_cjson.c:477:19: warning: comparison between signed and unsigned integer expressions [-Wsign-compare] for (i = 0; i < len; i++) { ^ In file included from lua_cjson.c:47:0: lua_cjson.c: At top level: fpconv.h:15:20: warning: inline function 'fpconv_init' declared but never defined extern inline void fpconv_init(); ^~~~~~~~~~~ lua_cjson.c: In function 'json_append_data': lua_cjson.c:689:12: warning: this statement may fall through [-Wimplicit-fallthrough=] if (lua_touserdata(l, -1) == NULL) { ^ lua_cjson.c:693:5: note: here default: ^~~~~~~ libtool --silent --tag=CC --mode=compile cc -c -g -O2 -fdebug-prefix-map=/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -I/usr//include/lua5.2 -Wall -Wextra -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/fpconv.lo fpconv.c libtool --silent --tag=CC --mode=compile cc -c -g -O2 -fdebug-prefix-map=/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -I/usr//include/lua5.2 -Wall -Wextra -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/strbuf.lo strbuf.c libtool --silent --tag=CC --mode=link cc \ -rpath /usr//lib/x86_64-linux-gnu -version-info 0:0:0 -Wl,--no-add-needed \ -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/liblua5.2-cjson.la \ /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/lua_cjson.lo /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/fpconv.lo /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/strbuf.lo \ -Wl,-z,relro ar: `u' modifier ignored since `D' is the default (see `U') ldd /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/cjson.so linux-vdso.so.1 (0x00007ffd590a9000) libc.so.6 => /lib/x86_64-linux-gnu/libc.so.6 (0x00007fd0fbadd000) /lib64/ld-linux-x86-64.so.2 (0x00007fd0fc09b000) Target build made dh_auto_test -a -O--buildsystem=lua dh_auto_test: Compatibility levels before 9 are deprecated (level 7 in use) make --no-print-directory -f /usr/share/dh-lua/make/dh-lua.Makefile.multiple test Making target test for debian/lua5.1.dh-lua.conf # tests Copying lua/cjson/util.lua in /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson for test ********************** lua dynamic (5.1) ********* Test: cd tests/ && @@LUA@@ test.lua ==> Testing Lua CJSON version 2.1.0 ==> Test [1] Check module name, version: PASS [Input] { } [Received:success] { "cjson", "2.1.0" } ==> Test [2] Decode string: PASS [Input] { "\"test string\"" } [Received:success] { "test string" } ==> Test [3] Decode numbers: PASS [Input] { "[ 0.0, -5e3, -1, 0.3e-3, 1023.2, 0e10 ]" } [Received:success] { { 0, -5000, -1, 0.0003, 1023.2, 0 } } ==> Test [4] Decode null: PASS [Input] { "null" } [Received:success] { json.null } ==> Test [5] Decode true: PASS [Input] { "true" } [Received:success] { true } ==> Test [6] Decode false: PASS [Input] { "false" } [Received:success] { false } ==> Test [7] Decode object with numeric keys: PASS [Input] { "{ \"1\": \"one\", \"3\": \"three\" }" } [Received:success] { { ["1"] = "one", ["3"] = "three" } } ==> Test [8] Decode object with string keys: PASS [Input] { "{ \"a\": \"a\", \"b\": \"b\" }" } [Received:success] { { ["a"] = "a", ["b"] = "b" } } ==> Test [9] Decode array: PASS [Input] { "[ \"one\", null, \"three\" ]" } [Received:success] { { "one", json.null, "three" } } ==> Test [10] Decode UTF-16BE [throw error]: PASS [Input] { "\000\"\000\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [11] Decode UTF-16LE [throw error]: PASS [Input] { "\"\000\"\000" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [12] Decode UTF-32BE [throw error]: PASS [Input] { "\000\000\000\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [13] Decode UTF-32LE [throw error]: PASS [Input] { "\"\000\000\000" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [14] Decode partial JSON [throw error]: PASS [Input] { "{ \"unexpected eof\": " } [Received:error] { "Expected value but found T_END at character 21" } ==> Test [15] Decode with extra comma [throw error]: PASS [Input] { "{ \"extra data\": true }, false" } [Received:error] { "Expected the end but found T_COMMA at character 23" } ==> Test [16] Decode invalid escape code [throw error]: PASS [Input] { " { \"bad escape \\q code\" } " } [Received:error] { "Expected object key string but found invalid escape code at character 16" } ==> Test [17] Decode invalid unicode escape [throw error]: PASS [Input] { " { \"bad unicode \\u0f6 escape\" } " } [Received:error] { "Expected object key string but found invalid unicode escape code at character 17" } ==> Test [18] Decode invalid keyword [throw error]: PASS [Input] { " [ \"bad barewood\", test ] " } [Received:error] { "Expected value but found invalid token at character 20" } ==> Test [19] Decode invalid number #1 [throw error]: PASS [Input] { "[ -+12 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [20] Decode invalid number #2 [throw error]: PASS [Input] { "-v" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [21] Decode invalid number exponent [throw error]: PASS [Input] { "[ 0.4eg10 ]" } [Received:error] { "Expected comma or array end but found invalid token at character 6" } ==> Test [22] Set decode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [23] Decode array at nested limit: PASS [Input] { "[[[[[ \"nested\" ]]]]]" } [Received:success] { { { { { { "nested" } } } } } } ==> Test [24] Decode array over nested limit [throw error]: PASS [Input] { "[[[[[[ \"nested\" ]]]]]]" } [Received:error] { "Found too many nested data structures (6) at character 6" } ==> Test [25] Decode object at nested limit: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } [Received:success] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } ==> Test [26] Decode object over nested limit [throw error]: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":{\"f\":\"nested\"}}}}}}" } [Received:error] { "Found too many nested data structures (6) at character 26" } ==> Test [27] Set decode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [28] Decode deeply nested array [throw error]: PASS [Input] { "[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[1100]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]" } [Received:error] { "Found too many nested data structures (1001) at character 1001" } ==> Test [29] Set encode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [30] Encode nested table as array at nested limit: PASS [Input] { { { { { { "nested" } } } } } } [Received:success] { "[[[[[\"nested\"]]]]]" } ==> Test [31] Encode nested table as array after nested limit [throw error]: PASS [Input] { { { { { { { "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [32] Encode nested table as object at nested limit: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } [Received:success] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } ==> Test [33] Encode nested table as object over nested limit [throw error]: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = { ["f"] = "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [34] Encode table with cycle [throw error]: PASS [Input] { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [35] Set encode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [36] Encode deeply nested data [throw error]: PASS [Input] { { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = Cannot serialise any further: too many nested tables, [2] = "string", ["a"] = Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (1001)" } ==> Test [37] Encode null: PASS [Input] { json.null } [Received:success] { "null" } ==> Test [38] Encode true: PASS [Input] { true } [Received:success] { "true" } ==> Test [39] Encode false: PASS [Input] { false } [Received:success] { "false" } ==> Test [40] Encode empty object: PASS [Input] { { } } [Received:success] { "{}" } ==> Test [41] Encode integer: PASS [Input] { 10 } [Received:success] { "10" } ==> Test [42] Encode string: PASS [Input] { "hello" } [Received:success] { "\"hello\"" } ==> Test [43] Encode Lua function [throw error]: PASS [Input] { "" } [Received:error] { "Cannot serialise function: type not supported" } ==> Test [44] Set decode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [45] Decode hexadecimal: PASS [Input] { "0x6.ffp1" } [Received:success] { 13.9921875 } ==> Test [46] Decode numbers with leading zero: PASS [Input] { "[ 0123, 00.33 ]" } [Received:success] { { 123, 0.33 } } ==> Test [47] Decode +-Inf: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:success] { { inf, inf, -inf } } ==> Test [48] Decode +-Infinity: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:success] { { inf, inf, -inf } } ==> Test [49] Decode +-NaN: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:success] { { nan, nan, nan } } ==> Test [50] Decode Infrared (not infinity) [throw error]: PASS [Input] { "Infrared" } [Received:error] { "Expected the end but found invalid token at character 4" } ==> Test [51] Decode Noodle (not NaN) [throw error]: PASS [Input] { "Noodle" } [Received:error] { "Expected value but found invalid token at character 1" } ==> Test [52] Set decode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [53] Decode hexadecimal [throw error]: PASS [Input] { "0x6" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [54] Decode numbers with leading zero [throw error]: PASS [Input] { "[ 0123, 00.33 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [55] Decode +-Inf [throw error]: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [56] Decode +-Infinity [throw error]: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [57] Decode +-NaN [throw error]: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [58] Set decode_invalid_numbers("on"): PASS [Input] { "on" } [Received:success] { true } ==> Test [59] Set encode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [60] Encode NaN [throw error]: PASS [Input] { -nan } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [61] Encode Infinity [throw error]: PASS [Input] { inf } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [62] Set encode_invalid_numbers("null"): PASS [Input] { "null" } [Received:success] { "null" } ==> Test [63] Encode NaN as null: PASS [Input] { -nan } [Received:success] { "null" } ==> Test [64] Encode Infinity as null: PASS [Input] { inf } [Received:success] { "null" } ==> Test [65] Set encode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [66] Encode NaN: PASS [Input] { -nan } [Received:success] { "nan" } ==> Test [67] Encode Infinity: PASS [Input] { inf } [Received:success] { "inf" } ==> Test [68] Set encode_invalid_numbers("off"): PASS [Input] { "off" } [Received:success] { false } ==> Test [69] Set encode_sparse_array(true, 2, 3): PASS [Input] { true, 2, 3 } [Received:success] { true, 2, 3 } ==> Test [70] Encode sparse table as array #1: PASS [Input] { { [3] = "sparse test" } } [Received:success] { "[null,null,\"sparse test\"]" } ==> Test [71] Encode sparse table as array #2: PASS [Input] { { "one", nil, nil, "sparse test" } } [Received:success] { "[\"one\",null,null,\"sparse test\"]" } ==> Test [72] Encode sparse array as object: PASS [Input] { { [1] = "one", [5] = "sparse test" } } [Received:success] { "{\"1\":\"one\",\"5\":\"sparse test\"}" } ==> Test [73] Encode table with numeric string key as object: PASS [Input] { { ["2"] = "numeric string key test" } } [Received:success] { "{\"2\":\"numeric string key test\"}" } ==> Test [74] Set encode_sparse_array(false): PASS [Input] { false } [Received:success] { false, 2, 3 } ==> Test [75] Encode table with incompatible key [throw error]: PASS [Input] { { [false] = "wrong" } } [Received:error] { "Cannot serialise boolean: table key must be a number or string" } ==> Test [76] Encode all octets (8-bit clean): PASS [Input] { "\000 \ \r !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" } [Received:success] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } ==> Test [77] Decode all escaped octets: PASS [Input] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } [Received:success] { "\000 \ \r !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" } ==> Test [78] Decode single UTF-16 escape: PASS [Input] { "\"\\uF800\"" } [Received:success] { "" } ==> Test [79] Decode swapped surrogate pair [throw error]: PASS [Input] { "\"\\uDC00\\uD800\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [80] Decode duplicate high surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [81] Decode duplicate low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [82] Decode missing low surrogate [throw error]: PASS [Input] { "\"\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [83] Decode invalid low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uD\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Set locale to cs_CZ (comma separator) ==> Test [84] Encode number under comma locale: PASS [Input] { 1.5 } [Received:success] { "1.5" } ==> Test [85] Decode number in array under comma locale: PASS [Input] { "[ 10, \"test\" ]" } [Received:success] { { 10, "test" } } ==> Revert locale to POSIX ==> Test [86] Set encode_keep_buffer(false): PASS [Input] { false } [Received:success] { false } ==> Test [87] Set encode_number_precision(3): PASS [Input] { 3 } [Received:success] { 3 } ==> Test [88] Encode number with precision 3: PASS [Input] { 0.33333333333333 } [Received:success] { "0.333" } ==> Test [89] Set encode_number_precision(14): PASS [Input] { 14 } [Received:success] { 14 } ==> Test [90] Set encode_keep_buffer(true): PASS [Input] { true } [Received:success] { true } ==> Test [91] Set encode_number_precision(0) [throw error]: PASS [Input] { 0 } [Received:error] { "bad argument #1 to '?' (expected integer between 1 and 14)" } ==> Test [92] Set encode_number_precision("five") [throw error]: PASS [Input] { "five" } [Received:error] { "bad argument #1 to '?' (number expected, got string)" } ==> Test [93] Set encode_keep_buffer(nil, true) [throw error]: PASS [Input] { nil, true } [Received:error] { "bad argument #2 to '?' (found too many arguments)" } ==> Test [94] Set encode_max_depth("wrong") [throw error]: PASS [Input] { "wrong" } [Received:error] { "bad argument #1 to '?' (number expected, got string)" } ==> Test [95] Set decode_max_depth(0) [throw error]: PASS [Input] { "0" } [Received:error] { "bad argument #1 to '?' (expected integer between 1 and 2147483647)" } ==> Test [96] Set encode_invalid_numbers(-2) [throw error]: PASS [Input] { -2 } [Received:error] { "bad argument #1 to '?' (invalid option '-2')" } ==> Test [97] Set decode_invalid_numbers(true, false) [throw error]: PASS [Input] { true, false } [Received:error] { "bad argument #2 to '?' (found too many arguments)" } ==> Test [98] Set encode_sparse_array("not quite on") [throw error]: PASS [Input] { "not quite on" } [Received:error] { "bad argument #1 to '?' (invalid option 'not quite on')" } ==> Reset Lua CJSON configuration ==> Test [99] Check encode_sparse_array(): PASS [Input] { } [Received:success] { false, 2, 10 } ==> Test [100] Encode (safe) simple value: PASS [Input] { true } [Received:success] { "true" } ==> Test [101] Encode (safe) argument validation [throw error]: PASS [Input] { "arg1", "arg2" } [Received:error] { "bad argument #1 to '?' (expected 1 argument)" } ==> Test [102] Decode (safe) error generation: PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Test [103] Decode (safe) error generation after new(): PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Summary: all tests succeeded ************************************************** libtool --silent --tag=CC --mode=link cc -g -O2 -fdebug-prefix-map=/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -I/usr//include/lua5.1 -Wall -Wextra -Wl,--no-add-needed \ -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/app-dynamic -I . -I /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/ \ /usr/share/dh-lua/test/5.1/app.c /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/liblua5.1-cjson.la \ -Wl,-z,relro -L/usr//lib/x86_64-linux-gnu -llua5.1 libtool --silent --tag=CC --mode=execute -dlopen /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/liblua5.1-cjson.la \ ldd /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/app-dynamic linux-vdso.so.1 (0x00007fff59d7b000) liblua5.1-cjson.so.0 => /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/.libs/liblua5.1-cjson.so.0 (0x00007f054a782000) liblua5.1.so.0 => /usr//lib/x86_64-linux-gnu/liblua5.1.so.0 (0x00007f054a554000) libc.so.6 => /lib/x86_64-linux-gnu/libc.so.6 (0x00007f054a19e000) libm.so.6 => /lib/x86_64-linux-gnu/libm.so.6 (0x00007f0549e53000) libdl.so.2 => /lib/x86_64-linux-gnu/libdl.so.2 (0x00007f0549c4f000) /lib64/ld-linux-x86-64.so.2 (0x00007f054ab8d000) ********************** app dynamic (5.1) ********* Test: cd tests/ && @@LUA@@ test.lua ==> Testing Lua CJSON version 2.1.0 ==> Test [1] Check module name, version: PASS [Input] { } [Received:success] { "cjson", "2.1.0" } ==> Test [2] Decode string: PASS [Input] { "\"test string\"" } [Received:success] { "test string" } ==> Test [3] Decode numbers: PASS [Input] { "[ 0.0, -5e3, -1, 0.3e-3, 1023.2, 0e10 ]" } [Received:success] { { 0, -5000, -1, 0.0003, 1023.2, 0 } } ==> Test [4] Decode null: PASS [Input] { "null" } [Received:success] { json.null } ==> Test [5] Decode true: PASS [Input] { "true" } [Received:success] { true } ==> Test [6] Decode false: PASS [Input] { "false" } [Received:success] { false } ==> Test [7] Decode object with numeric keys: PASS [Input] { "{ \"1\": \"one\", \"3\": \"three\" }" } [Received:success] { { ["1"] = "one", ["3"] = "three" } } ==> Test [8] Decode object with string keys: PASS [Input] { "{ \"a\": \"a\", \"b\": \"b\" }" } [Received:success] { { ["a"] = "a", ["b"] = "b" } } ==> Test [9] Decode array: PASS [Input] { "[ \"one\", null, \"three\" ]" } [Received:success] { { "one", json.null, "three" } } ==> Test [10] Decode UTF-16BE [throw error]: PASS [Input] { "\000\"\000\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [11] Decode UTF-16LE [throw error]: PASS [Input] { "\"\000\"\000" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [12] Decode UTF-32BE [throw error]: PASS [Input] { "\000\000\000\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [13] Decode UTF-32LE [throw error]: PASS [Input] { "\"\000\000\000" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [14] Decode partial JSON [throw error]: PASS [Input] { "{ \"unexpected eof\": " } [Received:error] { "Expected value but found T_END at character 21" } ==> Test [15] Decode with extra comma [throw error]: PASS [Input] { "{ \"extra data\": true }, false" } [Received:error] { "Expected the end but found T_COMMA at character 23" } ==> Test [16] Decode invalid escape code [throw error]: PASS [Input] { " { \"bad escape \\q code\" } " } [Received:error] { "Expected object key string but found invalid escape code at character 16" } ==> Test [17] Decode invalid unicode escape [throw error]: PASS [Input] { " { \"bad unicode \\u0f6 escape\" } " } [Received:error] { "Expected object key string but found invalid unicode escape code at character 17" } ==> Test [18] Decode invalid keyword [throw error]: PASS [Input] { " [ \"bad barewood\", test ] " } [Received:error] { "Expected value but found invalid token at character 20" } ==> Test [19] Decode invalid number #1 [throw error]: PASS [Input] { "[ -+12 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [20] Decode invalid number #2 [throw error]: PASS [Input] { "-v" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [21] Decode invalid number exponent [throw error]: PASS [Input] { "[ 0.4eg10 ]" } [Received:error] { "Expected comma or array end but found invalid token at character 6" } ==> Test [22] Set decode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [23] Decode array at nested limit: PASS [Input] { "[[[[[ \"nested\" ]]]]]" } [Received:success] { { { { { { "nested" } } } } } } ==> Test [24] Decode array over nested limit [throw error]: PASS [Input] { "[[[[[[ \"nested\" ]]]]]]" } [Received:error] { "Found too many nested data structures (6) at character 6" } ==> Test [25] Decode object at nested limit: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } [Received:success] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } ==> Test [26] Decode object over nested limit [throw error]: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":{\"f\":\"nested\"}}}}}}" } [Received:error] { "Found too many nested data structures (6) at character 26" } ==> Test [27] Set decode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [28] Decode deeply nested array [throw error]: PASS [Input] { "[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[1100]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]" } [Received:error] { "Found too many nested data structures (1001) at character 1001" } ==> Test [29] Set encode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [30] Encode nested table as array at nested limit: PASS [Input] { { { { { { "nested" } } } } } } [Received:success] { "[[[[[\"nested\"]]]]]" } ==> Test [31] Encode nested table as array after nested limit [throw error]: PASS [Input] { { { { { { { "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [32] Encode nested table as object at nested limit: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } [Received:success] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } ==> Test [33] Encode nested table as object over nested limit [throw error]: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = { ["f"] = "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [34] Encode table with cycle [throw error]: PASS [Input] { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [35] Set encode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [36] Encode deeply nested data [throw error]: PASS [Input] { { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = Cannot serialise any further: too many nested tables, [2] = "string", ["a"] = Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (1001)" } ==> Test [37] Encode null: PASS [Input] { json.null } [Received:success] { "null" } ==> Test [38] Encode true: PASS [Input] { true } [Received:success] { "true" } ==> Test [39] Encode false: PASS [Input] { false } [Received:success] { "false" } ==> Test [40] Encode empty object: PASS [Input] { { } } [Received:success] { "{}" } ==> Test [41] Encode integer: PASS [Input] { 10 } [Received:success] { "10" } ==> Test [42] Encode string: PASS [Input] { "hello" } [Received:success] { "\"hello\"" } ==> Test [43] Encode Lua function [throw error]: PASS [Input] { "" } [Received:error] { "Cannot serialise function: type not supported" } ==> Test [44] Set decode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [45] Decode hexadecimal: PASS [Input] { "0x6.ffp1" } [Received:success] { 13.9921875 } ==> Test [46] Decode numbers with leading zero: PASS [Input] { "[ 0123, 00.33 ]" } [Received:success] { { 123, 0.33 } } ==> Test [47] Decode +-Inf: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:success] { { inf, inf, -inf } } ==> Test [48] Decode +-Infinity: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:success] { { inf, inf, -inf } } ==> Test [49] Decode +-NaN: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:success] { { nan, nan, nan } } ==> Test [50] Decode Infrared (not infinity) [throw error]: PASS [Input] { "Infrared" } [Received:error] { "Expected the end but found invalid token at character 4" } ==> Test [51] Decode Noodle (not NaN) [throw error]: PASS [Input] { "Noodle" } [Received:error] { "Expected value but found invalid token at character 1" } ==> Test [52] Set decode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [53] Decode hexadecimal [throw error]: PASS [Input] { "0x6" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [54] Decode numbers with leading zero [throw error]: PASS [Input] { "[ 0123, 00.33 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [55] Decode +-Inf [throw error]: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [56] Decode +-Infinity [throw error]: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [57] Decode +-NaN [throw error]: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [58] Set decode_invalid_numbers("on"): PASS [Input] { "on" } [Received:success] { true } ==> Test [59] Set encode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [60] Encode NaN [throw error]: PASS [Input] { -nan } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [61] Encode Infinity [throw error]: PASS [Input] { inf } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [62] Set encode_invalid_numbers("null"): PASS [Input] { "null" } [Received:success] { "null" } ==> Test [63] Encode NaN as null: PASS [Input] { -nan } [Received:success] { "null" } ==> Test [64] Encode Infinity as null: PASS [Input] { inf } [Received:success] { "null" } ==> Test [65] Set encode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [66] Encode NaN: PASS [Input] { -nan } [Received:success] { "nan" } ==> Test [67] Encode Infinity: PASS [Input] { inf } [Received:success] { "inf" } ==> Test [68] Set encode_invalid_numbers("off"): PASS [Input] { "off" } [Received:success] { false } ==> Test [69] Set encode_sparse_array(true, 2, 3): PASS [Input] { true, 2, 3 } [Received:success] { true, 2, 3 } ==> Test [70] Encode sparse table as array #1: PASS [Input] { { [3] = "sparse test" } } [Received:success] { "[null,null,\"sparse test\"]" } ==> Test [71] Encode sparse table as array #2: PASS [Input] { { "one", nil, nil, "sparse test" } } [Received:success] { "[\"one\",null,null,\"sparse test\"]" } ==> Test [72] Encode sparse array as object: PASS [Input] { { [1] = "one", [5] = "sparse test" } } [Received:success] { "{\"1\":\"one\",\"5\":\"sparse test\"}" } ==> Test [73] Encode table with numeric string key as object: PASS [Input] { { ["2"] = "numeric string key test" } } [Received:success] { "{\"2\":\"numeric string key test\"}" } ==> Test [74] Set encode_sparse_array(false): PASS [Input] { false } [Received:success] { false, 2, 3 } ==> Test [75] Encode table with incompatible key [throw error]: PASS [Input] { { [false] = "wrong" } } [Received:error] { "Cannot serialise boolean: table key must be a number or string" } ==> Test [76] Encode all octets (8-bit clean): PASS [Input] { "\000 \ \r !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" } [Received:success] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } ==> Test [77] Decode all escaped octets: PASS [Input] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } [Received:success] { "\000 \ \r !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" } ==> Test [78] Decode single UTF-16 escape: PASS [Input] { "\"\\uF800\"" } [Received:success] { "" } ==> Test [79] Decode swapped surrogate pair [throw error]: PASS [Input] { "\"\\uDC00\\uD800\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [80] Decode duplicate high surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [81] Decode duplicate low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [82] Decode missing low surrogate [throw error]: PASS [Input] { "\"\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [83] Decode invalid low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uD\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Set locale to cs_CZ (comma separator) ==> Test [84] Encode number under comma locale: PASS [Input] { 1.5 } [Received:success] { "1.5" } ==> Test [85] Decode number in array under comma locale: PASS [Input] { "[ 10, \"test\" ]" } [Received:success] { { 10, "test" } } ==> Revert locale to POSIX ==> Test [86] Set encode_keep_buffer(false): PASS [Input] { false } [Received:success] { false } ==> Test [87] Set encode_number_precision(3): PASS [Input] { 3 } [Received:success] { 3 } ==> Test [88] Encode number with precision 3: PASS [Input] { 0.33333333333333 } [Received:success] { "0.333" } ==> Test [89] Set encode_number_precision(14): PASS [Input] { 14 } [Received:success] { 14 } ==> Test [90] Set encode_keep_buffer(true): PASS [Input] { true } [Received:success] { true } ==> Test [91] Set encode_number_precision(0) [throw error]: PASS [Input] { 0 } [Received:error] { "bad argument #1 to '?' (expected integer between 1 and 14)" } ==> Test [92] Set encode_number_precision("five") [throw error]: PASS [Input] { "five" } [Received:error] { "bad argument #1 to '?' (number expected, got string)" } ==> Test [93] Set encode_keep_buffer(nil, true) [throw error]: PASS [Input] { nil, true } [Received:error] { "bad argument #2 to '?' (found too many arguments)" } ==> Test [94] Set encode_max_depth("wrong") [throw error]: PASS [Input] { "wrong" } [Received:error] { "bad argument #1 to '?' (number expected, got string)" } ==> Test [95] Set decode_max_depth(0) [throw error]: PASS [Input] { "0" } [Received:error] { "bad argument #1 to '?' (expected integer between 1 and 2147483647)" } ==> Test [96] Set encode_invalid_numbers(-2) [throw error]: PASS [Input] { -2 } [Received:error] { "bad argument #1 to '?' (invalid option '-2')" } ==> Test [97] Set decode_invalid_numbers(true, false) [throw error]: PASS [Input] { true, false } [Received:error] { "bad argument #2 to '?' (found too many arguments)" } ==> Test [98] Set encode_sparse_array("not quite on") [throw error]: PASS [Input] { "not quite on" } [Received:error] { "bad argument #1 to '?' (invalid option 'not quite on')" } ==> Reset Lua CJSON configuration ==> Test [99] Check encode_sparse_array(): PASS [Input] { } [Received:success] { false, 2, 10 } ==> Test [100] Encode (safe) simple value: PASS [Input] { true } [Received:success] { "true" } ==> Test [101] Encode (safe) argument validation [throw error]: PASS [Input] { "arg1", "arg2" } [Received:error] { "bad argument #1 to '?' (expected 1 argument)" } ==> Test [102] Decode (safe) error generation: PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Test [103] Decode (safe) error generation after new(): PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Summary: all tests succeeded ************************************************** libtool --silent --tag=CC --mode=link cc -g -O2 -fdebug-prefix-map=/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -I/usr//include/lua5.1 -Wall -Wextra -Wl,--no-add-needed \ -static -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/app-static -I . -I /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/ \ /usr/share/dh-lua/test/5.1/app.c /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/liblua5.1-cjson.la \ -Wl,-z,relro -L/usr//lib/x86_64-linux-gnu -llua5.1 -lm -ldl ldd /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/app-static linux-vdso.so.1 (0x00007fff89deb000) liblua5.1.so.0 => /usr/lib/x86_64-linux-gnu/liblua5.1.so.0 (0x00007f44e5115000) libm.so.6 => /lib/x86_64-linux-gnu/libm.so.6 (0x00007f44e4dca000) libdl.so.2 => /lib/x86_64-linux-gnu/libdl.so.2 (0x00007f44e4bc6000) libc.so.6 => /lib/x86_64-linux-gnu/libc.so.6 (0x00007f44e4810000) /lib64/ld-linux-x86-64.so.2 (0x00007f44e554b000) *********************** app static (5.1) ********* Test: cd tests/ && @@LUA@@ test.lua ==> Testing Lua CJSON version 2.1.0 ==> Test [1] Check module name, version: PASS [Input] { } [Received:success] { "cjson", "2.1.0" } ==> Test [2] Decode string: PASS [Input] { "\"test string\"" } [Received:success] { "test string" } ==> Test [3] Decode numbers: PASS [Input] { "[ 0.0, -5e3, -1, 0.3e-3, 1023.2, 0e10 ]" } [Received:success] { { 0, -5000, -1, 0.0003, 1023.2, 0 } } ==> Test [4] Decode null: PASS [Input] { "null" } [Received:success] { json.null } ==> Test [5] Decode true: PASS [Input] { "true" } [Received:success] { true } ==> Test [6] Decode false: PASS [Input] { "false" } [Received:success] { false } ==> Test [7] Decode object with numeric keys: PASS [Input] { "{ \"1\": \"one\", \"3\": \"three\" }" } [Received:success] { { ["1"] = "one", ["3"] = "three" } } ==> Test [8] Decode object with string keys: PASS [Input] { "{ \"a\": \"a\", \"b\": \"b\" }" } [Received:success] { { ["a"] = "a", ["b"] = "b" } } ==> Test [9] Decode array: PASS [Input] { "[ \"one\", null, \"three\" ]" } [Received:success] { { "one", json.null, "three" } } ==> Test [10] Decode UTF-16BE [throw error]: PASS [Input] { "\000\"\000\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [11] Decode UTF-16LE [throw error]: PASS [Input] { "\"\000\"\000" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [12] Decode UTF-32BE [throw error]: PASS [Input] { "\000\000\000\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [13] Decode UTF-32LE [throw error]: PASS [Input] { "\"\000\000\000" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [14] Decode partial JSON [throw error]: PASS [Input] { "{ \"unexpected eof\": " } [Received:error] { "Expected value but found T_END at character 21" } ==> Test [15] Decode with extra comma [throw error]: PASS [Input] { "{ \"extra data\": true }, false" } [Received:error] { "Expected the end but found T_COMMA at character 23" } ==> Test [16] Decode invalid escape code [throw error]: PASS [Input] { " { \"bad escape \\q code\" } " } [Received:error] { "Expected object key string but found invalid escape code at character 16" } ==> Test [17] Decode invalid unicode escape [throw error]: PASS [Input] { " { \"bad unicode \\u0f6 escape\" } " } [Received:error] { "Expected object key string but found invalid unicode escape code at character 17" } ==> Test [18] Decode invalid keyword [throw error]: PASS [Input] { " [ \"bad barewood\", test ] " } [Received:error] { "Expected value but found invalid token at character 20" } ==> Test [19] Decode invalid number #1 [throw error]: PASS [Input] { "[ -+12 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [20] Decode invalid number #2 [throw error]: PASS [Input] { "-v" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [21] Decode invalid number exponent [throw error]: PASS [Input] { "[ 0.4eg10 ]" } [Received:error] { "Expected comma or array end but found invalid token at character 6" } ==> Test [22] Set decode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [23] Decode array at nested limit: PASS [Input] { "[[[[[ \"nested\" ]]]]]" } [Received:success] { { { { { { "nested" } } } } } } ==> Test [24] Decode array over nested limit [throw error]: PASS [Input] { "[[[[[[ \"nested\" ]]]]]]" } [Received:error] { "Found too many nested data structures (6) at character 6" } ==> Test [25] Decode object at nested limit: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } [Received:success] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } ==> Test [26] Decode object over nested limit [throw error]: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":{\"f\":\"nested\"}}}}}}" } [Received:error] { "Found too many nested data structures (6) at character 26" } ==> Test [27] Set decode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [28] Decode deeply nested array [throw error]: PASS [Input] { "[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[1100]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]" } [Received:error] { "Found too many nested data structures (1001) at character 1001" } ==> Test [29] Set encode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [30] Encode nested table as array at nested limit: PASS [Input] { { { { { { "nested" } } } } } } [Received:success] { "[[[[[\"nested\"]]]]]" } ==> Test [31] Encode nested table as array after nested limit [throw error]: PASS [Input] { { { { { { { "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [32] Encode nested table as object at nested limit: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } [Received:success] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } ==> Test [33] Encode nested table as object over nested limit [throw error]: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = { ["f"] = "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [34] Encode table with cycle [throw error]: PASS [Input] { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [35] Set encode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [36] Encode deeply nested data [throw error]: PASS [Input] { { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = Cannot serialise any further: too many nested tables, [2] = "string", ["a"] = Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (1001)" } ==> Test [37] Encode null: PASS [Input] { json.null } [Received:success] { "null" } ==> Test [38] Encode true: PASS [Input] { true } [Received:success] { "true" } ==> Test [39] Encode false: PASS [Input] { false } [Received:success] { "false" } ==> Test [40] Encode empty object: PASS [Input] { { } } [Received:success] { "{}" } ==> Test [41] Encode integer: PASS [Input] { 10 } [Received:success] { "10" } ==> Test [42] Encode string: PASS [Input] { "hello" } [Received:success] { "\"hello\"" } ==> Test [43] Encode Lua function [throw error]: PASS [Input] { "" } [Received:error] { "Cannot serialise function: type not supported" } ==> Test [44] Set decode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [45] Decode hexadecimal: PASS [Input] { "0x6.ffp1" } [Received:success] { 13.9921875 } ==> Test [46] Decode numbers with leading zero: PASS [Input] { "[ 0123, 00.33 ]" } [Received:success] { { 123, 0.33 } } ==> Test [47] Decode +-Inf: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:success] { { inf, inf, -inf } } ==> Test [48] Decode +-Infinity: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:success] { { inf, inf, -inf } } ==> Test [49] Decode +-NaN: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:success] { { nan, nan, nan } } ==> Test [50] Decode Infrared (not infinity) [throw error]: PASS [Input] { "Infrared" } [Received:error] { "Expected the end but found invalid token at character 4" } ==> Test [51] Decode Noodle (not NaN) [throw error]: PASS [Input] { "Noodle" } [Received:error] { "Expected value but found invalid token at character 1" } ==> Test [52] Set decode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [53] Decode hexadecimal [throw error]: PASS [Input] { "0x6" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [54] Decode numbers with leading zero [throw error]: PASS [Input] { "[ 0123, 00.33 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [55] Decode +-Inf [throw error]: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [56] Decode +-Infinity [throw error]: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [57] Decode +-NaN [throw error]: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [58] Set decode_invalid_numbers("on"): PASS [Input] { "on" } [Received:success] { true } ==> Test [59] Set encode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [60] Encode NaN [throw error]: PASS [Input] { -nan } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [61] Encode Infinity [throw error]: PASS [Input] { inf } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [62] Set encode_invalid_numbers("null"): PASS [Input] { "null" } [Received:success] { "null" } ==> Test [63] Encode NaN as null: PASS [Input] { -nan } [Received:success] { "null" } ==> Test [64] Encode Infinity as null: PASS [Input] { inf } [Received:success] { "null" } ==> Test [65] Set encode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [66] Encode NaN: PASS [Input] { -nan } [Received:success] { "nan" } ==> Test [67] Encode Infinity: PASS [Input] { inf } [Received:success] { "inf" } ==> Test [68] Set encode_invalid_numbers("off"): PASS [Input] { "off" } [Received:success] { false } ==> Test [69] Set encode_sparse_array(true, 2, 3): PASS [Input] { true, 2, 3 } [Received:success] { true, 2, 3 } ==> Test [70] Encode sparse table as array #1: PASS [Input] { { [3] = "sparse test" } } [Received:success] { "[null,null,\"sparse test\"]" } ==> Test [71] Encode sparse table as array #2: PASS [Input] { { "one", nil, nil, "sparse test" } } [Received:success] { "[\"one\",null,null,\"sparse test\"]" } ==> Test [72] Encode sparse array as object: PASS [Input] { { [1] = "one", [5] = "sparse test" } } [Received:success] { "{\"1\":\"one\",\"5\":\"sparse test\"}" } ==> Test [73] Encode table with numeric string key as object: PASS [Input] { { ["2"] = "numeric string key test" } } [Received:success] { "{\"2\":\"numeric string key test\"}" } ==> Test [74] Set encode_sparse_array(false): PASS [Input] { false } [Received:success] { false, 2, 3 } ==> Test [75] Encode table with incompatible key [throw error]: PASS [Input] { { [false] = "wrong" } } [Received:error] { "Cannot serialise boolean: table key must be a number or string" } ==> Test [76] Encode all octets (8-bit clean): PASS [Input] { "\000 \ \r !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" } [Received:success] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } ==> Test [77] Decode all escaped octets: PASS [Input] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } [Received:success] { "\000 \ \r !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" } ==> Test [78] Decode single UTF-16 escape: PASS [Input] { "\"\\uF800\"" } [Received:success] { "" } ==> Test [79] Decode swapped surrogate pair [throw error]: PASS [Input] { "\"\\uDC00\\uD800\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [80] Decode duplicate high surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [81] Decode duplicate low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [82] Decode missing low surrogate [throw error]: PASS [Input] { "\"\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [83] Decode invalid low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uD\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Set locale to cs_CZ (comma separator) ==> Test [84] Encode number under comma locale: PASS [Input] { 1.5 } [Received:success] { "1.5" } ==> Test [85] Decode number in array under comma locale: PASS [Input] { "[ 10, \"test\" ]" } [Received:success] { { 10, "test" } } ==> Revert locale to POSIX ==> Test [86] Set encode_keep_buffer(false): PASS [Input] { false } [Received:success] { false } ==> Test [87] Set encode_number_precision(3): PASS [Input] { 3 } [Received:success] { 3 } ==> Test [88] Encode number with precision 3: PASS [Input] { 0.33333333333333 } [Received:success] { "0.333" } ==> Test [89] Set encode_number_precision(14): PASS [Input] { 14 } [Received:success] { 14 } ==> Test [90] Set encode_keep_buffer(true): PASS [Input] { true } [Received:success] { true } ==> Test [91] Set encode_number_precision(0) [throw error]: PASS [Input] { 0 } [Received:error] { "bad argument #1 to '?' (expected integer between 1 and 14)" } ==> Test [92] Set encode_number_precision("five") [throw error]: PASS [Input] { "five" } [Received:error] { "bad argument #1 to '?' (number expected, got string)" } ==> Test [93] Set encode_keep_buffer(nil, true) [throw error]: PASS [Input] { nil, true } [Received:error] { "bad argument #2 to '?' (found too many arguments)" } ==> Test [94] Set encode_max_depth("wrong") [throw error]: PASS [Input] { "wrong" } [Received:error] { "bad argument #1 to '?' (number expected, got string)" } ==> Test [95] Set decode_max_depth(0) [throw error]: PASS [Input] { "0" } [Received:error] { "bad argument #1 to '?' (expected integer between 1 and 2147483647)" } ==> Test [96] Set encode_invalid_numbers(-2) [throw error]: PASS [Input] { -2 } [Received:error] { "bad argument #1 to '?' (invalid option '-2')" } ==> Test [97] Set decode_invalid_numbers(true, false) [throw error]: PASS [Input] { true, false } [Received:error] { "bad argument #2 to '?' (found too many arguments)" } ==> Test [98] Set encode_sparse_array("not quite on") [throw error]: PASS [Input] { "not quite on" } [Received:error] { "bad argument #1 to '?' (invalid option 'not quite on')" } ==> Reset Lua CJSON configuration ==> Test [99] Check encode_sparse_array(): PASS [Input] { } [Received:success] { false, 2, 10 } ==> Test [100] Encode (safe) simple value: PASS [Input] { true } [Received:success] { "true" } ==> Test [101] Encode (safe) argument validation [throw error]: PASS [Input] { "arg1", "arg2" } [Received:error] { "bad argument #1 to '?' (expected 1 argument)" } ==> Test [102] Decode (safe) error generation: PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Test [103] Decode (safe) error generation after new(): PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Summary: all tests succeeded ************************************************** Target test made Making target test for debian/lua5.2.dh-lua.conf # tests Copying lua/cjson/util.lua in /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson for test ********************** lua dynamic (5.2) ********* Test: cd tests/ && @@LUA@@ test.lua ==> Testing Lua CJSON version 2.1.0 ==> Test [1] Check module name, version: PASS [Input] { } [Received:success] { "cjson", "2.1.0" } ==> Test [2] Decode string: PASS [Input] { "\"test string\"" } [Received:success] { "test string" } ==> Test [3] Decode numbers: PASS [Input] { "[ 0.0, -5e3, -1, 0.3e-3, 1023.2, 0e10 ]" } [Received:success] { { 0, -5000, -1, 0.0003, 1023.2, 0 } } ==> Test [4] Decode null: PASS [Input] { "null" } [Received:success] { json.null } ==> Test [5] Decode true: PASS [Input] { "true" } [Received:success] { true } ==> Test [6] Decode false: PASS [Input] { "false" } [Received:success] { false } ==> Test [7] Decode object with numeric keys: PASS [Input] { "{ \"1\": \"one\", \"3\": \"three\" }" } [Received:success] { { ["1"] = "one", ["3"] = "three" } } ==> Test [8] Decode object with string keys: PASS [Input] { "{ \"a\": \"a\", \"b\": \"b\" }" } [Received:success] { { ["a"] = "a", ["b"] = "b" } } ==> Test [9] Decode array: PASS [Input] { "[ \"one\", null, \"three\" ]" } [Received:success] { { "one", json.null, "three" } } ==> Test [10] Decode UTF-16BE [throw error]: PASS [Input] { "\0\"\0\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [11] Decode UTF-16LE [throw error]: PASS [Input] { "\"\0\"\0" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [12] Decode UTF-32BE [throw error]: PASS [Input] { "\0\0\0\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [13] Decode UTF-32LE [throw error]: PASS [Input] { "\"\0\0\0" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [14] Decode partial JSON [throw error]: PASS [Input] { "{ \"unexpected eof\": " } [Received:error] { "Expected value but found T_END at character 21" } ==> Test [15] Decode with extra comma [throw error]: PASS [Input] { "{ \"extra data\": true }, false" } [Received:error] { "Expected the end but found T_COMMA at character 23" } ==> Test [16] Decode invalid escape code [throw error]: PASS [Input] { " { \"bad escape \\q code\" } " } [Received:error] { "Expected object key string but found invalid escape code at character 16" } ==> Test [17] Decode invalid unicode escape [throw error]: PASS [Input] { " { \"bad unicode \\u0f6 escape\" } " } [Received:error] { "Expected object key string but found invalid unicode escape code at character 17" } ==> Test [18] Decode invalid keyword [throw error]: PASS [Input] { " [ \"bad barewood\", test ] " } [Received:error] { "Expected value but found invalid token at character 20" } ==> Test [19] Decode invalid number #1 [throw error]: PASS [Input] { "[ -+12 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [20] Decode invalid number #2 [throw error]: PASS [Input] { "-v" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [21] Decode invalid number exponent [throw error]: PASS [Input] { "[ 0.4eg10 ]" } [Received:error] { "Expected comma or array end but found invalid token at character 6" } ==> Test [22] Set decode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [23] Decode array at nested limit: PASS [Input] { "[[[[[ \"nested\" ]]]]]" } [Received:success] { { { { { { "nested" } } } } } } ==> Test [24] Decode array over nested limit [throw error]: PASS [Input] { "[[[[[[ \"nested\" ]]]]]]" } [Received:error] { "Found too many nested data structures (6) at character 6" } ==> Test [25] Decode object at nested limit: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } [Received:success] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } ==> Test [26] Decode object over nested limit [throw error]: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":{\"f\":\"nested\"}}}}}}" } [Received:error] { "Found too many nested data structures (6) at character 26" } ==> Test [27] Set decode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [28] Decode deeply nested array [throw error]: PASS [Input] { "[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[1100]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]" } [Received:error] { "Found too many nested data structures (1001) at character 1001" } ==> Test [29] Set encode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [30] Encode nested table as array at nested limit: PASS [Input] { { { { { { "nested" } } } } } } [Received:success] { "[[[[[\"nested\"]]]]]" } ==> Test [31] Encode nested table as array after nested limit [throw error]: PASS [Input] { { { { { { { "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [32] Encode nested table as object at nested limit: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } [Received:success] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } ==> Test [33] Encode nested table as object over nested limit [throw error]: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = { ["f"] = "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [34] Encode table with cycle [throw error]: PASS [Input] { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [35] Set encode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [36] Encode deeply nested data [throw error]: PASS [Input] { { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = Cannot serialise any further: too many nested tables, [2] = "string", ["a"] = Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (1001)" } ==> Test [37] Encode null: PASS [Input] { json.null } [Received:success] { "null" } ==> Test [38] Encode true: PASS [Input] { true } [Received:success] { "true" } ==> Test [39] Encode false: PASS [Input] { false } [Received:success] { "false" } ==> Test [40] Encode empty object: PASS [Input] { { } } [Received:success] { "{}" } ==> Test [41] Encode integer: PASS [Input] { 10 } [Received:success] { "10" } ==> Test [42] Encode string: PASS [Input] { "hello" } [Received:success] { "\"hello\"" } ==> Test [43] Encode Lua function [throw error]: PASS [Input] { "" } [Received:error] { "Cannot serialise function: type not supported" } ==> Test [44] Set decode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [45] Decode hexadecimal: PASS [Input] { "0x6.ffp1" } [Received:success] { 13.9921875 } ==> Test [46] Decode numbers with leading zero: PASS [Input] { "[ 0123, 00.33 ]" } [Received:success] { { 123, 0.33 } } ==> Test [47] Decode +-Inf: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:success] { { inf, inf, -inf } } ==> Test [48] Decode +-Infinity: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:success] { { inf, inf, -inf } } ==> Test [49] Decode +-NaN: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:success] { { nan, nan, nan } } ==> Test [50] Decode Infrared (not infinity) [throw error]: PASS [Input] { "Infrared" } [Received:error] { "Expected the end but found invalid token at character 4" } ==> Test [51] Decode Noodle (not NaN) [throw error]: PASS [Input] { "Noodle" } [Received:error] { "Expected value but found invalid token at character 1" } ==> Test [52] Set decode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [53] Decode hexadecimal [throw error]: PASS [Input] { "0x6" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [54] Decode numbers with leading zero [throw error]: PASS [Input] { "[ 0123, 00.33 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [55] Decode +-Inf [throw error]: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [56] Decode +-Infinity [throw error]: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [57] Decode +-NaN [throw error]: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [58] Set decode_invalid_numbers("on"): PASS [Input] { "on" } [Received:success] { true } ==> Test [59] Set encode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [60] Encode NaN [throw error]: PASS [Input] { -nan } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [61] Encode Infinity [throw error]: PASS [Input] { inf } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [62] Set encode_invalid_numbers("null"): PASS [Input] { "null" } [Received:success] { "null" } ==> Test [63] Encode NaN as null: PASS [Input] { -nan } [Received:success] { "null" } ==> Test [64] Encode Infinity as null: PASS [Input] { inf } [Received:success] { "null" } ==> Test [65] Set encode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [66] Encode NaN: PASS [Input] { -nan } [Received:success] { "nan" } ==> Test [67] Encode Infinity: PASS [Input] { inf } [Received:success] { "inf" } ==> Test [68] Set encode_invalid_numbers("off"): PASS [Input] { "off" } [Received:success] { false } ==> Test [69] Set encode_sparse_array(true, 2, 3): PASS [Input] { true, 2, 3 } [Received:success] { true, 2, 3 } ==> Test [70] Encode sparse table as array #1: PASS [Input] { { [3] = "sparse test" } } [Received:success] { "[null,null,\"sparse test\"]" } ==> Test [71] Encode sparse table as array #2: PASS [Input] { { "one", nil, nil, "sparse test" } } [Received:success] { "[\"one\",null,null,\"sparse test\"]" } ==> Test [72] Encode sparse array as object: PASS [Input] { { [1] = "one", [5] = "sparse test" } } [Received:success] { "{\"1\":\"one\",\"5\":\"sparse test\"}" } ==> Test [73] Encode table with numeric string key as object: PASS [Input] { { ["2"] = "numeric string key test" } } [Received:success] { "{\"2\":\"numeric string key test\"}" } ==> Test [74] Set encode_sparse_array(false): PASS [Input] { false } [Received:success] { false, 2, 3 } ==> Test [75] Encode table with incompatible key [throw error]: PASS [Input] { { [false] = "wrong" } } [Received:error] { "Cannot serialise boolean: table key must be a number or string" } ==> Test [76] Encode all octets (8-bit clean): PASS [Input] { "\0\1\2\3\4\5\6\7\8\9\ \11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31 !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\127" } [Received:success] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } ==> Test [77] Decode all escaped octets: PASS [Input] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } [Received:success] { "\0\1\2\3\4\5\6\7\8\9\ \11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31 !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\127" } ==> Test [78] Decode single UTF-16 escape: PASS [Input] { "\"\\uF800\"" } [Received:success] { "" } ==> Test [79] Decode swapped surrogate pair [throw error]: PASS [Input] { "\"\\uDC00\\uD800\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [80] Decode duplicate high surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [81] Decode duplicate low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [82] Decode missing low surrogate [throw error]: PASS [Input] { "\"\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [83] Decode invalid low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uD\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Set locale to cs_CZ (comma separator) ==> Test [84] Encode number under comma locale: PASS [Input] { 1.5 } [Received:success] { "1.5" } ==> Test [85] Decode number in array under comma locale: PASS [Input] { "[ 10, \"test\" ]" } [Received:success] { { 10, "test" } } ==> Revert locale to POSIX ==> Test [86] Set encode_keep_buffer(false): PASS [Input] { false } [Received:success] { false } ==> Test [87] Set encode_number_precision(3): PASS [Input] { 3 } [Received:success] { 3 } ==> Test [88] Encode number with precision 3: PASS [Input] { 0.33333333333333 } [Received:success] { "0.333" } ==> Test [89] Set encode_number_precision(14): PASS [Input] { 14 } [Received:success] { 14 } ==> Test [90] Set encode_keep_buffer(true): PASS [Input] { true } [Received:success] { true } ==> Test [91] Set encode_number_precision(0) [throw error]: PASS [Input] { 0 } [Received:error] { "bad argument #1 to 'cjson.encode_number_precision' (expected integer between 1 and 14)" } ==> Test [92] Set encode_number_precision("five") [throw error]: PASS [Input] { "five" } [Received:error] { "bad argument #1 to 'cjson.encode_number_precision' (number expected, got string)" } ==> Test [93] Set encode_keep_buffer(nil, true) [throw error]: PASS [Input] { nil, true } [Received:error] { "bad argument #2 to 'cjson.encode_keep_buffer' (found too many arguments)" } ==> Test [94] Set encode_max_depth("wrong") [throw error]: PASS [Input] { "wrong" } [Received:error] { "bad argument #1 to 'cjson.encode_max_depth' (number expected, got string)" } ==> Test [95] Set decode_max_depth(0) [throw error]: PASS [Input] { "0" } [Received:error] { "bad argument #1 to 'cjson.decode_max_depth' (expected integer between 1 and 2147483647)" } ==> Test [96] Set encode_invalid_numbers(-2) [throw error]: PASS [Input] { -2 } [Received:error] { "bad argument #1 to 'cjson.encode_invalid_numbers' (invalid option '-2')" } ==> Test [97] Set decode_invalid_numbers(true, false) [throw error]: PASS [Input] { true, false } [Received:error] { "bad argument #2 to 'cjson.decode_invalid_numbers' (found too many arguments)" } ==> Test [98] Set encode_sparse_array("not quite on") [throw error]: PASS [Input] { "not quite on" } [Received:error] { "bad argument #1 to 'cjson.encode_sparse_array' (invalid option 'not quite on')" } ==> Reset Lua CJSON configuration ==> Test [99] Check encode_sparse_array(): PASS [Input] { } [Received:success] { false, 2, 10 } ==> Test [100] Encode (safe) simple value: PASS [Input] { true } [Received:success] { "true" } ==> Test [101] Encode (safe) argument validation [throw error]: PASS [Input] { "arg1", "arg2" } [Received:error] { "bad argument #1 to '?' (expected 1 argument)" } ==> Test [102] Decode (safe) error generation: PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Test [103] Decode (safe) error generation after new(): PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Summary: all tests succeeded ************************************************** libtool --silent --tag=CC --mode=link cc -g -O2 -fdebug-prefix-map=/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -I/usr//include/lua5.2 -Wall -Wextra -Wl,--no-add-needed \ -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/app-dynamic -I . -I /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/ \ /usr/share/dh-lua/test/5.2/app.c /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/liblua5.2-cjson.la \ -Wl,-z,relro -L/usr//lib/x86_64-linux-gnu -llua5.2 libtool --silent --tag=CC --mode=execute -dlopen /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/liblua5.2-cjson.la \ ldd /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/app-dynamic linux-vdso.so.1 (0x00007fff4c2fb000) liblua5.2-cjson.so.0 => /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/.libs/liblua5.2-cjson.so.0 (0x00007f94069c3000) liblua5.2.so.0 => /usr//lib/x86_64-linux-gnu/liblua5.2.so.0 (0x00007f9406790000) libc.so.6 => /lib/x86_64-linux-gnu/libc.so.6 (0x00007f94063da000) libm.so.6 => /lib/x86_64-linux-gnu/libm.so.6 (0x00007f940608f000) libdl.so.2 => /lib/x86_64-linux-gnu/libdl.so.2 (0x00007f9405e8b000) /lib64/ld-linux-x86-64.so.2 (0x00007f9406dce000) ********************** app dynamic (5.2) ********* Test: cd tests/ && @@LUA@@ test.lua ==> Testing Lua CJSON version 2.1.0 ==> Test [1] Check module name, version: PASS [Input] { } [Received:success] { "cjson", "2.1.0" } ==> Test [2] Decode string: PASS [Input] { "\"test string\"" } [Received:success] { "test string" } ==> Test [3] Decode numbers: PASS [Input] { "[ 0.0, -5e3, -1, 0.3e-3, 1023.2, 0e10 ]" } [Received:success] { { 0, -5000, -1, 0.0003, 1023.2, 0 } } ==> Test [4] Decode null: PASS [Input] { "null" } [Received:success] { json.null } ==> Test [5] Decode true: PASS [Input] { "true" } [Received:success] { true } ==> Test [6] Decode false: PASS [Input] { "false" } [Received:success] { false } ==> Test [7] Decode object with numeric keys: PASS [Input] { "{ \"1\": \"one\", \"3\": \"three\" }" } [Received:success] { { ["1"] = "one", ["3"] = "three" } } ==> Test [8] Decode object with string keys: PASS [Input] { "{ \"a\": \"a\", \"b\": \"b\" }" } [Received:success] { { ["a"] = "a", ["b"] = "b" } } ==> Test [9] Decode array: PASS [Input] { "[ \"one\", null, \"three\" ]" } [Received:success] { { "one", json.null, "three" } } ==> Test [10] Decode UTF-16BE [throw error]: PASS [Input] { "\0\"\0\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [11] Decode UTF-16LE [throw error]: PASS [Input] { "\"\0\"\0" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [12] Decode UTF-32BE [throw error]: PASS [Input] { "\0\0\0\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [13] Decode UTF-32LE [throw error]: PASS [Input] { "\"\0\0\0" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [14] Decode partial JSON [throw error]: PASS [Input] { "{ \"unexpected eof\": " } [Received:error] { "Expected value but found T_END at character 21" } ==> Test [15] Decode with extra comma [throw error]: PASS [Input] { "{ \"extra data\": true }, false" } [Received:error] { "Expected the end but found T_COMMA at character 23" } ==> Test [16] Decode invalid escape code [throw error]: PASS [Input] { " { \"bad escape \\q code\" } " } [Received:error] { "Expected object key string but found invalid escape code at character 16" } ==> Test [17] Decode invalid unicode escape [throw error]: PASS [Input] { " { \"bad unicode \\u0f6 escape\" } " } [Received:error] { "Expected object key string but found invalid unicode escape code at character 17" } ==> Test [18] Decode invalid keyword [throw error]: PASS [Input] { " [ \"bad barewood\", test ] " } [Received:error] { "Expected value but found invalid token at character 20" } ==> Test [19] Decode invalid number #1 [throw error]: PASS [Input] { "[ -+12 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [20] Decode invalid number #2 [throw error]: PASS [Input] { "-v" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [21] Decode invalid number exponent [throw error]: PASS [Input] { "[ 0.4eg10 ]" } [Received:error] { "Expected comma or array end but found invalid token at character 6" } ==> Test [22] Set decode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [23] Decode array at nested limit: PASS [Input] { "[[[[[ \"nested\" ]]]]]" } [Received:success] { { { { { { "nested" } } } } } } ==> Test [24] Decode array over nested limit [throw error]: PASS [Input] { "[[[[[[ \"nested\" ]]]]]]" } [Received:error] { "Found too many nested data structures (6) at character 6" } ==> Test [25] Decode object at nested limit: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } [Received:success] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } ==> Test [26] Decode object over nested limit [throw error]: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":{\"f\":\"nested\"}}}}}}" } [Received:error] { "Found too many nested data structures (6) at character 26" } ==> Test [27] Set decode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [28] Decode deeply nested array [throw error]: PASS [Input] { "[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[1100]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]" } [Received:error] { "Found too many nested data structures (1001) at character 1001" } ==> Test [29] Set encode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [30] Encode nested table as array at nested limit: PASS [Input] { { { { { { "nested" } } } } } } [Received:success] { "[[[[[\"nested\"]]]]]" } ==> Test [31] Encode nested table as array after nested limit [throw error]: PASS [Input] { { { { { { { "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [32] Encode nested table as object at nested limit: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } [Received:success] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } ==> Test [33] Encode nested table as object over nested limit [throw error]: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = { ["f"] = "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [34] Encode table with cycle [throw error]: PASS [Input] { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [35] Set encode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [36] Encode deeply nested data [throw error]: PASS [Input] { { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = Cannot serialise any further: too many nested tables, [2] = "string", ["a"] = Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (1001)" } ==> Test [37] Encode null: PASS [Input] { json.null } [Received:success] { "null" } ==> Test [38] Encode true: PASS [Input] { true } [Received:success] { "true" } ==> Test [39] Encode false: PASS [Input] { false } [Received:success] { "false" } ==> Test [40] Encode empty object: PASS [Input] { { } } [Received:success] { "{}" } ==> Test [41] Encode integer: PASS [Input] { 10 } [Received:success] { "10" } ==> Test [42] Encode string: PASS [Input] { "hello" } [Received:success] { "\"hello\"" } ==> Test [43] Encode Lua function [throw error]: PASS [Input] { "" } [Received:error] { "Cannot serialise function: type not supported" } ==> Test [44] Set decode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [45] Decode hexadecimal: PASS [Input] { "0x6.ffp1" } [Received:success] { 13.9921875 } ==> Test [46] Decode numbers with leading zero: PASS [Input] { "[ 0123, 00.33 ]" } [Received:success] { { 123, 0.33 } } ==> Test [47] Decode +-Inf: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:success] { { inf, inf, -inf } } ==> Test [48] Decode +-Infinity: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:success] { { inf, inf, -inf } } ==> Test [49] Decode +-NaN: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:success] { { nan, nan, nan } } ==> Test [50] Decode Infrared (not infinity) [throw error]: PASS [Input] { "Infrared" } [Received:error] { "Expected the end but found invalid token at character 4" } ==> Test [51] Decode Noodle (not NaN) [throw error]: PASS [Input] { "Noodle" } [Received:error] { "Expected value but found invalid token at character 1" } ==> Test [52] Set decode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [53] Decode hexadecimal [throw error]: PASS [Input] { "0x6" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [54] Decode numbers with leading zero [throw error]: PASS [Input] { "[ 0123, 00.33 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [55] Decode +-Inf [throw error]: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [56] Decode +-Infinity [throw error]: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [57] Decode +-NaN [throw error]: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [58] Set decode_invalid_numbers("on"): PASS [Input] { "on" } [Received:success] { true } ==> Test [59] Set encode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [60] Encode NaN [throw error]: PASS [Input] { -nan } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [61] Encode Infinity [throw error]: PASS [Input] { inf } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [62] Set encode_invalid_numbers("null"): PASS [Input] { "null" } [Received:success] { "null" } ==> Test [63] Encode NaN as null: PASS [Input] { -nan } [Received:success] { "null" } ==> Test [64] Encode Infinity as null: PASS [Input] { inf } [Received:success] { "null" } ==> Test [65] Set encode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [66] Encode NaN: PASS [Input] { -nan } [Received:success] { "nan" } ==> Test [67] Encode Infinity: PASS [Input] { inf } [Received:success] { "inf" } ==> Test [68] Set encode_invalid_numbers("off"): PASS [Input] { "off" } [Received:success] { false } ==> Test [69] Set encode_sparse_array(true, 2, 3): PASS [Input] { true, 2, 3 } [Received:success] { true, 2, 3 } ==> Test [70] Encode sparse table as array #1: PASS [Input] { { [3] = "sparse test" } } [Received:success] { "[null,null,\"sparse test\"]" } ==> Test [71] Encode sparse table as array #2: PASS [Input] { { "one", nil, nil, "sparse test" } } [Received:success] { "[\"one\",null,null,\"sparse test\"]" } ==> Test [72] Encode sparse array as object: PASS [Input] { { [1] = "one", [5] = "sparse test" } } [Received:success] { "{\"1\":\"one\",\"5\":\"sparse test\"}" } ==> Test [73] Encode table with numeric string key as object: PASS [Input] { { ["2"] = "numeric string key test" } } [Received:success] { "{\"2\":\"numeric string key test\"}" } ==> Test [74] Set encode_sparse_array(false): PASS [Input] { false } [Received:success] { false, 2, 3 } ==> Test [75] Encode table with incompatible key [throw error]: PASS [Input] { { [false] = "wrong" } } [Received:error] { "Cannot serialise boolean: table key must be a number or string" } ==> Test [76] Encode all octets (8-bit clean): PASS [Input] { "\0\1\2\3\4\5\6\7\8\9\ \11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31 !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\127" } [Received:success] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } ==> Test [77] Decode all escaped octets: PASS [Input] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } [Received:success] { "\0\1\2\3\4\5\6\7\8\9\ \11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31 !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\127" } ==> Test [78] Decode single UTF-16 escape: PASS [Input] { "\"\\uF800\"" } [Received:success] { "" } ==> Test [79] Decode swapped surrogate pair [throw error]: PASS [Input] { "\"\\uDC00\\uD800\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [80] Decode duplicate high surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [81] Decode duplicate low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [82] Decode missing low surrogate [throw error]: PASS [Input] { "\"\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [83] Decode invalid low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uD\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Set locale to cs_CZ (comma separator) ==> Test [84] Encode number under comma locale: PASS [Input] { 1.5 } [Received:success] { "1.5" } ==> Test [85] Decode number in array under comma locale: PASS [Input] { "[ 10, \"test\" ]" } [Received:success] { { 10, "test" } } ==> Revert locale to POSIX ==> Test [86] Set encode_keep_buffer(false): PASS [Input] { false } [Received:success] { false } ==> Test [87] Set encode_number_precision(3): PASS [Input] { 3 } [Received:success] { 3 } ==> Test [88] Encode number with precision 3: PASS [Input] { 0.33333333333333 } [Received:success] { "0.333" } ==> Test [89] Set encode_number_precision(14): PASS [Input] { 14 } [Received:success] { 14 } ==> Test [90] Set encode_keep_buffer(true): PASS [Input] { true } [Received:success] { true } ==> Test [91] Set encode_number_precision(0) [throw error]: PASS [Input] { 0 } [Received:error] { "bad argument #1 to '?' (expected integer between 1 and 14)" } ==> Test [92] Set encode_number_precision("five") [throw error]: PASS [Input] { "five" } [Received:error] { "bad argument #1 to '?' (number expected, got string)" } ==> Test [93] Set encode_keep_buffer(nil, true) [throw error]: PASS [Input] { nil, true } [Received:error] { "bad argument #2 to '?' (found too many arguments)" } ==> Test [94] Set encode_max_depth("wrong") [throw error]: PASS [Input] { "wrong" } [Received:error] { "bad argument #1 to '?' (number expected, got string)" } ==> Test [95] Set decode_max_depth(0) [throw error]: PASS [Input] { "0" } [Received:error] { "bad argument #1 to '?' (expected integer between 1 and 2147483647)" } ==> Test [96] Set encode_invalid_numbers(-2) [throw error]: PASS [Input] { -2 } [Received:error] { "bad argument #1 to '?' (invalid option '-2')" } ==> Test [97] Set decode_invalid_numbers(true, false) [throw error]: PASS [Input] { true, false } [Received:error] { "bad argument #2 to '?' (found too many arguments)" } ==> Test [98] Set encode_sparse_array("not quite on") [throw error]: PASS [Input] { "not quite on" } [Received:error] { "bad argument #1 to '?' (invalid option 'not quite on')" } ==> Reset Lua CJSON configuration ==> Test [99] Check encode_sparse_array(): PASS [Input] { } [Received:success] { false, 2, 10 } ==> Test [100] Encode (safe) simple value: PASS [Input] { true } [Received:success] { "true" } ==> Test [101] Encode (safe) argument validation [throw error]: PASS [Input] { "arg1", "arg2" } [Received:error] { "bad argument #1 to '?' (expected 1 argument)" } ==> Test [102] Decode (safe) error generation: PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Test [103] Decode (safe) error generation after new(): PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Summary: all tests succeeded ************************************************** libtool --silent --tag=CC --mode=link cc -g -O2 -fdebug-prefix-map=/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -I/usr//include/lua5.2 -Wall -Wextra -Wl,--no-add-needed \ -static -o /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/app-static -I . -I /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/ \ /usr/share/dh-lua/test/5.2/app.c /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/liblua5.2-cjson.la \ -Wl,-z,relro -L/usr//lib/x86_64-linux-gnu -llua5.2 -lm -ldl ldd /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/app-static linux-vdso.so.1 (0x00007ffe221cd000) liblua5.2.so.0 => /usr/lib/x86_64-linux-gnu/liblua5.2.so.0 (0x00007fb3f5ca1000) libm.so.6 => /lib/x86_64-linux-gnu/libm.so.6 (0x00007fb3f5956000) libdl.so.2 => /lib/x86_64-linux-gnu/libdl.so.2 (0x00007fb3f5752000) libc.so.6 => /lib/x86_64-linux-gnu/libc.so.6 (0x00007fb3f539c000) /lib64/ld-linux-x86-64.so.2 (0x00007fb3f60dc000) *********************** app static (5.2) ********* Test: cd tests/ && @@LUA@@ test.lua ==> Testing Lua CJSON version 2.1.0 ==> Test [1] Check module name, version: PASS [Input] { } [Received:success] { "cjson", "2.1.0" } ==> Test [2] Decode string: PASS [Input] { "\"test string\"" } [Received:success] { "test string" } ==> Test [3] Decode numbers: PASS [Input] { "[ 0.0, -5e3, -1, 0.3e-3, 1023.2, 0e10 ]" } [Received:success] { { 0, -5000, -1, 0.0003, 1023.2, 0 } } ==> Test [4] Decode null: PASS [Input] { "null" } [Received:success] { json.null } ==> Test [5] Decode true: PASS [Input] { "true" } [Received:success] { true } ==> Test [6] Decode false: PASS [Input] { "false" } [Received:success] { false } ==> Test [7] Decode object with numeric keys: PASS [Input] { "{ \"1\": \"one\", \"3\": \"three\" }" } [Received:success] { { ["3"] = "three", ["1"] = "one" } } ==> Test [8] Decode object with string keys: PASS [Input] { "{ \"a\": \"a\", \"b\": \"b\" }" } [Received:success] { { ["b"] = "b", ["a"] = "a" } } ==> Test [9] Decode array: PASS [Input] { "[ \"one\", null, \"three\" ]" } [Received:success] { { "one", json.null, "three" } } ==> Test [10] Decode UTF-16BE [throw error]: PASS [Input] { "\0\"\0\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [11] Decode UTF-16LE [throw error]: PASS [Input] { "\"\0\"\0" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [12] Decode UTF-32BE [throw error]: PASS [Input] { "\0\0\0\"" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [13] Decode UTF-32LE [throw error]: PASS [Input] { "\"\0\0\0" } [Received:error] { "JSON parser does not support UTF-16 or UTF-32" } ==> Test [14] Decode partial JSON [throw error]: PASS [Input] { "{ \"unexpected eof\": " } [Received:error] { "Expected value but found T_END at character 21" } ==> Test [15] Decode with extra comma [throw error]: PASS [Input] { "{ \"extra data\": true }, false" } [Received:error] { "Expected the end but found T_COMMA at character 23" } ==> Test [16] Decode invalid escape code [throw error]: PASS [Input] { " { \"bad escape \\q code\" } " } [Received:error] { "Expected object key string but found invalid escape code at character 16" } ==> Test [17] Decode invalid unicode escape [throw error]: PASS [Input] { " { \"bad unicode \\u0f6 escape\" } " } [Received:error] { "Expected object key string but found invalid unicode escape code at character 17" } ==> Test [18] Decode invalid keyword [throw error]: PASS [Input] { " [ \"bad barewood\", test ] " } [Received:error] { "Expected value but found invalid token at character 20" } ==> Test [19] Decode invalid number #1 [throw error]: PASS [Input] { "[ -+12 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [20] Decode invalid number #2 [throw error]: PASS [Input] { "-v" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [21] Decode invalid number exponent [throw error]: PASS [Input] { "[ 0.4eg10 ]" } [Received:error] { "Expected comma or array end but found invalid token at character 6" } ==> Test [22] Set decode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [23] Decode array at nested limit: PASS [Input] { "[[[[[ \"nested\" ]]]]]" } [Received:success] { { { { { { "nested" } } } } } } ==> Test [24] Decode array over nested limit [throw error]: PASS [Input] { "[[[[[[ \"nested\" ]]]]]]" } [Received:error] { "Found too many nested data structures (6) at character 6" } ==> Test [25] Decode object at nested limit: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } [Received:success] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } ==> Test [26] Decode object over nested limit [throw error]: PASS [Input] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":{\"f\":\"nested\"}}}}}}" } [Received:error] { "Found too many nested data structures (6) at character 26" } ==> Test [27] Set decode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [28] Decode deeply nested array [throw error]: PASS [Input] { "[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[1100]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]" } [Received:error] { "Found too many nested data structures (1001) at character 1001" } ==> Test [29] Set encode_max_depth(5): PASS [Input] { 5 } [Received:success] { 5 } ==> Test [30] Encode nested table as array at nested limit: PASS [Input] { { { { { { "nested" } } } } } } [Received:success] { "[[[[[\"nested\"]]]]]" } ==> Test [31] Encode nested table as array after nested limit [throw error]: PASS [Input] { { { { { { { "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [32] Encode nested table as object at nested limit: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = "nested" } } } } } } [Received:success] { "{\"a\":{\"b\":{\"c\":{\"d\":{\"e\":\"nested\"}}}}}" } ==> Test [33] Encode nested table as object over nested limit [throw error]: PASS [Input] { { ["a"] = { ["b"] = { ["c"] = { ["d"] = { ["e"] = { ["f"] = "nested" } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [34] Encode table with cycle [throw error]: PASS [Input] { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { { Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (6)" } ==> Test [35] Set encode_max_depth(1000): PASS [Input] { 1000 } [Received:success] { 1000 } ==> Test [36] Encode deeply nested data [throw error]: PASS [Input] { { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = { 10, false, true, json.null }, [2] = "string", ["a"] = { [1] = Cannot serialise any further: too many nested tables, [2] = "string", ["a"] = Cannot serialise any further: too many nested tables } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } [Received:error] { "Cannot serialise, excessive nesting (1001)" } ==> Test [37] Encode null: PASS [Input] { json.null } [Received:success] { "null" } ==> Test [38] Encode true: PASS [Input] { true } [Received:success] { "true" } ==> Test [39] Encode false: PASS [Input] { false } [Received:success] { "false" } ==> Test [40] Encode empty object: PASS [Input] { { } } [Received:success] { "{}" } ==> Test [41] Encode integer: PASS [Input] { 10 } [Received:success] { "10" } ==> Test [42] Encode string: PASS [Input] { "hello" } [Received:success] { "\"hello\"" } ==> Test [43] Encode Lua function [throw error]: PASS [Input] { "" } [Received:error] { "Cannot serialise function: type not supported" } ==> Test [44] Set decode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [45] Decode hexadecimal: PASS [Input] { "0x6.ffp1" } [Received:success] { 13.9921875 } ==> Test [46] Decode numbers with leading zero: PASS [Input] { "[ 0123, 00.33 ]" } [Received:success] { { 123, 0.33 } } ==> Test [47] Decode +-Inf: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:success] { { inf, inf, -inf } } ==> Test [48] Decode +-Infinity: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:success] { { inf, inf, -inf } } ==> Test [49] Decode +-NaN: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:success] { { nan, nan, nan } } ==> Test [50] Decode Infrared (not infinity) [throw error]: PASS [Input] { "Infrared" } [Received:error] { "Expected the end but found invalid token at character 4" } ==> Test [51] Decode Noodle (not NaN) [throw error]: PASS [Input] { "Noodle" } [Received:error] { "Expected value but found invalid token at character 1" } ==> Test [52] Set decode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [53] Decode hexadecimal [throw error]: PASS [Input] { "0x6" } [Received:error] { "Expected value but found invalid number at character 1" } ==> Test [54] Decode numbers with leading zero [throw error]: PASS [Input] { "[ 0123, 00.33 ]" } [Received:error] { "Expected value but found invalid number at character 3" } ==> Test [55] Decode +-Inf [throw error]: PASS [Input] { "[ +Inf, Inf, -Inf ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [56] Decode +-Infinity [throw error]: PASS [Input] { "[ +Infinity, Infinity, -Infinity ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [57] Decode +-NaN [throw error]: PASS [Input] { "[ +NaN, NaN, -NaN ]" } [Received:error] { "Expected value but found invalid token at character 3" } ==> Test [58] Set decode_invalid_numbers("on"): PASS [Input] { "on" } [Received:success] { true } ==> Test [59] Set encode_invalid_numbers(false): PASS [Input] { false } [Received:success] { false } ==> Test [60] Encode NaN [throw error]: PASS [Input] { -nan } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [61] Encode Infinity [throw error]: PASS [Input] { inf } [Received:error] { "Cannot serialise number: must not be NaN or Inf" } ==> Test [62] Set encode_invalid_numbers("null"): PASS [Input] { "null" } [Received:success] { "null" } ==> Test [63] Encode NaN as null: PASS [Input] { -nan } [Received:success] { "null" } ==> Test [64] Encode Infinity as null: PASS [Input] { inf } [Received:success] { "null" } ==> Test [65] Set encode_invalid_numbers(true): PASS [Input] { true } [Received:success] { true } ==> Test [66] Encode NaN: PASS [Input] { -nan } [Received:success] { "nan" } ==> Test [67] Encode Infinity: PASS [Input] { inf } [Received:success] { "inf" } ==> Test [68] Set encode_invalid_numbers("off"): PASS [Input] { "off" } [Received:success] { false } ==> Test [69] Set encode_sparse_array(true, 2, 3): PASS [Input] { true, 2, 3 } [Received:success] { true, 2, 3 } ==> Test [70] Encode sparse table as array #1: PASS [Input] { { [3] = "sparse test" } } [Received:success] { "[null,null,\"sparse test\"]" } ==> Test [71] Encode sparse table as array #2: PASS [Input] { { "one", nil, nil, "sparse test" } } [Received:success] { "[\"one\",null,null,\"sparse test\"]" } ==> Test [72] Encode sparse array as object: PASS [Input] { { [1] = "one", [5] = "sparse test" } } [Received:success] { "{\"1\":\"one\",\"5\":\"sparse test\"}" } ==> Test [73] Encode table with numeric string key as object: PASS [Input] { { ["2"] = "numeric string key test" } } [Received:success] { "{\"2\":\"numeric string key test\"}" } ==> Test [74] Set encode_sparse_array(false): PASS [Input] { false } [Received:success] { false, 2, 3 } ==> Test [75] Encode table with incompatible key [throw error]: PASS [Input] { { [false] = "wrong" } } [Received:error] { "Cannot serialise boolean: table key must be a number or string" } ==> Test [76] Encode all octets (8-bit clean): PASS [Input] { "\0\1\2\3\4\5\6\7\8\9\ \11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31 !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\127" } [Received:success] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } ==> Test [77] Decode all escaped octets: PASS [Input] { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f !\\\"#$%&'()*+,-.\\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\u007f\"" } [Received:success] { "\0\1\2\3\4\5\6\7\8\9\ \11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31 !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\127" } ==> Test [78] Decode single UTF-16 escape: PASS [Input] { "\"\\uF800\"" } [Received:success] { "" } ==> Test [79] Decode swapped surrogate pair [throw error]: PASS [Input] { "\"\\uDC00\\uD800\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [80] Decode duplicate high surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [81] Decode duplicate low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [82] Decode missing low surrogate [throw error]: PASS [Input] { "\"\\uDB00\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Test [83] Decode invalid low surrogate [throw error]: PASS [Input] { "\"\\uDB00\\uD\"" } [Received:error] { "Expected value but found invalid unicode escape code at character 2" } ==> Set locale to cs_CZ (comma separator) ==> Test [84] Encode number under comma locale: PASS [Input] { 1.5 } [Received:success] { "1.5" } ==> Test [85] Decode number in array under comma locale: PASS [Input] { "[ 10, \"test\" ]" } [Received:success] { { 10, "test" } } ==> Revert locale to POSIX ==> Test [86] Set encode_keep_buffer(false): PASS [Input] { false } [Received:success] { false } ==> Test [87] Set encode_number_precision(3): PASS [Input] { 3 } [Received:success] { 3 } ==> Test [88] Encode number with precision 3: PASS [Input] { 0.33333333333333 } [Received:success] { "0.333" } ==> Test [89] Set encode_number_precision(14): PASS [Input] { 14 } [Received:success] { 14 } ==> Test [90] Set encode_keep_buffer(true): PASS [Input] { true } [Received:success] { true } ==> Test [91] Set encode_number_precision(0) [throw error]: PASS [Input] { 0 } [Received:error] { "bad argument #1 to '?' (expected integer between 1 and 14)" } ==> Test [92] Set encode_number_precision("five") [throw error]: PASS [Input] { "five" } [Received:error] { "bad argument #1 to '?' (number expected, got string)" } ==> Test [93] Set encode_keep_buffer(nil, true) [throw error]: PASS [Input] { nil, true } [Received:error] { "bad argument #2 to '?' (found too many arguments)" } ==> Test [94] Set encode_max_depth("wrong") [throw error]: PASS [Input] { "wrong" } [Received:error] { "bad argument #1 to '?' (number expected, got string)" } ==> Test [95] Set decode_max_depth(0) [throw error]: PASS [Input] { "0" } [Received:error] { "bad argument #1 to '?' (expected integer between 1 and 2147483647)" } ==> Test [96] Set encode_invalid_numbers(-2) [throw error]: PASS [Input] { -2 } [Received:error] { "bad argument #1 to '?' (invalid option '-2')" } ==> Test [97] Set decode_invalid_numbers(true, false) [throw error]: PASS [Input] { true, false } [Received:error] { "bad argument #2 to '?' (found too many arguments)" } ==> Test [98] Set encode_sparse_array("not quite on") [throw error]: PASS [Input] { "not quite on" } [Received:error] { "bad argument #1 to '?' (invalid option 'not quite on')" } ==> Reset Lua CJSON configuration ==> Test [99] Check encode_sparse_array(): PASS [Input] { } [Received:success] { false, 2, 10 } ==> Test [100] Encode (safe) simple value: PASS [Input] { true } [Received:success] { "true" } ==> Test [101] Encode (safe) argument validation [throw error]: PASS [Input] { "arg1", "arg2" } [Received:error] { "bad argument #1 to '?' (expected 1 argument)" } ==> Test [102] Decode (safe) error generation: PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Test [103] Decode (safe) error generation after new(): PASS [Input] { "Oops" } [Received:success] { nil, "Expected value but found invalid token at character 1" } ==> Summary: all tests succeeded ************************************************** Target test made fakeroot debian/rules binary-arch dh binary-arch --buildsystem=lua --with lua dh: Compatibility levels before 9 are deprecated (level 7 in use) dh_testroot -a -O--buildsystem=lua dh_prep -a -O--buildsystem=lua dh_auto_install -a -O--buildsystem=lua dh_auto_install: Compatibility levels before 9 are deprecated (level 7 in use) make --no-print-directory -f /usr/share/dh-lua/make/dh-lua.Makefile.multiple install /build/lua-cjson-h7mDhj/lua-cjson-2.1.0\+dfsg/debian/tmp Making target install for debian/lua5.1.dh-lua.conf # .lua Installing lua/cjson/util.lua in debian/tmp/usr//share/lua/5.1 # debian/substvars Filling in debian/lua-cjson.substvars Adding new line: lua:Versions=5.1 5.2 Filling in debian/lua-cjson-dev.substvars Adding new line: lua:Versions=5.1 5.2 Filling in debian/lua-cjson.substvars Adding new line: lua:Provides=lua5.2-cjson, lua5.1-cjson, Filling in debian/lua-cjson-dev.substvars Adding new line: lua:Provides=lua5.2-cjson-dev, lua5.1-cjson-dev, # .so Installing liblua5.1-cjson libtool: warning: remember to run 'libtool --finish /usr//lib/x86_64-linux-gnu' libtool --silent --tag=CC --finish debian/tmp/usr//lib/x86_64-linux-gnu Creating symlink cjson.so # .pc Installing lua5.1-cjson.pc # .h Installing /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.1-cjson/lua-cjson.h Target install made Making target install for debian/lua5.2.dh-lua.conf # .lua Installing lua/cjson/util.lua in debian/tmp/usr//share/lua/5.2 # debian/substvars Filling in debian/lua-cjson.substvars Skipping already existing line: lua:Versions=5.1 5.2 Filling in debian/lua-cjson-dev.substvars Skipping already existing line: lua:Versions=5.1 5.2 Filling in debian/lua-cjson.substvars Skipping already existing line: lua:Provides=lua5.2-cjson, lua5.1-cjson, Filling in debian/lua-cjson-dev.substvars Skipping already existing line: lua:Provides=lua5.2-cjson-dev, lua5.1-cjson-dev, # .so Installing liblua5.2-cjson libtool: warning: remember to run 'libtool --finish /usr//lib/x86_64-linux-gnu' libtool --silent --tag=CC --finish debian/tmp/usr//lib/x86_64-linux-gnu Creating symlink cjson.so # .pc Installing lua5.2-cjson.pc # .h Installing /build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/5.2-cjson/lua-cjson.h Target install made make[1]: Nothing to be done for '/build/lua-cjson-h7mDhj/lua-cjson-2.1.0+dfsg/debian/tmp'. dh_install -a -O--buildsystem=lua dh_install: Compatibility levels before 9 are deprecated (level 7 in use) dh_lua -a -O--buildsystem=lua deduplicating cjson/util.lua dh_installdocs -a -O--buildsystem=lua dh_installdocs: Compatibility levels before 9 are deprecated (level 7 in use) dh_installchangelogs -a -O--buildsystem=lua dh_installchangelogs: Compatibility levels before 9 are deprecated (level 7 in use) dh_perl -a -O--buildsystem=lua dh_link -a -O--buildsystem=lua dh_strip_nondeterminism -a -O--buildsystem=lua dh_compress -X.lua -a -O--buildsystem=lua dh_compress: Compatibility levels before 9 are deprecated (level 7 in use) dh_compress: Compatibility levels before 9 are deprecated (level 7 in use) dh_fixperms -a -O--buildsystem=lua dh_missing -a -O--buildsystem=lua dh_missing: Compatibility levels before 9 are deprecated (level 7 in use) dh_strip -a -O--buildsystem=lua dh_strip: Compatibility levels before 9 are deprecated (level 7 in use) dh_strip: Compatibility levels before 9 are deprecated (level 7 in use) dh_makeshlibs -a -O--buildsystem=lua dh_makeshlibs: Compatibility levels before 9 are deprecated (level 7 in use) dh_shlibdeps -a -O--buildsystem=lua dh_shlibdeps: Compatibility levels before 9 are deprecated (level 7 in use) dh_shlibdeps: Compatibility levels before 9 are deprecated (level 7 in use) dpkg-shlibdeps: warning: symbol lua_next used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_tonumberx used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_typename used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_pushnumber used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol luaL_argerror used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol luaL_setfuncs used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol luaL_checkoption used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol luaL_checkinteger used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_setmetatable used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_pushboolean used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.2-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: 25 other similar warnings have been skipped (use -v to see them all) dpkg-shlibdeps: warning: symbol lua_type used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_pushlightuserdata used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_createtable used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_newuserdata used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_pushstring used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_tolstring used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol luaL_checklstring used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_checkstack used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol lua_pushvalue used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: symbol luaL_checkstack used by debian/lua-cjson/usr/lib/x86_64-linux-gnu/liblua5.1-cjson.so.0.0.0 found in none of the libraries dpkg-shlibdeps: warning: 25 other similar warnings have been skipped (use -v to see them all) dh_installdeb -a -O--buildsystem=lua dh_installdeb: Compatibility levels before 9 are deprecated (level 7 in use) dh_gencontrol -a -O--buildsystem=lua dpkg-gencontrol: warning: Depends field of package lua-cjson-dev: unknown substitution variable ${shlibs:Depends} dh_md5sums -a -O--buildsystem=lua dh_builddeb -a -O--buildsystem=lua dpkg-deb: building package 'lua-cjson' in '../lua-cjson_2.1.0+dfsg-2.1_amd64.deb'. dpkg-deb: building package 'lua-cjson-dbgsym' in '../lua-cjson-dbgsym_2.1.0+dfsg-2.1_amd64.deb'. dpkg-deb: building package 'lua-cjson-dev' in '../lua-cjson-dev_2.1.0+dfsg-2.1_amd64.deb'. dpkg-genbuildinfo --build=any dpkg-genchanges --build=any >../lua-cjson_2.1.0+dfsg-2.1_amd64.changes dpkg-genchanges: info: binary-only arch-specific upload (source code and arch-indep packages not included) dpkg-source --after-build lua-cjson-2.1.0+dfsg dpkg-buildpackage: info: binary-only upload (no source included) I: running special hook: sync-out /build/lua-cjson-h7mDhj /tmp/lua-cjson-2.1.0+dfsg-2.13kfm0w82 I: cleaning package lists and apt cache... I: creating tarball... I: done I: removing tempdir /tmp/mmdebstrap.k8neI7XJQK... I: success in 241.2625 seconds md5: lua-cjson-dbgsym_2.1.0+dfsg-2.1_amd64.deb: OK md5: lua-cjson-dev_2.1.0+dfsg-2.1_amd64.deb: OK md5: lua-cjson_2.1.0+dfsg-2.1_amd64.deb: OK sha1: lua-cjson-dbgsym_2.1.0+dfsg-2.1_amd64.deb: OK sha1: lua-cjson-dev_2.1.0+dfsg-2.1_amd64.deb: OK sha1: lua-cjson_2.1.0+dfsg-2.1_amd64.deb: OK sha256: lua-cjson-dbgsym_2.1.0+dfsg-2.1_amd64.deb: OK sha256: lua-cjson-dev_2.1.0+dfsg-2.1_amd64.deb: OK sha256: lua-cjson_2.1.0+dfsg-2.1_amd64.deb: OK Checksums: OK